diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/CMakeLists.txt b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..17dc38b8a933d0b8fd84e5579426f7933ca45c99 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/CMakeLists.txt @@ -0,0 +1,41 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved. +# CMake lowest version requirement +cmake_minimum_required(VERSION 3.5.1) +# project information +project(ascendcl) +# Compile options +add_compile_options(-std=c++11) +# Specify target generation path +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "../outputs") +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "../outputs") +set(CMAKE_INSTALL_PREFIX "../../../") +set(CMAKE_OUTPUT_DIR "out") +set(CMAKE_CXX_FLAGS_RELEASE "-fPIC -O2 -g -Wall") + +ADD_DEFINITIONS("-DENABLE_DVPP_INTERFACE -D_GLIBCXX_USE_CXX11_ABI=0") + +# Header path +include_directories( +inc +#/usr/include/gflags +$ENV{install_path}/acllib/include +$ENV{install_path}/driver/kernel/libc_sec/include +/usr/include +) + +# add host lib path +link_directories($ENV{install_path}/acllib/lib64/stub) +#link_directories(/usr/local/Ascend/driver/lib64) +#link_directories(/usr/local/Ascend/atc/lib64) +#link_directories(/usr/local/lib) +link_directories(../thirdpart_lib) + +# 设置需要编译的源文件 +add_executable(benchmark main.cpp util.cpp post_process.cpp infer_engine.cpp) + +# 设置共享库 RC为待扩展的offline模型 +#target_link_libraries(benchmark acl_dvpp ascendcl pthread protobuf cryptopp) +target_link_libraries(benchmark acl_dvpp ascendcl pthread) + +install(TARGETS benchmark DESTINATION ${CMAKE_OUTPUT_DIR}) + diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/defines.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/defines.h new file mode 100644 index 0000000000000000000000000000000000000000..f0be3dcb485269718125445537ce3616c3078d34 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/defines.h @@ -0,0 +1,48 @@ +/* Generated from defines.h.in during build configuration using CMake. */ + +// Note: This header file is only used internally. It is not part of public interface! +// Any cmakedefine is defined using the -D flag instead when Bazel is used. +// For Bazel, this file is thus not used to avoid a private file in $(GENDIR). + +#ifndef GFLAGS_DEFINES_H_ +#define GFLAGS_DEFINES_H_ + + +// Define if you build this library for a MS Windows OS. +/* #undef OS_WINDOWS */ + +// Define if you have the header file. +#define HAVE_STDINT_H + +// Define if you have the header file. +#define HAVE_SYS_TYPES_H + +// Define if you have the header file. +#define HAVE_INTTYPES_H + +// Define if you have the header file. +#define HAVE_SYS_STAT_H + +// Define if you have the header file. +#define HAVE_UNISTD_H + +// Define if you have the header file. +#define HAVE_FNMATCH_H + +// Define if you have the header file (Windows 2000/XP). +/* #undef HAVE_SHLWAPI_H */ + +// Define if you have the strtoll function. +#define HAVE_STRTOLL + +// Define if you have the strtoq function. +/* #undef HAVE_STRTOQ */ + +// Define if you have the header file. +#define HAVE_PTHREAD + +// Define if your pthread library defines the type pthread_rwlock_t +#define HAVE_RWLOCK + + +#endif // GFLAGS_DEFINES_H_ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags.h new file mode 100644 index 0000000000000000000000000000000000000000..4f3168a03d878a16ac0e05d1d240396b9d422c63 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags.h @@ -0,0 +1,624 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// Revamped and reorganized by Craig Silverstein +// +// This is the file that should be included by any file which declares +// or defines a command line flag or wants to parse command line flags +// or print a program usage message (which will include information about +// flags). Executive summary, in the form of an example foo.cc file: +// +// #include "foo.h" // foo.h has a line "DECLARE_int32(start);" +// #include "validators.h" // hypothetical file defining ValidateIsFile() +// +// DEFINE_int32(end, 1000, "The last record to read"); +// +// DEFINE_string(filename, "my_file.txt", "The file to read"); +// // Crash if the specified file does not exist. +// static bool dummy = RegisterFlagValidator(&FLAGS_filename, +// &ValidateIsFile); +// +// DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...) +// +// void MyFunc() { +// if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end); +// } +// +// Then, at the command-line: +// ./foo --noverbose --start=5 --end=100 +// +// For more details, see +// doc/gflags.html +// +// --- A note about thread-safety: +// +// We describe many functions in this routine as being thread-hostile, +// thread-compatible, or thread-safe. Here are the meanings we use: +// +// thread-safe: it is safe for multiple threads to call this routine +// (or, when referring to a class, methods of this class) +// concurrently. +// thread-hostile: it is not safe for multiple threads to call this +// routine (or methods of this class) concurrently. In gflags, +// most thread-hostile routines are intended to be called early in, +// or even before, main() -- that is, before threads are spawned. +// thread-compatible: it is safe for multiple threads to read from +// this variable (when applied to variables), or to call const +// methods of this class (when applied to classes), as long as no +// other thread is writing to the variable or calling non-const +// methods of this class. + +#ifndef GFLAGS_GFLAGS_H_ +#define GFLAGS_GFLAGS_H_ + +#include +#include + +#include "gflags/gflags_declare.h" // IWYU pragma: export + + +// We always want to export variables defined in user code +#ifndef GFLAGS_DLL_DEFINE_FLAG +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DEFINE_FLAG __declspec(dllexport) +# else +# define GFLAGS_DLL_DEFINE_FLAG +# endif +#endif + + +namespace GFLAGS_NAMESPACE { + + +// -------------------------------------------------------------------- +// To actually define a flag in a file, use DEFINE_bool, +// DEFINE_string, etc. at the bottom of this file. You may also find +// it useful to register a validator with the flag. This ensures that +// when the flag is parsed from the commandline, or is later set via +// SetCommandLineOption, we call the validation function. It is _not_ +// called when you assign the value to the flag directly using the = operator. +// +// The validation function should return true if the flag value is valid, and +// false otherwise. If the function returns false for the new setting of the +// flag, the flag will retain its current value. If it returns false for the +// default value, ParseCommandLineFlags() will die. +// +// This function is safe to call at global construct time (as in the +// example below). +// +// Example use: +// static bool ValidatePort(const char* flagname, int32 value) { +// if (value > 0 && value < 32768) // value is ok +// return true; +// printf("Invalid value for --%s: %d\n", flagname, (int)value); +// return false; +// } +// DEFINE_int32(port, 0, "What port to listen on"); +// static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort); + +// Returns true if successfully registered, false if not (because the +// first argument doesn't point to a command-line flag, or because a +// validator is already registered for this flag). +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag, bool (*validate_fn)(const char*, bool)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag, bool (*validate_fn)(const char*, int32)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint32* flag, bool (*validate_fn)(const char*, uint32)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag, bool (*validate_fn)(const char*, int64)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag, bool (*validate_fn)(const char*, uint64)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag, bool (*validate_fn)(const char*, double)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag, bool (*validate_fn)(const char*, const std::string&)); + +// Convenience macro for the registration of a flag validator +#define DEFINE_validator(name, validator) \ + static const bool name##_validator_registered = \ + GFLAGS_NAMESPACE::RegisterFlagValidator(&FLAGS_##name, validator) + + +// -------------------------------------------------------------------- +// These methods are the best way to get access to info about the +// list of commandline flags. Note that these routines are pretty slow. +// GetAllFlags: mostly-complete info about the list, sorted by file. +// ShowUsageWithFlags: pretty-prints the list to stdout (what --help does) +// ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr +// +// In addition to accessing flags, you can also access argv[0] (the program +// name) and argv (the entire commandline), which we sock away a copy of. +// These variables are static, so you should only set them once. +// +// No need to export this data only structure from DLL, avoiding VS warning 4251. +struct CommandLineFlagInfo { + std::string name; // the name of the flag + std::string type; // the type of the flag: int32, etc + std::string description; // the "help text" associated with the flag + std::string current_value; // the current value, as a string + std::string default_value; // the default value, as a string + std::string filename; // 'cleaned' version of filename holding the flag + bool has_validator_fn; // true if RegisterFlagValidator called on this flag + bool is_default; // true if the flag has the default value and + // has not been set explicitly from the cmdline + // or via SetCommandLineOption + const void* flag_ptr; // pointer to the flag's current value (i.e. FLAGS_foo) +}; + +// Using this inside of a validator is a recipe for a deadlock. +// TODO(user) Fix locking when validators are running, to make it safe to +// call validators during ParseAllFlags. +// Also make sure then to uncomment the corresponding unit test in +// gflags_unittest.sh +extern GFLAGS_DLL_DECL void GetAllFlags(std::vector* OUTPUT); +// These two are actually defined in gflags_reporting.cc. +extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0); // what --help does +extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict); + +// Create a descriptive string for a flag. +// Goes to some trouble to make pretty line breaks. +extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag); + +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv); + +// The following functions are thread-safe as long as SetArgv() is +// only called before any threads start. +extern GFLAGS_DLL_DECL const std::vector& GetArgvs(); +extern GFLAGS_DLL_DECL const char* GetArgv(); // all of argv as a string +extern GFLAGS_DLL_DECL const char* GetArgv0(); // only argv0 +extern GFLAGS_DLL_DECL uint32 GetArgvSum(); // simple checksum of argv +extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set +extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName(); // basename(argv0) + +// ProgramUsage() is thread-safe as long as SetUsageMessage() is only +// called before any threads start. +extern GFLAGS_DLL_DECL const char* ProgramUsage(); // string set by SetUsageMessage() + +// VersionString() is thread-safe as long as SetVersionString() is only +// called before any threads start. +extern GFLAGS_DLL_DECL const char* VersionString(); // string set by SetVersionString() + + + +// -------------------------------------------------------------------- +// Normally you access commandline flags by just saying "if (FLAGS_foo)" +// or whatever, and set them by calling "FLAGS_foo = bar" (or, more +// commonly, via the DEFINE_foo macro). But if you need a bit more +// control, we have programmatic ways to get/set the flags as well. +// These programmatic ways to access flags are thread-safe, but direct +// access is only thread-compatible. + +// Return true iff the flagname was found. +// OUTPUT is set to the flag's value, or unchanged if we return false. +extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT); + +// Return true iff the flagname was found. OUTPUT is set to the flag's +// CommandLineFlagInfo or unchanged if we return false. +extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name, CommandLineFlagInfo* OUTPUT); + +// Return the CommandLineFlagInfo of the flagname. exit() if name not found. +// Example usage, to check if a flag's value is currently the default value: +// if (GetCommandLineFlagInfoOrDie("foo").is_default) ... +extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name); + +enum GFLAGS_DLL_DECL FlagSettingMode { + // update the flag's value (can call this multiple times). + SET_FLAGS_VALUE, + // update the flag's value, but *only if* it has not yet been updated + // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef". + SET_FLAG_IF_DEFAULT, + // set the flag's default value to this. If the flag has not yet updated + // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef") + // change the flag's current value to the new default value as well. + SET_FLAGS_DEFAULT +}; + +// Set a particular flag ("command line option"). Returns a string +// describing the new value that the option has been set to. The +// return value API is not well-specified, so basically just depend on +// it to be empty if the setting failed for some reason -- the name is +// not a valid flag name, or the value is not a valid value -- and +// non-empty else. + +// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case) +extern GFLAGS_DLL_DECL std::string SetCommandLineOption (const char* name, const char* value); +extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value, FlagSettingMode set_mode); + + +// -------------------------------------------------------------------- +// Saves the states (value, default value, whether the user has set +// the flag, registered validators, etc) of all flags, and restores +// them when the FlagSaver is destroyed. This is very useful in +// tests, say, when you want to let your tests change the flags, but +// make sure that they get reverted to the original states when your +// test is complete. +// +// Example usage: +// void TestFoo() { +// FlagSaver s1; +// FLAG_foo = false; +// FLAG_bar = "some value"; +// +// // test happens here. You can return at any time +// // without worrying about restoring the FLAG values. +// } +// +// Note: This class is marked with GFLAGS_ATTRIBUTE_UNUSED because all +// the work is done in the constructor and destructor, so in the standard +// usage example above, the compiler would complain that it's an +// unused variable. +// +// This class is thread-safe. However, its destructor writes to +// exactly the set of flags that have changed value during its +// lifetime, so concurrent _direct_ access to those flags +// (i.e. FLAGS_foo instead of {Get,Set}CommandLineOption()) is unsafe. + +class GFLAGS_DLL_DECL FlagSaver { + public: + FlagSaver(); + ~FlagSaver(); + + private: + class FlagSaverImpl* impl_; // we use pimpl here to keep API steady + + FlagSaver(const FlagSaver&); // no copying! + void operator=(const FlagSaver&); +}__attribute((unused)); + +// -------------------------------------------------------------------- +// Some deprecated or hopefully-soon-to-be-deprecated functions. + +// This is often used for logging. TODO(csilvers): figure out a better way +extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString(); +// Usually where this is used, a FlagSaver should be used instead. +extern GFLAGS_DLL_DECL +bool ReadFlagsFromString(const std::string& flagfilecontents, + const char* prog_name, + bool errors_are_fatal); // uses SET_FLAGS_VALUE + +// These let you manually implement --flagfile functionality. +// DEPRECATED. +extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name); +extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name, bool errors_are_fatal); // uses SET_FLAGS_VALUE + + +// -------------------------------------------------------------------- +// Useful routines for initializing flags from the environment. +// In each case, if 'varname' does not exist in the environment +// return defval. If 'varname' does exist but is not valid +// (e.g., not a number for an int32 flag), abort with an error. +// Otherwise, return the value. NOTE: for booleans, for true use +// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'. + +extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval); +extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval); +extern GFLAGS_DLL_DECL uint32 Uint32FromEnv(const char *varname, uint32 defval); +extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval); +extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval); +extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval); +extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval); + + +// -------------------------------------------------------------------- +// The next two functions parse gflags from main(): + +// Set the "usage" message for this program. For example: +// string usage("This program does nothing. Sample usage:\n"); +// usage += argv[0] + " "; +// SetUsageMessage(usage); +// Do not include commandline flags in the usage: we do that for you! +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage); + +// Sets the version string, which is emitted with --version. +// For instance: SetVersionString("1.3"); +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetVersionString(const std::string& version); + + +// Looks for flags in argv and parses them. Rearranges argv to put +// flags first, or removes them entirely if remove_flags is true. +// If a flag is defined more than once in the command line or flag +// file, the last definition is used. Returns the index (into argv) +// of the first non-flag argument. +// See top-of-file for more details on this function. +#ifndef SWIG // In swig, use ParseCommandLineFlagsScript() instead. +extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv, bool remove_flags); +#endif + + +// Calls to ParseCommandLineNonHelpFlags and then to +// HandleCommandLineHelpFlags can be used instead of a call to +// ParseCommandLineFlags during initialization, in order to allow for +// changing default values for some FLAGS (via +// e.g. SetCommandLineOptionWithMode calls) between the time of +// command line parsing and the time of dumping help information for +// the flags as a result of command line parsing. If a flag is +// defined more than once in the command line or flag file, the last +// definition is used. Returns the index (into argv) of the first +// non-flag argument. (If remove_flags is true, will always return 1.) +extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv, bool remove_flags); + +// This is actually defined in gflags_reporting.cc. +// This function is misnamed (it also handles --version, etc.), but +// it's too late to change that now. :-( +extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags(); // in gflags_reporting.cc + +// Allow command line reparsing. Disables the error normally +// generated when an unknown flag is found, since it may be found in a +// later parse. Thread-hostile; meant to be called before any threads +// are spawned. +extern GFLAGS_DLL_DECL void AllowCommandLineReparsing(); + +// Reparse the flags that have not yet been recognized. Only flags +// registered since the last parse will be recognized. Any flag value +// must be provided as part of the argument using "=", not as a +// separate command line argument that follows the flag argument. +// Intended for handling flags from dynamically loaded libraries, +// since their flags are not registered until they are loaded. +extern GFLAGS_DLL_DECL void ReparseCommandLineNonHelpFlags(); + +// Clean up memory allocated by flags. This is only needed to reduce +// the quantity of "potentially leaked" reports emitted by memory +// debugging tools such as valgrind. It is not required for normal +// operation, or for the google perftools heap-checker. It must only +// be called when the process is about to exit, and all threads that +// might access flags are quiescent. Referencing flags after this is +// called will have unexpected consequences. This is not safe to run +// when multiple threads might be running: the function is +// thread-hostile. +extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags(); + + +// -------------------------------------------------------------------- +// Now come the command line flag declaration/definition macros that +// will actually be used. They're kind of hairy. A major reason +// for this is initialization: we want people to be able to access +// variables in global constructors and have that not crash, even if +// their global constructor runs before the global constructor here. +// (Obviously, we can't guarantee the flags will have the correct +// default value in that case, but at least accessing them is safe.) +// The only way to do that is have flags point to a static buffer. +// So we make one, using a union to ensure proper alignment, and +// then use placement-new to actually set up the flag with the +// correct default value. In the same vein, we have to worry about +// flag access in global destructors, so FlagRegisterer has to be +// careful never to destroy the flag-values it constructs. +// +// Note that when we define a flag variable FLAGS_, we also +// preemptively define a junk variable, FLAGS_no. This is to +// cause a link-time error if someone tries to define 2 flags with +// names like "logging" and "nologging". We do this because a bool +// flag FLAG can be set from the command line to true with a "-FLAG" +// argument, and to false with a "-noFLAG" argument, and so this can +// potentially avert confusion. +// +// We also put flags into their own namespace. It is purposefully +// named in an opaque way that people should have trouble typing +// directly. The idea is that DEFINE puts the flag in the weird +// namespace, and DECLARE imports the flag from there into the current +// namespace. The net result is to force people to use DECLARE to get +// access to a flag, rather than saying "extern GFLAGS_DLL_DECL bool FLAGS_whatever;" +// or some such instead. We want this so we can put extra +// functionality (like sanity-checking) in DECLARE if we want, and +// make sure it is picked up everywhere. +// +// We also put the type of the variable in the namespace, so that +// people can't DECLARE_int32 something that they DEFINE_bool'd +// elsewhere. + +class GFLAGS_DLL_DECL FlagRegisterer { + public: + // We instantiate this template ctor for all supported types, + // so it is possible to place implementation of the FlagRegisterer ctor in + // .cc file. + // Calling this constructor with unsupported type will produce linker error. + template + FlagRegisterer(const char* name, + const char* help, const char* filename, + FlagType* current_storage, FlagType* defvalue_storage); +}; + +// Force compiler to not generate code for the given template specialization. +#if defined(_MSC_VER) && _MSC_VER < 1800 // Visual Studio 2013 version 12.0 + #define GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(type) +#else + #define GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(type) \ + extern template GFLAGS_DLL_DECL FlagRegisterer::FlagRegisterer( \ + const char* name, const char* help, const char* filename, \ + type* current_storage, type* defvalue_storage) +#endif + +// Do this for all supported flag types. +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(bool); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(int32); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(uint32); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(int64); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(uint64); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(double); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(std::string); + +#undef GFLAGS_DECLARE_FLAG_REGISTERER_CTOR + +// If your application #defines STRIP_FLAG_HELP to a non-zero value +// before #including this file, we remove the help message from the +// binary file. This can reduce the size of the resulting binary +// somewhat, and may also be useful for security reasons. + +extern GFLAGS_DLL_DECL const char kStrippedFlagHelp[]; + + +} // namespace GFLAGS_NAMESPACE + + +#ifndef SWIG // In swig, ignore the main flag declarations + +#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0 +// Need this construct to avoid the 'defined but not used' warning. +#define MAYBE_STRIPPED_HELP(txt) \ + (false ? (txt) : GFLAGS_NAMESPACE::kStrippedFlagHelp) +#else +#define MAYBE_STRIPPED_HELP(txt) txt +#endif + +// Each command-line flag has two variables associated with it: one +// with the current value, and one with the default value. However, +// we have a third variable, which is where value is assigned; it's a +// constant. This guarantees that FLAG_##value is initialized at +// static initialization time (e.g. before program-start) rather than +// than global construction time (which is after program-start but +// before main), at least when 'value' is a compile-time constant. We +// use a small trick for the "default value" variable, and call it +// FLAGS_no. This serves the second purpose of assuring a +// compile error if someone tries to define a flag named no +// which is illegal (--foo and --nofoo both affect the "foo" flag). +#define DEFINE_VARIABLE(type, shorttype, name, value, help) \ + namespace fL##shorttype { \ + static const type FLAGS_nono##name = value; \ + /* We always want to export defined variables, dll or no */ \ + GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \ + static type FLAGS_no##name = FLAGS_nono##name; \ + static GFLAGS_NAMESPACE::FlagRegisterer o_##name( \ + #name, MAYBE_STRIPPED_HELP(help), __FILE__, \ + &FLAGS_##name, &FLAGS_no##name); \ + } \ + using fL##shorttype::FLAGS_##name + +// For DEFINE_bool, we want to do the extra check that the passed-in +// value is actually a bool, and not a string or something that can be +// coerced to a bool. These declarations (no definition needed!) will +// help us do that, and never evaluate From, which is important. +// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires +// that the compiler have different sizes for bool & double. Since +// this is not guaranteed by the standard, we check it with a +// COMPILE_ASSERT. +namespace fLB { +struct CompileAssert {}; +typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[ + (sizeof(double) != sizeof(bool)) ? 1 : -1]; +template double GFLAGS_DLL_DECL IsBoolFlag(const From& from); +GFLAGS_DLL_DECL bool IsBoolFlag(bool from); +} // namespace fLB + +// Here are the actual DEFINE_*-macros. The respective DECLARE_*-macros +// are in a separate include, gflags_declare.h, for reducing +// the physical transitive size for DECLARE use. +#define DEFINE_bool(name, val, txt) \ + namespace fLB { \ + typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[ \ + (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double))? 1: -1]; \ + } \ + DEFINE_VARIABLE(bool, B, name, val, txt) + +#define DEFINE_int32(name, val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::int32, I, \ + name, val, txt) + +#define DEFINE_uint32(name,val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::uint32, U, \ + name, val, txt) + +#define DEFINE_int64(name, val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::int64, I64, \ + name, val, txt) + +#define DEFINE_uint64(name,val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::uint64, U64, \ + name, val, txt) + +#define DEFINE_double(name, val, txt) \ + DEFINE_VARIABLE(double, D, name, val, txt) + +// Strings are trickier, because they're not a POD, so we can't +// construct them at static-initialization time (instead they get +// constructed at global-constructor time, which is much later). To +// try to avoid crashes in that case, we use a char buffer to store +// the string, which we can static-initialize, and then placement-new +// into it later. It's not perfect, but the best we can do. + +namespace fLS { + +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + const char *value) { + return new(stringspot) clstring(value); +} +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + const clstring &value) { + return new(stringspot) clstring(value); +} +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + int value); + +// Auxiliary class used to explicitly call destructor of string objects +// allocated using placement new during static program deinitialization. +// The destructor MUST be an inline function such that the explicit +// destruction occurs in the same compilation unit as the placement new. +class StringFlagDestructor { + void *current_storage_; + void *defvalue_storage_; + +public: + + StringFlagDestructor(void *current, void *defvalue) + : current_storage_(current), defvalue_storage_(defvalue) {} + + ~StringFlagDestructor() { + reinterpret_cast(current_storage_ )->~clstring(); + reinterpret_cast(defvalue_storage_)->~clstring(); + } +}; + +} // namespace fLS + +// We need to define a var named FLAGS_no##name so people don't define +// --string and --nostring. And we need a temporary place to put val +// so we don't have to evaluate it twice. Two great needs that go +// great together! +// The weird 'using' + 'extern' inside the fLS namespace is to work around +// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10. See +// http://code.google.com/p/google-gflags/issues/detail?id=20 +#define DEFINE_string(name, val, txt) \ + namespace fLS { \ + using ::fLS::clstring; \ + using ::fLS::StringFlagDestructor; \ + static union { void* align; char s[sizeof(clstring)]; } s_##name[2]; \ + clstring* const FLAGS_no##name = ::fLS:: \ + dont_pass0toDEFINE_string(s_##name[0].s, \ + val); \ + static GFLAGS_NAMESPACE::FlagRegisterer o_##name( \ + #name, MAYBE_STRIPPED_HELP(txt), __FILE__, \ + FLAGS_no##name, new (s_##name[1].s) clstring(*FLAGS_no##name)); \ + static StringFlagDestructor d_##name(s_##name[0].s, s_##name[1].s); \ + extern GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name; \ + using fLS::FLAGS_##name; \ + clstring& FLAGS_##name = *FLAGS_no##name; \ + } \ + using fLS::FLAGS_##name + +#endif // SWIG + +// Import gflags library symbols into alternative/deprecated namespace(s) +#include "gflags_gflags.h" +#endif // GFLAGS_GFLAGS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_completions.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_completions.h new file mode 100644 index 0000000000000000000000000000000000000000..15637eb3de853a79eccb9e7ba71771c607767a2e --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_completions.h @@ -0,0 +1,119 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// --- + +// +// Implement helpful bash-style command line flag completions +// +// ** Functional API: +// HandleCommandLineCompletions() should be called early during +// program startup, but after command line flag code has been +// initialized, such as the beginning of HandleCommandLineHelpFlags(). +// It checks the value of the flag --tab_completion_word. If this +// flag is empty, nothing happens here. If it contains a string, +// however, then HandleCommandLineCompletions() will hijack the +// process, attempting to identify the intention behind this +// completion. Regardless of the outcome of this deduction, the +// process will be terminated, similar to --helpshort flag +// handling. +// +// ** Overview of Bash completions: +// Bash can be told to programatically determine completions for the +// current 'cursor word'. It does this by (in this case) invoking a +// command with some additional arguments identifying the command +// being executed, the word being completed, and the previous word +// (if any). Bash then expects a sequence of output lines to be +// printed to stdout. If these lines all contain a common prefix +// longer than the cursor word, bash will replace the cursor word +// with that common prefix, and display nothing. If there isn't such +// a common prefix, bash will display the lines in pages using 'more'. +// +// ** Strategy taken for command line completions: +// If we can deduce either the exact flag intended, or a common flag +// prefix, we'll output exactly that. Otherwise, if information +// must be displayed to the user, we'll take the opportunity to add +// some helpful information beyond just the flag name (specifically, +// we'll include the default flag value and as much of the flag's +// description as can fit on a single terminal line width, as specified +// by the flag --tab_completion_columns). Furthermore, we'll try to +// make bash order the output such that the most useful or relevent +// flags are the most likely to be shown at the top. +// +// ** Additional features: +// To assist in finding that one really useful flag, substring matching +// was implemented. Before pressing a to get completion for the +// current word, you can append one or more '?' to the flag to do +// substring matching. Here's the semantics: +// --foo Show me all flags with names prefixed by 'foo' +// --foo? Show me all flags with 'foo' somewhere in the name +// --foo?? Same as prior case, but also search in module +// definition path for 'foo' +// --foo??? Same as prior case, but also search in flag +// descriptions for 'foo' +// Finally, we'll trim the output to a relatively small number of +// flags to keep bash quiet about the verbosity of output. If one +// really wanted to see all possible matches, appending a '+' to the +// search word will force the exhaustive list of matches to be printed. +// +// ** How to have bash accept completions from a binary: +// Bash requires that it be informed about each command that programmatic +// completion should be enabled for. Example addition to a .bashrc +// file would be (your path to gflags_completions.sh file may differ): + +/* +$ complete -o bashdefault -o default -o nospace -C \ + '/home/build/eng/bash/bash_completions.sh --tab_completion_columns $COLUMNS' \ + time env binary_name another_binary [...] +*/ + +// This would allow the following to work: +// $ /path/to/binary_name --vmodule +// Or: +// $ ./bin/path/another_binary --gfs_u +// (etc) +// +// Sadly, it appears that bash gives no easy way to force this behavior for +// all commands. That's where the "time" in the above example comes in. +// If you haven't specifically added a command to the list of completion +// supported commands, you can still get completions by prefixing the +// entire command with "env". +// $ env /some/brand/new/binary --vmod +// Assuming that "binary" is a newly compiled binary, this should still +// produce the expected completion output. + + +#ifndef GFLAGS_COMPLETIONS_H_ +#define GFLAGS_COMPLETIONS_H_ + +namespace google { +extern void HandleCommandLineCompletions(void); +} + +#endif // GFLAGS_COMPLETIONS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_declare.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_declare.h new file mode 100644 index 0000000000000000000000000000000000000000..a9c6759707846f63ab97a66c13cb446975364448 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_declare.h @@ -0,0 +1,155 @@ +// Copyright (c) 1999, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// +// Revamped and reorganized by Craig Silverstein +// +// This is the file that should be included by any file which declares +// command line flag. + +#ifndef GFLAGS_DECLARE_H_ +#define GFLAGS_DECLARE_H_ + + +// --------------------------------------------------------------------------- +// Namespace of gflags library symbols. +#define GFLAGS_NAMESPACE google + +// --------------------------------------------------------------------------- +// Windows DLL import/export. + +// Whether gflags library is a DLL. +// +// Set to 1 by default when the shared gflags library was built on Windows. +// Must be overwritten when this header file is used with the optionally also +// built static library instead; set by CMake's INTERFACE_COMPILE_DEFINITIONS. +#ifndef GFLAGS_IS_A_DLL +# define GFLAGS_IS_A_DLL 1 +#endif + +// We always want to import the symbols of the gflags library. +#ifndef GFLAGS_DLL_DECL +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DECL __declspec(dllimport) +# elif defined(__GNUC__) && __GNUC__ >= 4 +# define GFLAGS_DLL_DECL __attribute__((visibility("default"))) +# else +# define GFLAGS_DLL_DECL +# endif +#endif + +// We always want to import variables declared in user code. +#ifndef GFLAGS_DLL_DECLARE_FLAG +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DECLARE_FLAG __declspec(dllimport) +# elif defined(__GNUC__) && __GNUC__ >= 4 +# define GFLAGS_DLL_DECLARE_FLAG __attribute__((visibility("default"))) +# else +# define GFLAGS_DLL_DECLARE_FLAG +# endif +#endif + +// --------------------------------------------------------------------------- +// Flag types +#include +#if 1 +# include // the normal place uint32_t is defined +#elif 1 +# include // the normal place u_int32_t is defined +#elif 1 +# include // a third place for uint32_t or u_int32_t +#endif + +namespace GFLAGS_NAMESPACE { + +#if 1 // C99 +typedef int32_t int32; +typedef uint32_t uint32; +typedef int64_t int64; +typedef uint64_t uint64; +#elif 0 // BSD +typedef int32_t int32; +typedef u_int32_t uint32; +typedef int64_t int64; +typedef u_int64_t uint64; +#elif 0 // Windows +typedef __int32 int32; +typedef unsigned __int32 uint32; +typedef __int64 int64; +typedef unsigned __int64 uint64; +#else +# error Do not know how to define a 32-bit integer quantity on your system +#endif + +} // namespace GFLAGS_NAMESPACE + + +namespace fLS { + +// The meaning of "string" might be different between now and when the +// macros below get invoked (e.g., if someone is experimenting with +// other string implementations that get defined after this file is +// included). Save the current meaning now and use it in the macros. +typedef std::string clstring; + +} // namespace fLS + + +#define DECLARE_VARIABLE(type, shorttype, name) \ + /* We always want to import declared variables, dll or no */ \ + namespace fL##shorttype { extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; } \ + using fL##shorttype::FLAGS_##name + +#define DECLARE_bool(name) \ + DECLARE_VARIABLE(bool, B, name) + +#define DECLARE_int32(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::int32, I, name) + +#define DECLARE_uint32(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::uint32, U, name) + +#define DECLARE_int64(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::int64, I64, name) + +#define DECLARE_uint64(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::uint64, U64, name) + +#define DECLARE_double(name) \ + DECLARE_VARIABLE(double, D, name) + +#define DECLARE_string(name) \ + /* We always want to import declared variables, dll or no */ \ + namespace fLS { \ + extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; \ + } \ + using fLS::FLAGS_##name + +#endif // GFLAGS_DECLARE_H_ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_gflags.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_gflags.h new file mode 100644 index 0000000000000000000000000000000000000000..3780704e1caa005e3102c189291b73a1972fa080 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/gflags/gflags_gflags.h @@ -0,0 +1,99 @@ +// Copyright (c) 2014, Andreas Schuh +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// ----------------------------------------------------------------------------- +// Imports the gflags library symbols into an alternative/deprecated namespace. + +#ifndef GFLAGS_GFLAGS_H_ +# error The internal header gflags_gflags.h may only be included by gflags.h +#endif + +#ifndef GFLAGS_NS_GFLAGS_H_ +#define GFLAGS_NS_GFLAGS_H_ + + +namespace gflags { + + +using GFLAGS_NAMESPACE::int32; +using GFLAGS_NAMESPACE::uint32; +using GFLAGS_NAMESPACE::int64; +using GFLAGS_NAMESPACE::uint64; + +using GFLAGS_NAMESPACE::RegisterFlagValidator; +using GFLAGS_NAMESPACE::CommandLineFlagInfo; +using GFLAGS_NAMESPACE::GetAllFlags; +using GFLAGS_NAMESPACE::ShowUsageWithFlags; +using GFLAGS_NAMESPACE::ShowUsageWithFlagsRestrict; +using GFLAGS_NAMESPACE::DescribeOneFlag; +using GFLAGS_NAMESPACE::SetArgv; +using GFLAGS_NAMESPACE::GetArgvs; +using GFLAGS_NAMESPACE::GetArgv; +using GFLAGS_NAMESPACE::GetArgv0; +using GFLAGS_NAMESPACE::GetArgvSum; +using GFLAGS_NAMESPACE::ProgramInvocationName; +using GFLAGS_NAMESPACE::ProgramInvocationShortName; +using GFLAGS_NAMESPACE::ProgramUsage; +using GFLAGS_NAMESPACE::VersionString; +using GFLAGS_NAMESPACE::GetCommandLineOption; +using GFLAGS_NAMESPACE::GetCommandLineFlagInfo; +using GFLAGS_NAMESPACE::GetCommandLineFlagInfoOrDie; +using GFLAGS_NAMESPACE::FlagSettingMode; +using GFLAGS_NAMESPACE::SET_FLAGS_VALUE; +using GFLAGS_NAMESPACE::SET_FLAG_IF_DEFAULT; +using GFLAGS_NAMESPACE::SET_FLAGS_DEFAULT; +using GFLAGS_NAMESPACE::SetCommandLineOption; +using GFLAGS_NAMESPACE::SetCommandLineOptionWithMode; +using GFLAGS_NAMESPACE::FlagSaver; +using GFLAGS_NAMESPACE::CommandlineFlagsIntoString; +using GFLAGS_NAMESPACE::ReadFlagsFromString; +using GFLAGS_NAMESPACE::AppendFlagsIntoFile; +using GFLAGS_NAMESPACE::ReadFromFlagsFile; +using GFLAGS_NAMESPACE::BoolFromEnv; +using GFLAGS_NAMESPACE::Int32FromEnv; +using GFLAGS_NAMESPACE::Uint32FromEnv; +using GFLAGS_NAMESPACE::Int64FromEnv; +using GFLAGS_NAMESPACE::Uint64FromEnv; +using GFLAGS_NAMESPACE::DoubleFromEnv; +using GFLAGS_NAMESPACE::StringFromEnv; +using GFLAGS_NAMESPACE::SetUsageMessage; +using GFLAGS_NAMESPACE::SetVersionString; +using GFLAGS_NAMESPACE::ParseCommandLineNonHelpFlags; +using GFLAGS_NAMESPACE::HandleCommandLineHelpFlags; +using GFLAGS_NAMESPACE::AllowCommandLineReparsing; +using GFLAGS_NAMESPACE::ReparseCommandLineNonHelpFlags; +using GFLAGS_NAMESPACE::ShutDownCommandLineFlags; +using GFLAGS_NAMESPACE::FlagRegisterer; + +#ifndef SWIG +using GFLAGS_NAMESPACE::ParseCommandLineFlags; +#endif + +} // namespace gflags +#endif // GFLAGS_NS_GFLAGS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/infer_engine.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/infer_engine.h new file mode 100644 index 0000000000000000000000000000000000000000..cc7c5e0aa74b720099c01808be36fd4586ceec12 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/infer_engine.h @@ -0,0 +1,45 @@ +#ifndef BENCHMARK_INFER_ENGINE_H +#define BENCHMARK_INFER_ENGINE_H +#include "util.h" +#include "acl/acl_base.h" +#include "post_process.h" +#include +#include +#include +#include +#include +#include "acl/acl_mdl.h" +#include +#include + +aclError InitContext(const char* configPath = ""); +aclError UnInitContext(); +aclError LoadModel(); +aclError InitInput(std::vector files); +aclError Inference(); +aclError PostProcess(); +aclError DvppSetup(); +aclError DvppInitInput(std::vector files); +aclError UnloadModel(); +void getImgResizeShape(); +acldvppRoiConfig* InitCropRoiConfig(uint32_t width, uint32_t height); + +/* + * @brief : 初始化中心抠图配置信息。 + * @param [in] uint32_t newInputWidth : 输入图像的宽(等比例缩放后的宽度) + * @param [in] uint32_t newInputHeight : 输入图像的高(等比例缩放后的高) + * @param [in] uint32_t modelInputWidth : 中心抠图后输入给模型的宽 + * @param [in] uint32_t modelInputHeight : 中心抠图后输入给模型的高 + * @return : acldvppRoiConfig:中心抠图配置信息 + */ +acldvppRoiConfig* InitCropCenterRoiConfig(uint32_t newInputWidth, uint32_t newInputHeight,uint32_t modelInputWidth, uint32_t modelInputHeight); + +/* + * @brief : 宽高较短的边缩放至RESIZE_MIN(256),较长的边做等比例缩放。 + * @param [in] uint32_t width : 输入图片宽 + * @param [in] uint32_t height : 输入图片高 + * @param [in] uint32_t &newInputWidth : 等比例缩放后的宽 + * @param [in] uint32_t &newInputHeight : 等比例缩放后的高 + */ +void SmallSizeAtLeast(uint32_t width, uint32_t height, uint32_t& newInputWidth, uint32_t& newInputHeigh); +#endif diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/post_process.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/post_process.h new file mode 100644 index 0000000000000000000000000000000000000000..d413c66567109cb8647cf7b385ad66478f217af8 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/post_process.h @@ -0,0 +1,5 @@ +#ifndef BENCHMARK_POST_PROCESS_H +#define BENCHMARK_POST_PROCESS_H +#include "util.h" +aclError SaveBinPostprocess(); +#endif diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/util.h b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/util.h new file mode 100644 index 0000000000000000000000000000000000000000..734db74083d316e3c6fdc52c3f5caf0548789684 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/inc/util.h @@ -0,0 +1,157 @@ +#ifndef BENCHMARK_UTIL_H +#define BENCHMARK_UTIL_H +#include +#include +#include +#include "acl/acl_base.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include "acl/acl_rt.h" +#include "acl/ops/acl_dvpp.h" +#include +#include +#include +#include +#include +#include + +// self defined problem code. +const int ACL_ERROR_PATH_INVALID = 101; +const int ACL_ERROR_CREATE_DATASET_FAILED = 102; +const int ACL_ERROR_PARSE_PARAM_FAILED = 103; +const int ACL_ERROR_DVPP_ERROR = 104; +const int ACL_ERROR_OTHERS = 255; +#define MODEL_INPUT_NUM_MAX (4) +#define MODEL_INPUT_OUTPUT_NUM_MAX (16) + +#define LOG(fmt, args...) \ + do { \ + printf(fmt, ##args); \ + } while(0) + + +#define START_PROC \ + struct timeval start, end; \ + long long time_use; \ + do { \ + gettimeofday(&start, NULL); \ + } while (0); + + +#define END_PROC \ + do { \ + gettimeofday(&end, NULL); \ + time_use = (end.tv_sec-start.tv_sec)*1000000+(end.tv_usec-start.tv_usec); \ + LOG("time use: %lld us\n", time_use); \ + } while (0); + + +#define CHECK_ACL_RET(msg, ret) \ + if (ret != ACL_ERROR_NONE) { \ + std::cout << msg << ", ret "<< ret << std::endl; \ + return ret; \ + } + + +#define CHECK_WITH_RET(condition, ret, msg) \ + if(!(condition)) { \ + std::cout << msg << ", ret "<< ret << std::endl; \ + return ret; \ + } + + +#define CHECK_RET(ret) \ + if(ret != ACL_ERROR_NONE) { \ + return ret; \ + } + +bool FolderExists(std::string foldname); + +bool FileExists(std::string filename); + +char* ReadBinFile(std::string fileName, uint32_t& fileSize); + +aclError GetFiles(std::string path, std::vector& files); + +aclError FreeDevMemory(aclmdlDataset* dataset); + +aclError DestroyDatasetResurce(aclmdlDataset* dataset, uint32_t flag); + +void* ReadFile(std::string fileLocation, uint64_t &fileSize); + +struct DvppConfig { + uint32_t resizedWidth; + uint32_t resizedHeight; + std::unordered_map> imgSizes; +}; + +struct ModelInfo +{ + aclFormat Format; + const char* Name; + size_t size; + size_t dimCount; + int64_t dims[ACL_MAX_DIM_CNT]; + aclDataType Type; +}; + +struct Config { + std::string om; + std::string dataDir; + std::string outDir; + DvppConfig dvppConfig; + bool useDvpp; + size_t batchSize; + ModelInfo inputInfo[MODEL_INPUT_OUTPUT_NUM_MAX]; + ModelInfo outputInfo[MODEL_INPUT_OUTPUT_NUM_MAX]; + size_t inputNum; + size_t outputNum; + aclmdlDesc* modelDesc; + uint32_t modelId; + aclrtContext context; + char* modelData_ptr; + void* devMem_ptr; + void* weightMem_ptr; + std::string imgType; + std::string modelType; + uint32_t deviceId; + uint32_t loopNum; + std::string framework; + int64_t curOutputSize[MODEL_INPUT_OUTPUT_NUM_MAX]; + Config() + { + om = ""; + dataDir = ""; + batchSize = 0; + useDvpp = 0; + inputNum = 0; + outputNum = 0; + modelDesc = nullptr; + modelId = 0; + context = nullptr; + imgType = ""; + modelType = ""; + deviceId = 0; + loopNum = 1; + framework = "caffe"; + outDir = "../../results"; + modelData_ptr = nullptr; + devMem_ptr = nullptr; + weightMem_ptr = nullptr; + } +}; + +struct Resnet50Result { + int top1; + int top5; + int total; + std::unordered_map cmp; + Resnet50Result(): top1(0), top5(0), total(0) {}; +}; + +struct DataFrame { + std::vector fileNames; + aclmdlDataset* dataset; +}; + +#endif diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/infer_engine.cpp b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/infer_engine.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7f2b8507a510844689239d71443d8b4b4480be3d --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/infer_engine.cpp @@ -0,0 +1,728 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "acl/acl.h" +#include "infer_engine.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include "acl/ops/acl_dvpp.h" +#include +#include +using namespace std; + +std::unordered_map dvppTime; +extern Resnet50Result resnet50Res; +extern Config cfg; +extern aclError ret; +extern int processedCnt; +extern long long inferTime; +aclrtContext context; +uint32_t modelId; +aclmdlDesc *modelDesc; +std::vector files; +DataFrame inputDataframe; +DataFrame outputDataframe; +aclDataBuffer *yoloImgInfo; +aclrtStream stream = nullptr; +acldvppChannelDesc *dvpp_channel_desc = nullptr; +std::unordered_map> imgSizes; + +#define RESIZE_MIN 256 +#define NUM_2 2 +#define NUM_3 3 +#define NUM_16 16 +#define NUM_128 128 + +uint32_t resizedWidth; +uint32_t resizedHeight; +uint32_t resizedWidthAligned; +uint32_t resizedHeightAligned; +uint32_t resizedOutputBufferSize; + +void getImgResizeShape() +{ + if (ACL_FORMAT_NCHW == cfg.inputInfo[0].Format) { + resizedHeight = cfg.inputInfo[0].dims[NUM_2]; + resizedWidth = cfg.inputInfo[0].dims[NUM_3]; + } else if (ACL_FORMAT_NHWC == cfg.inputInfo[0].Format) { + resizedHeight = cfg.inputInfo[0].dims[1]; + resizedWidth = cfg.inputInfo[0].dims[NUM_2]; + } + return; +} + +aclError InitContext(const char *configPath) +{ + LOG("context init start\n"); + ret = aclInit(configPath); + CHECK_ACL_RET("acl init failed", ret); + + ret = aclrtSetDevice(cfg.deviceId); + CHECK_ACL_RET("open device failed ret", ret); + + ret = aclrtCreateContext(&context, cfg.deviceId); + CHECK_ACL_RET("create context failed", ret); + + cfg.context = context; + LOG("context init done\n"); + return ACL_ERROR_NONE; +} + +aclError UnInitContext() +{ + ret = aclrtDestroyContext(context); + CHECK_ACL_RET("destory context failed", ret); + LOG("destory context done\n"); + + ret = aclrtResetDevice(cfg.deviceId); + CHECK_ACL_RET("reset device failed", ret); + + ret = aclFinalize(); + CHECK_ACL_RET("finalize failed", ret); + LOG("reset device done\n"); + + return ACL_ERROR_NONE; +} + +aclError LoadModel() +{ + LOG("load model start\n"); + size_t memSize; + size_t weightsize; + uint32_t modelSize = 0; + std::string modelPath = cfg.om; + + cfg.modelData_ptr = ReadBinFile(modelPath, modelSize); + CHECK_WITH_RET(cfg.modelData_ptr != nullptr, ACL_ERROR_READ_MODEL_FAILURE, "can't read model"); + + aclError ret = aclmdlQuerySizeFromMem(cfg.modelData_ptr, modelSize, &memSize, &weightsize); + CHECK_ACL_RET("query memory size failed", ret); + + ret = aclrtMalloc(&(cfg.devMem_ptr), memSize, ACL_MEM_MALLOC_HUGE_ONLY); + CHECK_ACL_RET("alloc dev_ptr failed", ret); + ret = aclrtMalloc(&(cfg.weightMem_ptr), weightsize, ACL_MEM_MALLOC_HUGE_ONLY); + CHECK_ACL_RET("alloc weight_ptr failed", ret); + + ret = aclmdlLoadFromMemWithMem(cfg.modelData_ptr, modelSize, &modelId, cfg.devMem_ptr, memSize, cfg.weightMem_ptr, + weightsize); + CHECK_ACL_RET("load model from memory failed", ret); + LOG("Load model success. memSize: %lu, weightSize: %lu.\n", memSize, weightsize); + + modelDesc = aclmdlCreateDesc(); + CHECK_WITH_RET(modelDesc != nullptr, ACL_ERROR_READ_MODEL_FAILURE, "create model desc failed"); + ret = aclmdlGetDesc(modelDesc, modelId); + CHECK_ACL_RET("get model desc failed", ret); + + cfg.modelDesc = modelDesc; + cfg.modelId = modelId; + + LOG("load model done\n"); + return ACL_ERROR_NONE; +} + +aclError DvppSetup() +{ + ret = aclrtSetCurrentContext(context); + if (ret != ACL_ERROR_NONE) { + LOG("Set context failed\n"); + return ret; + } + + ret = aclrtCreateStream(&stream); + if (ret != ACL_ERROR_NONE) { + LOG("create dvpp stream failed\n"); + return ret; + } + + dvpp_channel_desc = acldvppCreateChannelDesc(); + if (dvpp_channel_desc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create dvpp channel desc failed\n"); + return ret; + } + + ret = acldvppCreateChannel(dvpp_channel_desc); + if (ret != ACL_ERROR_NONE) { + LOG("create dvpp channel failed\n"); + return ret; + } + + imgSizes = cfg.dvppConfig.imgSizes; + + resizedWidthAligned = (resizedWidth + 15) / NUM_16 * NUM_16; + resizedHeightAligned = (resizedHeight + 1) / NUM_2 * NUM_2; + + resizedOutputBufferSize = resizedWidthAligned * resizedHeightAligned * NUM_3 / NUM_2; + LOG("resizedWidth %d resizedHeight %d resizedWidthAligned %d resizedHeightAligned %d resizedOutputBufferSize %d\n", + resizedWidth, resizedHeight, resizedWidthAligned, resizedHeightAligned, resizedOutputBufferSize); + + return ACL_ERROR_NONE; +} + +/* + * @brief : 生成dvpp图像描述信息 + * @param [in] void *dataDev : 码流buffer信息. + * @param [in] acldvppPixelFormat format: 图像格式 + * @param [in] uint32_t width : 宽度 + * @param [in] uint32_t height: 高度 + * @param [in] uint32_t widthStride : 宽度对齐. + * @param [in] uint32_t heightStride: 高度对齐. + * @param [in] uint32_t size: 码流大小. + * @return : acldvppPicDesc:图像描述信息 + */ +acldvppPicDesc *createDvppPicDesc(void *dataDev, acldvppPixelFormat format, uint32_t width, uint32_t height, + uint32_t widthStride, uint32_t heightStride, uint32_t size) +{ + acldvppPicDesc *picDesc = acldvppCreatePicDesc(); + if (picDesc == nullptr) { + LOG("failed to create pic desc\n"); + return nullptr; + } + + ret = acldvppSetPicDescData(picDesc, dataDev); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc data\n"); + return nullptr; + } + ret = acldvppSetPicDescSize(picDesc, size); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc size\n"); + return nullptr; + } + + ret = acldvppSetPicDescFormat(picDesc, format); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc format\n"); + return nullptr; + } + + ret = acldvppSetPicDescWidth(picDesc, width); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc width\n"); + return nullptr; + } + + ret = acldvppSetPicDescHeight(picDesc, height); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc height\n"); + return nullptr; + } + + ret = acldvppSetPicDescWidthStride(picDesc, widthStride); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc widthStride\n"); + return nullptr; + } + + ret = acldvppSetPicDescHeightStride(picDesc, heightStride); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc heightStride\n"); + return nullptr; + } + return picDesc; +} + +aclError InitInput(std::vector files) +{ + LOG("init input batch %d start\n", processedCnt); + ret = aclrtSetCurrentContext(context); + if (ret != ACL_ERROR_NONE) { + LOG("Set context failed, ret[%d]\n", ret); + return ret; + } + + size_t modelInputSize = cfg.inputInfo[0].size; + size_t imgSize = modelInputSize / cfg.batchSize; + + void *dst; + ret = aclrtMalloc(&dst, modelInputSize, ACL_MEM_MALLOC_NORMAL_ONLY); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc device failed, ret[%d]\n", ret); + return ret; + } + LOG("dst = %p, size = %ld\n", dst, modelInputSize); + + char *ptr = (char *)dst; + inputDataframe.fileNames.clear(); + for (int i = 0; i < files.size(); i++) { + + std::string fileLocation = cfg.dataDir + "/" + files[i]; + FILE *pFile = fopen(fileLocation.c_str(), "r"); + + if (pFile == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("open file %s failed\n", fileLocation.c_str()); + return ret; + } + + fseek(pFile, 0, SEEK_END); + size_t fileSize = ftell(pFile); + + if (fileSize > imgSize) { + ret = ACL_ERROR_OTHERS; + LOG("%s fileSize %lu * batch %lu don't match with model inputSize %lu\n", fileLocation.c_str(), + fileSize, cfg.batchSize, modelInputSize); + return ret; + } + + void *buff = nullptr; + ret = aclrtMallocHost(&buff, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host buff failed[%d]\n", ret); + return ret; + } + + rewind(pFile); + fread(buff, sizeof(char), fileSize, pFile); + fclose(pFile); + + void *dstTmp = (void *)ptr; + ret = aclrtMemcpy(dstTmp, fileSize, buff, fileSize, ACL_MEMCPY_HOST_TO_DEVICE); + ptr += fileSize; + LOG("input file: %s, memory addr: %p, file size: %ld\n",files[i].c_str(), dstTmp, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("init input %d, Copy host to device failed, ret[%d]\n", i, ret); + LOG("input addr %p, len %ld\n", dstTmp, fileSize); + aclrtFreeHost(buff); + return ret; + } + + aclrtFreeHost(buff); + inputDataframe.fileNames.push_back(files[i]); + } + + aclDataBuffer *inputData = aclCreateDataBuffer((void *)dst, modelInputSize); + if (inputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("aclCreateDataBuffer failed\n"); + return ret; + } + + aclmdlDataset *input = aclmdlCreateDataset(); + ret = aclmdlAddDatasetBuffer(input, inputData); + if (ret != ACL_ERROR_NONE) { + LOG("ACL_ModelInputDataAdd failed, ret[%d]\n", ret); + aclmdlDestroyDataset(input); + return ret; + } + + inputDataframe.dataset = input; + LOG("init input batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} + +/* + * @brief : 获取图像宽高 + * @param [in] void* buff : 输入码流地址. + * @param [in] uint32_t fileSize : 输入码流长度 + * @param [in] std::string fileLocation : 输入文件路径. + * @param [in] uint32_t &W : 输入码流宽度. + * @param [in] uint32_t &H : 输入码流高度. + */ +void GetImageHW(void* buff, uint32_t fileSize, std::string fileLocation, uint32_t &W, uint32_t &H) +{ + int32_t components = 0; + ret = acldvppJpegGetImageInfo((void *)buff, fileSize, &W, &H, &components); + if (ret != ACL_ERROR_NONE) { + cout << "acldvppJpegGetImageInfo failed, ret " << ret << "filename: " << fileLocation.c_str() << endl; + } +} + +/* + * @brief : dvpp在推理中的预处理流程 + * @param [in] string fileLocation : 输入文件路径. + * @param [in] char *&ptr : 输出buffer指针. + * @return : ACL_ERROR_NONE:预处理成功, 其他:预处理失败 + */ +aclError DVPP_Resnet50(std::string fileLocation, char *&ptr) +{ + // 1 获取输入码流 + uint32_t W, H, W_Aligned, H_Aligned, outputBuffSize; + void *decodeInput = nullptr; + void *decodeOutput = nullptr; + acldvppPicDesc *decodeOutputDesc = nullptr; + uint64_t fileSize; + void *buff = ReadFile(fileLocation, fileSize); + if( buff == nullptr) { + LOG("read pic failed\n"); + return 1; + } + + ret = acldvppMalloc(&decodeInput, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc dvpp in buff failed[%d]\n", ret); + return ret; + } + ret = aclrtMemcpy(decodeInput, fileSize, buff, fileSize, ACL_MEMCPY_HOST_TO_DEVICE); + if (ret != ACL_ERROR_NONE) { + LOG("copy host to device failed[%d]\n", ret); + return ret; + } + + // 2 获取解码输出描述信息 + GetImageHW(buff, fileSize, fileLocation, W, H); + W_Aligned = (W + 127) / NUM_128 * NUM_128; + H_Aligned = (H + 15) / NUM_16 * NUM_16; + outputBuffSize = W_Aligned * H_Aligned * NUM_3 / NUM_2; + ret = acldvppMalloc(&decodeOutput, outputBuffSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc decodeOutput buff failed[%d]\n", ret); + return ret; + } + decodeOutputDesc = createDvppPicDesc(decodeOutput, PIXEL_FORMAT_YUV_SEMIPLANAR_420, W, H, W_Aligned, H_Aligned, + outputBuffSize); + if (decodeOutputDesc == nullptr) { + LOG("create jpeg_output_desc failed\n"); + return 1; + } + LOG("file[%s] jpeg picDesc info: W=%d, H=%d, W_Aligned=%d, H_Aligned=%d, outBufSize=%d, format=%d\n", \ + fileLocation.c_str(),W, H, W_Aligned, H_Aligned, outputBuffSize, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + + // 3 使用jpegd图像解码 + ret = acldvppJpegDecodeAsync(dvpp_channel_desc, decodeInput, fileSize, decodeOutputDesc, stream); + if (ret != ACL_ERROR_NONE) { + LOG(" dvppJpegDecodeAsync failed\n"); + return ret; + } + aclrtFreeHost(buff); + aclrtSynchronizeStream(stream); + + // 4 对jpegd解码的图片进行原分辨率抠图及短边256等比例缩放 + acldvppRoiConfig *cropConfig = nullptr; + acldvppPicDesc *cropOutputDesc = nullptr; + // 设置对解码后的图片进行原图裁剪,目的是为了减少因jpegd解码后对齐的无效数据对图像精度的影响 + cropConfig = InitCropRoiConfig(W, H); + + uint32_t newInputWidth = 0; + uint32_t newInputHeight = 0; + void *cropOutBufferDev = nullptr; + // 宽和高较短的一条边缩放至256,较长边做等比例缩放。对齐至256目的是为了给224x224中心抠图做准备,短边256对齐,获得对齐后的宽高 + SmallSizeAtLeast(W, H, newInputWidth, newInputHeight); + uint32_t cropOutputWidthStride = (newInputWidth + (NUM_16 - 1)) / NUM_16 * NUM_16; + uint32_t cropOutputHeightStride = (newInputHeight + (NUM_2 - 1)) / NUM_2 * NUM_2; + uint32_t cropOutBufferSize = cropOutputWidthStride * cropOutputHeightStride * NUM_3 / NUM_2; + ret = acldvppMalloc(&cropOutBufferDev, cropOutBufferSize); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] AcldvppMalloc cropOutBufferDev_ failed, ret = " << ret << " cropOutBufferSize_ = " + << cropOutBufferSize << endl; + return ret; + } + cropOutputDesc = createDvppPicDesc(cropOutBufferDev, PIXEL_FORMAT_YUV_SEMIPLANAR_420, newInputWidth, newInputHeight, + cropOutputWidthStride, cropOutputHeightStride, cropOutBufferSize); + if (cropOutputDesc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create cropOutputDesc failed\n"); + return ret; + } + + ret = acldvppVpcCropAsync(dvpp_channel_desc, decodeOutputDesc, cropOutputDesc, cropConfig, stream); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] acldvppVpcCropAsync failed, ret = " << ret << std::endl; + return ret; + } + aclrtSynchronizeStream(stream); + + // 5 对等比例缩放后的图片做224x224中心抠图,中心抠图后的数据会发送给aipp进行YUV转RGB格式转换。需要注意:中心抠图后的输出格式和aipp + // 的输入格式需要保持一致。 + acldvppRoiConfig *centerCropConfig = nullptr; + acldvppPicDesc *centerCropOutputDesc = nullptr; // resize output desc + centerCropConfig = InitCropCenterRoiConfig(newInputWidth, newInputHeight, resizedWidth, resizedHeight); + void *vpcOutBufferDev = nullptr; + uint32_t vpcOutBufferSize = resizedWidthAligned * resizedHeightAligned * NUM_3 / NUM_2; + + vpcOutBufferDev = (void *)ptr; + centerCropOutputDesc = createDvppPicDesc(vpcOutBufferDev, PIXEL_FORMAT_YUV_SEMIPLANAR_420, resizedWidth, + resizedHeight, resizedWidthAligned, resizedHeightAligned, + vpcOutBufferSize); + if (centerCropOutputDesc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create centerCropOutputDesc failed\n"); + return ret; + } + + ret = acldvppVpcCropAsync(dvpp_channel_desc, cropOutputDesc, centerCropOutputDesc, centerCropConfig, stream); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] acldvppVpcCropAsync failed, ret = " << ret << "fileName: " << fileLocation.c_str() << std::endl; + return ret; + } + + ptr += vpcOutBufferSize; + aclrtSynchronizeStream(stream); + + // 6 释放资源 + acldvppFree(decodeInput); + acldvppFree(decodeOutput); + acldvppFree(cropOutBufferDev); + acldvppDestroyPicDesc(decodeOutputDesc); + acldvppDestroyPicDesc(cropOutputDesc); + acldvppDestroyPicDesc(centerCropOutputDesc); + acldvppDestroyRoiConfig(cropConfig); + acldvppDestroyRoiConfig(centerCropConfig); + return ret; +} + +aclError DvppInitInput(std::vector files) +{ + struct timeval process_start; + struct timeval process_end; + std::string funcName; + long long costTime; + funcName = "DvppTotalProcess"; + gettimeofday(&process_start, NULL); + + void *dst; + ret = acldvppMalloc(&dst, cfg.inputInfo[0].size); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc device failed, ret[%d]\n", ret); + return ret; + } + + char *ptr = (char *)dst; + inputDataframe.fileNames.clear(); + + for (int i = 0; i < files.size(); i++) { + std::string fileLocation = cfg.dataDir + "/" + files[i]; + ret = DVPP_Resnet50(fileLocation, ptr); + if(ret != ACL_ERROR_NONE) { + LOG("dvpp config failed"); + return ret; + } + inputDataframe.fileNames.push_back(files[i]); + } + + funcName = "DvppTotalProcess"; + gettimeofday(&process_end, NULL); + costTime = (process_end.tv_sec - process_start.tv_sec) * 1000000 + (process_end.tv_usec - process_start.tv_usec); + dvppTime[funcName] += costTime; + + aclmdlDataset *input = aclmdlCreateDataset(); + aclDataBuffer *inputData = aclCreateDataBuffer((void *)dst, cfg.inputInfo[0].size); + + if (inputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("aclCreateDataBuffer failed\n"); + return ret; + } + + ret = aclmdlAddDatasetBuffer(input, inputData); + + if (ret != ACL_ERROR_NONE) { + LOG("ACL_ModelInputDataAdd failed, ret[%d]\n", ret); + aclmdlDestroyDataset(input); + return ret; + } + + inputDataframe.dataset = input; + return ACL_ERROR_NONE; +} + +acldvppRoiConfig *InitCropRoiConfig(uint32_t width, uint32_t height) +{ + uint32_t right = 0; + uint32_t bottom = 0; + acldvppRoiConfig *cropConfig; + + if (width % NUM_2 == 0) { + right = width - 1; + } else { + right = width; + } + + if (height % NUM_2 == 0) { + bottom = height - 1; + } else { + bottom = height; + } + + cropConfig = acldvppCreateRoiConfig(0, right, 0, bottom); + if (cropConfig == nullptr) { + std::cout << "[ERROR][Vision] acldvppCreateRoiConfig failed " << std::endl; + return nullptr; + } + + return cropConfig; +} + +acldvppRoiConfig *InitCropCenterRoiConfig(uint32_t newInputWidth, uint32_t newInputHeight, uint32_t modelInputWidth, + uint32_t modelInputHeight) +{ + uint32_t left = 0; + uint32_t right = 0; + uint32_t top = 0; + uint32_t bottom = 0; + uint32_t amount_to_be_cropped_w = 0; + uint32_t amount_to_be_cropped_h = 0; + uint32_t left_half = 0; + uint32_t top_half = 0; + acldvppRoiConfig *centerCropConfig = nullptr; + + // 计算中心抠图起始点的坐标距离码流左边界和上边界的距离 + amount_to_be_cropped_w = newInputWidth - modelInputWidth; + left_half = amount_to_be_cropped_w / NUM_2; + amount_to_be_cropped_h = newInputHeight - modelInputHeight; + top_half = amount_to_be_cropped_h / NUM_2; + + // 保证起始点坐标为偶数 + left = (left_half % NUM_2 == 0) ? (amount_to_be_cropped_w / NUM_2) : (amount_to_be_cropped_w / NUM_2 + 1); + top = (top_half % NUM_2 == 0) ? (amount_to_be_cropped_h / NUM_2) : (amount_to_be_cropped_h / NUM_2 + 1); + + // 结束点为奇数 + right = left + modelInputWidth - 1; + bottom = top + modelInputHeight - 1; + + centerCropConfig = acldvppCreateRoiConfig(left, right, top, bottom); + if (centerCropConfig == nullptr) { + std::cout << "[ERROR][Vision] acldvppCreateRoiConfig failed " << std::endl; + return nullptr; + } + return centerCropConfig; +} + +void SmallSizeAtLeast(uint32_t width, uint32_t height, uint32_t &newInputWidth, uint32_t &newInputHeight) +{ + float scaleRatio = 0.0; + float inputWidth = 0.0; + float inputHeight = 0.0; + float resizeMin = 0.0; + bool minWidthFlag = false; + + inputWidth = (float)width; + inputHeight = (float)height; + resizeMin = (float)(RESIZE_MIN); + minWidthFlag = (width <= height) ? true : false; + + // 短边缩放为256,长边等比例缩放 + if (minWidthFlag == true) { + newInputWidth = resizeMin; + newInputHeight = (resizeMin / width) * inputHeight; + std::cout << "[INFO]scaleRatio: " << resizeMin / width << " inputWidth_: " << width << " newInputWidth: " << + newInputWidth << " inputHeight_: " << inputHeight << " newInputHeight_:" << newInputHeight << std::endl; + } else { + newInputWidth = (resizeMin / height) * width; + newInputHeight = resizeMin; + std::cout << "[INFO]scaleRatio: " << resizeMin / height << " width: " << width << " newInputWidth: " << + newInputWidth << " height: " << height << " newInputHeight:" << newInputHeight << std::endl; + } +} + +aclError Inference() +{ + LOG("inference batch %d start\n", processedCnt); + ret = aclrtSetCurrentContext(context); + + if (ret != ACL_ERROR_NONE) { + LOG("Set infer context failed\n"); + return ret; + } + + struct timeval startTmp, endTmp; + long long timeUse; + + if (inputDataframe.fileNames.size() == 0) { + ret = ACL_ERROR_OTHERS; + LOG("No file found\n"); + return ret; + } + + aclmdlDataset *output = aclmdlCreateDataset(); + if (output == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("Create Output Dataset failed\n"); + return ret; + } + + std::vector outputDevPtrs; + + for (size_t i = 0; i < cfg.outputNum; ++i) { + size_t buffer_size = cfg.outputInfo[i].size; + void *outputBuffer = nullptr; + ret = aclrtMalloc(&outputBuffer, (size_t)buffer_size, ACL_MEM_MALLOC_NORMAL_ONLY); + + if (ret != ACL_ERROR_NONE) { + LOG("Malloc output host failed, ret[%d]\n", ret); + return ret; + } + + outputDevPtrs.push_back(outputBuffer); + aclDataBuffer *outputData = aclCreateDataBuffer(outputBuffer, buffer_size); + + if (outputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("Create output data buffer failed\n"); + return ret; + } + + ret = aclmdlAddDatasetBuffer(output, outputData); + + if (ret != ACL_ERROR_NONE) { + LOG("Add output model dataset failed, ret[%d]\n", ret); + return ret; + } + } + + gettimeofday(&startTmp, NULL); + ret = aclmdlExecute(modelId, inputDataframe.dataset, output); + gettimeofday(&endTmp, NULL); + timeUse = (endTmp.tv_sec - startTmp.tv_sec) * 1000000 + (endTmp.tv_usec - startTmp.tv_usec); + LOG("inference time cost: %lld us\n", timeUse); + inferTime += timeUse; + + if (ret != ACL_ERROR_NONE) { + LOG("%s inference failed.\n", inputDataframe.fileNames[0].c_str()); + FreeDevMemory(inputDataframe.dataset); + aclmdlDestroyDataset(inputDataframe.dataset); + return ret; + } + + outputDataframe.fileNames = inputDataframe.fileNames; + outputDataframe.dataset = output; + + uint32_t dvppFlag = (cfg.useDvpp) ? 1 : 0; + ret = DestroyDatasetResurce(inputDataframe.dataset, dvppFlag); + if (ret != ACL_ERROR_NONE) { + LOG("DestroyDatasetResurce failed\n"); + return ret; + } + + LOG("inference batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} + +aclError UnloadModel() +{ + LOG("unload model start\n"); + ret = aclmdlUnload(modelId); + CHECK_ACL_RET("unload model failed", ret); + LOG("unload model done\n"); + + aclmdlDestroyDesc(cfg.modelDesc); + + if (cfg.devMem_ptr != nullptr) { + aclrtFree(cfg.devMem_ptr); + cfg.devMem_ptr = nullptr; + } + + if (cfg.weightMem_ptr != nullptr) { + aclrtFree(cfg.weightMem_ptr); + cfg.weightMem_ptr = nullptr; + } + + if (cfg.modelData_ptr != nullptr) { + delete[] cfg.modelData_ptr; + cfg.modelData_ptr = nullptr; + } + return ACL_ERROR_NONE; +} diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/main.cpp b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/main.cpp new file mode 100644 index 0000000000000000000000000000000000000000..99a8d46ab89f9c616048a09f0f995e68502c4330 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/main.cpp @@ -0,0 +1,493 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "util.h" +#include "infer_engine.h" +#include "acl/acl_base.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include "acl/acl.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using std::cout; +using std::endl; +Resnet50Result resnet50Res; +Config cfg; +aclError ret; +int processedCnt; +long long dataProcTime = 0; +long long inferTime = 0; +float avgTime = 0; +float avgPreTime = 0; + +extern std::unordered_map dvppTime; +extern DataFrame outputDataframe; + +void getCommandLineParam(int argc, char** argv, Config& config) +{ + while (1) { + int option_index = 0; + struct option long_options[] = + { + {"om", 1, 0, 'a'}, + {"dataDir", 1, 0, 'b'}, + {"outDir", 1, 0, 'c'}, + {"batchSize", 1, 0, 'd'}, + {"deviceId", 1, 0, 'e'}, + {"loopNum", 1, 0, 'f'}, + {"modelType", 1, 0, 'g'}, + {"imgType", 1, 0, 'h'}, + {"framework", 1, 0, 'i'}, + {"useDvpp", 1 , 0 , 'j'}, + {0, 0, 0, 0} + }; + + int c; + c = getopt_long(argc, argv, "a:b:c:e:f:j:k:l:m:n:u:t:", long_options, &option_index); + if (c == -1) { + break; + } + + switch (c) { + case 'a': + config.om = std::string(optarg); + printf("[INFO]om = %s\n", config.om.c_str()); + break; + case 'b': + config.dataDir = std::string(optarg); + printf("[INFO]dataDir = %s\n", config.dataDir.c_str()); + break; + case 'c': + config.outDir = std::string(optarg); + printf("[INFO]outDir = %s\n", config.outDir.c_str()); + break; + case 'd': + config.batchSize = atoi(optarg); + printf("[INFO]batchSize = %d\n", config.batchSize); + break; + case 'e': + config.deviceId = atoi(optarg); + printf("[INFO]deviceId = %d\n", config.deviceId); + break; + case 'f': + config.loopNum = atoi(optarg); + printf("[INFO]loopNum = %d\n", config.loopNum); + break; + case 'g': + config.modelType = std::string(optarg); + printf("[INFO]modelType = %s\n", config.modelType.c_str()); + break; + case 'h': + config.imgType = std::string(optarg); + printf("[INFO]imgType = %s\n", config.imgType.c_str()); + break; + case 'i': + config.framework = std::string(optarg); + printf("[INFO]framework = %s\n", config.framework.c_str()); + break; + case 'j': + config.useDvpp = atoi(optarg); + printf("[INFO]useDvpp = %d\n", config.useDvpp); + break; + default: + break; + } + } +} + +// 只校验必须的参数 +aclError ParseParams(int argc, char** argv, Config& config, std::string& errorMsg) +{ + getCommandLineParam(argc, argv, config); + + LOG("parase params start\n"); + + if (config.om.empty() || !FileExists(config.om)) { + LOG("om is empty\n"); + errorMsg = "om path is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + + if (config.dataDir.empty() || !FolderExists(config.dataDir)) { + errorMsg = "data Dir is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("dataDir %s \n", config.dataDir.c_str()); + + if (!config.outDir.empty() && !FolderExists(config.outDir)) { + LOG("output dir %s not exists, try to make dir.\n", config.outDir.c_str()); + mkdir(config.outDir.c_str(), 0755); + LOG("outDir %s \n", config.outDir.c_str()); + } + + if(config.batchSize <= 0){ + errorMsg = "batch Size should be > 0"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("batchSize %zd \n", config.batchSize); + + if (config.modelType.empty()) + { + LOG("FLAGS_modelType is empty\n"); + errorMsg = "modelType is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("modelType %s \n", config.modelType.c_str()); + + if (config.imgType.empty()) + { + LOG("imgType is empty\n"); + errorMsg = "imgType is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("imgType %s \n", config.imgType.c_str()); + LOG("useDvpp is %d \n", config.useDvpp); + LOG("parase params done\n"); + return ACL_ERROR_NONE; +} + +aclError Process() +{ + std::vector fileNames; + ret = GetFiles(cfg.dataDir, fileNames); + CHECK_RET(ret); + size_t fileNum = fileNames.size(); + LOG("fileNum:%zd\n",fileNames.size()); + struct timeval startTmp, endTmp; + + // 获取模型输入所需要的宽高 + getImgResizeShape(); + + if(cfg.useDvpp) { + ret = DvppSetup(); + CHECK_RET(ret); + } + + size_t inferCnt = 0; + size_t loopCnt = 0; + while(loopCnt < cfg.loopNum) { + LOG("loopCnt %d, loopNum %d\n", loopCnt, cfg.loopNum); + for(size_t i = 0; i < fileNum / cfg.batchSize; i++) { + gettimeofday(&startTmp, NULL); + std::vector batchFileNames; + for (int j = 0; j < cfg.batchSize; j++) { + batchFileNames.push_back(fileNames[i*cfg.batchSize+j]); + } + processedCnt++; + + if(cfg.useDvpp) { + ret = DvppInitInput(batchFileNames); + } else { + ret = InitInput(batchFileNames); + } + gettimeofday(&endTmp, NULL); + dataProcTime += (endTmp.tv_sec-startTmp.tv_sec)*1000000+(endTmp.tv_usec-startTmp.tv_usec); + CHECK_RET(ret); + + ret = Inference(); + CHECK_RET(ret); + + ret = SaveBinPostprocess(); + CHECK_RET(ret); + } + + if (fileNum % cfg.batchSize != 0) { + std::vector batchFileNames; + for(size_t i = (fileNum - fileNum % cfg.batchSize); i < fileNum; i++) { + batchFileNames.push_back(fileNames[i]); + } + + gettimeofday(&startTmp, NULL); + processedCnt++; + + if(cfg.useDvpp) { + ret = DvppInitInput(batchFileNames); + } else { + ret = InitInput(batchFileNames); + } + gettimeofday(&endTmp, NULL); + dataProcTime += (endTmp.tv_sec-startTmp.tv_sec) * 1000000 + (endTmp.tv_usec - startTmp.tv_usec); + CHECK_RET(ret); + + ret = Inference(); + CHECK_RET(ret); + + ret = SaveBinPostprocess(); + CHECK_RET(ret); + } + loopCnt++; + } + return ACL_ERROR_NONE; +} + +void SaveResult() +{ + ofstream outfile("test_perform_static.txt"); +#if 0 + std::string model_name; + int dex = (cfg.om).find_last_of("/"); + model_name = cfg.om.substr(dex+1); + + std:: string title = "model_name total batch top1 top5 pre_avg/ms pre_imgs/s infer_avg/ms infer_imgs/s mAP"; + outfile << title << endl; + + outfile << model_name << " "; + outfile << processedCnt*cfg.batchSize << " "; + outfile << cfg.batchSize << " "; + if (cfg.postprocessType == "resnet") { + outfile << 1.0*resnet50Res.top1/resnet50Res.total << " " << 1.0*resnet50Res.top5/resnet50Res.total << " "; + } else { + outfile << "NA" << " " << "NA" << " "; + } + + outfile << avgPreTime << " " << 1.0*1000/avgPreTime << " "; + outfile << avgTime << " " << 1.0*1000/avgTime << " "; + outfile << endl; +#endif + char tmpCh[256]; + memset(tmpCh, 0, sizeof(tmpCh)); + snprintf(tmpCh, sizeof(tmpCh), "NN inference cost average time: %4.3f ms %4.3f fps/s\n", + avgTime, (1.0 * 1000 / avgTime)); + outfile << tmpCh; + outfile.close(); +} + +aclError GetModelInputOutputInfo(Config& cfg) +{ + aclError ret; + std::ofstream outFile("modelInputOutputInfo", std::ios::trunc); + char tmpChr[256] = {0}; + + // 获取模型输入信息 + size_t inputNum = aclmdlGetNumInputs(cfg.modelDesc); + LOG("model input num %zd\n", inputNum); + snprintf(tmpChr, sizeof(tmpChr), "model input num %zd\n", inputNum); + outFile << tmpChr; + + cfg.inputNum = inputNum; + for (size_t i = 0; i < inputNum && i < MODEL_INPUT_OUTPUT_NUM_MAX; i++) { + size_t size = aclmdlGetInputSizeByIndex(cfg.modelDesc, i); + cfg.inputInfo[i].size = size; + LOG("model input[%zd] size %zd\n", i, cfg.inputInfo[i].size); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] size %zd\n", i, cfg.inputInfo[i].size); + outFile << tmpChr; + + aclmdlIODims dims; + ret = aclmdlGetInputDims(cfg.modelDesc, i, &dims); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetInputDims fail ret %d\n", ret); + return 1; + } + + cfg.inputInfo[i].dimCount = dims.dimCount; + ret = aclrtMemcpy(cfg.inputInfo[i].dims , cfg.inputInfo[i].dimCount * sizeof(int64_t), dims.dims, + cfg.inputInfo[i].dimCount * sizeof(int64_t), ACL_MEMCPY_HOST_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtMemcpy fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + LOG("model input[%zd] dimCount %zd\n", i, cfg.inputInfo[i].dimCount); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] dimCount %zd\n", i, cfg.inputInfo[i].dimCount); + outFile << tmpChr; + for (size_t dimIdx = 0; dimIdx < cfg.inputInfo[i].dimCount; dimIdx++) { + LOG("model input[%zd] dim[%zd] info %ld\n", i, dimIdx, cfg.inputInfo[i].dims[dimIdx]); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] dim[%zd] info %ld\n", + i, dimIdx, cfg.inputInfo[i].dims[dimIdx]); + outFile << tmpChr; + } + + cfg.inputInfo[i].Format = aclmdlGetInputFormat(cfg.modelDesc, i); + cfg.inputInfo[i].Type = aclmdlGetInputDataType(cfg.modelDesc, i); + + LOG("model input[%zd] format %d inputType %d\n", i, cfg.inputInfo[i].Format, cfg.inputInfo[i].Type); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] format %d inputType %d\n", i, cfg.inputInfo[i].Format, + cfg.inputInfo[i].Type); + outFile << tmpChr; + + cfg.inputInfo[i].Name = aclmdlGetInputNameByIndex(cfg.modelDesc, i); + LOG("model input[%zd] name %s\n", i, cfg.inputInfo[i].Name); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] name %s\n", i, cfg.inputInfo[i].Name); + outFile << tmpChr; + + size_t index; + ret = aclmdlGetInputIndexByName(cfg.modelDesc, cfg.inputInfo[i].Name, &index); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetInputIndexByName fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + if (i != index) { + LOG("aclmdlGetInputNameByIndex not equal aclmdlGetInputIndexByName\n"); + return 1; + } else { + LOG("model input name %s is belone to input %zd\n", cfg.inputInfo[i].Name, index); + } + } + + // 获取模型输出信息 + size_t outputNum = aclmdlGetNumOutputs(cfg.modelDesc); + LOG("model output num %zd\n", outputNum); + snprintf(tmpChr, sizeof(tmpChr), "model output num %zd\n", outputNum); + outFile << tmpChr; + + cfg.outputNum = outputNum; + for (size_t i = 0; i < outputNum && i < MODEL_INPUT_OUTPUT_NUM_MAX; i++) { + size_t size = aclmdlGetOutputSizeByIndex(cfg.modelDesc, i); + cfg.outputInfo[i].size = size; + LOG("model output[%zd] size %zd\n", i, cfg.outputInfo[i].size); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] size %zd\n", i, cfg.outputInfo[i].size); + outFile << tmpChr; + + aclmdlIODims dims; + ret = aclmdlGetOutputDims(cfg.modelDesc, i, &dims); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetOutputDims fail ret %d\n", ret); + return 1; + } + + cfg.outputInfo[i].dimCount = dims.dimCount; + ret = aclrtMemcpy(cfg.outputInfo[i].dims, cfg.outputInfo[i].dimCount * sizeof(int64_t), dims.dims, + cfg.outputInfo[i].dimCount * sizeof(int64_t), ACL_MEMCPY_HOST_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtMemcpy fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + LOG("model output[%zd] dimCount %zd\n", i, cfg.outputInfo[i].dimCount); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] dimCount %zd\n", i, cfg.outputInfo[i].dimCount); + outFile << tmpChr; + + for (size_t dimIdx = 0; dimIdx < cfg.outputInfo[i].dimCount; dimIdx++) { + LOG("model output[%zd] dim[%zd] info %ld\n", i, dimIdx, cfg.outputInfo[i].dims[dimIdx]); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] dim[%zd] info %ld\n", + i, dimIdx, cfg.outputInfo[i].dims[dimIdx]); + outFile << tmpChr; + } + + cfg.outputInfo[i].Format = aclmdlGetOutputFormat(cfg.modelDesc, i); + cfg.outputInfo[i].Type = aclmdlGetOutputDataType(cfg.modelDesc, i); + LOG("model output[%zd] format %d outputType %d\n", i, cfg.outputInfo[i].Format, cfg.outputInfo[i].Type); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] format %d outputType %d\n", i, cfg.outputInfo[i].Format, + cfg.outputInfo[i].Type); + outFile << tmpChr; + + cfg.outputInfo[i].Name = aclmdlGetOutputNameByIndex(cfg.modelDesc, i); + LOG("model output[%zd] name %s\n", i, cfg.outputInfo[i].Name); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] name %s\n", i, cfg.outputInfo[i].Name); + outFile << tmpChr; + + size_t index; + ret = aclmdlGetOutputIndexByName(cfg.modelDesc, cfg.outputInfo[i].Name, &index); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetOutputIndexByName fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + if (i != index) { + LOG("aclmdlGetOutputNameByIndex not equal aclmdlGetOutputIndexByName\n"); + return 1; + } else { + LOG("model output name %s is belone to output %d\n", cfg.outputInfo[i].Name, index); + } + } + + outFile.close(); + return ACL_ERROR_NONE; +} + +int main(int argc, char** argv) +{ + processedCnt = 0; + inferTime = 0; + + std::string errorMsg; + ret = ParseParams(argc, argv, cfg, errorMsg); + CHECK_ACL_RET(errorMsg, ret); + + ret = InitContext(); + CHECK_RET(ret); + + ret = LoadModel(); + CHECK_RET(ret); + + ret = GetModelInputOutputInfo(cfg); + CHECK_RET(ret); + + ret = Process(); + CHECK_RET(ret); + + ret = UnloadModel(); + CHECK_RET(ret); + + ret = UnInitContext(); + CHECK_RET(ret); + LOG("\n"); + + avgTime = 1.0 * inferTime / processedCnt /cfg.batchSize / 1000; + avgPreTime = 1.0 * dataProcTime / processedCnt / cfg.batchSize / 1000; + + if (cfg.useDvpp) { + LOG("\n"); + LOG("DVPP performance details:\n"); + LOG("#############################################\n"); + std::unordered_map::iterator iter; + for (iter = dvppTime.begin(); iter != dvppTime.end(); iter++) { + LOG("%s using avg time %0.2f ms\n",iter->first.c_str(),1.0*iter->second/processedCnt/cfg.batchSize/1000); + } + LOG("\n"); + } + + LOG("performance summary:\n"); + LOG("#############################################\n"); + LOG("total %ld imgs processed and batch size %ld\n", processedCnt*cfg.batchSize, cfg.batchSize); +#if 0 + if(cfg.postprocessType == "resnet") { + LOG("top1 ratio %0.3f top5 ratio %0.3f\n", + 1.0*resnet50Res.top1/resnet50Res.total, 1.0*resnet50Res.top5/resnet50Res.total); + } +#endif + + LOG("avg preprocess time %0.2f ms, %0.2f imgs/s\n", avgPreTime, 1.0 * 1000 / avgPreTime); + LOG("avg inference time %0.2f ms, %0.2f imgs/s\n", avgTime, 1.0 * 1000 / avgTime); + + SaveResult(); +} diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/post_process.cpp b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/post_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c66491757b8bb1388a8ac9210619b55b5a2dcb9e --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/post_process.cpp @@ -0,0 +1,127 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "post_process.h" +#include "util.h" +#include +#include +#include +#include +#include "stdio.h" +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +extern int processedCnt; + +extern Config cfg; +extern DataFrame outputDataframe; +extern aclError ret; +int topNum = 5; + +extern int processedCnt; + +aclError SaveBinPostprocess() +{ + aclError retVal; + + LOG("save batch %d start\n", processedCnt); + DataFrame dataframe = outputDataframe; + std::vector& inferFile_vec = outputDataframe.fileNames; + aclmdlDataset* output = dataframe.dataset; + + std::string resultFolder = cfg.outDir + "/" + cfg.modelType; + DIR* op = opendir(resultFolder.c_str()); + if (NULL == op) { + mkdir(resultFolder.c_str(), 00775); + } else { + closedir(op); + } + + for (size_t i = 0; i < cfg.outputNum; ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output, i); + void* data = aclGetDataBufferAddr(dataBuffer); + uint32_t len; + len = cfg.outputInfo[i].size; + + void* outHostData = NULL; + ret = aclrtMallocHost(&outHostData, len); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host failed.\n"); + return 1; + } + + ret = aclrtMemcpy(outHostData, len, data, len, ACL_MEMCPY_DEVICE_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("Copy device to host failed.\n"); + aclrtFreeHost(outHostData); + return 1; + } + + uint32_t eachSize = len / cfg.batchSize; + for (size_t j = 0; j < inferFile_vec.size(); j++) { + FILE* outputFile; + std::string framename = inferFile_vec[j]; + std::size_t dex = (framename).find_first_of("."); + std::string inputFileName = (framename).erase(dex); + + if (cfg.modelType.compare(0, 6, "resnet") == 0) { + outputFile = fopen((resultFolder + "/" + "davinci_" + inputFileName + "_" + "output" + ".bin").c_str(), "wb"); + } else { + outputFile = fopen((resultFolder + "/" + "davinci_" + inputFileName + "_" + "output" + std::to_string(i) + ".bin").c_str(), "wb"); + } + + if (outputFile == nullptr) { + aclrtFreeHost(outHostData); + return 1; + } + + fwrite((uint8_t *)outHostData + (j * eachSize), eachSize, sizeof(char), outputFile); + fclose(outputFile); + } + + ret = aclrtFreeHost(outHostData); + if (ret != ACL_ERROR_NONE) { + LOG("Free output host failed.\n"); + } + } + + (void)DestroyDatasetResurce(outputDataframe.dataset, 0); + + LOG("save batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/util.cpp b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/util.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ec437321c75c4c883ca35d56a0c5ab218f16efa7 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/Benchmark/util.cpp @@ -0,0 +1,230 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "util.h" +#include +#include +#include +#if 0 +static std::unordered_map errorMap = { + {ACL_ERROR_NONE, "success"}, + {ACL_ERROR_INVALID_PARAM, "params may not valid"}, + {ACL_ERROR_BAD_ALLOC, "alloc memory failed"}, + {ACL_ERROR_RT_FAILURE, "runtime failure"}, + {ACL_ERROR_GE_FAILURE, "GE failure"}, + {ACL_ERROR_OP_NOT_FOUND, "OP not find"}, + {ACL_ERROR_OP_LOAD_FAILED, "OP loads failed"}, + {ACL_ERROR_READ_MODEL_FAILURE, "load model failed"}, + {ACL_ERROR_PARSE_MODEL, "parse model failed"}, + {ACL_ERROR_MODEL_MISSING_ATTR, "model misssing attr"}, + {ACL_ERROR_DESERIALIZE_MODEL, "deserilize model failed"}, + // {ACL_ERROR_MULTIPLE_MODEL_MATCHED, "multiple model matched"}, + //{ACL_ERROR_EVENT_NOT_READY, "event not ready"}, + //{ACL_ERROR_EVENT_COMPLETE, "event not complete"}, + {ACL_ERROR_UNSUPPORTED_DATA_TYPE, "unsupport datatype"}, + {ACL_ERROR_REPEAT_INITIALIZE, "initial repeated"}, + //{ACL_ERROR_COMPILER_NOT_REGISTERED, "compilter not registered"}, + {ACL_ERROR_PATH_INVALID, "path invalid"}, + {ACL_ERROR_PARSE_PARAM_FAILED, "parse params failed"}, + {ACL_ERROR_DVPP_ERROR, "dvpp errors"} +}; + + +std::string CausedBy(aclError error) +{ + return errorMap[error]; +} +#endif + +bool FolderExists(std::string foldname) +{ + DIR* dir; + if ((dir = opendir(foldname.c_str())) == NULL) { + return false; + } + closedir(dir); + return true; +} + +void* ReadFile(std::string fileLocation, uint64_t &fileSize) +{ + aclError ret; + FILE *pFile = fopen(fileLocation.c_str(), "r"); + if (pFile == nullptr) { + LOG("open file %s failed\n", fileLocation.c_str()); + return nullptr; + } + + fseek(pFile, 0, SEEK_END); + fileSize = ftell(pFile); + + void *buff = nullptr; + ret = aclrtMallocHost(&buff, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host buff failed[%d]\n", ret); + return nullptr; + } + + rewind(pFile); + fread(buff, sizeof(char), fileSize, pFile); + fclose(pFile); + return buff; +} + +bool FileExists(std::string filename) +{ + std::fstream file; + file.open(filename, std::ios::in); + if (!file) { + return false; + } + + file.close(); + return true; +} + +char* ReadBinFile(std::string fileName, uint32_t& fileSize) +{ + std::ifstream binFile(fileName, std::ifstream::binary); + + if (binFile.is_open() == false) { + LOG("open file[%s] failed\n", fileName.c_str()); + return nullptr; + } + + binFile.seekg(0, binFile.end); + uint32_t binFileBufferLen = binFile.tellg(); + + if (binFileBufferLen == 0) { + LOG("binfile is empty, filename: %s", fileName.c_str()); + binFile.close(); + return nullptr; + } + + binFile.seekg(0, binFile.beg); + char* binFileBufferData = new(std::nothrow) char[binFileBufferLen]; + LOG("binFileBufferData:%p\n", binFileBufferData); + + if (binFileBufferData == nullptr) { + LOG("malloc binFileBufferData failed\n"); + binFile.close(); + return nullptr; + } + + binFile.read(binFileBufferData, binFileBufferLen); + binFile.close(); + fileSize = binFileBufferLen; + return binFileBufferData; +} + +aclError GetFiles(std::string path, std::vector& files) +{ + DIR* dir; + struct dirent* ptr; + char base[1000]; + + if ((dir = opendir(path.c_str())) == NULL) { + LOG("Open dir %s error.\n", path.c_str()); + return ACL_ERROR_PATH_INVALID; + } + + while ((ptr = readdir(dir)) != NULL) { + if (strcmp(ptr->d_name, ".") == 0 || strcmp(ptr->d_name, "..") == 0) { + //current dir OR parrent dir + continue; + } else if (ptr->d_type == 8) { + //file + files.push_back(ptr->d_name); + } else if (ptr->d_type == 10) { + //link file + continue; + } else if (ptr->d_type == 4) { + //dir + continue; + } + } + + closedir(dir); + std::sort(files.begin(), files.end()); + return ACL_ERROR_NONE; +} + +aclError FreeDevMemory(aclmdlDataset* dataset) +{ + aclError ret; + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(dataset); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(dataset, i); + void* data = aclGetDataBufferAddr(dataBuffer); + aclrtFree(data); + aclDestroyDataBuffer(dataBuffer); + } + + return ACL_ERROR_NONE; +} + +aclError DestroyDatasetResurce(aclmdlDataset* dataset, uint32_t flag) +{ + aclError ret = ACL_ERROR_NONE; + + if (nullptr == dataset) { + LOG("dataset == null\n"); + return 1; + } + + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(dataset); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(dataset, i); + if (nullptr == dataBuffer) { + LOG("dataBuffer == null\n"); + continue; + } + + void* data = aclGetDataBufferAddr(dataBuffer); + if (nullptr != data) { + if (1 == flag) { + if (i > 0) { + ret = aclrtFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree data failed, ret %d\n", ret); + } + } else { + ret = acldvppFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("acldvppFree data failed, ret %d\n", ret); + } + } + } else { + ret = aclrtFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree data failed, ret %d\n", ret); + } + } + } + + ret = aclDestroyDataBuffer(dataBuffer); + if (ret != ACL_ERROR_NONE) { + LOG("Destroy dataBuffer failed, ret %d\n", ret); + } + } + + ret = aclmdlDestroyDataset(dataset); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree dataset failed, ret %d\n", ret); + } + + return ret; +} + + diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/LICENSE b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..57bc88a15a0ee8266c259b2667e64608d3f7e292 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/LICENSE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/README.md b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..27979521a469f5308d4510a76ccb517ae6aabe3f --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/README.md @@ -0,0 +1,86 @@ + + +# InceptionV2 Inference for Tensorflow + +This repository provides a script and recipe to Inference of the InceptionV2 model. + +## Notice +**This sample only provides reference for you to learn the Ascend software stack and is not for commercial purposes.** + +Before starting, please pay attention to the following adaptation conditions. If they do not match, may leading in failure. + +| Conditions | Need | +| --- | --- | +| CANN Version | >=5.0.3 | +| Chip Platform| Ascend310/Ascend710 | +| 3rd Party Requirements| Please follow the 'requirements.txt' | + +## Quick Start Guide + +### 1. Clone the respository + +```shell +git clone https://gitee.com/ascend/ModelZoo-TensorFlow.git +cd Modelzoo-TensorFlow/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL +``` + +### 2. Download and preprocess the dataset + +1. Download the ImageNet2012 Validation dataset by yourself. You can get the validation pictures(50000 JPEGS and a ILSVRC2012val-label-index.txt) + +2. Put JPEGS to **'scripts/ILSVRC2012val'** and label text to **'scripts/'** + +3. Images Preprocess: +``` +cd scripts +mkdir input_bins +python3 inception_preprocessing.py ./ILSVRC2012val/ ./input_bins/ +``` +The jpegs pictures will be preprocessed to bin fils. + +### 3. Offline Inference + +**Convert pb to om.** + +- configure the env + + ``` + export install_path=/usr/local/Ascend + export PATH=/usr/local/python3.7.5/bin:${install_path}/atc/ccec_compiler/bin:${install_path}/atc/bin:$PATH + export PYTHONPATH=${install_path}/atc/python/site-packages:${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:${install_path}/atc/python/site-packages/schedule_search.egg:$PYTHONPATH + export LD_LIBRARY_PATH=${install_path}/atc/lib64:${install_path}/acllib/lib64:$LD_LIBRARY_PATH + export ASCEND_OPP_PATH=${install_path}/opp + ``` + +- convert pb to om + + [pb download link](https://modelzoo-train-atc.obs.cn-north-4.myhuaweicloud.com/003_Atc_Models/modelzoo/Official/cv/Inceptionv2_for_ACL/inceptionv2_tf.pb) + + ``` + atc --model=inceptionv2_tf.pb --framework=3 --output=inceptionv3_tf_1batch --output_type=FP32 --soc_version=Ascend310 --input_shape="input:1,224,224,3" --insert_op_conf=inceptionv2_aipp.cfg --enable_small_channel=1 --log=info + ``` + +- Build the program + + ``` + bash build.sh + ``` + +- Run the program: + + ``` + cd scripts + bash benchmark_tf.sh + ``` + +## Performance + +### Result + +Our result was obtained by running the applicable inference script. To achieve the same results, follow the steps in the Quick Start Guide. + +#### Inference accuracy results + +| model | **data** | Top1/Top5 | +| :---------------: | :-------: | :-------------: | +| offline Inference | 50000 images | 74.0 %/ 91.8% | diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/build.sh b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..dae86211d2691b82ecfd8c2637d1276092e476ed --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/build.sh @@ -0,0 +1,9 @@ +rm -rf ./Benchmark/build + +mkdir -p Benchmark/build/intermediates/host +cd Benchmark/build/intermediates/host +cmake ../../../../Benchmark/ -DCAMKE_CXX_COMPILER=g++ +make clean +make install +cd - +cd Benchmark/out/ diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/inceptionv2_aipp.cfg b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/inceptionv2_aipp.cfg new file mode 100644 index 0000000000000000000000000000000000000000..484f80d3d1da6bc802e3f0aced5f914fd53b6e25 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/inceptionv2_aipp.cfg @@ -0,0 +1,13 @@ +aipp_op { + aipp_mode: static + input_format : RGB888_U8 + src_image_size_w : 224 + src_image_size_h : 224 + mean_chn_0 : 128 + mean_chn_1 : 128 + mean_chn_2 : 128 + var_reci_chn_0 : 0.00781 + var_reci_chn_1 : 0.00781 + var_reci_chn_2 : 0.00781 +} + diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..0934af7691acabd7981d82342b3a2310fe606d3d --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/modelzoo_level.txt @@ -0,0 +1,6 @@ +ModelCovert:OK +QuantStatus:OK +FuncStatus:OK +PrecisionStatus:OK +AutoTune:OK +PerfStatus:OK diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/requirements.txt b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f66bb9f75c74849c47871a646493af6c2eb83d3 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/requirements.txt @@ -0,0 +1,3 @@ +tensorflow==1.15 +numpy==1.16 +Pillow==7.1.2 diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/benchmark_tf.sh b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/benchmark_tf.sh new file mode 100644 index 0000000000000000000000000000000000000000..e269edf1c0a43a2d1bef3a9495aa66a3c04f8eeb --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/benchmark_tf.sh @@ -0,0 +1,15 @@ +#!/bin/bash +#set -x +cur_dir=`pwd` +benchmark_dir=$cur_dir/../Benchmark/out +om_name=$cur_dir/../inceptionv2_tf_1batch.om +batchsize=1 +model_name=inceptionv2 +output_dir='results' +rm -rf $cur_dir/$output_dir/* + +#start offline inference +$benchmark_dir/benchmark --om $om_name --dataDir $cur_dir/input_bins/ --modelType $model_name --outDir $cur_dir/$output_dir --batchSize $batchsize --imgType bin --useDvpp 0 + +#post process +python3 $cur_dir/imagenet_accuarcy_cal.py --infer_result $cur_dir/$output_dir/$model_name --label $cur_dir/ILSVRC2012val-label-index.txt --offset 1 diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/imagenet_accuarcy_cal.py b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/imagenet_accuarcy_cal.py new file mode 100644 index 0000000000000000000000000000000000000000..38f0d91170e48644f8bafffa9e0a098881c52b48 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/imagenet_accuarcy_cal.py @@ -0,0 +1,75 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import os +import time +import argparse + +if __name__=='__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--infer_result", type=str, default="../../result_Files") + parser.add_argument("--label", type=str, default="../data/input_50000.csv") + parser.add_argument("--output_index", type=int, default=0) + parser.add_argument("--offset", type=int, default=0) + parser.add_argument("--dtype", type=str, default='float32') #datatype of bin files + args = parser.parse_args() + + image_cnt = 0 + top1_cnt = 0 + top5_cnt = 0 + ground_truth={} + if args.label.endswith(".csv"): + with open(args.label, 'r') as cs: + rs_list = cs.readlines() + for line in rs_list: + image_name = line.split(',')[0].split('.JPEG')[0] + label = int(line.split(',')[1]) + label += args.offset + ground_truth[image_name]=label + elif args.label.endswith(".txt"): + with open(args.label, 'r') as cs: + rs_list = cs.readlines() + for line in rs_list: + image_name = line.split(' ')[0].split('.JPEG')[0] + label = int(line.split(' ')[1].replace("\n","")) + label += args.offset + ground_truth[image_name]=label + + for i in sorted(ground_truth): + try: + image_name = i + label = ground_truth[i] + #查看输出的文件 + if os.path.exists(os.path.join(args.infer_result,'davinci_{}_output{}.bin'.format(image_name,args.output_index))): + bin_path = os.path.join(args.infer_result,'davinci_{}_output{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + elif os.path.exists(os.path.join(args.infer_result,'davinci_{}.JPEG_output{}.bin'.format(image_name, args.output_index))): + bin_path = os.path.join(args.infer_result,'davinci_{}.JPEG_output{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + elif os.path.exists(os.path.join(args.infer_result,'{}_output_{}.bin'.format(image_name,args.output_index))): + bin_path = os.path.join(args.infer_result,'{}_output_{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + else: + continue + top1=np.argmax(pred) + if label == top1: + top1_cnt += 1 + if label in np.argsort(-pred)[0:5]: + top5_cnt += 1 + image_cnt+=1 + print("{}, gt label:{: >4}, predict results:{}".format(image_name,label,str(np.argsort(-pred)[0:5]))) + except Exception as e: + print("Can't find " + bin_path) + print('imag_count %d, top1_accuracy %.3f top5_accuracy %.3f'%(image_cnt,top1_cnt/image_cnt,top5_cnt/image_cnt)) \ No newline at end of file diff --git a/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/inception_preprocessing.py b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/inception_preprocessing.py new file mode 100644 index 0000000000000000000000000000000000000000..0a2b3d578a4f401a4b5ebdd222cb2488d2ae3c66 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Inceptionv2_for_ACL/scripts/inception_preprocessing.py @@ -0,0 +1,405 @@ +# Copyright 2016 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Provides utilities to preprocess images for the Inception networks.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow.compat.v1 as tf +import os +import sys +import shutil +import numpy as np +from PIL import Image +from tensorflow.python.ops import control_flow_ops + + +def apply_with_random_selector(x, func, num_cases): + """Computes func(x, sel), with sel sampled from [0...num_cases-1]. + + Args: + x: input Tensor. + func: Python function to apply. + num_cases: Python int32, number of cases to sample sel from. + + Returns: + The result of func(x, sel), where func receives the value of the + selector as a python integer, but sel is sampled dynamically. + """ + sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32) + # Pass the real x only to one of the func calls. + return control_flow_ops.merge([ + func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case) + for case in range(num_cases)])[0] + + +def distort_color(image, color_ordering=0, fast_mode=True, scope=None): + """Distort the color of a Tensor image. + + Each color distortion is non-commutative and thus ordering of the color ops + matters. Ideally we would randomly permute the ordering of the color ops. + Rather then adding that level of complication, we select a distinct ordering + of color ops for each preprocessing thread. + + Args: + image: 3-D Tensor containing single image in [0, 1]. + color_ordering: Python int, a type of distortion (valid values: 0-3). + fast_mode: Avoids slower ops (random_hue and random_contrast) + scope: Optional scope for name_scope. + Returns: + 3-D Tensor color-distorted image on range [0, 1] + Raises: + ValueError: if color_ordering not in [0, 3] + """ + with tf.name_scope(scope, 'distort_color', [image]): + if fast_mode: + if color_ordering == 0: + image = tf.image.random_brightness(image, max_delta=32. / 255.) + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + else: + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + image = tf.image.random_brightness(image, max_delta=32. / 255.) + else: + if color_ordering == 0: + image = tf.image.random_brightness(image, max_delta=32. / 255.) + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + image = tf.image.random_hue(image, max_delta=0.2) + image = tf.image.random_contrast(image, lower=0.5, upper=1.5) + elif color_ordering == 1: + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + image = tf.image.random_brightness(image, max_delta=32. / 255.) + image = tf.image.random_contrast(image, lower=0.5, upper=1.5) + image = tf.image.random_hue(image, max_delta=0.2) + elif color_ordering == 2: + image = tf.image.random_contrast(image, lower=0.5, upper=1.5) + image = tf.image.random_hue(image, max_delta=0.2) + image = tf.image.random_brightness(image, max_delta=32. / 255.) + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + elif color_ordering == 3: + image = tf.image.random_hue(image, max_delta=0.2) + image = tf.image.random_saturation(image, lower=0.5, upper=1.5) + image = tf.image.random_contrast(image, lower=0.5, upper=1.5) + image = tf.image.random_brightness(image, max_delta=32. / 255.) + else: + raise ValueError('color_ordering must be in [0, 3]') + + # The random_* ops do not necessarily clamp. + return tf.clip_by_value(image, 0.0, 1.0) + + +def distorted_bounding_box_crop(image, + bbox, + min_object_covered=0.1, + aspect_ratio_range=(0.75, 1.33), + area_range=(0.05, 1.0), + max_attempts=100, + scope=None): + """Generates cropped_image using a one of the bboxes randomly distorted. + + See `tf.image.sample_distorted_bounding_box` for more documentation. + + Args: + image: 3-D Tensor of image (it will be converted to floats in [0, 1]). + bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] + where each coordinate is [0, 1) and the coordinates are arranged + as [ymin, xmin, ymax, xmax]. If num_boxes is 0 then it would use the whole + image. + min_object_covered: An optional `float`. Defaults to `0.1`. The cropped + area of the image must contain at least this fraction of any bounding box + supplied. + aspect_ratio_range: An optional list of `floats`. The cropped area of the + image must have an aspect ratio = width / height within this range. + area_range: An optional list of `floats`. The cropped area of the image + must contain a fraction of the supplied image within in this range. + max_attempts: An optional `int`. Number of attempts at generating a cropped + region of the image of the specified constraints. After `max_attempts` + failures, return the entire image. + scope: Optional scope for name_scope. + Returns: + A tuple, a 3-D Tensor cropped_image and the distorted bbox + """ + with tf.name_scope(scope, 'distorted_bounding_box_crop', [image, bbox]): + # Each bounding box has shape [1, num_boxes, box coords] and + # the coordinates are ordered [ymin, xmin, ymax, xmax]. + + # A large fraction of image datasets contain a human-annotated bounding + # box delineating the region of the image containing the object of interest. + # We choose to create a new bounding box for the object which is a randomly + # distorted version of the human-annotated bounding box that obeys an + # allowed range of aspect ratios, sizes and overlap with the human-annotated + # bounding box. If no box is supplied, then we assume the bounding box is + # the entire image. + sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box( + tf.shape(image), + bounding_boxes=bbox, + min_object_covered=min_object_covered, + aspect_ratio_range=aspect_ratio_range, + area_range=area_range, + max_attempts=max_attempts, + use_image_if_no_bounding_boxes=True) + bbox_begin, bbox_size, distort_bbox = sample_distorted_bounding_box + + # Crop the image to the specified bounding box. + cropped_image = tf.slice(image, bbox_begin, bbox_size) + return cropped_image, distort_bbox + + +def preprocess_for_train(image, + height, + width, + bbox, + fast_mode=True, + scope=None, + add_image_summaries=True, + random_crop=True, + use_grayscale=False): + """Distort one image for training a network. + + Distorting images provides a useful technique for augmenting the data + set during training in order to make the network invariant to aspects + of the image that do not effect the label. + + Additionally it would create image_summaries to display the different + transformations applied to the image. + + Args: + image: 3-D Tensor of image. If dtype is tf.float32 then the range should be + [0, 1], otherwise it would converted to tf.float32 assuming that the range + is [0, MAX], where MAX is largest positive representable number for + int(8/16/32) data type (see `tf.image.convert_image_dtype` for details). + height: integer + width: integer + bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] + where each coordinate is [0, 1) and the coordinates are arranged + as [ymin, xmin, ymax, xmax]. + fast_mode: Optional boolean, if True avoids slower transformations (i.e. + bi-cubic resizing, random_hue or random_contrast). + scope: Optional scope for name_scope. + add_image_summaries: Enable image summaries. + random_crop: Enable random cropping of images during preprocessing for + training. + use_grayscale: Whether to convert the image from RGB to grayscale. + Returns: + 3-D float Tensor of distorted image used for training with range [-1, 1]. + """ + with tf.name_scope(scope, 'distort_image', [image, height, width, bbox]): + if bbox is None: + bbox = tf.constant([0.0, 0.0, 1.0, 1.0], + dtype=tf.float32, + shape=[1, 1, 4]) + if image.dtype != tf.float32: + image = tf.image.convert_image_dtype(image, dtype=tf.float32) + # Each bounding box has shape [1, num_boxes, box coords] and + # the coordinates are ordered [ymin, xmin, ymax, xmax]. + image_with_box = tf.image.draw_bounding_boxes(tf.expand_dims(image, 0), + bbox) + if add_image_summaries: + tf.summary.image('image_with_bounding_boxes', image_with_box) + + if not random_crop: + distorted_image = image + else: + distorted_image, distorted_bbox = distorted_bounding_box_crop(image, bbox) + # Restore the shape since the dynamic slice based upon the bbox_size loses + # the third dimension. + distorted_image.set_shape([None, None, 3]) + image_with_distorted_box = tf.image.draw_bounding_boxes( + tf.expand_dims(image, 0), distorted_bbox) + if add_image_summaries: + tf.summary.image('images_with_distorted_bounding_box', + image_with_distorted_box) + + # This resizing operation may distort the images because the aspect + # ratio is not respected. We select a resize method in a round robin + # fashion based on the thread number. + # Note that ResizeMethod contains 4 enumerated resizing methods. + + # We select only 1 case for fast_mode bilinear. + num_resize_cases = 1 if fast_mode else 4 + distorted_image = apply_with_random_selector( + distorted_image, + lambda x, method: tf.image.resize_images(x, [height, width], method), + num_cases=num_resize_cases) + + if add_image_summaries: + tf.summary.image(('cropped_' if random_crop else '') + 'resized_image', + tf.expand_dims(distorted_image, 0)) + + # Randomly flip the image horizontally. + distorted_image = tf.image.random_flip_left_right(distorted_image) + + # Randomly distort the colors. There are 1 or 4 ways to do it. + num_distort_cases = 1 if fast_mode else 4 + distorted_image = apply_with_random_selector( + distorted_image, + lambda x, ordering: distort_color(x, ordering, fast_mode), + num_cases=num_distort_cases) + + if use_grayscale: + distorted_image = tf.image.rgb_to_grayscale(distorted_image) + + if add_image_summaries: + tf.summary.image('final_distorted_image', + tf.expand_dims(distorted_image, 0)) + distorted_image = tf.subtract(distorted_image, 0.5) + distorted_image = tf.multiply(distorted_image, 2.0) + return distorted_image + + +def preprocess_for_eval(image, + height, + width, + central_fraction=0.875, + scope=None, + central_crop=True, + use_grayscale=False): + """Prepare one image for evaluation. + + If height and width are specified it would output an image with that size by + applying resize_bilinear. + + If central_fraction is specified it would crop the central fraction of the + input image. + + Args: + image: 3-D Tensor of image. If dtype is tf.float32 then the range should be + [0, 1], otherwise it would converted to tf.float32 assuming that the range + is [0, MAX], where MAX is largest positive representable number for + int(8/16/32) data type (see `tf.image.convert_image_dtype` for details). + height: integer + width: integer + central_fraction: Optional Float, fraction of the image to crop. + scope: Optional scope for name_scope. + central_crop: Enable central cropping of images during preprocessing for + evaluation. + use_grayscale: Whether to convert the image from RGB to grayscale. + Returns: + 3-D float Tensor of prepared image. + """ + with tf.name_scope(scope, 'eval_image', [image, height, width]): + if image.dtype != tf.float32: + image = tf.image.convert_image_dtype(image, dtype=tf.float32) + if use_grayscale: + image = tf.image.rgb_to_grayscale(image) + # Crop the central region of the image with an area containing 87.5% of + # the original image. + if central_crop and central_fraction: + image = tf.image.central_crop(image, central_fraction=central_fraction) + + if height and width: + # Resize the image to the specified height and width. + image = tf.expand_dims(image, 0) + image = tf.image.resize_bilinear(image, [height, width], + align_corners=False) + image = tf.squeeze(image, [0]) + image = tf.subtract(image, 0.5) + image = tf.multiply(image, 2.0) + return image + + +def preprocess_image(image, + height, + width, + is_training=False, + bbox=None, + fast_mode=True, + add_image_summaries=True, + crop_image=True, + use_grayscale=False): + """Pre-process one image for training or evaluation. + + Args: + image: 3-D Tensor [height, width, channels] with the image. If dtype is + tf.float32 then the range should be [0, 1], otherwise it would converted + to tf.float32 assuming that the range is [0, MAX], where MAX is largest + positive representable number for int(8/16/32) data type (see + `tf.image.convert_image_dtype` for details). + height: integer, image expected height. + width: integer, image expected width. + is_training: Boolean. If true it would transform an image for train, + otherwise it would transform it for evaluation. + bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] + where each coordinate is [0, 1) and the coordinates are arranged as + [ymin, xmin, ymax, xmax]. + fast_mode: Optional boolean, if True avoids slower transformations. + add_image_summaries: Enable image summaries. + crop_image: Whether to enable cropping of images during preprocessing for + both training and evaluation. + use_grayscale: Whether to convert the image from RGB to grayscale. + + Returns: + 3-D float Tensor containing an appropriately scaled image + + Raises: + ValueError: if user does not provide bounding box + """ + if is_training: + return preprocess_for_train( + image, + height, + width, + bbox, + fast_mode, + add_image_summaries=add_image_summaries, + random_crop=crop_image, + use_grayscale=use_grayscale) + else: + return preprocess_for_eval( + image, + height, + width, + central_crop=crop_image, + use_grayscale=use_grayscale) + +def convert_RGB(img_name): + image = Image.open(img_name).convert('RGB') + return image + +def preprocess(src_path, save_path): + in_files = os.listdir(src_path) + in_files.sort() + resize_shape = [224, 224, 3] + sqz_mean = np.array([127.5, 127.5, 127.5], np.float32) + img_std = np.array([[0.5*255, 0.5*255, 0.5*255]], np.float32) + if os.path.isdir(save_path): + shutil.rmtree(save_path) + os.makedirs(save_path) + for file in in_files: + with tf.Session().as_default(): + if not os.path.isdir(file): + print(file) + img = convert_RGB(os.path.join(src_path, file)) + img = np.array(img) + img = tf.convert_to_tensor(img) + img = preprocess_image(img, + 224, + 224, + is_training=False, + use_grayscale=False) + img = img.eval() + img = img * img_std + img = img + sqz_mean + img = img.astype(np.uint8, copy=False) + img.tofile(os.path.join(save_path, file.split('.')[0]+".bin")) + tf.reset_default_graph() + +if __name__ == "__main__": + if len(sys.argv) < 3: + raise Exception("usage: python3 xxx.py [src_path] [save_path]") + + src_path = sys.argv[1] + save_path = sys.argv[2] + preprocess(src_path, save_path) diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/CMakeLists.txt b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..17dc38b8a933d0b8fd84e5579426f7933ca45c99 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/CMakeLists.txt @@ -0,0 +1,41 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved. +# CMake lowest version requirement +cmake_minimum_required(VERSION 3.5.1) +# project information +project(ascendcl) +# Compile options +add_compile_options(-std=c++11) +# Specify target generation path +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "../outputs") +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "../outputs") +set(CMAKE_INSTALL_PREFIX "../../../") +set(CMAKE_OUTPUT_DIR "out") +set(CMAKE_CXX_FLAGS_RELEASE "-fPIC -O2 -g -Wall") + +ADD_DEFINITIONS("-DENABLE_DVPP_INTERFACE -D_GLIBCXX_USE_CXX11_ABI=0") + +# Header path +include_directories( +inc +#/usr/include/gflags +$ENV{install_path}/acllib/include +$ENV{install_path}/driver/kernel/libc_sec/include +/usr/include +) + +# add host lib path +link_directories($ENV{install_path}/acllib/lib64/stub) +#link_directories(/usr/local/Ascend/driver/lib64) +#link_directories(/usr/local/Ascend/atc/lib64) +#link_directories(/usr/local/lib) +link_directories(../thirdpart_lib) + +# 设置需要编译的源文件 +add_executable(benchmark main.cpp util.cpp post_process.cpp infer_engine.cpp) + +# 设置共享库 RC为待扩展的offline模型 +#target_link_libraries(benchmark acl_dvpp ascendcl pthread protobuf cryptopp) +target_link_libraries(benchmark acl_dvpp ascendcl pthread) + +install(TARGETS benchmark DESTINATION ${CMAKE_OUTPUT_DIR}) + diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/defines.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/defines.h new file mode 100644 index 0000000000000000000000000000000000000000..f0be3dcb485269718125445537ce3616c3078d34 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/defines.h @@ -0,0 +1,48 @@ +/* Generated from defines.h.in during build configuration using CMake. */ + +// Note: This header file is only used internally. It is not part of public interface! +// Any cmakedefine is defined using the -D flag instead when Bazel is used. +// For Bazel, this file is thus not used to avoid a private file in $(GENDIR). + +#ifndef GFLAGS_DEFINES_H_ +#define GFLAGS_DEFINES_H_ + + +// Define if you build this library for a MS Windows OS. +/* #undef OS_WINDOWS */ + +// Define if you have the header file. +#define HAVE_STDINT_H + +// Define if you have the header file. +#define HAVE_SYS_TYPES_H + +// Define if you have the header file. +#define HAVE_INTTYPES_H + +// Define if you have the header file. +#define HAVE_SYS_STAT_H + +// Define if you have the header file. +#define HAVE_UNISTD_H + +// Define if you have the header file. +#define HAVE_FNMATCH_H + +// Define if you have the header file (Windows 2000/XP). +/* #undef HAVE_SHLWAPI_H */ + +// Define if you have the strtoll function. +#define HAVE_STRTOLL + +// Define if you have the strtoq function. +/* #undef HAVE_STRTOQ */ + +// Define if you have the header file. +#define HAVE_PTHREAD + +// Define if your pthread library defines the type pthread_rwlock_t +#define HAVE_RWLOCK + + +#endif // GFLAGS_DEFINES_H_ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags.h new file mode 100644 index 0000000000000000000000000000000000000000..4f3168a03d878a16ac0e05d1d240396b9d422c63 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags.h @@ -0,0 +1,624 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// Revamped and reorganized by Craig Silverstein +// +// This is the file that should be included by any file which declares +// or defines a command line flag or wants to parse command line flags +// or print a program usage message (which will include information about +// flags). Executive summary, in the form of an example foo.cc file: +// +// #include "foo.h" // foo.h has a line "DECLARE_int32(start);" +// #include "validators.h" // hypothetical file defining ValidateIsFile() +// +// DEFINE_int32(end, 1000, "The last record to read"); +// +// DEFINE_string(filename, "my_file.txt", "The file to read"); +// // Crash if the specified file does not exist. +// static bool dummy = RegisterFlagValidator(&FLAGS_filename, +// &ValidateIsFile); +// +// DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...) +// +// void MyFunc() { +// if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end); +// } +// +// Then, at the command-line: +// ./foo --noverbose --start=5 --end=100 +// +// For more details, see +// doc/gflags.html +// +// --- A note about thread-safety: +// +// We describe many functions in this routine as being thread-hostile, +// thread-compatible, or thread-safe. Here are the meanings we use: +// +// thread-safe: it is safe for multiple threads to call this routine +// (or, when referring to a class, methods of this class) +// concurrently. +// thread-hostile: it is not safe for multiple threads to call this +// routine (or methods of this class) concurrently. In gflags, +// most thread-hostile routines are intended to be called early in, +// or even before, main() -- that is, before threads are spawned. +// thread-compatible: it is safe for multiple threads to read from +// this variable (when applied to variables), or to call const +// methods of this class (when applied to classes), as long as no +// other thread is writing to the variable or calling non-const +// methods of this class. + +#ifndef GFLAGS_GFLAGS_H_ +#define GFLAGS_GFLAGS_H_ + +#include +#include + +#include "gflags/gflags_declare.h" // IWYU pragma: export + + +// We always want to export variables defined in user code +#ifndef GFLAGS_DLL_DEFINE_FLAG +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DEFINE_FLAG __declspec(dllexport) +# else +# define GFLAGS_DLL_DEFINE_FLAG +# endif +#endif + + +namespace GFLAGS_NAMESPACE { + + +// -------------------------------------------------------------------- +// To actually define a flag in a file, use DEFINE_bool, +// DEFINE_string, etc. at the bottom of this file. You may also find +// it useful to register a validator with the flag. This ensures that +// when the flag is parsed from the commandline, or is later set via +// SetCommandLineOption, we call the validation function. It is _not_ +// called when you assign the value to the flag directly using the = operator. +// +// The validation function should return true if the flag value is valid, and +// false otherwise. If the function returns false for the new setting of the +// flag, the flag will retain its current value. If it returns false for the +// default value, ParseCommandLineFlags() will die. +// +// This function is safe to call at global construct time (as in the +// example below). +// +// Example use: +// static bool ValidatePort(const char* flagname, int32 value) { +// if (value > 0 && value < 32768) // value is ok +// return true; +// printf("Invalid value for --%s: %d\n", flagname, (int)value); +// return false; +// } +// DEFINE_int32(port, 0, "What port to listen on"); +// static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort); + +// Returns true if successfully registered, false if not (because the +// first argument doesn't point to a command-line flag, or because a +// validator is already registered for this flag). +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag, bool (*validate_fn)(const char*, bool)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag, bool (*validate_fn)(const char*, int32)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint32* flag, bool (*validate_fn)(const char*, uint32)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag, bool (*validate_fn)(const char*, int64)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag, bool (*validate_fn)(const char*, uint64)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag, bool (*validate_fn)(const char*, double)); +extern GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag, bool (*validate_fn)(const char*, const std::string&)); + +// Convenience macro for the registration of a flag validator +#define DEFINE_validator(name, validator) \ + static const bool name##_validator_registered = \ + GFLAGS_NAMESPACE::RegisterFlagValidator(&FLAGS_##name, validator) + + +// -------------------------------------------------------------------- +// These methods are the best way to get access to info about the +// list of commandline flags. Note that these routines are pretty slow. +// GetAllFlags: mostly-complete info about the list, sorted by file. +// ShowUsageWithFlags: pretty-prints the list to stdout (what --help does) +// ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr +// +// In addition to accessing flags, you can also access argv[0] (the program +// name) and argv (the entire commandline), which we sock away a copy of. +// These variables are static, so you should only set them once. +// +// No need to export this data only structure from DLL, avoiding VS warning 4251. +struct CommandLineFlagInfo { + std::string name; // the name of the flag + std::string type; // the type of the flag: int32, etc + std::string description; // the "help text" associated with the flag + std::string current_value; // the current value, as a string + std::string default_value; // the default value, as a string + std::string filename; // 'cleaned' version of filename holding the flag + bool has_validator_fn; // true if RegisterFlagValidator called on this flag + bool is_default; // true if the flag has the default value and + // has not been set explicitly from the cmdline + // or via SetCommandLineOption + const void* flag_ptr; // pointer to the flag's current value (i.e. FLAGS_foo) +}; + +// Using this inside of a validator is a recipe for a deadlock. +// TODO(user) Fix locking when validators are running, to make it safe to +// call validators during ParseAllFlags. +// Also make sure then to uncomment the corresponding unit test in +// gflags_unittest.sh +extern GFLAGS_DLL_DECL void GetAllFlags(std::vector* OUTPUT); +// These two are actually defined in gflags_reporting.cc. +extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0); // what --help does +extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict); + +// Create a descriptive string for a flag. +// Goes to some trouble to make pretty line breaks. +extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag); + +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv); + +// The following functions are thread-safe as long as SetArgv() is +// only called before any threads start. +extern GFLAGS_DLL_DECL const std::vector& GetArgvs(); +extern GFLAGS_DLL_DECL const char* GetArgv(); // all of argv as a string +extern GFLAGS_DLL_DECL const char* GetArgv0(); // only argv0 +extern GFLAGS_DLL_DECL uint32 GetArgvSum(); // simple checksum of argv +extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set +extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName(); // basename(argv0) + +// ProgramUsage() is thread-safe as long as SetUsageMessage() is only +// called before any threads start. +extern GFLAGS_DLL_DECL const char* ProgramUsage(); // string set by SetUsageMessage() + +// VersionString() is thread-safe as long as SetVersionString() is only +// called before any threads start. +extern GFLAGS_DLL_DECL const char* VersionString(); // string set by SetVersionString() + + + +// -------------------------------------------------------------------- +// Normally you access commandline flags by just saying "if (FLAGS_foo)" +// or whatever, and set them by calling "FLAGS_foo = bar" (or, more +// commonly, via the DEFINE_foo macro). But if you need a bit more +// control, we have programmatic ways to get/set the flags as well. +// These programmatic ways to access flags are thread-safe, but direct +// access is only thread-compatible. + +// Return true iff the flagname was found. +// OUTPUT is set to the flag's value, or unchanged if we return false. +extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT); + +// Return true iff the flagname was found. OUTPUT is set to the flag's +// CommandLineFlagInfo or unchanged if we return false. +extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name, CommandLineFlagInfo* OUTPUT); + +// Return the CommandLineFlagInfo of the flagname. exit() if name not found. +// Example usage, to check if a flag's value is currently the default value: +// if (GetCommandLineFlagInfoOrDie("foo").is_default) ... +extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name); + +enum GFLAGS_DLL_DECL FlagSettingMode { + // update the flag's value (can call this multiple times). + SET_FLAGS_VALUE, + // update the flag's value, but *only if* it has not yet been updated + // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef". + SET_FLAG_IF_DEFAULT, + // set the flag's default value to this. If the flag has not yet updated + // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef") + // change the flag's current value to the new default value as well. + SET_FLAGS_DEFAULT +}; + +// Set a particular flag ("command line option"). Returns a string +// describing the new value that the option has been set to. The +// return value API is not well-specified, so basically just depend on +// it to be empty if the setting failed for some reason -- the name is +// not a valid flag name, or the value is not a valid value -- and +// non-empty else. + +// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case) +extern GFLAGS_DLL_DECL std::string SetCommandLineOption (const char* name, const char* value); +extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value, FlagSettingMode set_mode); + + +// -------------------------------------------------------------------- +// Saves the states (value, default value, whether the user has set +// the flag, registered validators, etc) of all flags, and restores +// them when the FlagSaver is destroyed. This is very useful in +// tests, say, when you want to let your tests change the flags, but +// make sure that they get reverted to the original states when your +// test is complete. +// +// Example usage: +// void TestFoo() { +// FlagSaver s1; +// FLAG_foo = false; +// FLAG_bar = "some value"; +// +// // test happens here. You can return at any time +// // without worrying about restoring the FLAG values. +// } +// +// Note: This class is marked with GFLAGS_ATTRIBUTE_UNUSED because all +// the work is done in the constructor and destructor, so in the standard +// usage example above, the compiler would complain that it's an +// unused variable. +// +// This class is thread-safe. However, its destructor writes to +// exactly the set of flags that have changed value during its +// lifetime, so concurrent _direct_ access to those flags +// (i.e. FLAGS_foo instead of {Get,Set}CommandLineOption()) is unsafe. + +class GFLAGS_DLL_DECL FlagSaver { + public: + FlagSaver(); + ~FlagSaver(); + + private: + class FlagSaverImpl* impl_; // we use pimpl here to keep API steady + + FlagSaver(const FlagSaver&); // no copying! + void operator=(const FlagSaver&); +}__attribute((unused)); + +// -------------------------------------------------------------------- +// Some deprecated or hopefully-soon-to-be-deprecated functions. + +// This is often used for logging. TODO(csilvers): figure out a better way +extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString(); +// Usually where this is used, a FlagSaver should be used instead. +extern GFLAGS_DLL_DECL +bool ReadFlagsFromString(const std::string& flagfilecontents, + const char* prog_name, + bool errors_are_fatal); // uses SET_FLAGS_VALUE + +// These let you manually implement --flagfile functionality. +// DEPRECATED. +extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name); +extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name, bool errors_are_fatal); // uses SET_FLAGS_VALUE + + +// -------------------------------------------------------------------- +// Useful routines for initializing flags from the environment. +// In each case, if 'varname' does not exist in the environment +// return defval. If 'varname' does exist but is not valid +// (e.g., not a number for an int32 flag), abort with an error. +// Otherwise, return the value. NOTE: for booleans, for true use +// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'. + +extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval); +extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval); +extern GFLAGS_DLL_DECL uint32 Uint32FromEnv(const char *varname, uint32 defval); +extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval); +extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval); +extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval); +extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval); + + +// -------------------------------------------------------------------- +// The next two functions parse gflags from main(): + +// Set the "usage" message for this program. For example: +// string usage("This program does nothing. Sample usage:\n"); +// usage += argv[0] + " "; +// SetUsageMessage(usage); +// Do not include commandline flags in the usage: we do that for you! +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage); + +// Sets the version string, which is emitted with --version. +// For instance: SetVersionString("1.3"); +// Thread-hostile; meant to be called before any threads are spawned. +extern GFLAGS_DLL_DECL void SetVersionString(const std::string& version); + + +// Looks for flags in argv and parses them. Rearranges argv to put +// flags first, or removes them entirely if remove_flags is true. +// If a flag is defined more than once in the command line or flag +// file, the last definition is used. Returns the index (into argv) +// of the first non-flag argument. +// See top-of-file for more details on this function. +#ifndef SWIG // In swig, use ParseCommandLineFlagsScript() instead. +extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv, bool remove_flags); +#endif + + +// Calls to ParseCommandLineNonHelpFlags and then to +// HandleCommandLineHelpFlags can be used instead of a call to +// ParseCommandLineFlags during initialization, in order to allow for +// changing default values for some FLAGS (via +// e.g. SetCommandLineOptionWithMode calls) between the time of +// command line parsing and the time of dumping help information for +// the flags as a result of command line parsing. If a flag is +// defined more than once in the command line or flag file, the last +// definition is used. Returns the index (into argv) of the first +// non-flag argument. (If remove_flags is true, will always return 1.) +extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv, bool remove_flags); + +// This is actually defined in gflags_reporting.cc. +// This function is misnamed (it also handles --version, etc.), but +// it's too late to change that now. :-( +extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags(); // in gflags_reporting.cc + +// Allow command line reparsing. Disables the error normally +// generated when an unknown flag is found, since it may be found in a +// later parse. Thread-hostile; meant to be called before any threads +// are spawned. +extern GFLAGS_DLL_DECL void AllowCommandLineReparsing(); + +// Reparse the flags that have not yet been recognized. Only flags +// registered since the last parse will be recognized. Any flag value +// must be provided as part of the argument using "=", not as a +// separate command line argument that follows the flag argument. +// Intended for handling flags from dynamically loaded libraries, +// since their flags are not registered until they are loaded. +extern GFLAGS_DLL_DECL void ReparseCommandLineNonHelpFlags(); + +// Clean up memory allocated by flags. This is only needed to reduce +// the quantity of "potentially leaked" reports emitted by memory +// debugging tools such as valgrind. It is not required for normal +// operation, or for the google perftools heap-checker. It must only +// be called when the process is about to exit, and all threads that +// might access flags are quiescent. Referencing flags after this is +// called will have unexpected consequences. This is not safe to run +// when multiple threads might be running: the function is +// thread-hostile. +extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags(); + + +// -------------------------------------------------------------------- +// Now come the command line flag declaration/definition macros that +// will actually be used. They're kind of hairy. A major reason +// for this is initialization: we want people to be able to access +// variables in global constructors and have that not crash, even if +// their global constructor runs before the global constructor here. +// (Obviously, we can't guarantee the flags will have the correct +// default value in that case, but at least accessing them is safe.) +// The only way to do that is have flags point to a static buffer. +// So we make one, using a union to ensure proper alignment, and +// then use placement-new to actually set up the flag with the +// correct default value. In the same vein, we have to worry about +// flag access in global destructors, so FlagRegisterer has to be +// careful never to destroy the flag-values it constructs. +// +// Note that when we define a flag variable FLAGS_, we also +// preemptively define a junk variable, FLAGS_no. This is to +// cause a link-time error if someone tries to define 2 flags with +// names like "logging" and "nologging". We do this because a bool +// flag FLAG can be set from the command line to true with a "-FLAG" +// argument, and to false with a "-noFLAG" argument, and so this can +// potentially avert confusion. +// +// We also put flags into their own namespace. It is purposefully +// named in an opaque way that people should have trouble typing +// directly. The idea is that DEFINE puts the flag in the weird +// namespace, and DECLARE imports the flag from there into the current +// namespace. The net result is to force people to use DECLARE to get +// access to a flag, rather than saying "extern GFLAGS_DLL_DECL bool FLAGS_whatever;" +// or some such instead. We want this so we can put extra +// functionality (like sanity-checking) in DECLARE if we want, and +// make sure it is picked up everywhere. +// +// We also put the type of the variable in the namespace, so that +// people can't DECLARE_int32 something that they DEFINE_bool'd +// elsewhere. + +class GFLAGS_DLL_DECL FlagRegisterer { + public: + // We instantiate this template ctor for all supported types, + // so it is possible to place implementation of the FlagRegisterer ctor in + // .cc file. + // Calling this constructor with unsupported type will produce linker error. + template + FlagRegisterer(const char* name, + const char* help, const char* filename, + FlagType* current_storage, FlagType* defvalue_storage); +}; + +// Force compiler to not generate code for the given template specialization. +#if defined(_MSC_VER) && _MSC_VER < 1800 // Visual Studio 2013 version 12.0 + #define GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(type) +#else + #define GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(type) \ + extern template GFLAGS_DLL_DECL FlagRegisterer::FlagRegisterer( \ + const char* name, const char* help, const char* filename, \ + type* current_storage, type* defvalue_storage) +#endif + +// Do this for all supported flag types. +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(bool); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(int32); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(uint32); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(int64); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(uint64); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(double); +GFLAGS_DECLARE_FLAG_REGISTERER_CTOR(std::string); + +#undef GFLAGS_DECLARE_FLAG_REGISTERER_CTOR + +// If your application #defines STRIP_FLAG_HELP to a non-zero value +// before #including this file, we remove the help message from the +// binary file. This can reduce the size of the resulting binary +// somewhat, and may also be useful for security reasons. + +extern GFLAGS_DLL_DECL const char kStrippedFlagHelp[]; + + +} // namespace GFLAGS_NAMESPACE + + +#ifndef SWIG // In swig, ignore the main flag declarations + +#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0 +// Need this construct to avoid the 'defined but not used' warning. +#define MAYBE_STRIPPED_HELP(txt) \ + (false ? (txt) : GFLAGS_NAMESPACE::kStrippedFlagHelp) +#else +#define MAYBE_STRIPPED_HELP(txt) txt +#endif + +// Each command-line flag has two variables associated with it: one +// with the current value, and one with the default value. However, +// we have a third variable, which is where value is assigned; it's a +// constant. This guarantees that FLAG_##value is initialized at +// static initialization time (e.g. before program-start) rather than +// than global construction time (which is after program-start but +// before main), at least when 'value' is a compile-time constant. We +// use a small trick for the "default value" variable, and call it +// FLAGS_no. This serves the second purpose of assuring a +// compile error if someone tries to define a flag named no +// which is illegal (--foo and --nofoo both affect the "foo" flag). +#define DEFINE_VARIABLE(type, shorttype, name, value, help) \ + namespace fL##shorttype { \ + static const type FLAGS_nono##name = value; \ + /* We always want to export defined variables, dll or no */ \ + GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \ + static type FLAGS_no##name = FLAGS_nono##name; \ + static GFLAGS_NAMESPACE::FlagRegisterer o_##name( \ + #name, MAYBE_STRIPPED_HELP(help), __FILE__, \ + &FLAGS_##name, &FLAGS_no##name); \ + } \ + using fL##shorttype::FLAGS_##name + +// For DEFINE_bool, we want to do the extra check that the passed-in +// value is actually a bool, and not a string or something that can be +// coerced to a bool. These declarations (no definition needed!) will +// help us do that, and never evaluate From, which is important. +// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires +// that the compiler have different sizes for bool & double. Since +// this is not guaranteed by the standard, we check it with a +// COMPILE_ASSERT. +namespace fLB { +struct CompileAssert {}; +typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[ + (sizeof(double) != sizeof(bool)) ? 1 : -1]; +template double GFLAGS_DLL_DECL IsBoolFlag(const From& from); +GFLAGS_DLL_DECL bool IsBoolFlag(bool from); +} // namespace fLB + +// Here are the actual DEFINE_*-macros. The respective DECLARE_*-macros +// are in a separate include, gflags_declare.h, for reducing +// the physical transitive size for DECLARE use. +#define DEFINE_bool(name, val, txt) \ + namespace fLB { \ + typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[ \ + (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double))? 1: -1]; \ + } \ + DEFINE_VARIABLE(bool, B, name, val, txt) + +#define DEFINE_int32(name, val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::int32, I, \ + name, val, txt) + +#define DEFINE_uint32(name,val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::uint32, U, \ + name, val, txt) + +#define DEFINE_int64(name, val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::int64, I64, \ + name, val, txt) + +#define DEFINE_uint64(name,val, txt) \ + DEFINE_VARIABLE(GFLAGS_NAMESPACE::uint64, U64, \ + name, val, txt) + +#define DEFINE_double(name, val, txt) \ + DEFINE_VARIABLE(double, D, name, val, txt) + +// Strings are trickier, because they're not a POD, so we can't +// construct them at static-initialization time (instead they get +// constructed at global-constructor time, which is much later). To +// try to avoid crashes in that case, we use a char buffer to store +// the string, which we can static-initialize, and then placement-new +// into it later. It's not perfect, but the best we can do. + +namespace fLS { + +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + const char *value) { + return new(stringspot) clstring(value); +} +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + const clstring &value) { + return new(stringspot) clstring(value); +} +inline clstring* dont_pass0toDEFINE_string(char *stringspot, + int value); + +// Auxiliary class used to explicitly call destructor of string objects +// allocated using placement new during static program deinitialization. +// The destructor MUST be an inline function such that the explicit +// destruction occurs in the same compilation unit as the placement new. +class StringFlagDestructor { + void *current_storage_; + void *defvalue_storage_; + +public: + + StringFlagDestructor(void *current, void *defvalue) + : current_storage_(current), defvalue_storage_(defvalue) {} + + ~StringFlagDestructor() { + reinterpret_cast(current_storage_ )->~clstring(); + reinterpret_cast(defvalue_storage_)->~clstring(); + } +}; + +} // namespace fLS + +// We need to define a var named FLAGS_no##name so people don't define +// --string and --nostring. And we need a temporary place to put val +// so we don't have to evaluate it twice. Two great needs that go +// great together! +// The weird 'using' + 'extern' inside the fLS namespace is to work around +// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10. See +// http://code.google.com/p/google-gflags/issues/detail?id=20 +#define DEFINE_string(name, val, txt) \ + namespace fLS { \ + using ::fLS::clstring; \ + using ::fLS::StringFlagDestructor; \ + static union { void* align; char s[sizeof(clstring)]; } s_##name[2]; \ + clstring* const FLAGS_no##name = ::fLS:: \ + dont_pass0toDEFINE_string(s_##name[0].s, \ + val); \ + static GFLAGS_NAMESPACE::FlagRegisterer o_##name( \ + #name, MAYBE_STRIPPED_HELP(txt), __FILE__, \ + FLAGS_no##name, new (s_##name[1].s) clstring(*FLAGS_no##name)); \ + static StringFlagDestructor d_##name(s_##name[0].s, s_##name[1].s); \ + extern GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name; \ + using fLS::FLAGS_##name; \ + clstring& FLAGS_##name = *FLAGS_no##name; \ + } \ + using fLS::FLAGS_##name + +#endif // SWIG + +// Import gflags library symbols into alternative/deprecated namespace(s) +#include "gflags_gflags.h" +#endif // GFLAGS_GFLAGS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_completions.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_completions.h new file mode 100644 index 0000000000000000000000000000000000000000..15637eb3de853a79eccb9e7ba71771c607767a2e --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_completions.h @@ -0,0 +1,119 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// --- + +// +// Implement helpful bash-style command line flag completions +// +// ** Functional API: +// HandleCommandLineCompletions() should be called early during +// program startup, but after command line flag code has been +// initialized, such as the beginning of HandleCommandLineHelpFlags(). +// It checks the value of the flag --tab_completion_word. If this +// flag is empty, nothing happens here. If it contains a string, +// however, then HandleCommandLineCompletions() will hijack the +// process, attempting to identify the intention behind this +// completion. Regardless of the outcome of this deduction, the +// process will be terminated, similar to --helpshort flag +// handling. +// +// ** Overview of Bash completions: +// Bash can be told to programatically determine completions for the +// current 'cursor word'. It does this by (in this case) invoking a +// command with some additional arguments identifying the command +// being executed, the word being completed, and the previous word +// (if any). Bash then expects a sequence of output lines to be +// printed to stdout. If these lines all contain a common prefix +// longer than the cursor word, bash will replace the cursor word +// with that common prefix, and display nothing. If there isn't such +// a common prefix, bash will display the lines in pages using 'more'. +// +// ** Strategy taken for command line completions: +// If we can deduce either the exact flag intended, or a common flag +// prefix, we'll output exactly that. Otherwise, if information +// must be displayed to the user, we'll take the opportunity to add +// some helpful information beyond just the flag name (specifically, +// we'll include the default flag value and as much of the flag's +// description as can fit on a single terminal line width, as specified +// by the flag --tab_completion_columns). Furthermore, we'll try to +// make bash order the output such that the most useful or relevent +// flags are the most likely to be shown at the top. +// +// ** Additional features: +// To assist in finding that one really useful flag, substring matching +// was implemented. Before pressing a to get completion for the +// current word, you can append one or more '?' to the flag to do +// substring matching. Here's the semantics: +// --foo Show me all flags with names prefixed by 'foo' +// --foo? Show me all flags with 'foo' somewhere in the name +// --foo?? Same as prior case, but also search in module +// definition path for 'foo' +// --foo??? Same as prior case, but also search in flag +// descriptions for 'foo' +// Finally, we'll trim the output to a relatively small number of +// flags to keep bash quiet about the verbosity of output. If one +// really wanted to see all possible matches, appending a '+' to the +// search word will force the exhaustive list of matches to be printed. +// +// ** How to have bash accept completions from a binary: +// Bash requires that it be informed about each command that programmatic +// completion should be enabled for. Example addition to a .bashrc +// file would be (your path to gflags_completions.sh file may differ): + +/* +$ complete -o bashdefault -o default -o nospace -C \ + '/home/build/eng/bash/bash_completions.sh --tab_completion_columns $COLUMNS' \ + time env binary_name another_binary [...] +*/ + +// This would allow the following to work: +// $ /path/to/binary_name --vmodule +// Or: +// $ ./bin/path/another_binary --gfs_u +// (etc) +// +// Sadly, it appears that bash gives no easy way to force this behavior for +// all commands. That's where the "time" in the above example comes in. +// If you haven't specifically added a command to the list of completion +// supported commands, you can still get completions by prefixing the +// entire command with "env". +// $ env /some/brand/new/binary --vmod +// Assuming that "binary" is a newly compiled binary, this should still +// produce the expected completion output. + + +#ifndef GFLAGS_COMPLETIONS_H_ +#define GFLAGS_COMPLETIONS_H_ + +namespace google { +extern void HandleCommandLineCompletions(void); +} + +#endif // GFLAGS_COMPLETIONS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_declare.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_declare.h new file mode 100644 index 0000000000000000000000000000000000000000..a9c6759707846f63ab97a66c13cb446975364448 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_declare.h @@ -0,0 +1,155 @@ +// Copyright (c) 1999, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// +// Revamped and reorganized by Craig Silverstein +// +// This is the file that should be included by any file which declares +// command line flag. + +#ifndef GFLAGS_DECLARE_H_ +#define GFLAGS_DECLARE_H_ + + +// --------------------------------------------------------------------------- +// Namespace of gflags library symbols. +#define GFLAGS_NAMESPACE google + +// --------------------------------------------------------------------------- +// Windows DLL import/export. + +// Whether gflags library is a DLL. +// +// Set to 1 by default when the shared gflags library was built on Windows. +// Must be overwritten when this header file is used with the optionally also +// built static library instead; set by CMake's INTERFACE_COMPILE_DEFINITIONS. +#ifndef GFLAGS_IS_A_DLL +# define GFLAGS_IS_A_DLL 1 +#endif + +// We always want to import the symbols of the gflags library. +#ifndef GFLAGS_DLL_DECL +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DECL __declspec(dllimport) +# elif defined(__GNUC__) && __GNUC__ >= 4 +# define GFLAGS_DLL_DECL __attribute__((visibility("default"))) +# else +# define GFLAGS_DLL_DECL +# endif +#endif + +// We always want to import variables declared in user code. +#ifndef GFLAGS_DLL_DECLARE_FLAG +# if GFLAGS_IS_A_DLL && defined(_MSC_VER) +# define GFLAGS_DLL_DECLARE_FLAG __declspec(dllimport) +# elif defined(__GNUC__) && __GNUC__ >= 4 +# define GFLAGS_DLL_DECLARE_FLAG __attribute__((visibility("default"))) +# else +# define GFLAGS_DLL_DECLARE_FLAG +# endif +#endif + +// --------------------------------------------------------------------------- +// Flag types +#include +#if 1 +# include // the normal place uint32_t is defined +#elif 1 +# include // the normal place u_int32_t is defined +#elif 1 +# include // a third place for uint32_t or u_int32_t +#endif + +namespace GFLAGS_NAMESPACE { + +#if 1 // C99 +typedef int32_t int32; +typedef uint32_t uint32; +typedef int64_t int64; +typedef uint64_t uint64; +#elif 0 // BSD +typedef int32_t int32; +typedef u_int32_t uint32; +typedef int64_t int64; +typedef u_int64_t uint64; +#elif 0 // Windows +typedef __int32 int32; +typedef unsigned __int32 uint32; +typedef __int64 int64; +typedef unsigned __int64 uint64; +#else +# error Do not know how to define a 32-bit integer quantity on your system +#endif + +} // namespace GFLAGS_NAMESPACE + + +namespace fLS { + +// The meaning of "string" might be different between now and when the +// macros below get invoked (e.g., if someone is experimenting with +// other string implementations that get defined after this file is +// included). Save the current meaning now and use it in the macros. +typedef std::string clstring; + +} // namespace fLS + + +#define DECLARE_VARIABLE(type, shorttype, name) \ + /* We always want to import declared variables, dll or no */ \ + namespace fL##shorttype { extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; } \ + using fL##shorttype::FLAGS_##name + +#define DECLARE_bool(name) \ + DECLARE_VARIABLE(bool, B, name) + +#define DECLARE_int32(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::int32, I, name) + +#define DECLARE_uint32(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::uint32, U, name) + +#define DECLARE_int64(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::int64, I64, name) + +#define DECLARE_uint64(name) \ + DECLARE_VARIABLE(::GFLAGS_NAMESPACE::uint64, U64, name) + +#define DECLARE_double(name) \ + DECLARE_VARIABLE(double, D, name) + +#define DECLARE_string(name) \ + /* We always want to import declared variables, dll or no */ \ + namespace fLS { \ + extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; \ + } \ + using fLS::FLAGS_##name + +#endif // GFLAGS_DECLARE_H_ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_gflags.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_gflags.h new file mode 100644 index 0000000000000000000000000000000000000000..3780704e1caa005e3102c189291b73a1972fa080 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/gflags/gflags_gflags.h @@ -0,0 +1,99 @@ +// Copyright (c) 2014, Andreas Schuh +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// ----------------------------------------------------------------------------- +// Imports the gflags library symbols into an alternative/deprecated namespace. + +#ifndef GFLAGS_GFLAGS_H_ +# error The internal header gflags_gflags.h may only be included by gflags.h +#endif + +#ifndef GFLAGS_NS_GFLAGS_H_ +#define GFLAGS_NS_GFLAGS_H_ + + +namespace gflags { + + +using GFLAGS_NAMESPACE::int32; +using GFLAGS_NAMESPACE::uint32; +using GFLAGS_NAMESPACE::int64; +using GFLAGS_NAMESPACE::uint64; + +using GFLAGS_NAMESPACE::RegisterFlagValidator; +using GFLAGS_NAMESPACE::CommandLineFlagInfo; +using GFLAGS_NAMESPACE::GetAllFlags; +using GFLAGS_NAMESPACE::ShowUsageWithFlags; +using GFLAGS_NAMESPACE::ShowUsageWithFlagsRestrict; +using GFLAGS_NAMESPACE::DescribeOneFlag; +using GFLAGS_NAMESPACE::SetArgv; +using GFLAGS_NAMESPACE::GetArgvs; +using GFLAGS_NAMESPACE::GetArgv; +using GFLAGS_NAMESPACE::GetArgv0; +using GFLAGS_NAMESPACE::GetArgvSum; +using GFLAGS_NAMESPACE::ProgramInvocationName; +using GFLAGS_NAMESPACE::ProgramInvocationShortName; +using GFLAGS_NAMESPACE::ProgramUsage; +using GFLAGS_NAMESPACE::VersionString; +using GFLAGS_NAMESPACE::GetCommandLineOption; +using GFLAGS_NAMESPACE::GetCommandLineFlagInfo; +using GFLAGS_NAMESPACE::GetCommandLineFlagInfoOrDie; +using GFLAGS_NAMESPACE::FlagSettingMode; +using GFLAGS_NAMESPACE::SET_FLAGS_VALUE; +using GFLAGS_NAMESPACE::SET_FLAG_IF_DEFAULT; +using GFLAGS_NAMESPACE::SET_FLAGS_DEFAULT; +using GFLAGS_NAMESPACE::SetCommandLineOption; +using GFLAGS_NAMESPACE::SetCommandLineOptionWithMode; +using GFLAGS_NAMESPACE::FlagSaver; +using GFLAGS_NAMESPACE::CommandlineFlagsIntoString; +using GFLAGS_NAMESPACE::ReadFlagsFromString; +using GFLAGS_NAMESPACE::AppendFlagsIntoFile; +using GFLAGS_NAMESPACE::ReadFromFlagsFile; +using GFLAGS_NAMESPACE::BoolFromEnv; +using GFLAGS_NAMESPACE::Int32FromEnv; +using GFLAGS_NAMESPACE::Uint32FromEnv; +using GFLAGS_NAMESPACE::Int64FromEnv; +using GFLAGS_NAMESPACE::Uint64FromEnv; +using GFLAGS_NAMESPACE::DoubleFromEnv; +using GFLAGS_NAMESPACE::StringFromEnv; +using GFLAGS_NAMESPACE::SetUsageMessage; +using GFLAGS_NAMESPACE::SetVersionString; +using GFLAGS_NAMESPACE::ParseCommandLineNonHelpFlags; +using GFLAGS_NAMESPACE::HandleCommandLineHelpFlags; +using GFLAGS_NAMESPACE::AllowCommandLineReparsing; +using GFLAGS_NAMESPACE::ReparseCommandLineNonHelpFlags; +using GFLAGS_NAMESPACE::ShutDownCommandLineFlags; +using GFLAGS_NAMESPACE::FlagRegisterer; + +#ifndef SWIG +using GFLAGS_NAMESPACE::ParseCommandLineFlags; +#endif + +} // namespace gflags +#endif // GFLAGS_NS_GFLAGS_H_ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/infer_engine.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/infer_engine.h new file mode 100644 index 0000000000000000000000000000000000000000..cc7c5e0aa74b720099c01808be36fd4586ceec12 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/infer_engine.h @@ -0,0 +1,45 @@ +#ifndef BENCHMARK_INFER_ENGINE_H +#define BENCHMARK_INFER_ENGINE_H +#include "util.h" +#include "acl/acl_base.h" +#include "post_process.h" +#include +#include +#include +#include +#include +#include "acl/acl_mdl.h" +#include +#include + +aclError InitContext(const char* configPath = ""); +aclError UnInitContext(); +aclError LoadModel(); +aclError InitInput(std::vector files); +aclError Inference(); +aclError PostProcess(); +aclError DvppSetup(); +aclError DvppInitInput(std::vector files); +aclError UnloadModel(); +void getImgResizeShape(); +acldvppRoiConfig* InitCropRoiConfig(uint32_t width, uint32_t height); + +/* + * @brief : 初始化中心抠图配置信息。 + * @param [in] uint32_t newInputWidth : 输入图像的宽(等比例缩放后的宽度) + * @param [in] uint32_t newInputHeight : 输入图像的高(等比例缩放后的高) + * @param [in] uint32_t modelInputWidth : 中心抠图后输入给模型的宽 + * @param [in] uint32_t modelInputHeight : 中心抠图后输入给模型的高 + * @return : acldvppRoiConfig:中心抠图配置信息 + */ +acldvppRoiConfig* InitCropCenterRoiConfig(uint32_t newInputWidth, uint32_t newInputHeight,uint32_t modelInputWidth, uint32_t modelInputHeight); + +/* + * @brief : 宽高较短的边缩放至RESIZE_MIN(256),较长的边做等比例缩放。 + * @param [in] uint32_t width : 输入图片宽 + * @param [in] uint32_t height : 输入图片高 + * @param [in] uint32_t &newInputWidth : 等比例缩放后的宽 + * @param [in] uint32_t &newInputHeight : 等比例缩放后的高 + */ +void SmallSizeAtLeast(uint32_t width, uint32_t height, uint32_t& newInputWidth, uint32_t& newInputHeigh); +#endif diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/post_process.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/post_process.h new file mode 100644 index 0000000000000000000000000000000000000000..d413c66567109cb8647cf7b385ad66478f217af8 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/post_process.h @@ -0,0 +1,5 @@ +#ifndef BENCHMARK_POST_PROCESS_H +#define BENCHMARK_POST_PROCESS_H +#include "util.h" +aclError SaveBinPostprocess(); +#endif diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/util.h b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/util.h new file mode 100644 index 0000000000000000000000000000000000000000..734db74083d316e3c6fdc52c3f5caf0548789684 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/inc/util.h @@ -0,0 +1,157 @@ +#ifndef BENCHMARK_UTIL_H +#define BENCHMARK_UTIL_H +#include +#include +#include +#include "acl/acl_base.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include "acl/acl_rt.h" +#include "acl/ops/acl_dvpp.h" +#include +#include +#include +#include +#include +#include + +// self defined problem code. +const int ACL_ERROR_PATH_INVALID = 101; +const int ACL_ERROR_CREATE_DATASET_FAILED = 102; +const int ACL_ERROR_PARSE_PARAM_FAILED = 103; +const int ACL_ERROR_DVPP_ERROR = 104; +const int ACL_ERROR_OTHERS = 255; +#define MODEL_INPUT_NUM_MAX (4) +#define MODEL_INPUT_OUTPUT_NUM_MAX (16) + +#define LOG(fmt, args...) \ + do { \ + printf(fmt, ##args); \ + } while(0) + + +#define START_PROC \ + struct timeval start, end; \ + long long time_use; \ + do { \ + gettimeofday(&start, NULL); \ + } while (0); + + +#define END_PROC \ + do { \ + gettimeofday(&end, NULL); \ + time_use = (end.tv_sec-start.tv_sec)*1000000+(end.tv_usec-start.tv_usec); \ + LOG("time use: %lld us\n", time_use); \ + } while (0); + + +#define CHECK_ACL_RET(msg, ret) \ + if (ret != ACL_ERROR_NONE) { \ + std::cout << msg << ", ret "<< ret << std::endl; \ + return ret; \ + } + + +#define CHECK_WITH_RET(condition, ret, msg) \ + if(!(condition)) { \ + std::cout << msg << ", ret "<< ret << std::endl; \ + return ret; \ + } + + +#define CHECK_RET(ret) \ + if(ret != ACL_ERROR_NONE) { \ + return ret; \ + } + +bool FolderExists(std::string foldname); + +bool FileExists(std::string filename); + +char* ReadBinFile(std::string fileName, uint32_t& fileSize); + +aclError GetFiles(std::string path, std::vector& files); + +aclError FreeDevMemory(aclmdlDataset* dataset); + +aclError DestroyDatasetResurce(aclmdlDataset* dataset, uint32_t flag); + +void* ReadFile(std::string fileLocation, uint64_t &fileSize); + +struct DvppConfig { + uint32_t resizedWidth; + uint32_t resizedHeight; + std::unordered_map> imgSizes; +}; + +struct ModelInfo +{ + aclFormat Format; + const char* Name; + size_t size; + size_t dimCount; + int64_t dims[ACL_MAX_DIM_CNT]; + aclDataType Type; +}; + +struct Config { + std::string om; + std::string dataDir; + std::string outDir; + DvppConfig dvppConfig; + bool useDvpp; + size_t batchSize; + ModelInfo inputInfo[MODEL_INPUT_OUTPUT_NUM_MAX]; + ModelInfo outputInfo[MODEL_INPUT_OUTPUT_NUM_MAX]; + size_t inputNum; + size_t outputNum; + aclmdlDesc* modelDesc; + uint32_t modelId; + aclrtContext context; + char* modelData_ptr; + void* devMem_ptr; + void* weightMem_ptr; + std::string imgType; + std::string modelType; + uint32_t deviceId; + uint32_t loopNum; + std::string framework; + int64_t curOutputSize[MODEL_INPUT_OUTPUT_NUM_MAX]; + Config() + { + om = ""; + dataDir = ""; + batchSize = 0; + useDvpp = 0; + inputNum = 0; + outputNum = 0; + modelDesc = nullptr; + modelId = 0; + context = nullptr; + imgType = ""; + modelType = ""; + deviceId = 0; + loopNum = 1; + framework = "caffe"; + outDir = "../../results"; + modelData_ptr = nullptr; + devMem_ptr = nullptr; + weightMem_ptr = nullptr; + } +}; + +struct Resnet50Result { + int top1; + int top5; + int total; + std::unordered_map cmp; + Resnet50Result(): top1(0), top5(0), total(0) {}; +}; + +struct DataFrame { + std::vector fileNames; + aclmdlDataset* dataset; +}; + +#endif diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/infer_engine.cpp b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/infer_engine.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7f2b8507a510844689239d71443d8b4b4480be3d --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/infer_engine.cpp @@ -0,0 +1,728 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "acl/acl.h" +#include "infer_engine.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include "acl/ops/acl_dvpp.h" +#include +#include +using namespace std; + +std::unordered_map dvppTime; +extern Resnet50Result resnet50Res; +extern Config cfg; +extern aclError ret; +extern int processedCnt; +extern long long inferTime; +aclrtContext context; +uint32_t modelId; +aclmdlDesc *modelDesc; +std::vector files; +DataFrame inputDataframe; +DataFrame outputDataframe; +aclDataBuffer *yoloImgInfo; +aclrtStream stream = nullptr; +acldvppChannelDesc *dvpp_channel_desc = nullptr; +std::unordered_map> imgSizes; + +#define RESIZE_MIN 256 +#define NUM_2 2 +#define NUM_3 3 +#define NUM_16 16 +#define NUM_128 128 + +uint32_t resizedWidth; +uint32_t resizedHeight; +uint32_t resizedWidthAligned; +uint32_t resizedHeightAligned; +uint32_t resizedOutputBufferSize; + +void getImgResizeShape() +{ + if (ACL_FORMAT_NCHW == cfg.inputInfo[0].Format) { + resizedHeight = cfg.inputInfo[0].dims[NUM_2]; + resizedWidth = cfg.inputInfo[0].dims[NUM_3]; + } else if (ACL_FORMAT_NHWC == cfg.inputInfo[0].Format) { + resizedHeight = cfg.inputInfo[0].dims[1]; + resizedWidth = cfg.inputInfo[0].dims[NUM_2]; + } + return; +} + +aclError InitContext(const char *configPath) +{ + LOG("context init start\n"); + ret = aclInit(configPath); + CHECK_ACL_RET("acl init failed", ret); + + ret = aclrtSetDevice(cfg.deviceId); + CHECK_ACL_RET("open device failed ret", ret); + + ret = aclrtCreateContext(&context, cfg.deviceId); + CHECK_ACL_RET("create context failed", ret); + + cfg.context = context; + LOG("context init done\n"); + return ACL_ERROR_NONE; +} + +aclError UnInitContext() +{ + ret = aclrtDestroyContext(context); + CHECK_ACL_RET("destory context failed", ret); + LOG("destory context done\n"); + + ret = aclrtResetDevice(cfg.deviceId); + CHECK_ACL_RET("reset device failed", ret); + + ret = aclFinalize(); + CHECK_ACL_RET("finalize failed", ret); + LOG("reset device done\n"); + + return ACL_ERROR_NONE; +} + +aclError LoadModel() +{ + LOG("load model start\n"); + size_t memSize; + size_t weightsize; + uint32_t modelSize = 0; + std::string modelPath = cfg.om; + + cfg.modelData_ptr = ReadBinFile(modelPath, modelSize); + CHECK_WITH_RET(cfg.modelData_ptr != nullptr, ACL_ERROR_READ_MODEL_FAILURE, "can't read model"); + + aclError ret = aclmdlQuerySizeFromMem(cfg.modelData_ptr, modelSize, &memSize, &weightsize); + CHECK_ACL_RET("query memory size failed", ret); + + ret = aclrtMalloc(&(cfg.devMem_ptr), memSize, ACL_MEM_MALLOC_HUGE_ONLY); + CHECK_ACL_RET("alloc dev_ptr failed", ret); + ret = aclrtMalloc(&(cfg.weightMem_ptr), weightsize, ACL_MEM_MALLOC_HUGE_ONLY); + CHECK_ACL_RET("alloc weight_ptr failed", ret); + + ret = aclmdlLoadFromMemWithMem(cfg.modelData_ptr, modelSize, &modelId, cfg.devMem_ptr, memSize, cfg.weightMem_ptr, + weightsize); + CHECK_ACL_RET("load model from memory failed", ret); + LOG("Load model success. memSize: %lu, weightSize: %lu.\n", memSize, weightsize); + + modelDesc = aclmdlCreateDesc(); + CHECK_WITH_RET(modelDesc != nullptr, ACL_ERROR_READ_MODEL_FAILURE, "create model desc failed"); + ret = aclmdlGetDesc(modelDesc, modelId); + CHECK_ACL_RET("get model desc failed", ret); + + cfg.modelDesc = modelDesc; + cfg.modelId = modelId; + + LOG("load model done\n"); + return ACL_ERROR_NONE; +} + +aclError DvppSetup() +{ + ret = aclrtSetCurrentContext(context); + if (ret != ACL_ERROR_NONE) { + LOG("Set context failed\n"); + return ret; + } + + ret = aclrtCreateStream(&stream); + if (ret != ACL_ERROR_NONE) { + LOG("create dvpp stream failed\n"); + return ret; + } + + dvpp_channel_desc = acldvppCreateChannelDesc(); + if (dvpp_channel_desc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create dvpp channel desc failed\n"); + return ret; + } + + ret = acldvppCreateChannel(dvpp_channel_desc); + if (ret != ACL_ERROR_NONE) { + LOG("create dvpp channel failed\n"); + return ret; + } + + imgSizes = cfg.dvppConfig.imgSizes; + + resizedWidthAligned = (resizedWidth + 15) / NUM_16 * NUM_16; + resizedHeightAligned = (resizedHeight + 1) / NUM_2 * NUM_2; + + resizedOutputBufferSize = resizedWidthAligned * resizedHeightAligned * NUM_3 / NUM_2; + LOG("resizedWidth %d resizedHeight %d resizedWidthAligned %d resizedHeightAligned %d resizedOutputBufferSize %d\n", + resizedWidth, resizedHeight, resizedWidthAligned, resizedHeightAligned, resizedOutputBufferSize); + + return ACL_ERROR_NONE; +} + +/* + * @brief : 生成dvpp图像描述信息 + * @param [in] void *dataDev : 码流buffer信息. + * @param [in] acldvppPixelFormat format: 图像格式 + * @param [in] uint32_t width : 宽度 + * @param [in] uint32_t height: 高度 + * @param [in] uint32_t widthStride : 宽度对齐. + * @param [in] uint32_t heightStride: 高度对齐. + * @param [in] uint32_t size: 码流大小. + * @return : acldvppPicDesc:图像描述信息 + */ +acldvppPicDesc *createDvppPicDesc(void *dataDev, acldvppPixelFormat format, uint32_t width, uint32_t height, + uint32_t widthStride, uint32_t heightStride, uint32_t size) +{ + acldvppPicDesc *picDesc = acldvppCreatePicDesc(); + if (picDesc == nullptr) { + LOG("failed to create pic desc\n"); + return nullptr; + } + + ret = acldvppSetPicDescData(picDesc, dataDev); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc data\n"); + return nullptr; + } + ret = acldvppSetPicDescSize(picDesc, size); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc size\n"); + return nullptr; + } + + ret = acldvppSetPicDescFormat(picDesc, format); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc format\n"); + return nullptr; + } + + ret = acldvppSetPicDescWidth(picDesc, width); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc width\n"); + return nullptr; + } + + ret = acldvppSetPicDescHeight(picDesc, height); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc height\n"); + return nullptr; + } + + ret = acldvppSetPicDescWidthStride(picDesc, widthStride); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc widthStride\n"); + return nullptr; + } + + ret = acldvppSetPicDescHeightStride(picDesc, heightStride); + if (ret != ACL_ERROR_NONE) { + LOG("failed to set desc heightStride\n"); + return nullptr; + } + return picDesc; +} + +aclError InitInput(std::vector files) +{ + LOG("init input batch %d start\n", processedCnt); + ret = aclrtSetCurrentContext(context); + if (ret != ACL_ERROR_NONE) { + LOG("Set context failed, ret[%d]\n", ret); + return ret; + } + + size_t modelInputSize = cfg.inputInfo[0].size; + size_t imgSize = modelInputSize / cfg.batchSize; + + void *dst; + ret = aclrtMalloc(&dst, modelInputSize, ACL_MEM_MALLOC_NORMAL_ONLY); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc device failed, ret[%d]\n", ret); + return ret; + } + LOG("dst = %p, size = %ld\n", dst, modelInputSize); + + char *ptr = (char *)dst; + inputDataframe.fileNames.clear(); + for (int i = 0; i < files.size(); i++) { + + std::string fileLocation = cfg.dataDir + "/" + files[i]; + FILE *pFile = fopen(fileLocation.c_str(), "r"); + + if (pFile == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("open file %s failed\n", fileLocation.c_str()); + return ret; + } + + fseek(pFile, 0, SEEK_END); + size_t fileSize = ftell(pFile); + + if (fileSize > imgSize) { + ret = ACL_ERROR_OTHERS; + LOG("%s fileSize %lu * batch %lu don't match with model inputSize %lu\n", fileLocation.c_str(), + fileSize, cfg.batchSize, modelInputSize); + return ret; + } + + void *buff = nullptr; + ret = aclrtMallocHost(&buff, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host buff failed[%d]\n", ret); + return ret; + } + + rewind(pFile); + fread(buff, sizeof(char), fileSize, pFile); + fclose(pFile); + + void *dstTmp = (void *)ptr; + ret = aclrtMemcpy(dstTmp, fileSize, buff, fileSize, ACL_MEMCPY_HOST_TO_DEVICE); + ptr += fileSize; + LOG("input file: %s, memory addr: %p, file size: %ld\n",files[i].c_str(), dstTmp, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("init input %d, Copy host to device failed, ret[%d]\n", i, ret); + LOG("input addr %p, len %ld\n", dstTmp, fileSize); + aclrtFreeHost(buff); + return ret; + } + + aclrtFreeHost(buff); + inputDataframe.fileNames.push_back(files[i]); + } + + aclDataBuffer *inputData = aclCreateDataBuffer((void *)dst, modelInputSize); + if (inputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("aclCreateDataBuffer failed\n"); + return ret; + } + + aclmdlDataset *input = aclmdlCreateDataset(); + ret = aclmdlAddDatasetBuffer(input, inputData); + if (ret != ACL_ERROR_NONE) { + LOG("ACL_ModelInputDataAdd failed, ret[%d]\n", ret); + aclmdlDestroyDataset(input); + return ret; + } + + inputDataframe.dataset = input; + LOG("init input batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} + +/* + * @brief : 获取图像宽高 + * @param [in] void* buff : 输入码流地址. + * @param [in] uint32_t fileSize : 输入码流长度 + * @param [in] std::string fileLocation : 输入文件路径. + * @param [in] uint32_t &W : 输入码流宽度. + * @param [in] uint32_t &H : 输入码流高度. + */ +void GetImageHW(void* buff, uint32_t fileSize, std::string fileLocation, uint32_t &W, uint32_t &H) +{ + int32_t components = 0; + ret = acldvppJpegGetImageInfo((void *)buff, fileSize, &W, &H, &components); + if (ret != ACL_ERROR_NONE) { + cout << "acldvppJpegGetImageInfo failed, ret " << ret << "filename: " << fileLocation.c_str() << endl; + } +} + +/* + * @brief : dvpp在推理中的预处理流程 + * @param [in] string fileLocation : 输入文件路径. + * @param [in] char *&ptr : 输出buffer指针. + * @return : ACL_ERROR_NONE:预处理成功, 其他:预处理失败 + */ +aclError DVPP_Resnet50(std::string fileLocation, char *&ptr) +{ + // 1 获取输入码流 + uint32_t W, H, W_Aligned, H_Aligned, outputBuffSize; + void *decodeInput = nullptr; + void *decodeOutput = nullptr; + acldvppPicDesc *decodeOutputDesc = nullptr; + uint64_t fileSize; + void *buff = ReadFile(fileLocation, fileSize); + if( buff == nullptr) { + LOG("read pic failed\n"); + return 1; + } + + ret = acldvppMalloc(&decodeInput, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc dvpp in buff failed[%d]\n", ret); + return ret; + } + ret = aclrtMemcpy(decodeInput, fileSize, buff, fileSize, ACL_MEMCPY_HOST_TO_DEVICE); + if (ret != ACL_ERROR_NONE) { + LOG("copy host to device failed[%d]\n", ret); + return ret; + } + + // 2 获取解码输出描述信息 + GetImageHW(buff, fileSize, fileLocation, W, H); + W_Aligned = (W + 127) / NUM_128 * NUM_128; + H_Aligned = (H + 15) / NUM_16 * NUM_16; + outputBuffSize = W_Aligned * H_Aligned * NUM_3 / NUM_2; + ret = acldvppMalloc(&decodeOutput, outputBuffSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc decodeOutput buff failed[%d]\n", ret); + return ret; + } + decodeOutputDesc = createDvppPicDesc(decodeOutput, PIXEL_FORMAT_YUV_SEMIPLANAR_420, W, H, W_Aligned, H_Aligned, + outputBuffSize); + if (decodeOutputDesc == nullptr) { + LOG("create jpeg_output_desc failed\n"); + return 1; + } + LOG("file[%s] jpeg picDesc info: W=%d, H=%d, W_Aligned=%d, H_Aligned=%d, outBufSize=%d, format=%d\n", \ + fileLocation.c_str(),W, H, W_Aligned, H_Aligned, outputBuffSize, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + + // 3 使用jpegd图像解码 + ret = acldvppJpegDecodeAsync(dvpp_channel_desc, decodeInput, fileSize, decodeOutputDesc, stream); + if (ret != ACL_ERROR_NONE) { + LOG(" dvppJpegDecodeAsync failed\n"); + return ret; + } + aclrtFreeHost(buff); + aclrtSynchronizeStream(stream); + + // 4 对jpegd解码的图片进行原分辨率抠图及短边256等比例缩放 + acldvppRoiConfig *cropConfig = nullptr; + acldvppPicDesc *cropOutputDesc = nullptr; + // 设置对解码后的图片进行原图裁剪,目的是为了减少因jpegd解码后对齐的无效数据对图像精度的影响 + cropConfig = InitCropRoiConfig(W, H); + + uint32_t newInputWidth = 0; + uint32_t newInputHeight = 0; + void *cropOutBufferDev = nullptr; + // 宽和高较短的一条边缩放至256,较长边做等比例缩放。对齐至256目的是为了给224x224中心抠图做准备,短边256对齐,获得对齐后的宽高 + SmallSizeAtLeast(W, H, newInputWidth, newInputHeight); + uint32_t cropOutputWidthStride = (newInputWidth + (NUM_16 - 1)) / NUM_16 * NUM_16; + uint32_t cropOutputHeightStride = (newInputHeight + (NUM_2 - 1)) / NUM_2 * NUM_2; + uint32_t cropOutBufferSize = cropOutputWidthStride * cropOutputHeightStride * NUM_3 / NUM_2; + ret = acldvppMalloc(&cropOutBufferDev, cropOutBufferSize); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] AcldvppMalloc cropOutBufferDev_ failed, ret = " << ret << " cropOutBufferSize_ = " + << cropOutBufferSize << endl; + return ret; + } + cropOutputDesc = createDvppPicDesc(cropOutBufferDev, PIXEL_FORMAT_YUV_SEMIPLANAR_420, newInputWidth, newInputHeight, + cropOutputWidthStride, cropOutputHeightStride, cropOutBufferSize); + if (cropOutputDesc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create cropOutputDesc failed\n"); + return ret; + } + + ret = acldvppVpcCropAsync(dvpp_channel_desc, decodeOutputDesc, cropOutputDesc, cropConfig, stream); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] acldvppVpcCropAsync failed, ret = " << ret << std::endl; + return ret; + } + aclrtSynchronizeStream(stream); + + // 5 对等比例缩放后的图片做224x224中心抠图,中心抠图后的数据会发送给aipp进行YUV转RGB格式转换。需要注意:中心抠图后的输出格式和aipp + // 的输入格式需要保持一致。 + acldvppRoiConfig *centerCropConfig = nullptr; + acldvppPicDesc *centerCropOutputDesc = nullptr; // resize output desc + centerCropConfig = InitCropCenterRoiConfig(newInputWidth, newInputHeight, resizedWidth, resizedHeight); + void *vpcOutBufferDev = nullptr; + uint32_t vpcOutBufferSize = resizedWidthAligned * resizedHeightAligned * NUM_3 / NUM_2; + + vpcOutBufferDev = (void *)ptr; + centerCropOutputDesc = createDvppPicDesc(vpcOutBufferDev, PIXEL_FORMAT_YUV_SEMIPLANAR_420, resizedWidth, + resizedHeight, resizedWidthAligned, resizedHeightAligned, + vpcOutBufferSize); + if (centerCropOutputDesc == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("create centerCropOutputDesc failed\n"); + return ret; + } + + ret = acldvppVpcCropAsync(dvpp_channel_desc, cropOutputDesc, centerCropOutputDesc, centerCropConfig, stream); + if (ret != ACL_ERROR_NONE) { + std::cout << "[ERROR][Vision] acldvppVpcCropAsync failed, ret = " << ret << "fileName: " << fileLocation.c_str() << std::endl; + return ret; + } + + ptr += vpcOutBufferSize; + aclrtSynchronizeStream(stream); + + // 6 释放资源 + acldvppFree(decodeInput); + acldvppFree(decodeOutput); + acldvppFree(cropOutBufferDev); + acldvppDestroyPicDesc(decodeOutputDesc); + acldvppDestroyPicDesc(cropOutputDesc); + acldvppDestroyPicDesc(centerCropOutputDesc); + acldvppDestroyRoiConfig(cropConfig); + acldvppDestroyRoiConfig(centerCropConfig); + return ret; +} + +aclError DvppInitInput(std::vector files) +{ + struct timeval process_start; + struct timeval process_end; + std::string funcName; + long long costTime; + funcName = "DvppTotalProcess"; + gettimeofday(&process_start, NULL); + + void *dst; + ret = acldvppMalloc(&dst, cfg.inputInfo[0].size); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc device failed, ret[%d]\n", ret); + return ret; + } + + char *ptr = (char *)dst; + inputDataframe.fileNames.clear(); + + for (int i = 0; i < files.size(); i++) { + std::string fileLocation = cfg.dataDir + "/" + files[i]; + ret = DVPP_Resnet50(fileLocation, ptr); + if(ret != ACL_ERROR_NONE) { + LOG("dvpp config failed"); + return ret; + } + inputDataframe.fileNames.push_back(files[i]); + } + + funcName = "DvppTotalProcess"; + gettimeofday(&process_end, NULL); + costTime = (process_end.tv_sec - process_start.tv_sec) * 1000000 + (process_end.tv_usec - process_start.tv_usec); + dvppTime[funcName] += costTime; + + aclmdlDataset *input = aclmdlCreateDataset(); + aclDataBuffer *inputData = aclCreateDataBuffer((void *)dst, cfg.inputInfo[0].size); + + if (inputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("aclCreateDataBuffer failed\n"); + return ret; + } + + ret = aclmdlAddDatasetBuffer(input, inputData); + + if (ret != ACL_ERROR_NONE) { + LOG("ACL_ModelInputDataAdd failed, ret[%d]\n", ret); + aclmdlDestroyDataset(input); + return ret; + } + + inputDataframe.dataset = input; + return ACL_ERROR_NONE; +} + +acldvppRoiConfig *InitCropRoiConfig(uint32_t width, uint32_t height) +{ + uint32_t right = 0; + uint32_t bottom = 0; + acldvppRoiConfig *cropConfig; + + if (width % NUM_2 == 0) { + right = width - 1; + } else { + right = width; + } + + if (height % NUM_2 == 0) { + bottom = height - 1; + } else { + bottom = height; + } + + cropConfig = acldvppCreateRoiConfig(0, right, 0, bottom); + if (cropConfig == nullptr) { + std::cout << "[ERROR][Vision] acldvppCreateRoiConfig failed " << std::endl; + return nullptr; + } + + return cropConfig; +} + +acldvppRoiConfig *InitCropCenterRoiConfig(uint32_t newInputWidth, uint32_t newInputHeight, uint32_t modelInputWidth, + uint32_t modelInputHeight) +{ + uint32_t left = 0; + uint32_t right = 0; + uint32_t top = 0; + uint32_t bottom = 0; + uint32_t amount_to_be_cropped_w = 0; + uint32_t amount_to_be_cropped_h = 0; + uint32_t left_half = 0; + uint32_t top_half = 0; + acldvppRoiConfig *centerCropConfig = nullptr; + + // 计算中心抠图起始点的坐标距离码流左边界和上边界的距离 + amount_to_be_cropped_w = newInputWidth - modelInputWidth; + left_half = amount_to_be_cropped_w / NUM_2; + amount_to_be_cropped_h = newInputHeight - modelInputHeight; + top_half = amount_to_be_cropped_h / NUM_2; + + // 保证起始点坐标为偶数 + left = (left_half % NUM_2 == 0) ? (amount_to_be_cropped_w / NUM_2) : (amount_to_be_cropped_w / NUM_2 + 1); + top = (top_half % NUM_2 == 0) ? (amount_to_be_cropped_h / NUM_2) : (amount_to_be_cropped_h / NUM_2 + 1); + + // 结束点为奇数 + right = left + modelInputWidth - 1; + bottom = top + modelInputHeight - 1; + + centerCropConfig = acldvppCreateRoiConfig(left, right, top, bottom); + if (centerCropConfig == nullptr) { + std::cout << "[ERROR][Vision] acldvppCreateRoiConfig failed " << std::endl; + return nullptr; + } + return centerCropConfig; +} + +void SmallSizeAtLeast(uint32_t width, uint32_t height, uint32_t &newInputWidth, uint32_t &newInputHeight) +{ + float scaleRatio = 0.0; + float inputWidth = 0.0; + float inputHeight = 0.0; + float resizeMin = 0.0; + bool minWidthFlag = false; + + inputWidth = (float)width; + inputHeight = (float)height; + resizeMin = (float)(RESIZE_MIN); + minWidthFlag = (width <= height) ? true : false; + + // 短边缩放为256,长边等比例缩放 + if (minWidthFlag == true) { + newInputWidth = resizeMin; + newInputHeight = (resizeMin / width) * inputHeight; + std::cout << "[INFO]scaleRatio: " << resizeMin / width << " inputWidth_: " << width << " newInputWidth: " << + newInputWidth << " inputHeight_: " << inputHeight << " newInputHeight_:" << newInputHeight << std::endl; + } else { + newInputWidth = (resizeMin / height) * width; + newInputHeight = resizeMin; + std::cout << "[INFO]scaleRatio: " << resizeMin / height << " width: " << width << " newInputWidth: " << + newInputWidth << " height: " << height << " newInputHeight:" << newInputHeight << std::endl; + } +} + +aclError Inference() +{ + LOG("inference batch %d start\n", processedCnt); + ret = aclrtSetCurrentContext(context); + + if (ret != ACL_ERROR_NONE) { + LOG("Set infer context failed\n"); + return ret; + } + + struct timeval startTmp, endTmp; + long long timeUse; + + if (inputDataframe.fileNames.size() == 0) { + ret = ACL_ERROR_OTHERS; + LOG("No file found\n"); + return ret; + } + + aclmdlDataset *output = aclmdlCreateDataset(); + if (output == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("Create Output Dataset failed\n"); + return ret; + } + + std::vector outputDevPtrs; + + for (size_t i = 0; i < cfg.outputNum; ++i) { + size_t buffer_size = cfg.outputInfo[i].size; + void *outputBuffer = nullptr; + ret = aclrtMalloc(&outputBuffer, (size_t)buffer_size, ACL_MEM_MALLOC_NORMAL_ONLY); + + if (ret != ACL_ERROR_NONE) { + LOG("Malloc output host failed, ret[%d]\n", ret); + return ret; + } + + outputDevPtrs.push_back(outputBuffer); + aclDataBuffer *outputData = aclCreateDataBuffer(outputBuffer, buffer_size); + + if (outputData == nullptr) { + ret = ACL_ERROR_OTHERS; + LOG("Create output data buffer failed\n"); + return ret; + } + + ret = aclmdlAddDatasetBuffer(output, outputData); + + if (ret != ACL_ERROR_NONE) { + LOG("Add output model dataset failed, ret[%d]\n", ret); + return ret; + } + } + + gettimeofday(&startTmp, NULL); + ret = aclmdlExecute(modelId, inputDataframe.dataset, output); + gettimeofday(&endTmp, NULL); + timeUse = (endTmp.tv_sec - startTmp.tv_sec) * 1000000 + (endTmp.tv_usec - startTmp.tv_usec); + LOG("inference time cost: %lld us\n", timeUse); + inferTime += timeUse; + + if (ret != ACL_ERROR_NONE) { + LOG("%s inference failed.\n", inputDataframe.fileNames[0].c_str()); + FreeDevMemory(inputDataframe.dataset); + aclmdlDestroyDataset(inputDataframe.dataset); + return ret; + } + + outputDataframe.fileNames = inputDataframe.fileNames; + outputDataframe.dataset = output; + + uint32_t dvppFlag = (cfg.useDvpp) ? 1 : 0; + ret = DestroyDatasetResurce(inputDataframe.dataset, dvppFlag); + if (ret != ACL_ERROR_NONE) { + LOG("DestroyDatasetResurce failed\n"); + return ret; + } + + LOG("inference batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} + +aclError UnloadModel() +{ + LOG("unload model start\n"); + ret = aclmdlUnload(modelId); + CHECK_ACL_RET("unload model failed", ret); + LOG("unload model done\n"); + + aclmdlDestroyDesc(cfg.modelDesc); + + if (cfg.devMem_ptr != nullptr) { + aclrtFree(cfg.devMem_ptr); + cfg.devMem_ptr = nullptr; + } + + if (cfg.weightMem_ptr != nullptr) { + aclrtFree(cfg.weightMem_ptr); + cfg.weightMem_ptr = nullptr; + } + + if (cfg.modelData_ptr != nullptr) { + delete[] cfg.modelData_ptr; + cfg.modelData_ptr = nullptr; + } + return ACL_ERROR_NONE; +} diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/main.cpp b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/main.cpp new file mode 100644 index 0000000000000000000000000000000000000000..99a8d46ab89f9c616048a09f0f995e68502c4330 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/main.cpp @@ -0,0 +1,493 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "util.h" +#include "infer_engine.h" +#include "acl/acl_base.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include "acl/acl.h" +#include "acl/acl_mdl.h" +#include "acl/acl_rt.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using std::cout; +using std::endl; +Resnet50Result resnet50Res; +Config cfg; +aclError ret; +int processedCnt; +long long dataProcTime = 0; +long long inferTime = 0; +float avgTime = 0; +float avgPreTime = 0; + +extern std::unordered_map dvppTime; +extern DataFrame outputDataframe; + +void getCommandLineParam(int argc, char** argv, Config& config) +{ + while (1) { + int option_index = 0; + struct option long_options[] = + { + {"om", 1, 0, 'a'}, + {"dataDir", 1, 0, 'b'}, + {"outDir", 1, 0, 'c'}, + {"batchSize", 1, 0, 'd'}, + {"deviceId", 1, 0, 'e'}, + {"loopNum", 1, 0, 'f'}, + {"modelType", 1, 0, 'g'}, + {"imgType", 1, 0, 'h'}, + {"framework", 1, 0, 'i'}, + {"useDvpp", 1 , 0 , 'j'}, + {0, 0, 0, 0} + }; + + int c; + c = getopt_long(argc, argv, "a:b:c:e:f:j:k:l:m:n:u:t:", long_options, &option_index); + if (c == -1) { + break; + } + + switch (c) { + case 'a': + config.om = std::string(optarg); + printf("[INFO]om = %s\n", config.om.c_str()); + break; + case 'b': + config.dataDir = std::string(optarg); + printf("[INFO]dataDir = %s\n", config.dataDir.c_str()); + break; + case 'c': + config.outDir = std::string(optarg); + printf("[INFO]outDir = %s\n", config.outDir.c_str()); + break; + case 'd': + config.batchSize = atoi(optarg); + printf("[INFO]batchSize = %d\n", config.batchSize); + break; + case 'e': + config.deviceId = atoi(optarg); + printf("[INFO]deviceId = %d\n", config.deviceId); + break; + case 'f': + config.loopNum = atoi(optarg); + printf("[INFO]loopNum = %d\n", config.loopNum); + break; + case 'g': + config.modelType = std::string(optarg); + printf("[INFO]modelType = %s\n", config.modelType.c_str()); + break; + case 'h': + config.imgType = std::string(optarg); + printf("[INFO]imgType = %s\n", config.imgType.c_str()); + break; + case 'i': + config.framework = std::string(optarg); + printf("[INFO]framework = %s\n", config.framework.c_str()); + break; + case 'j': + config.useDvpp = atoi(optarg); + printf("[INFO]useDvpp = %d\n", config.useDvpp); + break; + default: + break; + } + } +} + +// 只校验必须的参数 +aclError ParseParams(int argc, char** argv, Config& config, std::string& errorMsg) +{ + getCommandLineParam(argc, argv, config); + + LOG("parase params start\n"); + + if (config.om.empty() || !FileExists(config.om)) { + LOG("om is empty\n"); + errorMsg = "om path is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + + if (config.dataDir.empty() || !FolderExists(config.dataDir)) { + errorMsg = "data Dir is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("dataDir %s \n", config.dataDir.c_str()); + + if (!config.outDir.empty() && !FolderExists(config.outDir)) { + LOG("output dir %s not exists, try to make dir.\n", config.outDir.c_str()); + mkdir(config.outDir.c_str(), 0755); + LOG("outDir %s \n", config.outDir.c_str()); + } + + if(config.batchSize <= 0){ + errorMsg = "batch Size should be > 0"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("batchSize %zd \n", config.batchSize); + + if (config.modelType.empty()) + { + LOG("FLAGS_modelType is empty\n"); + errorMsg = "modelType is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("modelType %s \n", config.modelType.c_str()); + + if (config.imgType.empty()) + { + LOG("imgType is empty\n"); + errorMsg = "imgType is invalid"; + return ACL_ERROR_PARSE_PARAM_FAILED; + } + LOG("imgType %s \n", config.imgType.c_str()); + LOG("useDvpp is %d \n", config.useDvpp); + LOG("parase params done\n"); + return ACL_ERROR_NONE; +} + +aclError Process() +{ + std::vector fileNames; + ret = GetFiles(cfg.dataDir, fileNames); + CHECK_RET(ret); + size_t fileNum = fileNames.size(); + LOG("fileNum:%zd\n",fileNames.size()); + struct timeval startTmp, endTmp; + + // 获取模型输入所需要的宽高 + getImgResizeShape(); + + if(cfg.useDvpp) { + ret = DvppSetup(); + CHECK_RET(ret); + } + + size_t inferCnt = 0; + size_t loopCnt = 0; + while(loopCnt < cfg.loopNum) { + LOG("loopCnt %d, loopNum %d\n", loopCnt, cfg.loopNum); + for(size_t i = 0; i < fileNum / cfg.batchSize; i++) { + gettimeofday(&startTmp, NULL); + std::vector batchFileNames; + for (int j = 0; j < cfg.batchSize; j++) { + batchFileNames.push_back(fileNames[i*cfg.batchSize+j]); + } + processedCnt++; + + if(cfg.useDvpp) { + ret = DvppInitInput(batchFileNames); + } else { + ret = InitInput(batchFileNames); + } + gettimeofday(&endTmp, NULL); + dataProcTime += (endTmp.tv_sec-startTmp.tv_sec)*1000000+(endTmp.tv_usec-startTmp.tv_usec); + CHECK_RET(ret); + + ret = Inference(); + CHECK_RET(ret); + + ret = SaveBinPostprocess(); + CHECK_RET(ret); + } + + if (fileNum % cfg.batchSize != 0) { + std::vector batchFileNames; + for(size_t i = (fileNum - fileNum % cfg.batchSize); i < fileNum; i++) { + batchFileNames.push_back(fileNames[i]); + } + + gettimeofday(&startTmp, NULL); + processedCnt++; + + if(cfg.useDvpp) { + ret = DvppInitInput(batchFileNames); + } else { + ret = InitInput(batchFileNames); + } + gettimeofday(&endTmp, NULL); + dataProcTime += (endTmp.tv_sec-startTmp.tv_sec) * 1000000 + (endTmp.tv_usec - startTmp.tv_usec); + CHECK_RET(ret); + + ret = Inference(); + CHECK_RET(ret); + + ret = SaveBinPostprocess(); + CHECK_RET(ret); + } + loopCnt++; + } + return ACL_ERROR_NONE; +} + +void SaveResult() +{ + ofstream outfile("test_perform_static.txt"); +#if 0 + std::string model_name; + int dex = (cfg.om).find_last_of("/"); + model_name = cfg.om.substr(dex+1); + + std:: string title = "model_name total batch top1 top5 pre_avg/ms pre_imgs/s infer_avg/ms infer_imgs/s mAP"; + outfile << title << endl; + + outfile << model_name << " "; + outfile << processedCnt*cfg.batchSize << " "; + outfile << cfg.batchSize << " "; + if (cfg.postprocessType == "resnet") { + outfile << 1.0*resnet50Res.top1/resnet50Res.total << " " << 1.0*resnet50Res.top5/resnet50Res.total << " "; + } else { + outfile << "NA" << " " << "NA" << " "; + } + + outfile << avgPreTime << " " << 1.0*1000/avgPreTime << " "; + outfile << avgTime << " " << 1.0*1000/avgTime << " "; + outfile << endl; +#endif + char tmpCh[256]; + memset(tmpCh, 0, sizeof(tmpCh)); + snprintf(tmpCh, sizeof(tmpCh), "NN inference cost average time: %4.3f ms %4.3f fps/s\n", + avgTime, (1.0 * 1000 / avgTime)); + outfile << tmpCh; + outfile.close(); +} + +aclError GetModelInputOutputInfo(Config& cfg) +{ + aclError ret; + std::ofstream outFile("modelInputOutputInfo", std::ios::trunc); + char tmpChr[256] = {0}; + + // 获取模型输入信息 + size_t inputNum = aclmdlGetNumInputs(cfg.modelDesc); + LOG("model input num %zd\n", inputNum); + snprintf(tmpChr, sizeof(tmpChr), "model input num %zd\n", inputNum); + outFile << tmpChr; + + cfg.inputNum = inputNum; + for (size_t i = 0; i < inputNum && i < MODEL_INPUT_OUTPUT_NUM_MAX; i++) { + size_t size = aclmdlGetInputSizeByIndex(cfg.modelDesc, i); + cfg.inputInfo[i].size = size; + LOG("model input[%zd] size %zd\n", i, cfg.inputInfo[i].size); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] size %zd\n", i, cfg.inputInfo[i].size); + outFile << tmpChr; + + aclmdlIODims dims; + ret = aclmdlGetInputDims(cfg.modelDesc, i, &dims); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetInputDims fail ret %d\n", ret); + return 1; + } + + cfg.inputInfo[i].dimCount = dims.dimCount; + ret = aclrtMemcpy(cfg.inputInfo[i].dims , cfg.inputInfo[i].dimCount * sizeof(int64_t), dims.dims, + cfg.inputInfo[i].dimCount * sizeof(int64_t), ACL_MEMCPY_HOST_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtMemcpy fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + LOG("model input[%zd] dimCount %zd\n", i, cfg.inputInfo[i].dimCount); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] dimCount %zd\n", i, cfg.inputInfo[i].dimCount); + outFile << tmpChr; + for (size_t dimIdx = 0; dimIdx < cfg.inputInfo[i].dimCount; dimIdx++) { + LOG("model input[%zd] dim[%zd] info %ld\n", i, dimIdx, cfg.inputInfo[i].dims[dimIdx]); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] dim[%zd] info %ld\n", + i, dimIdx, cfg.inputInfo[i].dims[dimIdx]); + outFile << tmpChr; + } + + cfg.inputInfo[i].Format = aclmdlGetInputFormat(cfg.modelDesc, i); + cfg.inputInfo[i].Type = aclmdlGetInputDataType(cfg.modelDesc, i); + + LOG("model input[%zd] format %d inputType %d\n", i, cfg.inputInfo[i].Format, cfg.inputInfo[i].Type); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] format %d inputType %d\n", i, cfg.inputInfo[i].Format, + cfg.inputInfo[i].Type); + outFile << tmpChr; + + cfg.inputInfo[i].Name = aclmdlGetInputNameByIndex(cfg.modelDesc, i); + LOG("model input[%zd] name %s\n", i, cfg.inputInfo[i].Name); + snprintf(tmpChr, sizeof(tmpChr), "model input[%zd] name %s\n", i, cfg.inputInfo[i].Name); + outFile << tmpChr; + + size_t index; + ret = aclmdlGetInputIndexByName(cfg.modelDesc, cfg.inputInfo[i].Name, &index); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetInputIndexByName fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + if (i != index) { + LOG("aclmdlGetInputNameByIndex not equal aclmdlGetInputIndexByName\n"); + return 1; + } else { + LOG("model input name %s is belone to input %zd\n", cfg.inputInfo[i].Name, index); + } + } + + // 获取模型输出信息 + size_t outputNum = aclmdlGetNumOutputs(cfg.modelDesc); + LOG("model output num %zd\n", outputNum); + snprintf(tmpChr, sizeof(tmpChr), "model output num %zd\n", outputNum); + outFile << tmpChr; + + cfg.outputNum = outputNum; + for (size_t i = 0; i < outputNum && i < MODEL_INPUT_OUTPUT_NUM_MAX; i++) { + size_t size = aclmdlGetOutputSizeByIndex(cfg.modelDesc, i); + cfg.outputInfo[i].size = size; + LOG("model output[%zd] size %zd\n", i, cfg.outputInfo[i].size); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] size %zd\n", i, cfg.outputInfo[i].size); + outFile << tmpChr; + + aclmdlIODims dims; + ret = aclmdlGetOutputDims(cfg.modelDesc, i, &dims); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetOutputDims fail ret %d\n", ret); + return 1; + } + + cfg.outputInfo[i].dimCount = dims.dimCount; + ret = aclrtMemcpy(cfg.outputInfo[i].dims, cfg.outputInfo[i].dimCount * sizeof(int64_t), dims.dims, + cfg.outputInfo[i].dimCount * sizeof(int64_t), ACL_MEMCPY_HOST_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtMemcpy fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + LOG("model output[%zd] dimCount %zd\n", i, cfg.outputInfo[i].dimCount); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] dimCount %zd\n", i, cfg.outputInfo[i].dimCount); + outFile << tmpChr; + + for (size_t dimIdx = 0; dimIdx < cfg.outputInfo[i].dimCount; dimIdx++) { + LOG("model output[%zd] dim[%zd] info %ld\n", i, dimIdx, cfg.outputInfo[i].dims[dimIdx]); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] dim[%zd] info %ld\n", + i, dimIdx, cfg.outputInfo[i].dims[dimIdx]); + outFile << tmpChr; + } + + cfg.outputInfo[i].Format = aclmdlGetOutputFormat(cfg.modelDesc, i); + cfg.outputInfo[i].Type = aclmdlGetOutputDataType(cfg.modelDesc, i); + LOG("model output[%zd] format %d outputType %d\n", i, cfg.outputInfo[i].Format, cfg.outputInfo[i].Type); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] format %d outputType %d\n", i, cfg.outputInfo[i].Format, + cfg.outputInfo[i].Type); + outFile << tmpChr; + + cfg.outputInfo[i].Name = aclmdlGetOutputNameByIndex(cfg.modelDesc, i); + LOG("model output[%zd] name %s\n", i, cfg.outputInfo[i].Name); + snprintf(tmpChr, sizeof(tmpChr), "model output[%zd] name %s\n", i, cfg.outputInfo[i].Name); + outFile << tmpChr; + + size_t index; + ret = aclmdlGetOutputIndexByName(cfg.modelDesc, cfg.outputInfo[i].Name, &index); + if (ret != ACL_ERROR_NONE) { + LOG("aclmdlGetOutputIndexByName fail ret %d line %d\n", ret, __LINE__); + return 1; + } + + if (i != index) { + LOG("aclmdlGetOutputNameByIndex not equal aclmdlGetOutputIndexByName\n"); + return 1; + } else { + LOG("model output name %s is belone to output %d\n", cfg.outputInfo[i].Name, index); + } + } + + outFile.close(); + return ACL_ERROR_NONE; +} + +int main(int argc, char** argv) +{ + processedCnt = 0; + inferTime = 0; + + std::string errorMsg; + ret = ParseParams(argc, argv, cfg, errorMsg); + CHECK_ACL_RET(errorMsg, ret); + + ret = InitContext(); + CHECK_RET(ret); + + ret = LoadModel(); + CHECK_RET(ret); + + ret = GetModelInputOutputInfo(cfg); + CHECK_RET(ret); + + ret = Process(); + CHECK_RET(ret); + + ret = UnloadModel(); + CHECK_RET(ret); + + ret = UnInitContext(); + CHECK_RET(ret); + LOG("\n"); + + avgTime = 1.0 * inferTime / processedCnt /cfg.batchSize / 1000; + avgPreTime = 1.0 * dataProcTime / processedCnt / cfg.batchSize / 1000; + + if (cfg.useDvpp) { + LOG("\n"); + LOG("DVPP performance details:\n"); + LOG("#############################################\n"); + std::unordered_map::iterator iter; + for (iter = dvppTime.begin(); iter != dvppTime.end(); iter++) { + LOG("%s using avg time %0.2f ms\n",iter->first.c_str(),1.0*iter->second/processedCnt/cfg.batchSize/1000); + } + LOG("\n"); + } + + LOG("performance summary:\n"); + LOG("#############################################\n"); + LOG("total %ld imgs processed and batch size %ld\n", processedCnt*cfg.batchSize, cfg.batchSize); +#if 0 + if(cfg.postprocessType == "resnet") { + LOG("top1 ratio %0.3f top5 ratio %0.3f\n", + 1.0*resnet50Res.top1/resnet50Res.total, 1.0*resnet50Res.top5/resnet50Res.total); + } +#endif + + LOG("avg preprocess time %0.2f ms, %0.2f imgs/s\n", avgPreTime, 1.0 * 1000 / avgPreTime); + LOG("avg inference time %0.2f ms, %0.2f imgs/s\n", avgTime, 1.0 * 1000 / avgTime); + + SaveResult(); +} diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/post_process.cpp b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/post_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f2dc4997af21b868ac1944276fe98354e76c93f4 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/post_process.cpp @@ -0,0 +1,123 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "post_process.h" +#include "util.h" +#include +#include +#include +#include +#include "stdio.h" +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +extern int processedCnt; + +extern Config cfg; +extern DataFrame outputDataframe; +extern aclError ret; +int topNum = 5; + +extern int processedCnt; + +aclError SaveBinPostprocess() +{ + aclError retVal; + + LOG("save batch %d start\n", processedCnt); + DataFrame dataframe = outputDataframe; + std::vector& inferFile_vec = outputDataframe.fileNames; + aclmdlDataset* output = dataframe.dataset; + + std::string resultFolder = cfg.outDir + "/" + cfg.modelType; + DIR* op = opendir(resultFolder.c_str()); + if (NULL == op) { + mkdir(resultFolder.c_str(), 00775); + } else { + closedir(op); + } + + for (size_t i = 0; i < cfg.outputNum; ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output, i); + void* data = aclGetDataBufferAddr(dataBuffer); + uint32_t len; + len = cfg.outputInfo[i].size; + + void* outHostData = NULL; + ret = aclrtMallocHost(&outHostData, len); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host failed.\n"); + return 1; + } + + ret = aclrtMemcpy(outHostData, len, data, len, ACL_MEMCPY_DEVICE_TO_HOST); + if (ret != ACL_ERROR_NONE) { + LOG("Copy device to host failed.\n"); + aclrtFreeHost(outHostData); + return 1; + } + + uint32_t eachSize = len / cfg.batchSize; + for (size_t j = 0; j < inferFile_vec.size(); j++) { + FILE* outputFile; + std::string framename = inferFile_vec[j]; + std::size_t dex = (framename).find_first_of("."); + std::string inputFileName = (framename).erase(dex); + + outputFile = fopen((resultFolder + "/" + "davinci_" + inputFileName + "_" + "output" + std::to_string(i) + ".bin").c_str(), "wb"); + + if (outputFile == nullptr) { + aclrtFreeHost(outHostData); + return 1; + } + + fwrite((uint8_t *)outHostData + (j * eachSize), eachSize, sizeof(char), outputFile); + fclose(outputFile); + } + + ret = aclrtFreeHost(outHostData); + if (ret != ACL_ERROR_NONE) { + LOG("Free output host failed.\n"); + } + } + + (void)DestroyDatasetResurce(outputDataframe.dataset, 0); + + LOG("save batch %d done\n", processedCnt); + return ACL_ERROR_NONE; +} diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/util.cpp b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/util.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ec437321c75c4c883ca35d56a0c5ab218f16efa7 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/Benchmark/util.cpp @@ -0,0 +1,230 @@ +/* * +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* */ + +#include "util.h" +#include +#include +#include +#if 0 +static std::unordered_map errorMap = { + {ACL_ERROR_NONE, "success"}, + {ACL_ERROR_INVALID_PARAM, "params may not valid"}, + {ACL_ERROR_BAD_ALLOC, "alloc memory failed"}, + {ACL_ERROR_RT_FAILURE, "runtime failure"}, + {ACL_ERROR_GE_FAILURE, "GE failure"}, + {ACL_ERROR_OP_NOT_FOUND, "OP not find"}, + {ACL_ERROR_OP_LOAD_FAILED, "OP loads failed"}, + {ACL_ERROR_READ_MODEL_FAILURE, "load model failed"}, + {ACL_ERROR_PARSE_MODEL, "parse model failed"}, + {ACL_ERROR_MODEL_MISSING_ATTR, "model misssing attr"}, + {ACL_ERROR_DESERIALIZE_MODEL, "deserilize model failed"}, + // {ACL_ERROR_MULTIPLE_MODEL_MATCHED, "multiple model matched"}, + //{ACL_ERROR_EVENT_NOT_READY, "event not ready"}, + //{ACL_ERROR_EVENT_COMPLETE, "event not complete"}, + {ACL_ERROR_UNSUPPORTED_DATA_TYPE, "unsupport datatype"}, + {ACL_ERROR_REPEAT_INITIALIZE, "initial repeated"}, + //{ACL_ERROR_COMPILER_NOT_REGISTERED, "compilter not registered"}, + {ACL_ERROR_PATH_INVALID, "path invalid"}, + {ACL_ERROR_PARSE_PARAM_FAILED, "parse params failed"}, + {ACL_ERROR_DVPP_ERROR, "dvpp errors"} +}; + + +std::string CausedBy(aclError error) +{ + return errorMap[error]; +} +#endif + +bool FolderExists(std::string foldname) +{ + DIR* dir; + if ((dir = opendir(foldname.c_str())) == NULL) { + return false; + } + closedir(dir); + return true; +} + +void* ReadFile(std::string fileLocation, uint64_t &fileSize) +{ + aclError ret; + FILE *pFile = fopen(fileLocation.c_str(), "r"); + if (pFile == nullptr) { + LOG("open file %s failed\n", fileLocation.c_str()); + return nullptr; + } + + fseek(pFile, 0, SEEK_END); + fileSize = ftell(pFile); + + void *buff = nullptr; + ret = aclrtMallocHost(&buff, fileSize); + if (ret != ACL_ERROR_NONE) { + LOG("Malloc host buff failed[%d]\n", ret); + return nullptr; + } + + rewind(pFile); + fread(buff, sizeof(char), fileSize, pFile); + fclose(pFile); + return buff; +} + +bool FileExists(std::string filename) +{ + std::fstream file; + file.open(filename, std::ios::in); + if (!file) { + return false; + } + + file.close(); + return true; +} + +char* ReadBinFile(std::string fileName, uint32_t& fileSize) +{ + std::ifstream binFile(fileName, std::ifstream::binary); + + if (binFile.is_open() == false) { + LOG("open file[%s] failed\n", fileName.c_str()); + return nullptr; + } + + binFile.seekg(0, binFile.end); + uint32_t binFileBufferLen = binFile.tellg(); + + if (binFileBufferLen == 0) { + LOG("binfile is empty, filename: %s", fileName.c_str()); + binFile.close(); + return nullptr; + } + + binFile.seekg(0, binFile.beg); + char* binFileBufferData = new(std::nothrow) char[binFileBufferLen]; + LOG("binFileBufferData:%p\n", binFileBufferData); + + if (binFileBufferData == nullptr) { + LOG("malloc binFileBufferData failed\n"); + binFile.close(); + return nullptr; + } + + binFile.read(binFileBufferData, binFileBufferLen); + binFile.close(); + fileSize = binFileBufferLen; + return binFileBufferData; +} + +aclError GetFiles(std::string path, std::vector& files) +{ + DIR* dir; + struct dirent* ptr; + char base[1000]; + + if ((dir = opendir(path.c_str())) == NULL) { + LOG("Open dir %s error.\n", path.c_str()); + return ACL_ERROR_PATH_INVALID; + } + + while ((ptr = readdir(dir)) != NULL) { + if (strcmp(ptr->d_name, ".") == 0 || strcmp(ptr->d_name, "..") == 0) { + //current dir OR parrent dir + continue; + } else if (ptr->d_type == 8) { + //file + files.push_back(ptr->d_name); + } else if (ptr->d_type == 10) { + //link file + continue; + } else if (ptr->d_type == 4) { + //dir + continue; + } + } + + closedir(dir); + std::sort(files.begin(), files.end()); + return ACL_ERROR_NONE; +} + +aclError FreeDevMemory(aclmdlDataset* dataset) +{ + aclError ret; + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(dataset); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(dataset, i); + void* data = aclGetDataBufferAddr(dataBuffer); + aclrtFree(data); + aclDestroyDataBuffer(dataBuffer); + } + + return ACL_ERROR_NONE; +} + +aclError DestroyDatasetResurce(aclmdlDataset* dataset, uint32_t flag) +{ + aclError ret = ACL_ERROR_NONE; + + if (nullptr == dataset) { + LOG("dataset == null\n"); + return 1; + } + + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(dataset); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(dataset, i); + if (nullptr == dataBuffer) { + LOG("dataBuffer == null\n"); + continue; + } + + void* data = aclGetDataBufferAddr(dataBuffer); + if (nullptr != data) { + if (1 == flag) { + if (i > 0) { + ret = aclrtFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree data failed, ret %d\n", ret); + } + } else { + ret = acldvppFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("acldvppFree data failed, ret %d\n", ret); + } + } + } else { + ret = aclrtFree(data); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree data failed, ret %d\n", ret); + } + } + } + + ret = aclDestroyDataBuffer(dataBuffer); + if (ret != ACL_ERROR_NONE) { + LOG("Destroy dataBuffer failed, ret %d\n", ret); + } + } + + ret = aclmdlDestroyDataset(dataset); + if (ret != ACL_ERROR_NONE) { + LOG("aclrtFree dataset failed, ret %d\n", ret); + } + + return ret; +} + + diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/LICENSE b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..57bc88a15a0ee8266c259b2667e64608d3f7e292 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/LICENSE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/README.md b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..82ee516308152708dfc6c793a2fea1723cdab8d9 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/README.md @@ -0,0 +1,90 @@ + + +# Resnet50v1.5 Inference for Tensorflow + +This repository provides a script and recipe to Inference of the Resnet50v1.5 model. + +## Notice +**This sample only provides reference for you to learn the Ascend software stack and is not for commercial purposes.** + +Before starting, please pay attention to the following adaptation conditions. If they do not match, may leading in failure. + +| Conditions | Need | +| --- | --- | +| CANN Version | >=5.0.3 | +| Chip Platform| Ascend310/Ascend710 | +| 3rd Party Requirements| Please follow the 'requirements.txt' | + +## Quick Start Guide + +### 1. Clone the respository + +```shell +git clone https://gitee.com/ascend/ModelZoo-TensorFlow.git +cd Modelzoo-TensorFlow/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL +``` + +### 2. Download and preprocess the dataset + +1. Download the ImageNet2012 Validation dataset by yourself. You can get the validation pictures(50000 JPEGS and a ILSVRC2012val-label-index.txt) + +2. Put JPEGS to **'scripts/ILSVRC2012val'** and label text to **'scripts/'** + +3. Images Preprocess: +``` +cd scripts +mkdir input_bins +python3 resnet50v15_preprocessing.py ./ILSVRC2012val/ ./input_bins/ +``` +The jpegs pictures will be preprocessed to bin fils. + +### 3. Offline Inference + +**Convert pb to om.** + +- configure the env + + ``` + export install_path=/usr/local/Ascend + export PATH=/usr/local/python3.7.5/bin:${install_path}/atc/ccec_compiler/bin:${install_path}/atc/bin:$PATH + export PYTHONPATH=${install_path}/atc/python/site-packages:${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:${install_path}/atc/python/site-packages/schedule_search.egg:$PYTHONPATH + export LD_LIBRARY_PATH=${install_path}/atc/lib64:${install_path}/acllib/lib64:$LD_LIBRARY_PATH + export ASCEND_OPP_PATH=${install_path}/opp + ``` + +- convert pb to om + + [pb download link](https://modelzoo-train-atc.obs.cn-north-4.myhuaweicloud.com/003_Atc_Models/modelzoo/Official/cv/Resnet50v1.5_for_ACL/resnet50v15_tf.pb) + + ``` + atc --model=resnet50v15_tf.pb --framework=3 --output=resnet50v15_tf_1batch --output_type=FP32 --soc_version=Ascend310 --input_shape="input_tensor:1,224,224,3" --insert_op_conf=resnet50v15_aipp.cfg --enable_small_channel=1 --log=info + ``` + +- Build the program + + ``` + bash build.sh + ``` + +- Run the program: + + ``` + cd scripts + bash benchmark_tf.sh + ``` + +## Performance + +### Result + +Our result was obtained by running the applicable inference script. To achieve the same results, follow the steps in the Quick Start Guide. + +#### Inference accuracy results + +| model | **data** | Top1/Top5 | +| :---------------: | :-------: | :-------------: | +| offline Inference | 50000 images | 76.5 %/ 93.1% | + +## Reference + +[1] https://github.com/IntelAI/models diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/build.sh b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..dae86211d2691b82ecfd8c2637d1276092e476ed --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/build.sh @@ -0,0 +1,9 @@ +rm -rf ./Benchmark/build + +mkdir -p Benchmark/build/intermediates/host +cd Benchmark/build/intermediates/host +cmake ../../../../Benchmark/ -DCAMKE_CXX_COMPILER=g++ +make clean +make install +cd - +cd Benchmark/out/ diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..0934af7691acabd7981d82342b3a2310fe606d3d --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/modelzoo_level.txt @@ -0,0 +1,6 @@ +ModelCovert:OK +QuantStatus:OK +FuncStatus:OK +PrecisionStatus:OK +AutoTune:OK +PerfStatus:OK diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/requirements.txt b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f66bb9f75c74849c47871a646493af6c2eb83d3 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/requirements.txt @@ -0,0 +1,3 @@ +tensorflow==1.15 +numpy==1.16 +Pillow==7.1.2 diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/resnet50v15_aipp.cfg b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/resnet50v15_aipp.cfg new file mode 100644 index 0000000000000000000000000000000000000000..ee696ee994f923f97017fa1aadba1318a37e6c30 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/resnet50v15_aipp.cfg @@ -0,0 +1,13 @@ +aipp_op { + aipp_mode: static + input_format : RGB888_U8 + src_image_size_w : 224 + src_image_size_h : 224 + mean_chn_0 : 124 + mean_chn_1 : 117 + mean_chn_2 : 104 + var_reci_chn_0 : 1 + var_reci_chn_1 : 1 + var_reci_chn_2 : 1 +} + diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/benchmark_tf.sh b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/benchmark_tf.sh new file mode 100644 index 0000000000000000000000000000000000000000..4d8ffccae6d4f1d1ae0e25a8951d21f0380f9c7d --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/benchmark_tf.sh @@ -0,0 +1,15 @@ +#!/bin/bash +#set -x +cur_dir=`pwd` +benchmark_dir=$cur_dir/../Benchmark/out +om_name=$cur_dir/../resnet50v15_tf_1batch.om +batchsize=1 +model_name=resnet50v15 +output_dir='results' +rm -rf $cur_dir/$output_dir/* + +#start offline inference +$benchmark_dir/benchmark --om $om_name --dataDir $cur_dir/input_bins/ --modelType $model_name --outDir $cur_dir/$output_dir --batchSize $batchsize --imgType bin --useDvpp 0 + +#post process +python3 $cur_dir/imagenet_accuarcy_cal.py --infer_result $cur_dir/$output_dir/$model_name --label $cur_dir/ILSVRC2012val-label-index.txt --offset 1 diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/imagenet_accuarcy_cal.py b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/imagenet_accuarcy_cal.py new file mode 100644 index 0000000000000000000000000000000000000000..38f0d91170e48644f8bafffa9e0a098881c52b48 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/imagenet_accuarcy_cal.py @@ -0,0 +1,75 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import os +import time +import argparse + +if __name__=='__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--infer_result", type=str, default="../../result_Files") + parser.add_argument("--label", type=str, default="../data/input_50000.csv") + parser.add_argument("--output_index", type=int, default=0) + parser.add_argument("--offset", type=int, default=0) + parser.add_argument("--dtype", type=str, default='float32') #datatype of bin files + args = parser.parse_args() + + image_cnt = 0 + top1_cnt = 0 + top5_cnt = 0 + ground_truth={} + if args.label.endswith(".csv"): + with open(args.label, 'r') as cs: + rs_list = cs.readlines() + for line in rs_list: + image_name = line.split(',')[0].split('.JPEG')[0] + label = int(line.split(',')[1]) + label += args.offset + ground_truth[image_name]=label + elif args.label.endswith(".txt"): + with open(args.label, 'r') as cs: + rs_list = cs.readlines() + for line in rs_list: + image_name = line.split(' ')[0].split('.JPEG')[0] + label = int(line.split(' ')[1].replace("\n","")) + label += args.offset + ground_truth[image_name]=label + + for i in sorted(ground_truth): + try: + image_name = i + label = ground_truth[i] + #查看输出的文件 + if os.path.exists(os.path.join(args.infer_result,'davinci_{}_output{}.bin'.format(image_name,args.output_index))): + bin_path = os.path.join(args.infer_result,'davinci_{}_output{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + elif os.path.exists(os.path.join(args.infer_result,'davinci_{}.JPEG_output{}.bin'.format(image_name, args.output_index))): + bin_path = os.path.join(args.infer_result,'davinci_{}.JPEG_output{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + elif os.path.exists(os.path.join(args.infer_result,'{}_output_{}.bin'.format(image_name,args.output_index))): + bin_path = os.path.join(args.infer_result,'{}_output_{}.bin'.format(image_name, args.output_index)) + pred = np.fromfile(bin_path, dtype=args.dtype) + else: + continue + top1=np.argmax(pred) + if label == top1: + top1_cnt += 1 + if label in np.argsort(-pred)[0:5]: + top5_cnt += 1 + image_cnt+=1 + print("{}, gt label:{: >4}, predict results:{}".format(image_name,label,str(np.argsort(-pred)[0:5]))) + except Exception as e: + print("Can't find " + bin_path) + print('imag_count %d, top1_accuracy %.3f top5_accuracy %.3f'%(image_cnt,top1_cnt/image_cnt,top5_cnt/image_cnt)) \ No newline at end of file diff --git a/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/resnet50v15_preprocessing.py b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/resnet50v15_preprocessing.py new file mode 100644 index 0000000000000000000000000000000000000000..44d63f9676ee2bdc2c206ee6e932d38ddd163ec7 --- /dev/null +++ b/ACL_TensorFlow/built-in/cv/Resnet50v1.5_for_ACL/scripts/resnet50v15_preprocessing.py @@ -0,0 +1,100 @@ +# Copyright 2016 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Provides utilities to preprocess images for the Inception networks.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow.compat.v1 as tf +import os +import sys +import shutil +import numpy as np +from PIL import Image +from tensorflow.python.ops import control_flow_ops + +def eval_image(image, height, width, resize_method, + central_fraction=0.875, scope=None): + + with tf.name_scope('eval_image'): + if resize_method == 'crop': + shape = tf.shape(input=image) + image = tf.cond(pred=tf.less(shape[0], shape[1]), + true_fn=lambda: tf.image.resize(image, + tf.convert_to_tensor(value=[256, 256 * shape[1] / shape[0]], + dtype=tf.int32)), + false_fn=lambda: tf.image.resize(image, + tf.convert_to_tensor(value=[256 * shape[0] / shape[1], 256], + dtype=tf.int32))) + + shape = tf.shape(input=image) + y0 = (shape[0] - height) // 2 + x0 = (shape[1] - width) // 2 + distorted_image = tf.image.crop_to_bounding_box(image, y0, x0, height, width) + distorted_image.set_shape([height, width, 3]) + means = tf.broadcast_to([123.68, 116.78, 103.94], tf.shape(input=distorted_image)) + return distorted_image - means + else: # bilinear + if image.dtype != tf.float32: + image = tf.image.convert_image_dtype(image, dtype=tf.float32) + # Crop the central region of the image with an area containing 87.5% of + # the original image. + if central_fraction: + image = tf.image.central_crop(image, central_fraction=central_fraction) + + if height and width: + # Resize the image to the specified height and width. + image = tf.expand_dims(image, 0) + image = tf.image.resize(image, [height, width], + method=tf.image.ResizeMethod.BILINEAR) + image = tf.squeeze(image, [0]) + image = tf.subtract(image, 0.5) + image = tf.multiply(image, 2.0) + return image + +def preprocess(src_path, save_path): + in_files = os.listdir(src_path) + in_files.sort() + resize_shape = [224, 224, 3] + sqz_mean = np.array([123.68, 116.78, 103.94], np.float32) + img_std = np.array([[0.5*255, 0.5*255, 0.5*255]], np.float32) + if os.path.isdir(save_path): + shutil.rmtree(save_path) + os.makedirs(save_path) + for file in in_files: + with tf.Session().as_default(): + if not os.path.isdir(file): + print(file) + img_buffer = tf.io.gfile.GFile(os.path.join(src_path, file), 'rb').read() + img = tf.image.decode_jpeg(img_buffer,channels=3,fancy_upscaling=False,dct_method='INTEGER_FAST') + img = eval_image( img, + 224, + 224, + 'crop') + img = img.eval() + #img = img * img_std + img = img + sqz_mean + img = img.astype(np.uint8, copy=False) + img.tofile(os.path.join(save_path, file.split('.')[0]+".bin")) + tf.reset_default_graph() + +if __name__ == "__main__": + if len(sys.argv) < 3: + raise Exception("usage: python3 xxx.py [src_path] [save_path]") + + src_path = sys.argv[1] + save_path = sys.argv[2] + preprocess(src_path, save_path) diff --git a/ACL_TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_ACL/author.txt b/ACL_TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_ACL/author.txt index 9975e8044d8a1b39f83004796b2bc714d108360d..44656b06ed50b5b77cf3f9be868b99f1c29185f6 100644 --- a/ACL_TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_ACL/author.txt +++ b/ACL_TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_ACL/author.txt @@ -1,4 +1,4 @@ -Li Yi, Lei Xie +Yi Li, Lei Xie Nanjing University Nanjing, Jiangsu, China yili@smail.nju.edu.cn, lxie@nju.edu.cn \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..8389e23f5575d034f02543f0b7613cff48ae7bbc --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE @@ -0,0 +1,284 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------ +Files: third_party/compute_library/... + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------ +Files: ACKNOWLEDGEMENTS +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------ +Files: third_party/hexagon + +Copyright (c) 2016-2019, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c07aedecfdf53cb439f04796e4a0b2ce1a501c34 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md @@ -0,0 +1,70 @@ + +## 推理过程 +环境 +- Tensorflow 1.15 +- python 3.7 + +1. ckpt文件 + +- ckpt文件下载地址: + + https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/checkpoint65.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698491&Signature=Ltfv5%2B5VbaFSklW3pI6W6oTh73A%3D + + 通过freeze_graph.py转换成pb文件bliznet_tf_310.pb + +- pb文件下载地址: + + https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.pb?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656586&Signature=JhBRfk5dpeDFE%2BPy1jQg6Q4mvHY%3D + +2. om模型 + +- om模型下载地址: + + https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.om?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656644&Signature=Z7DyzKRGPd27pYipfD2Ke/KSGAo%3D + + 使用ATC模型转换工具进行模型转换时可以参考如下指令: + +``` +atc --model=/home/HwHiAiUser/atc/bliznet_tf_310.pb --framework=3 --output=/home/HwHiAiUser/atc/bliznet_tf_310 --soc_version=Ascend310 \ + --input_shape="input:1,300,300,3" \ + --log=info \ + --out_nodes="concat_1:0;concat_2:0;ssd_2/Conv_7/BiasAdd:0" +``` + +3. 使用msame工具推理 + + 参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。 + + 获取到msame可执行文件之后,将待检测om文件放在model文件夹,然后进行性能测试。 + + msame推理可以参考如下指令: +``` +./msame --model "/home/HwHiAiUser/msame/bliznet_tf_310.om" --input "/home/HwHiAiUser/msame/data" --output "/home/HwHiAiUser/msame/out/" --outfmt TXT +``` +- 将测试集数据转为bin文件: +``` + imageToBin.py +``` + +- 测试数据bin文件下载地址: + + https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/img.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698452&Signature=f3aLaUdPnodF8PKtCaI5Ox4wb6c%3D + + +4. 性能测试 + + 使用testBliznetPb_OM_Data.py对推理完成后获得的txt文件进行测试 + +

精度测试

+ +训练集:VOC12 train-seg-aug + +测试集:VOC12 val + +| | mIoU | mAP | +| ---------- | -------- | -------- | +| 论文精度 | 72.8 | 80.0 | +| GPU精度32 | 72.8 | 80.0 | +| GPU精度16 | 72.0 | 78.3 | +| NPU精度 | 70.1 | 77.6 | +| 推理精度 | 70.1 | 77.6 | \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..775d0e09f523b9fd28f7e965d696ff8d98d6fc62 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py @@ -0,0 +1,90 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import tensorflow as tf +from tensorflow.python.tools import freeze_graph +import os +from Train.config import args +from help_modelarts import modelarts_result2obs + +from Train.resnet import ResNet +from Train.config import config as net_config + +INIT_CKPT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'checkpoint65') +ckpt_path = os.path.join(INIT_CKPT_DIR, 'model.ckpt-65000') + +def main(): + print("start ckpt To pb") + print("ckpt_path") + tf.reset_default_graph() + img_ph = tf.placeholder(tf.float32, shape=[1, 300, 300, 3], name="input") + dataset_num_classes = 21 + + net = ResNet + depth = 50 + net = net(config=net_config, depth=depth, training=False) + + net.create_trunk(img_ph) + + if args.detect: + net.create_multibox_head(dataset_num_classes) + confidence = net.outputs['confidence'] + location = net.outputs['location'] + else: + location, confidence = None, None + + if args.segment: + net.create_segmentation_head(dataset_num_classes) + seg_logits = net.outputs['segmentation'] + else: + seg_logits = None + + print("confidence = ", confidence) + print("location = ", location) + print("seg_logits = ", seg_logits) + + with tf.Session() as sess: + tf.train.write_graph(sess.graph_def, args.result_dir, 'model.pb') + modelarts_result2obs(args) + freeze_graph.freeze_graph( + input_graph=os.path.join(args.result_dir, 'model.pb'), + input_saver='', + input_binary=False, + input_checkpoint=ckpt_path, + output_node_names="concat_1, concat_2, ssd_2/Conv_7/BiasAdd", # graph outputs node + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph=os.path.join(args.result_dir, 'bliznet_tf_310.pb'), # graph outputs name + clear_devices=False, + initializer_nodes="") + print("done") + + modelarts_result2obs(args) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py new file mode 100644 index 0000000000000000000000000000000000000000..23635dd95f92c9a96ee56505a4f017200762d98c --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py @@ -0,0 +1,70 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from config import args +from getData.voc_loader import VOCLoader + +import progressbar +import logging +log = logging.getLogger() +import numpy as np + +def main(argv=None): + if args.dataset == 'voc07' or args.dataset == 'voc07+12': + loader = VOCLoader('07', 'test') + if args.dataset == 'voc12-val': + loader = VOCLoader('12', 'val', segmentation=args.segment) + + filenames = loader.get_filenames() + image_list = [] + + inputs = tf.placeholder(tf.float32, shape=[None, None, 3], name="input") + img_ph = tf.image.resize_bilinear(tf.expand_dims(inputs, 0), (300, 300))# 增加一维,并reshape + + with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess0: + bar = progressbar.ProgressBar()# 显示进度条 + for i in bar(range(len(filenames))): + name = filenames[i] + img = loader.load_image(name) # 获取图片 + image = sess0.run(img_ph, feed_dict={inputs: img}) + + image_list.append(image) + gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息 + image.tofile("./binFile/img/{0:05d}.bin".format(i)) + # im = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32) + # print(im) + gt_bboxes.tofile("./binFile/gt_bboxes/{0:05d}.bin".format(i)) + seg_gt.tofile("./binFile/seg_gt/{0:05d}.bin".format(i)) + gt_cats.tofile("./binFile/gt_cats/{0:05d}.bin".format(i)) + # w.tofile("./binFile/w/{0:05d}.bin".format(i)) + # h.tofile("./binFile/h/{0:05d}.bin".format(i)) + difficulty.tofile("./binFile/difficulty/{0:05d}.bin".format(i)) + + +if __name__ == '__main__': + tf.app.run() \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1badb843c0738a566804f03237972aee0ba2299e --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt @@ -0,0 +1,6 @@ +FuncStatus:OK +PrecisionStatus:POK +AutoTune:POK +PerfStatus:POK +ModelConvert:OK +QuantStatus:OK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/maps/.keep b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/requirements.txt similarity index 100% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/maps/.keep rename to ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/requirements.txt diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py new file mode 100644 index 0000000000000000000000000000000000000000..9b9564af69a2a3d5cb8a83dc74fe53a79d99a885 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py @@ -0,0 +1,233 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from config import args +from getData.voc_loader import VOCLoader + +from tensorflow.python.ops.metrics_impl import mean_iou as streaming_mean_iou +from utils import decode_bboxes +from getData.boxer import PriorBoxGrid +from config import config as net_config +from detector import Detector +from tabulate import tabulate +import progressbar +import numpy as np +import logging +log = logging.getLogger() + +def eval_category(gt, dets, cid): + """Computes average precision for one category""" + cgt = gt[cid] + cdets = np.array(dets[cid]) + if (cdets.shape == (0, )): + return None, None + scores = cdets[:, 1] + sorted_inds = np.argsort(-scores) + image_ids = cdets[sorted_inds, 0].astype(int) + BB = cdets[sorted_inds] + + npos = 0 + for img_gt in cgt.values(): + img_gt['ignored'] = np.array(img_gt['difficult']) + img_gt['det'] = np.zeros(len(img_gt['difficult']), dtype=np.bool) + npos += np.sum(~img_gt['ignored']) + + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + ovmax = -np.inf + if image_ids[d] in cgt: + R = cgt[image_ids[d]] + bb = BB[d, 2:].astype(float) + + BBGT = R['bbox'].astype(float) + + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 0] + BBGT[:, 2], bb[0] + bb[2]) + iymax = np.minimum(BBGT[:, 1] + BBGT[:, 3], bb[1] + bb[3]) + iw = np.maximum(ixmax - ixmin, 0.) + ih = np.maximum(iymax - iymin, 0.) + inters = iw * ih + + # union + uni = (bb[2] * bb[3] + BBGT[:, 2] * BBGT[:, 3] - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > args.voc_iou_thresh: + if not R['ignored'][jmax]: + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = True + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + N = float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = rec * N / np.maximum(rec * N + fp, np.finfo(np.float32).eps) + return rec, prec + +def voc_ap(rec, prec, use_07_metric=False): + """ ap = voc_ap(rec, prec, [use_07_metric]) + Compute VOC AP given precision and recall. + If use_07_metric is true, uses the + VOC 07 11 point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + p = 0 if np.sum(rec >= t) == 0 else np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + +def compute_ap(gt, dets, loader): + """computes average precision for all categories""" + aps = {} + for cid in range(1, loader.num_classes): + cat_name = loader.ids_to_cats[cid] + rec, prec = eval_category(gt, dets, cid) + ap = voc_ap(rec, prec, loader.year == '07') + aps[loader.ids_to_cats[cid]] = ap + return aps + +def make_detection_table(gt, dets, loader): + """creates a table with AP per category and mean AP""" + aps = compute_ap(gt, dets, loader) + print("ap = ", aps) + eval_cache = [aps] + + table = [] + for cid in range(1, loader.num_classes): + cat_name = loader.ids_to_cats[cid] + table.append((cat_name, ) + tuple(aps.get(cat_name, 'N/A') for aps in eval_cache)) + mean_ap = np.mean([a for a in list(aps.values()) if a >= 0]) + table.append(("AVERAGE", ) + tuple(np.mean(list(aps.values())) for aps in eval_cache)) + x = tabulate(table, headers=(["Category", "mAP (all)"]), + tablefmt='orgtbl', floatfmt=".3f") + log.info("Eval results:\n%s", x) + return table + +def compute_mean_iou(detector): + iou = detector.get_mean_iou() + print(iou) + log.info("\n Mean IoU is %f", iou) + return iou + +def main(argv=None): + if args.dataset == 'voc07' or args.dataset == 'voc07+12': + loader = VOCLoader('07', 'test') + if args.dataset == 'voc12-val': + loader = VOCLoader('12', 'val', segmentation=args.segment) + + with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, + log_device_placement=False)) as sess: + detector = Detector(sess, loader, net_config, no_gt=args.no_seg_gt) + + filenames = loader.get_filenames() + gt = {cid: {} for cid in range(1, loader.num_classes)} + dets = {cid: [] for cid in range(1, loader.num_classes)} + + bar = progressbar.ProgressBar()# 显示进度条 + # print("filenames = ", filenames) + + init_op = tf.group(tf.local_variables_initializer(), tf.global_variables_initializer()) + sess.run(init_op) + for i in bar(range(len(filenames))): + name = filenames[i] + # print("name = ", name) + img_id = i + img = loader.load_image(name) # 获取图片 + # img = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32) + # img.shape = 1, 300, 300, 3 + gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息 + + confidence = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_0.txt".format(i)) + location = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_1.txt".format(i)) + seg_logits = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_2.txt".format(i)) + confidence.shape = 1, 45390, 21 + location.shape = 1, 45390, 4 + seg_logits.shape = 1, 75, 75, 21 + + for cid in np.unique(gt_cats): + mask = (gt_cats == cid) + bbox = gt_bboxes[mask] + diff = difficulty[mask] + det = np.zeros(len(diff), dtype=np.bool) + gt[cid][img_id] = {'bbox': bbox, 'difficult': diff, 'det': det} + + confidence1 = confidence + location1 = location + seg_logits1 = seg_logits + output = detector.feed_forward(img, seg_gt, confidence1, location1, seg_logits1, + w, h, name, gt_bboxes, gt_cats) # result + + if args.detect: + det_bboxes, det_probs, det_cats = output[:3] + for i in range(len(det_cats)): + dets[det_cats[i]].append((img_id, det_probs[i]) + tuple(det_bboxes[i])) + + # print("gt = ", gt) + # print("dets = ", dets) + print("table result:") + table = make_detection_table(gt, dets, loader) if args.detect else None + print("iou result:") + iou = compute_mean_iou(detector) if args.segment else None + + +if __name__ == '__main__': + tf.app.run() \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/.keep b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/.keep similarity index 100% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/.keep rename to ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/.keep diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/LICENSE.txt b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..eca7123693d7333d8d391b74a469fee175244ac7 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/LICENSE.txt @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2017 Guillaume Genthial + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/README.md b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/README.md new file mode 100644 index 0000000000000000000000000000000000000000..39d3503ff386dcec9336cc520f12957206fec3b1 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/README.md @@ -0,0 +1,68 @@ +# COMPARE_GAN离线推理 + +## 环境要求 + +| 环境 | 版本 | +| --- | --- | +| CANN | <5.0.3 | +| 处理器| Ascend310/Ascend910 | +| 其他| 见 'requirements.txt' | + +## 数据集bin准备: + 生成64*128的随机噪声 + python noise_bin_data.py + 数据集链接:[OBS](obs://cann-id2103/inference/bin_data/) +## 脚本和示例代码 + +```text +├── README.md //代码说明文档 +├── noise_bin_data.py //bin数据生成 +├── ckpt_to_pb.py //模型固化 +├── requirements.txt //环境依赖 +├── LICENSE.txt //证书 +├── scripts +│ ├──pb_to_om.sh //pb转om +├── ckpt //存放ckpt模型 +├── pb //存放pb模型 +├── bin_data //存放bin文件 +``` + +## 模型文件 + +包括ckpt、pb、om模型文件 + +下载链接:[OBS](obs://cann-id2103/inference/) + +## ckpt文件转pb模型 + +```bash +python ckpt_to_pb.py +``` + +## pb模型转om模型 + +检查环境中ATC工具环境变量,设置完成后,修改PB和OM文件路径PB_PATH和OM_PATH,运行pb_to_om.sh + +```bash +export PATH=/usr/local/python3.7.5/bin:$PATH +export PYTHONPATH=/usr/local/Ascend/ascend-toolkit/latest/atc/python/site-packages/te:$PYTHONPATH +export LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/atc/lib64:${LD_LIBRARY_PATH} + +PB_PATH=/root +OM_PATH=/root + +/usr/local/Ascend/ascend-toolkit/latest/atc/bin/atc --model=$PB_PATH/gan.pb --framework=3 \ + --input_shape="split_1:64,128" \ + --output=$OM_PATH/ganom --soc_version=Ascend310 \ + --out_nodes="generator/Sigmoid:0" \ + --log=debug + +``` +## 使用msame工具推理 + +安装好[msame]([tools: Ascend tools - Gitee.com](https://gitee.com/ascend/tools/tree/master/msame)) + +```bash +./msame --model /root/ganom.om --input /root/infer_bin_data.bin --output /root/bin_data --outfmt TXT +``` +注意,msame生成的推理文件夹是根据时间命名的,类似于20220429_170719这样的格式,需要自己检查路径。 diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/bin_data/.keep b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/bin_data/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/ckpt/.keep b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/ckpt/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/ckpt_to_pb.py b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/ckpt_to_pb.py new file mode 100644 index 0000000000000000000000000000000000000000..90cf96adbfb39741cca6be366d4a9f3e000a7025 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/ckpt_to_pb.py @@ -0,0 +1,15 @@ +from tensorflow.python.tools import freeze_graph + + +freeze_graph.freeze_graph( + input_checkpoint='ckpt/model.ckpt-45000', + output_node_names='generator/Sigmoid', + output_graph='pb/gan.pb', + initializer_nodes='', + input_graph= None, + input_saver= False, + input_binary=True, + restore_op_name=None, + filename_tensor_name=None, + clear_devices=False, + input_meta_graph='ckpt/model.ckpt-45000.meta') \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/noise_bin_data.py b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/noise_bin_data.py new file mode 100644 index 0000000000000000000000000000000000000000..b00eb6fbb3eb2fec573d4faf63f64e65f7454c7e --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/noise_bin_data.py @@ -0,0 +1,7 @@ +import numpy as np + +import numpy as np +#生成64*128随机噪声数据,类型float32 +data = np.random.uniform(size=(64,128)) +data = data.astype(np.float32) +data.tofile("bin_data/infer_bin_data.bin") \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/pb/.keep b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/pb/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/requirements b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/requirements new file mode 100644 index 0000000000000000000000000000000000000000..55b27f53e1bbe53fa5dd3697f0e0a00ccdbb5a8c --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/requirements @@ -0,0 +1,3 @@ +python==3.6 +tensorflow==1.15.0 +numpy \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/scripts/.keep b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/scripts/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/scripts/pb_to_om.sh b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/scripts/pb_to_om.sh new file mode 100644 index 0000000000000000000000000000000000000000..be5a626424b90a882164104cacaf7f0ff51bd3ed --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/COMPARE_GAN_ID2103_for_ALC/scripts/pb_to_om.sh @@ -0,0 +1,13 @@ + +export PATH=/usr/local/python3.7.5/bin:$PATH +export PYTHONPATH=/usr/local/Ascend/ascend-toolkit/latest/atc/python/site-packages/te:$PYTHONPATH +export LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/atc/lib64:${LD_LIBRARY_PATH} + +PB_PATH=/root +OM_PATH=/root + +/usr/local/Ascend/ascend-toolkit/latest/atc/bin/atc --model=$PB_PATH/gan.pb --framework=3 \ + --input_shape="split_1:64,128" \ + --output=$OM_PATH/ganom --soc_version=Ascend310 \ + --out_nodes="generator/Sigmoid:0" \ + --log=debug diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/LICENCE b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/LICENCE new file mode 100644 index 0000000000000000000000000000000000000000..68138bbe199d050af8296991a843bf1a5bcf86fd --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/LICENCE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Fan Yang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/README.md b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c7ae99eb778208480730ff53d38aff28e9c13619 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/README.md @@ -0,0 +1,30 @@ +# DD-NET + +### 关于项目 + +本项目为复现"Make Skeleton-based Action Recognition Model Smaller, Faster and Better"论文算法。 + +论文链接:[paper](https://arxiv.org/pdf/1907.09658.pdf) + +原作者开源代码:[code](https://github.com/fandulu/DD-Net) + +转换后可在Ascend 910运行的代码:[code](https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow) + +### 模型转换 + +#### h5转pb + +使用'tf.keras.models.save_model'将模型保存为h5文件;运行h5topb.py脚本得到固化的pb文件。 + + +#### pb转om + +使用ATC模型转换工具进行模型转换,命令: +`atc --model=./model.pb --framework=3 --output=./out --soc_version=Ascend310 --input_shape="Input:1,32,105; Input_1:1,32,15,2" --log=info --out_nodes="model_1/dense_2/Softmax:0"` + +### 推理 + +使用msame工具,参考命令: +`/home/HwHiAiUser/AscendProjects/tools/msame/out/msame --model ./out.om --output ./output --outfmt BIN --loop 1 +` + diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/h5topb.py b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/h5topb.py new file mode 100644 index 0000000000000000000000000000000000000000..aa146867d486056052074f45961a4ab287574723 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/h5topb.py @@ -0,0 +1,68 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf +from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 + +def poses_diff(x): + H, W = x.get_shape()[1],x.get_shape()[2] + x = tf.subtract(x[:,1:,...],x[:,:-1,...]) + x = tf.image.resize(x,size=[H,W]) + return x + +def h5_to_pb(h5_save_path): + model = tf.keras.models.load_model(h5_save_path, compile=False, custom_objects={'poses_diff':poses_diff}) + model.summary() + full_model = tf.function(lambda Input: model(Input)) + full_model = full_model.get_concrete_function([tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype), + tf.TensorSpec(model.inputs[1].shape, model.inputs[1].dtype)]) + + # Get frozen ConcreteFunction + frozen_func = convert_variables_to_constants_v2(full_model) + frozen_func.graph.as_graph_def() + + layers = [op.name for op in frozen_func.graph.get_operations()] + print("-" * 50) + print("Frozen model layers: ") + for layer in layers: + print(layer) + + print("-" * 50) + print("Frozen model inputs: ") + print(frozen_func.inputs) + print("Frozen model outputs: ") + print(frozen_func.outputs) + + # Save frozen graph from frozen ConcreteFunction to hard drive + tf.io.write_graph(graph_or_graph_def=frozen_func.graph, + logdir="./pb", + name="model.pb", + as_text=False) #可设置.pb存储路径 + + +h5_to_pb('weights/ddnet.h5') #此处填入.h5路径 \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/model.pb b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/model.pb new file mode 100644 index 0000000000000000000000000000000000000000..f94a31de84a01d661bccc9d66afaa7576c61c88a Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/model.pb differ diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..2c2153f3493a1d4051230cf3a5f6b72ac3cad795 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,6 @@ +FuncStatus:OK +PrecisionStatus:POK +GPUStatus:OK +NPUMigrationStatus:POK +AutoTune:NOK +PerfStatus:OK \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/out.om b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/out.om new file mode 100644 index 0000000000000000000000000000000000000000..d69b298435be2fda31fdec8b16e99557da27c0b9 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/out.om differ diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X0.bin b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X0.bin new file mode 100644 index 0000000000000000000000000000000000000000..f7fcfa375449dab1c691deee031ecfc204531499 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X0.bin differ diff --git a/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X1.bin b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X1.bin new file mode 100644 index 0000000000000000000000000000000000000000..55d4bda72a1ebbe086a5b6c163bc9648a3aa3b07 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/DD-NET_ID1088_for_TensorFlow/test/X1.bin differ diff --git a/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/.keep b/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..50b9a505a73ef6a3bc6fd09d5ad20950b2afa52c --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/README.md @@ -0,0 +1,96 @@ +## 模型功能 + +目标跟踪 + +## 原始模型 + +参考: + + +原实现模型: + +https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/DeepSort_ID0505_for_TensorFlow + +pb文件下载地址 : + +链接:https://pan.baidu.com/s/1v7Fe_YYT-hZUGCf2u7TI5g +提取码:v41g + +## om模型 + +pb模型转om模型参考freeze_pb.py + + +om模型下载地址: + +链接:https://pan.baidu.com/s/1GCuGdEUiniYZlTdFn2sknw +提取码:keil + +使用ATC模型转换工具进行模型转换时可以参考如下指令: + +``` +atc --model=/root/deepsort/deep_sort.pb --framework=3 --output=/root/deepsort/deep_sort --soc_version=Ascend310 --input_shape="input:1,128,64,3" +``` + +## 数据集准备 + +market1501测试集中的图像数据转换为bin数据集 地址: + + +链接:https://pan.baidu.com/s/1h9bWDVEW7-voFHFi7TaTuw 提取码:nwyj + + +## 使用msame工具推理 + + +参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。 + + +获取到msame可执行文件之后,进行性能测试。 + +./msame --model "/root/deepsort/deep_sort.om" --input "/root/osnet/query" --output "/root/deepsort/out/" --outfmt TXT + + +## 性能测试 + +使用msame推理工具,参考如下命令,发起推理性能测试: + +``` + +``` + +``` +... +Inference average time : 10.66 ms +Inference average time without first time: 10.66 ms +[INFO] unload model success, model Id is 1 +[INFO] Execute sample success +[INFO] end to destroy stream +[INFO] end to destroy context +[INFO] end to reset device is 0 +[INFO] end to finalize acl + +... +``` + +平均推理性能为 10.66ms + +## 精度测试 + + +``` + +``` + +最终精度:(暂无) + +``` +Ascend310推理结果: + gpu结果: + npu结果: +``` + + + + + diff --git a/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/freeze_pb.py b/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/freeze_pb.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f8cc678115f9fba953826715467bd34d9ad6d6 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/DeepSort_ID0505_for_ACL/freeze_pb.py @@ -0,0 +1,66 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +import tensorflow as tf +from tensorflow.python.tools import freeze_graph +from tensorflow.python.framework import graph_util +import os, sys +import argparse + +base_path=os.path.split(os.path.realpath(__file__))[0] +sys.path.append(base_path + "/../") + +from nets.deep_sort.network_definition import create_network + + +def main(): + + tf.reset_default_graph() + + # set inputs node + inputs = tf.placeholder(tf.float32, shape=[1, 128, 64, 3], name="input") + + features, logits = create_network(inputs, \ + num_classes=1502, \ + add_logits=True, \ + reuse=None, \ + create_summaries=False, \ + weight_decay=1e-8) + + prediction = tf.argmax(input=logits, axis=-1, output_type=tf.dtypes.int32, name="output") + + graph = tf.get_default_graph() + input_graph_def = graph.as_graph_def() + + output_graph="deep_sort.pb" + + with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + + saver = tf.train.Saver() + saver.restore(sess, "/home/HwHiAiUser/deep/lognckpt_bak/KOOKKJ/model.ckpt-98077") + + output_graph_def = graph_util.convert_variables_to_constants( + sess=sess, + input_graph_def=input_graph_def, + output_node_names=["output"]) + + with tf.gfile.GFile(output_graph, "wb") as f: + f.write(output_graph_def.SerializeToString()) + + print("done") + +if __name__ == '__main__': + main() + diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/.keep b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/2bin.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/2bin.py new file mode 100644 index 0000000000000000000000000000000000000000..5f723e0ae2b7b111c99701c1836de2c49e2f209c --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/2bin.py @@ -0,0 +1,37 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from data_generator import DataGenerator + +data_generator = DataGenerator(20, 4) +batch_x, batch_y, amp, phase = data_generator.generate() + +inputa = batch_x[:, :5, :] +labela = batch_y[:, :5, :] + +inputa.tofile("inputa.bin") +labela.tofile("labela.bin") diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5ea8a5f7b6ae91ebb12b7f2fa71a5432bb89de63 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/LICENSE @@ -0,0 +1,284 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------ +Files: third_party/compute_library/... + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------ +Files: ACKNOWLEDGEMENTS +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------ +Files: third_party/hexagon + +Copyright (c) 2016-2019, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..351606f5a2281f3b40c76522c798d234a4f235a4 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/README.md @@ -0,0 +1,63 @@ +# 基本信息 + +**发布者(Publisher):Huawei** + +**应用领域(Application Domain): Image Classification** + +**版本(Version):1.0** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt/pb/om** + +**精度(Precision):FP32** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +# 概述 + +MT-net是一种神经网络结构和任务特定的学习过程,基于[Gradient-Based Meta-Learning with Learned Layerwise Metric and Subspace](https://arxiv.org/abs/1801.05558),原论文摘要如下: + +Gradient-based meta-learning methods leverage gradient descent to learn the commonalities among various tasks. While previous such methods have been successful in meta-learning tasks, they resort to simple gradient descent during meta-testing. Our primary contribution is the **MT-net**, which enables the meta-learner to learn on each layer's activation space a subspace that the task-specific learner performs gradient descent on. Additionally, a task-specific learner of an {\em MT-net} performs gradient descent with respect to a meta-learned distance metric, which warps the activation space to be more sensitive to task identity. We demonstrate that the dimension of this learned subspace reflects the complexity of the task-specific learner's adaptation task, and also that our model is less sensitive to the choice of initial learning rates than previous gradient-based meta-learning methods. Our method achieves state-of-the-art or comparable performance on few-shot classification and regression tasks. + +论文中实验代码的开源链接:https://github.com/yoonholee/MT-net + +实验代码中有三个任务,分别是few-shot sine wave regression,Omniglot and miniImagenet few-shot classification。这里我们选择few-shot sine wave regression。 + +# 数据集准备 + +使用数据集sinusoid(源码里用numpy随机生成该数据集,不需要额外下载数据集sinusoid) + +# 推理过程 + +* step1:ckpt转pb + + 根据后面提供的ckpt模型下载链接,下载ckpt模型,运行ckpt2pb.py,得到mt-net.pb + + +* step2:pb转om + +​ 在华为云镜像服务器上,将mt-net.pb转为mt-net.om + +``` +atc --model=./mt-net.pb --framework=3 --output=./mt-net --soc_version=Ascend310 --input_shape="inputa:4,5,1;inputc:4,5,1" --log=info --out_nodes="output:0" +``` + + +* step3:模型推理 + + 运行2bin.py,得到数据文件inputa.bin和labela.bin + + 应用msame工具,对数据文件inputa.bin和labela.bin进行离线推理,在当前目录的output文件夹下得到推理输出结果 + +``` +./msame --model /root/mt-net/mt-net.om --input /root/mt-net/inputa.bin,/root/mt-net/labela.bin --output /root/mt-net/output --outfmt TXT --loop 2 --debug=true +``` + +# 推理模型下载 + +obs地址(包含ckpt模型,pb模型,om模型,数据文件inputa.bin和labela.bin): + +obs://cann-id1283/ \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/ckpt2pb.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/ckpt2pb.py new file mode 100644 index 0000000000000000000000000000000000000000..43b2378d3358b58741b842641094a3586359083a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/ckpt2pb.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from tensorflow.python.tools import freeze_graph +import tensorflow as tf + +from maml import MAML + + +def run(ckpt_path): + tf.reset_default_graph() + + inputa = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputa") + inputb = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputb") + labela = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputc") + labelb = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputd") + metaval_input_tensors = {'inputa': inputa, 'inputb': inputb, 'labela': labela, 'labelb': labelb} + + model = MAML(dim_input=1, dim_output=1, test_num_updates=1) + model.construct_model(input_tensors=metaval_input_tensors, prefix='metaval_') + + logits = model.metaval_total_loss1 + tf.identity(logits, name="output") + + with tf.Session() as sess: + tf.train.write_graph(sess.graph_def, './pb_model', 'output.pb') # save pb file with output node + freeze_graph.freeze_graph( + input_graph='./pb_model/output.pb', # the pb file with output node + input_saver='', + input_binary=False, + input_checkpoint=ckpt_path, # input checkpoint file path + output_node_names='output', # the name of output node in pb file + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph='./pb_model/mt-net.pb', # path of output graph + clear_devices=False, + initializer_nodes='') + + +if __name__ == "__main__": + ckpt_path = "./model59999" + run(ckpt_path) diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/data_generator.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/data_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..36a980838b65e6d901719db1e4486cc0146797ed --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/data_generator.py @@ -0,0 +1,210 @@ +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" Code for loading data. """ +import numpy as np +import os +import random +import tensorflow as tf + +from tensorflow.python.platform import flags +from utils import get_images + +FLAGS = flags.FLAGS +flags.DEFINE_string('datasource', 'sinusoid', 'sinusoid or omniglot or miniimagenet') + +class DataGenerator(object): + """ + Data Generator capable of generating batches of sinusoid or Omniglot data. + A "class" is considered a class of omniglot digits or a particular sinusoid function. + """ + def __init__(self, num_samples_per_class, batch_size, config={}): + """ + Args: + num_samples_per_class: num samples to generate per class in one batch + batch_size: size of meta batch size (e.g. number of functions) + """ + self.batch_size = batch_size + self.num_samples_per_class = num_samples_per_class + self.num_classes = 1 # by default 1 (only relevant for classification problems) + + if FLAGS.datasource == 'sinusoid': + self.generate = self.generate_sinusoid_batch + self.amp_range = config.get('amp_range', [0.1, 5.0]) + self.phase_range = config.get('phase_range', [0, np.pi]) + self.input_range = config.get('input_range', [-5.0, 5.0]) + self.freq_range = config.get('freq_range', [0.8, 1.2]) + self.dim_input = 1 + self.dim_output = 1 + elif 'omniglot' in FLAGS.datasource: + self.num_classes = config.get('num_classes', FLAGS.num_classes) + self.img_size = config.get('img_size', (28, 28)) + self.dim_input = np.prod(self.img_size) + self.dim_output = self.num_classes + # data that is pre-resized using PIL with lanczos filter + data_folder = config.get('data_folder', './data/omniglot_resized') + + character_folders = [os.path.join(data_folder, family, character) \ + for family in os.listdir(data_folder) \ + if os.path.isdir(os.path.join(data_folder, family)) \ + for character in os.listdir(os.path.join(data_folder, family))] + random.seed(1) + random.shuffle(character_folders) + num_val = 100 + num_train = config.get('num_train', 1200) - num_val + self.metatrain_character_folders = character_folders[:num_train] + if FLAGS.test_set: + self.metaval_character_folders = character_folders[num_train:num_train+num_val] + else: + self.metaval_character_folders = character_folders[num_train+num_val:] + self.rotations = config.get('rotations', [0, 90, 180, 270]) + elif FLAGS.datasource == 'miniimagenet': + self.num_classes = config.get('num_classes', FLAGS.num_classes) + self.img_size = config.get('img_size', (84, 84)) + self.dim_input = np.prod(self.img_size)*3 + self.dim_output = self.num_classes + metatrain_folder = config.get('metatrain_folder', './data/miniImagenet/train') + if FLAGS.test_set: + metaval_folder = config.get('metaval_folder', './data/miniImagenet/test') + else: + metaval_folder = config.get('metaval_folder', './data/miniImagenet/val') + + metatrain_folders = [os.path.join(metatrain_folder, label) \ + for label in os.listdir(metatrain_folder) \ + if os.path.isdir(os.path.join(metatrain_folder, label)) \ + ] + metaval_folders = [os.path.join(metaval_folder, label) \ + for label in os.listdir(metaval_folder) \ + if os.path.isdir(os.path.join(metaval_folder, label)) \ + ] + self.metatrain_character_folders = metatrain_folders + self.metaval_character_folders = metaval_folders + self.rotations = config.get('rotations', [0]) + else: + raise ValueError('Unrecognized data source') + + + def make_data_tensor(self, train=True): + if train: + folders = self.metatrain_character_folders + folders = folders[:FLAGS.num_train_classes] + # number of tasks, not number of meta-iterations. (divide by metabatch size to measure) + num_total_batches = 200000 if not FLAGS.debug else 32 + else: + folders = self.metaval_character_folders + num_total_batches = 600 if not FLAGS.debug else 32 + + # make list of files + print('Generating filenames') + all_filenames = [] + for _ in range(num_total_batches): + sampled_character_folders = random.sample(folders, self.num_classes) + random.shuffle(sampled_character_folders) + labels_and_images = get_images(sampled_character_folders, range(self.num_classes), nb_samples=self.num_samples_per_class, shuffle=False) + # make sure the above isn't randomized order + labels = [li[0] for li in labels_and_images] + filenames = [li[1] for li in labels_and_images] + all_filenames.extend(filenames) + + # make queue for tensorflow to read from + filename_queue = tf.train.string_input_producer(tf.convert_to_tensor(all_filenames), shuffle=False) + print('Generating image processing ops') + image_reader = tf.WholeFileReader() + _, image_file = image_reader.read(filename_queue) + if FLAGS.datasource == 'miniimagenet': + image = tf.image.decode_jpeg(image_file, channels=3) + image.set_shape((self.img_size[0], self.img_size[1], 3)) + image = tf.reshape(image, [self.dim_input]) + image = tf.cast(image, tf.float32) / 255.0 + else: + image = tf.image.decode_png(image_file) + image.set_shape((self.img_size[0],self.img_size[1],1)) + image = tf.reshape(image, [self.dim_input]) + image = tf.cast(image, tf.float32) / 255.0 + image = 1.0 - image # invert + num_preprocess_threads = 1 + # TODO: enable this to be set to >1 + min_queue_examples = 256 + examples_per_batch = self.num_classes * self.num_samples_per_class + batch_image_size = self.batch_size * examples_per_batch + print('Batching images') + images = tf.train.batch( + [image], + batch_size=batch_image_size, + num_threads=num_preprocess_threads, + capacity=min_queue_examples + 3 * batch_image_size, + ) + all_image_batches, all_label_batches = [], [] + print('Manipulating image data to be right shape') + for i in range(self.batch_size): + image_batch = images[i*examples_per_batch:(i+1)*examples_per_batch] + + if FLAGS.datasource == 'omniglot': + # omniglot augments the dataset by rotating digits to create new classes + # get rotation per class (e.g. 0,1,2,0,0 if there are 5 classes) + rotations = tf.multinomial(tf.log([[1., 1., 1., 1.]]), self.num_classes) + label_batch = tf.convert_to_tensor(labels) + new_list, new_label_list = [], [] + for k in range(self.num_samples_per_class): + class_idxs = tf.range(0, self.num_classes) + class_idxs = tf.random_shuffle(class_idxs) + + true_idxs = class_idxs*self.num_samples_per_class + k + new_list.append(tf.gather(image_batch,true_idxs)) + if FLAGS.datasource == 'omniglot': # and FLAGS.train: + new_list[-1] = tf.stack([tf.reshape(tf.image.rot90( + tf.reshape(new_list[-1][ind], [self.img_size[0],self.img_size[1],1]), + k=tf.cast(rotations[0,class_idxs[ind]], tf.int32)), (self.dim_input,)) + for ind in range(self.num_classes)]) + new_label_list.append(tf.gather(label_batch, true_idxs)) + new_list = tf.concat(new_list, 0) # has shape [self.num_classes*self.num_samples_per_class, self.dim_input] + new_label_list = tf.concat(new_label_list, 0) + all_image_batches.append(new_list) + all_label_batches.append(new_label_list) + all_image_batches = tf.stack(all_image_batches) + all_label_batches = tf.stack(all_label_batches) + all_label_batches = tf.one_hot(all_label_batches, self.num_classes) + return all_image_batches, all_label_batches + + def generate_sinusoid_batch(self, train=True, input_idx=None): + # Note train arg is not used (but it is used for omniglot method. + # input_idx is used during qualitative testing --the number of examples used for the grad update + amp = np.random.uniform(self.amp_range[0], self.amp_range[1], [self.batch_size]) + phase = np.random.uniform(self.phase_range[0], self.phase_range[1], [self.batch_size]) + freq = np.random.uniform(self.freq_range[0], self.freq_range[1], [self.batch_size]) + outputs = np.zeros([self.batch_size, self.num_samples_per_class, self.dim_output]) + init_inputs = np.zeros([self.batch_size, self.num_samples_per_class, self.dim_input]) + for func in range(self.batch_size): + init_inputs[func] = np.random.uniform(self.input_range[0], self.input_range[1], [self.num_samples_per_class, 1]) + if input_idx is not None: + init_inputs[:, input_idx:, 0] = np.linspace( + self.input_range[0], self.input_range[1], + num=self.num_samples_per_class-input_idx, retstep=False) + outputs[func] = amp[func] * np.sin(freq[func] * init_inputs[func]-phase[func]) + return init_inputs, outputs, amp, phase \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/maml.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/maml.py new file mode 100644 index 0000000000000000000000000000000000000000..a9473b4784a0e1d1f10e86dd7d126b0a9d6a2cd6 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/maml.py @@ -0,0 +1,529 @@ +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" Code for the MAML algorithm and network definitions. """ +import numpy as np + +try: + import special_grads +except KeyError as e: + print ('WARNING: Cannot define MaxPoolGrad, likely already defined for this version of TensorFlow:', e) +import tensorflow as tf + +from tensorflow.python.platform import flags +from utils import mse, xent, conv_block, normalize + +FLAGS = flags.FLAGS + +## chip options +flags.DEFINE_string('chip', 'npu', "run on which chip, (npu or gpu or cpu)") +flags.DEFINE_string('platform', 'linux', 'runtime platform, linux or modelarts') +flags.DEFINE_string("obs_dir", '', 'obs result path, not need on gpu and apulis platform') +flags.DEFINE_boolean("profiling", False, "profiling for performance or not") + +## Dataset/method options + +flags.DEFINE_string('datasource', 'sinusoid', 'sinusoid or omniglot or miniimagenet') +flags.DEFINE_integer('num_classes', 5, 'number of classes used in classification (e.g. 5-way classification).') +flags.DEFINE_integer('num_train_classes', -1, 'number of classes to train on (-1 for all).') +# oracle means task id is input (only suitable for sinusoid) +flags.DEFINE_string('baseline', None, 'oracle, or None') + +## Training options +flags.DEFINE_integer('pretrain_iterations', 0, 'number of pre-training iterations.') +flags.DEFINE_integer('metatrain_iterations', 40000, 'number of metatraining iterations.') # 15k for omniglot, 50k for sinusoid +flags.DEFINE_integer('meta_batch_size', 1, 'number of tasks sampled per meta-update') +flags.DEFINE_float('meta_lr', 0.001, 'the base learning rate of the generator') +flags.DEFINE_integer('update_batch_size', 1, 'number of examples used for inner gradient update (K for K-shot learning).') +flags.DEFINE_float('update_lr', .01, 'step size alpha for inner gradient update.') # 0.1 for omniglot +flags.DEFINE_integer('num_updates', 1, 'number of inner gradient updates during training.') +flags.DEFINE_integer('poly_order', 1, 'order of polynomial to generate') + +## Model options +#flags.DEFINE_string('mod', '', 'modifications to original paper. None, split, both') +flags.DEFINE_bool('use_T', True, 'whether or not to use transformation matrix T') +flags.DEFINE_bool('use_M', True, 'whether or not to use mask M') +flags.DEFINE_bool('share_M', True, 'only effective if use_M is true, whether or not to ' + 'share masks between weights' + 'that contribute to the same activation') +flags.DEFINE_float('temp', 1, 'temperature for gumbel-softmax') +flags.DEFINE_float('logit_init', 0, 'initial logit') +flags.DEFINE_string('norm', 'None', 'batch_norm, layer_norm, or None') +flags.DEFINE_integer('dim_hidden', 40, 'dimension of fc layer') +flags.DEFINE_integer('num_filters', 64, 'number of filters for conv nets -- use 32 for ' + 'miniimagenet, 64 for omiglot.') +flags.DEFINE_bool('conv', True, 'whether or not to use a convolutional network, only applicable in some cases') +flags.DEFINE_bool('max_pool', True, 'Whether or not to use max pooling rather than strided convolutions') +flags.DEFINE_bool('stop_grad', False, 'if True, do not use second derivatives in meta-optimization (for speed)') + +## Logging, saving, and testing options +flags.DEFINE_bool('log', True, 'if false, do not log summaries, for debugging code.') +flags.DEFINE_string('logdir', 'logs/omniglot20way', 'directory for summaries and checkpoints.') +flags.DEFINE_bool('debug', False, 'debug mode. uses less data for fast evaluation.') +flags.DEFINE_bool('resume', True, 'resume training if there is a model available') +flags.DEFINE_bool('train', False, 'True to train, False to test.') +flags.DEFINE_integer('test_iter', -1, 'iteration to load model (-1 for latest model)') +flags.DEFINE_bool('test_set', False, 'Set to true to test on the the test set, False for the validation set.') +flags.DEFINE_integer('train_update_batch_size', -1, 'number of examples used for gradient update during training (use if you want to test with a different number).') +flags.DEFINE_float('train_update_lr', -1, 'value of inner gradient step step during training. (use if you want to test with a different value)') # 0.1 for omniglot + + +class MAML: + def __init__(self, dim_input=1, dim_output=1, test_num_updates=5): + """ must call construct_model() after initializing MAML! """ + self.dim_input = dim_input + self.dim_output = dim_output + self.update_lr = FLAGS.update_lr + self.meta_lr = tf.placeholder_with_default(FLAGS.meta_lr, ()) + self.classification = False + self.test_num_updates = test_num_updates + if FLAGS.datasource in ['sinusoid', 'polynomial']: + self.dim_hidden = [FLAGS.dim_hidden, FLAGS.dim_hidden] + if FLAGS.use_T: + self.forward = self.forward_fc_withT + else: + self.forward = self.forward_fc + self.construct_weights = self.construct_fc_weights + self.loss_func = mse + elif FLAGS.datasource == 'omniglot' or FLAGS.datasource == 'miniimagenet': + self.loss_func = xent + self.classification = True + if FLAGS.conv: + self.dim_hidden = FLAGS.num_filters + if FLAGS.use_T: + self.forward = self.forward_conv_withT + else: + self.forward = self.forward_conv + self.construct_weights = self.construct_conv_weights + else: + self.dim_hidden = [256, 128, 64, 64] + self.forward = self.forward_fc + self.construct_weights = self.construct_fc_weights + if FLAGS.datasource == 'miniimagenet': + self.channels = 3 + else: + self.channels = 1 + self.img_size = int(np.sqrt(self.dim_input / self.channels)) + else: + raise ValueError('Unrecognized data source.') + + def construct_model(self, input_tensors=None, prefix='metatrain_'): + # a: training data for inner gradient, b: test data for meta gradient + self.inputa = input_tensors['inputa'] + self.inputb = input_tensors['inputb'] + self.labela = input_tensors['labela'] + self.labelb = input_tensors['labelb'] + + with tf.variable_scope('model', reuse=None) as training_scope: + self.dropout_probs = {} + if 'weights' in dir(self): + training_scope.reuse_variables() + weights = self.weights + else: + # Define the weights + self.weights = weights = self.construct_weights() + + # outputbs[i] and lossesb[i] is the output and loss after i+1 gradient updates + lossesa, outputas, lossesb, outputbs = [], [], [], [] + accuraciesa, accuraciesb = [], [] + num_updates = max(self.test_num_updates, FLAGS.num_updates) + outputbs = [[]] * num_updates + lossesb = [[]] * num_updates + accuraciesb = [[]] * num_updates + + def task_metalearn(inp, reuse=True): + """ Perform gradient descent for one task in the meta-batch. """ + inputa, inputb, labela, labelb = inp + task_outputbs, task_lossesb = [], [] + mse_lossesb = [] + + if self.classification: + task_accuraciesb = [] + + train_keys = list(weights.keys()) + if FLAGS.use_M and FLAGS.share_M: + def make_shared_mask(key): + temperature = FLAGS.temp + logits = weights[key+'_prob'] + logits = tf.stack([logits, tf.zeros(logits.shape)], 1) + U = tf.random_uniform(logits.shape, minval=0, maxval=1) + gumbel = -tf.log(-tf.log(U + 1e-20) + 1e-20) + y = logits + gumbel + gumbel_softmax = tf.nn.softmax(y / temperature) + gumbel_hard = tf.cast(tf.equal(gumbel_softmax, tf.reduce_max(gumbel_softmax, 1, keep_dims=True)), tf.float32) + mask = tf.stop_gradient(gumbel_hard - gumbel_softmax) + gumbel_softmax + return mask[:, 0] + + def get_mask(masks, name): + mask = masks[[k for k in masks.keys() if name[-1] in k][0]] + if 'conv' in name: # Conv + mask = tf.reshape(mask, [1, 1, 1, -1]) + tile_size = weights[name].shape.as_list()[:3] + [1] + mask = tf.tile(mask, tile_size) + elif 'w' in name: # FC + mask = tf.reshape(mask, [1, -1]) + tile_size = weights[name].shape.as_list()[:1] + [1] + mask = tf.tile(mask, tile_size) + elif 'b' in name: # Bias + mask = tf.reshape(mask, [-1]) + return mask + if self.classification: + masks = {k: make_shared_mask(k) for k in ['conv1', 'conv2', 'conv3', 'conv4', 'w5']} + else: + masks = {k: make_shared_mask(k) for k in ['w1', 'w2', 'w3']} + + if FLAGS.use_M and not FLAGS.share_M: + def get_mask_noshare(key): + temperature = FLAGS.temp + logits = weights[key + '_prob'] + logits = tf.stack([logits, tf.zeros(logits.shape)], 1) + U = tf.random_uniform(logits.shape, minval=0, maxval=1) + gumbel = -tf.log(-tf.log(U + 1e-20) + 1e-20) + y = logits + gumbel + gumbel_softmax = tf.nn.softmax(y / temperature) + gumbel_hard = tf.cast(tf.equal(gumbel_softmax, tf.reduce_max(gumbel_softmax, 1, keep_dims=True)), tf.float32) + out = tf.stop_gradient(gumbel_hard - gumbel_softmax) + gumbel_softmax + return tf.reshape(out[:, 0], weights[key].shape) + + train_keys = [k for k in weights.keys() if 'prob' not in k and 'f' not in k] + train_weights = [weights[k] for k in train_keys] + task_outputa = self.forward(inputa, weights, reuse=reuse) # only reuse on the first iter + self.task_outputa = task_outputa + task_lossa = self.loss_func(task_outputa, labela) + grads = tf.gradients(task_lossa, train_weights) + if FLAGS.stop_grad: + grads = [tf.stop_gradient(grad) for grad in grads] + gradients = dict(zip(train_keys, grads)) + + fast_weights = dict(zip(weights.keys(), [weights[key] for key in weights.keys()])) + + def compute_weights(key): + prev_weights = fast_weights[key] + if key not in train_keys: + return prev_weights + if FLAGS.use_M and FLAGS.share_M: + mask = get_mask(masks, key) + new_weights = prev_weights - self.update_lr * mask * gradients[key] + elif FLAGS.use_M and not FLAGS.share_M: + mask = get_mask_noshare(key) + new_weights = prev_weights - self.update_lr * mask * gradients[key] + else: + new_weights = prev_weights - self.update_lr * gradients[key] + return new_weights + + fast_weights = dict(zip( + weights.keys(), [compute_weights(key) for key in weights.keys()])) + + output = self.forward(inputb, fast_weights, reuse=True) + task_outputbs.append(output) + loss = self.loss_func(output, labelb) + task_lossesb.append(loss) + + for j in range(num_updates - 1): + output = self.forward(inputa, fast_weights, reuse=True) + loss = self.loss_func(output, labela) + train_weights = [fast_weights[k] for k in train_keys] + grads = tf.gradients(loss, train_weights) + if FLAGS.stop_grad: + grads = [tf.stop_gradient(grad) for grad in grads] + gradients = dict(zip(train_keys, grads)) + + fast_weights = dict(zip( + weights.keys(), [compute_weights(key) for key in weights.keys()])) + + output = self.forward(inputb, fast_weights, reuse=True) + task_outputbs.append(output) + loss = self.loss_func(output, labelb) + task_lossesb.append(loss) + + task_output = [task_outputa, task_outputbs, task_lossa, task_lossesb] + + if self.classification: + task_accuracya = tf.contrib.metrics.accuracy(tf.argmax(tf.nn.softmax(task_outputa), 1), + tf.argmax(labela, 1)) + for j in range(num_updates): + task_accuraciesb.append( + tf.contrib.metrics.accuracy(tf.argmax(tf.nn.softmax(task_outputbs[j]), 1), + tf.argmax(labelb, 1))) + task_output.extend([task_accuracya, task_accuraciesb]) + + return task_output + + out_dtype = [tf.float32, [tf.float32] * num_updates, tf.float32, [tf.float32] * num_updates] + if self.classification: + out_dtype.extend([tf.float32, [tf.float32] * num_updates]) + + if FLAGS.chip == 'npu': + if self.classification: + outputas, outputbs, lossesa, lossesb, accuraciesa, accuraciesb = [], [], [], [], [], [] + for i in range(FLAGS.meta_batch_size): + each_input = self.inputa[i], self.inputb[i], self.labela[i], self.labelb[i] + each_outputas, each_outputbs, each_lossesa, each_lossesb, each_accuraciesa, each_accuraciesb = task_metalearn( + each_input) + outputas.append(each_outputas) + outputbs.append(each_outputbs) + lossesa.append(each_lossesa) + lossesb.append(each_lossesb) + accuraciesa.append(each_accuraciesa) + accuraciesb.append(each_accuraciesb) + outputas = tf.stack(outputas) + outputbs = tf.unstack(tf.stack(outputbs), axis=1) + lossesa = tf.stack(lossesa) + lossesb = tf.unstack(tf.stack(lossesb), axis=1) + accuraciesa = tf.stack(accuraciesa) + accuraciesb = tf.unstack(tf.stack(accuraciesb), axis=1) + else: + outputas, outputbs, lossesa, lossesb = [], [], [], [] + for i in range(FLAGS.meta_batch_size): + each_input = self.inputa[i], self.inputb[i], self.labela[i], self.labelb[i] + each_outputas, each_outputbs, each_lossesa, each_lossesb = task_metalearn( + each_input) + outputas.append(each_outputas) + outputbs.append(each_outputbs) + lossesa.append(each_lossesa) + lossesb.append(each_lossesb) + outputas = tf.stack(outputas) + tmp = [] + for i in outputbs: + for j in i: + outputbs = tmp.append(i) + lossesa = tf.stack(lossesa) + lossesb = tf.unstack(tf.stack(lossesb), axis=1) + + logit_keys = sorted([k for k in weights.keys() if 'prob' in k]) + logit_weights = [-weights[k] for k in logit_keys] + probs = [tf.exp(w) / (1 + tf.exp(w)) for w in logit_weights] + self.total_probs = [tf.reduce_mean(p) for p in probs] + + ## Performance & Optimization + self.metaval_total_loss1 = total_loss1 = tf.reduce_sum(lossesa) / tf.to_float(FLAGS.meta_batch_size) + self.metaval_total_losses2 = total_losses2 = [tf.reduce_sum(lossesb[j]) / tf.to_float(FLAGS.meta_batch_size) + for j in range(num_updates)] + if self.classification: + self.metaval_total_accuracy1 = total_accuracy1 = tf.reduce_sum(accuraciesa) / tf.to_float( + FLAGS.meta_batch_size) + self.metaval_total_accuracies2 = total_accuracies2 = [ + tf.reduce_sum(accuraciesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(num_updates)] + + ## Summaries + tf.summary.scalar(prefix + 'change probs', tf.reduce_mean(self.total_probs)) + tf.summary.scalar(prefix + 'Pre-update loss', total_loss1) + if self.classification: + tf.summary.scalar(prefix + 'Pre-update accuracy', total_accuracy1) + + for j in range(num_updates): + tf.summary.scalar(prefix + 'Post-update loss, step ' + str(j + 1), total_losses2[j]) + if self.classification: + tf.summary.scalar(prefix + 'Post-update accuracy, step ' + str(j + 1), total_accuracies2[j]) + + for k, v in weights.items(): + tf.summary.histogram(k, v) + if 'prob' in k: + tf.summary.histogram('prob_'+k, tf.nn.softmax(tf.stack([v, tf.zeros(v.shape)], 1))[:, 0]) + + ### Network construction functions (fc networks and conv networks) + def construct_fc_weights(self): + weights = {} + weights['w1'] = tf.Variable(tf.truncated_normal([self.dim_input, self.dim_hidden[0]], stddev=0.01)) + weights['b1'] = tf.Variable(tf.zeros([self.dim_hidden[0]])) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1)] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i - 1], self.dim_hidden[i]], stddev=0.01)) + weights['b' + str(i + 1)] = tf.Variable(tf.zeros([self.dim_hidden[i]])) + weights['w' + str(len(self.dim_hidden) + 1)] = tf.Variable( + tf.truncated_normal([self.dim_hidden[-1], self.dim_output], stddev=0.01)) + weights['b' + str(len(self.dim_hidden) + 1)] = tf.Variable(tf.zeros([self.dim_output])) + + if FLAGS.use_M and not FLAGS.share_M: + weights['w1_prob'] = tf.Variable(tf.truncated_normal([self.dim_input * self.dim_hidden[0]], stddev=.1)) + weights['b1_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden[0]], stddev=.1)) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i - 1] * self.dim_hidden[i]], stddev=.1)) + weights['b' + str(i + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i]], stddev=.1)) + weights['w' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[-1] * self.dim_output], stddev=0.1)) + weights['b' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_output], stddev=.1)) + elif FLAGS.use_M and FLAGS.share_M: + weights['w1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden[0]])) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1) + '_prob'] = tf.Variable( + FLAGS.logit_init * tf.ones([self.dim_hidden[i]])) + weights['w' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['w1_f'] = tf.Variable(tf.eye(self.dim_hidden[0])) + weights['w2_f'] = tf.Variable(tf.eye(self.dim_hidden[1])) + weights['w3_f'] = tf.Variable(tf.eye(self.dim_output)) + return weights + + def forward_fc(self, inp, weights, reuse=False): + hidden = normalize(tf.matmul(inp, weights['w1']) + weights['b1'], + activation=tf.nn.relu, reuse=reuse, scope='0') + for i in range(1, len(self.dim_hidden)): + hidden = normalize(tf.matmul(hidden, weights['w' + str(i + 1)]) + weights['b' + str(i + 1)], + activation=tf.nn.relu, reuse=reuse, scope=str(i + 1)) + return tf.matmul(hidden, weights['w' + str(len(self.dim_hidden) + 1)]) + \ + weights['b' + str(len(self.dim_hidden) + 1)] + + def forward_fc_withT(self, inp, weights, reuse=False): + hidden = tf.matmul(tf.matmul(inp, weights['w1']) + weights['b1'], weights['w1_f']) + hidden = normalize(hidden, activation=tf.nn.relu, reuse=reuse, scope='1') + hidden = tf.matmul(tf.matmul(hidden, weights['w2']) + weights['b2'], weights['w2_f']) + hidden = normalize(hidden, activation=tf.nn.relu, reuse=reuse, scope='2') + hidden = tf.matmul(tf.matmul(hidden, weights['w3']) + weights['b3'], weights['w3_f']) + return hidden + + def construct_conv_weights(self): + weights = {} + dtype = tf.float32 + conv_initializer = tf.contrib.layers.xavier_initializer_conv2d(dtype=dtype) + fc_initializer = tf.contrib.layers.xavier_initializer(dtype=dtype) + k = 3 + channels = self.channels + dim_hidden = self.dim_hidden + + def get_conv(name, shape): + return tf.get_variable(name, shape, initializer=conv_initializer, dtype=dtype) + + def get_identity(dim, conv=True): + return tf.Variable(tf.eye(dim, batch_shape=[1,1])) if conv \ + else tf.Variable(tf.eye(dim)) + + weights['conv1'] = get_conv('conv1', [k, k, channels, self.dim_hidden]) + weights['b1'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv2'] = get_conv('conv2', [k, k, dim_hidden, self.dim_hidden]) + weights['b2'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv3'] = get_conv('conv3', [k, k, dim_hidden, self.dim_hidden]) + weights['b3'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv4'] = get_conv('conv4', [k, k, dim_hidden, self.dim_hidden]) + weights['b4'] = tf.Variable(tf.zeros([self.dim_hidden])) + if FLAGS.datasource == 'miniimagenet': + # assumes max pooling + assert FLAGS.max_pool + weights['w5'] = tf.get_variable('w5', [self.dim_hidden * 5 * 5, self.dim_output], + initializer=fc_initializer) + weights['b5'] = tf.Variable(tf.zeros([self.dim_output]), name='b5') + + if FLAGS.use_M and not FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(tf.truncated_normal([k * k * channels * self.dim_hidden], stddev=.01)) + weights['b1_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv2_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b2_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv3_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b3_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv4_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b4_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['w5_prob'] = tf.Variable(tf.truncated_normal([dim_hidden *5*5* self.dim_output], stddev=.01)) + weights['b5_prob'] = tf.Variable(tf.truncated_normal([self.dim_output], stddev=.01)) + if FLAGS.use_M and FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv2_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv3_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv4_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['w5_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['conv1_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv2_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv3_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv4_f'] = get_identity(self.dim_hidden, conv=True) + weights['w5_f'] = get_identity(self.dim_output, conv=False) + else: + weights['w5'] = tf.Variable(tf.random_normal([dim_hidden, self.dim_output]), name='w5') + weights['b5'] = tf.Variable(tf.zeros([self.dim_output]), name='b5') + if FLAGS.use_M and not FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(tf.truncated_normal([k * k * channels * self.dim_hidden], stddev=.01)) + weights['conv2_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['conv3_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['conv4_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['w5_prob'] = tf.Variable(tf.truncated_normal([dim_hidden * self.dim_output], stddev=.01)) + if FLAGS.use_M and FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv2_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv3_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv4_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['w5_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['conv1_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv2_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv3_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv4_f'] = get_identity(self.dim_hidden, conv=True) + weights['w5_f'] = get_identity(self.dim_output, conv=False) + return weights + + def forward_conv(self, inp, weights, reuse=False, scope=''): + # reuse is for the normalization parameters. + channels = self.channels + inp = tf.reshape(inp, [-1, self.img_size, self.img_size, channels]) + hidden1 = conv_block(inp, weights['conv1'], weights['b1'], reuse, scope + '0') + hidden2 = conv_block(hidden1, weights['conv2'], weights['b2'], reuse, scope + '1') + hidden3 = conv_block(hidden2, weights['conv3'], weights['b3'], reuse, scope + '2') + hidden4 = conv_block(hidden3, weights['conv4'], weights['b4'], reuse, scope + '3') + + if FLAGS.datasource == 'miniimagenet': + # last hidden layer is 6x6x64-ish, reshape to a vector + hidden4 = tf.reshape(hidden4, [-1, np.prod([int(dim) for dim in hidden4.get_shape()[1:]])]) + else: + hidden4 = tf.reduce_mean(hidden4, [1, 2]) + return tf.matmul(hidden4, weights['w5']) + weights['b5'] + + def forward_conv_withT(self, inp, weights, reuse=False, scope=''): + # reuse is for the normalization parameters. + def conv_tout(inp, cweight, bweight, rweight, reuse, scope, activation=tf.nn.relu, max_pool_pad='VALID', + residual=False): + stride, no_stride = [1, 2, 2, 1], [1, 1, 1, 1] + if FLAGS.max_pool: + conv_output = tf.nn.conv2d(inp, cweight, no_stride, 'SAME') + bweight + else: + conv_output = tf.nn.conv2d(inp, cweight, stride, 'SAME') + bweight + conv_output = tf.nn.conv2d(conv_output, rweight, no_stride, 'SAME') + normed = normalize(conv_output, activation, reuse, scope) + if FLAGS.max_pool: + normed = tf.nn.max_pool(normed, stride, stride, max_pool_pad) + return normed + + channels = self.channels + inp = tf.reshape(inp, [-1, self.img_size, self.img_size, channels]) + hidden1 = conv_tout(inp, weights['conv1'], weights['b1'], weights['conv1_f'], reuse, scope + '0') + hidden2 = conv_tout(hidden1, weights['conv2'], weights['b2'], weights['conv2_f'], reuse, scope + '1') + hidden3 = conv_tout(hidden2, weights['conv3'], weights['b3'], weights['conv3_f'], reuse, scope + '2') + hidden4 = conv_tout(hidden3, weights['conv4'], weights['b4'], weights['conv4_f'], reuse, scope + '3') + + if FLAGS.datasource == 'miniimagenet': + # last hidden layer is 6x6x64-ish, reshape to a vector + hidden4 = tf.reshape(hidden4, [-1, np.prod([int(dim) for dim in hidden4.get_shape()[1:]])]) + else: + hidden4 = tf.reduce_mean(hidden4, [1, 2]) + hidden5 = tf.matmul(hidden4, weights['w5']) + weights['b5'] + return tf.matmul(hidden5, weights['w5_f']) \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..39175166a4b66dd0ead8d95dcb7ea49c56a868e3 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/modelzoo_level.txt @@ -0,0 +1,2 @@ +ModelConvert:OK +QuantStatus:OK \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/requirements.txt b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/special_grads.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/special_grads.py new file mode 100644 index 0000000000000000000000000000000000000000..421e63967d5e8a8c3f56e373d7a48a05ed4ff555 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/special_grads.py @@ -0,0 +1,42 @@ +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" Code for second derivatives not implemented in TensorFlow library. """ +from tensorflow.python.framework import ops +from tensorflow.python.ops import array_ops +from tensorflow.python.ops import gen_nn_ops + +@ops.RegisterGradient("MaxPoolGrad") +def _MaxPoolGradGrad(op, grad): + gradient = gen_nn_ops._max_pool_grad(op.inputs[0], op.outputs[0], + grad, op.get_attr("ksize"), op.get_attr("strides"), + padding=op.get_attr("padding"), data_format=op.get_attr("data_format")) + gradgrad1 = array_ops.zeros(shape = array_ops.shape(op.inputs[1]), dtype=gradient.dtype) + gradgrad2 = array_ops.zeros(shape = array_ops.shape(op.inputs[2]), dtype=gradient.dtype) + return (gradient, gradgrad1, gradgrad2) \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data_npu.py b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/utils.py similarity index 30% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data_npu.py rename to ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/utils.py index 08686cb8b9ee64b1106afaa5099cb134849070ff..9df0069afc567748404549912ebba9284dad2ab7 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data_npu.py +++ b/ACL_TensorFlow/contrib/cv/MT-NET_ID1283_for_ACL/utils.py @@ -1,135 +1,84 @@ - -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import numpy as np -import os -import h5py -import glob -# import scipy.misc -# from scipy.misc import imread, imresize -from PIL import Image -# import moxing as mox -import matplotlib.image as mp - -imread = Image.open - - -def load_images(maps_dir): - train_all = glob.glob(maps_dir + "/train/*.jpg") - train_img_A = [] - train_img_B = [] - - for file in train_all: - full_image = np.array(imread(file).resize((512, 256), Image.ANTIALIAS)) - img_B = full_image[:, full_image.shape[1] // 2:, :] - img_A = full_image[:, :full_image.shape[1] // 2, :] - train_img_A.append(img_A) - train_img_B.append(img_B) - - train_A = np.asarray(train_img_A) - train_B = np.asarray(train_img_B) - print (train_A.shape) - print (train_B.shape) - - test_all = glob.glob(maps_dir + "/val/*.jpg") - test_img_A = [] - test_img_B = [] - - for file in test_all: - full_image = np.array(imread(file).resize((512, 256), Image.ANTIALIAS)) - img_B = full_image[:, full_image.shape[1] // 2:, :] - img_A = full_image[:, :full_image.shape[1] // 2, :] - test_img_A.append(img_A) - test_img_B.append(img_B) - - test_A = np.asarray(test_img_A) - test_B = np.asarray(test_img_B) - print (test_A.shape) - print (test_B.shape) - - return train_A, train_B, test_A, test_B - - -# train_all = glob.glob(maps_dir + "/train/*.jpg") -batch_size = 1 - - -def load_batch_image(idx, maps_dir): - train_all = glob.glob(maps_dir + "/train/*.jpg") - full_image = np.array(imread(train_all[idx]).resize((512,256), Image.ANTIALIAS)) - img_B = full_image[:, full_image.shape[1] // 2:, :] - img_A = full_image[:, :full_image.shape[1] // 2, :] - - return img_A, img_B - - -# test_all = glob.glob("maps/val/*.jpg") - - -def load_test_image(idx, maps_dir): - test_all = glob.glob(maps_dir + "/val/*.jpg") - full_image = np.array(imread(test_all[idx]).resize((512, 256), Image.ANTIALIAS)) - img_A = full_image[:, :full_image.shape[1] // 2, :] / 255. - - return img_A - - -def save_images(image, size, img_path): - return imsave(image, size, img_path) - - -def imsave(image, img_size, img_path): - # image = Image.fromarray(np.squeeze(image * 255.).astype(np.uint8)) - return Image.fromarray(np.squeeze(image * 255.).astype(np.uint8)).save(img_path) - # return mp.imsave(np.squeeze(img_path), image) - - -def inverse_transform(image): - return (image + 1.) / 2. - - -def merge(images, size): - h, w = images.shape[1], images.shape[2] - if (images.shape[3] in (3, 4)): - c = images.shape[3] - img = np.zeros((h * size[0], w * size[1], c)) - for idx, image in enumerate(images): - i = idx % size[1] - j = idx // size[1] - img[j * h:j * h + h, i * w:i * w + w, :] = image - return img - elif images.shape[3] == 1: - img = np.zeros((h * size[0], w * size[1])) - for idx, image in enumerate(images): - i = idx % size[1] - j = idx // size[1] - img[j * h:j * h + h, i * w:i * w + w] = image[:, :, 0] - return img - else: - raise ValueError('In merge function, the first argument must have dimensions: HxW or HxWx3 or HxWx4') +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" Utility functions. """ +import numpy as np +import os +import random +import tensorflow as tf + +from tensorflow.contrib.layers.python import layers as tf_layers +from tensorflow.python.platform import flags + +FLAGS = flags.FLAGS + +## Image helper +def get_images(paths, labels, nb_samples=None, shuffle=True): + if nb_samples is not None: + sampler = lambda x: random.sample(x, nb_samples) + else: + sampler = lambda x: x + images = [(i, os.path.join(path, image)) \ + for i, path in zip(labels, paths) \ + for image in sampler(os.listdir(path))] + if shuffle: + random.shuffle(images) + return images + +## Network helpers +def conv_block(inp, cweight, bweight, reuse, scope, activation=tf.nn.relu, max_pool_pad='VALID', residual=False): + """ Perform, conv, batch norm, nonlinearity, and max pool """ + stride, no_stride = [1,2,2,1], [1,1,1,1] + + if FLAGS.max_pool: + conv_output = tf.nn.conv2d(inp, cweight, no_stride, 'SAME') + bweight + else: + conv_output = tf.nn.conv2d(inp, cweight, stride, 'SAME') + bweight + normed = normalize(conv_output, activation, reuse, scope) + if FLAGS.max_pool: + normed = tf.nn.max_pool(normed, stride, stride, max_pool_pad) + return normed + +def normalize(inp, activation, reuse, scope): + if FLAGS.norm == 'batch_norm': + return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope) + elif FLAGS.norm == 'layer_norm': + return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope) + elif FLAGS.norm == 'None': + return activation(inp) + +## Loss functions +def mse(pred, label): + pred = tf.reshape(pred, [-1]) + label = tf.reshape(label, [-1]) + return tf.reduce_mean(tf.square(pred-label)) + +def xent(pred, label): + # Note - with tf version <=0.12, this loss has incorrect 2nd derivatives + return tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label) / FLAGS.update_batch_size \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/README.md b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a38b01aa6c616ac68211060b794f91d71b088881 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/README.md @@ -0,0 +1,109 @@ +## Pix2pose_ID1164_for_TensorFlow_ACL + +### 基本信息 + +**应用领域(Application Domain):**6D Posture Estimation + +**修改时间(Modified) :2022.4.17** + +**描述(Description):基于TensorFlow框架的Pix2pose的6D姿态估计网络离线推理代码** + +### 概述 + +Pix2Pose是一种经典的6D姿势估计方法。该模型可以在没有纹理的3D模型的情况下预测每个目标像素的三维坐标,解决了遮挡、对称和无纹理等问题,仅使用RGB图像来估计物体的6D姿势,并且能够构建具有精确纹理的三维模型。Pix2Pose设计了一个自动编码器结构来估计三维坐标和每个像素的预期误差,然后在多个阶段使用这些像素的预测来形成二维到三维的对应关系,用RANSAC迭代的PnP算法直接计算姿态,并利用生成式对抗训练来精确地覆盖被遮挡的部分,对遮挡的情况具有鲁棒性,Pix2Pose还提出了一个新的损失函数,即变换器损失函数,用于将预测的姿态引导到最近的对称姿态来处理对称目标。 + +- 参考论文: + + [Park K , Patten T et al. "Pix2Pose: Pixel-Wise Coordinate Regression of Objects for 6D Pose Estimation." *2019 IEEE/CVF International Conference on Computer Vision (ICCV). IEEE, 2020.] +(https://arxiv.org/pdf/1908.07433.pdf) + +- 参考实现: + + [Pix2Pose](https://github.com/kirumang/Pix2Pose) + + + +### hdf5转pb + +本模型基于keras框架实现,训练模型时以HDF5格式保存模型训练的权重,使用hdf52pb.py将模型和权重转化为pb,我们提供转换好的[hdf5模型文件](obs://pix2pose/tless_inference/pix2pose_weights/)。 + +hdf52pb.py主要代码如下: +``` +def h5_to_pb(h5_weight_path, output_dir, out_prefix="output_", log_tensorboard=True): + if not os.path.exists(output_dir): + os.mkdir(output_dir) + + h5_model = build_model() + h5_model.load_weights(h5_weight_path) + + out_nodes = [] + for i in range(len(h5_model.outputs)): + out_nodes.append(out_prefix + str(i + 1)) + tf.identity(h5_model.output[i], out_prefix + str(i + 1)) + + model_name = os.path.splitext(os.path.split(h5_weight_path)[-1])[0] + index + '.pb' + + sess = K.get_session() + init_graph = sess.graph.as_graph_def() + main_graph = graph_util.convert_variables_to_constants(sess, init_graph, out_nodes) + graph_io.write_graph(main_graph, output_dir, name=model_name, as_text=False) + if log_tensorboard: + from tensorflow.python.tools import import_pb_to_tensorboard + import_pb_to_tensorboard.import_to_tensorboard(os.path.join(output_dir, model_name), output_dir) + +def build_model(): + h5_model = load_model(inference_model_hdf5) + return h5_model + +if __name__ == '__main__': + output_dir = os.path.join(output_path) + h5_weight_path=os.path.join(inference_weight_hdf5) + h5_to_pb(h5_weight_path, output_dir) + print('finished') +``` +我们提供转换好的[pb模型文件](obs://pix2pose/tless_inference/pb/)。 + +### pb转om +使用ATC模型转换工具进行模型转换时可以参考如下指令: +``` +atc --model=/home/HwHiAiUser/AscendProjects/path_to_file/file.pb --framework=3 --output=/home/HwHiAiUser/AscendProjects/path_to_file/filename_OM --soc_version=Ascend310 --input_shape="input_1:1,128,128,3" --log=info --out_nodes="output_1:0;output_2:0" +``` + +我们提供转换好的[om模型文件](obs://pix2pose/tless_inference/OM/)。 + +![输出结果](picture/pb2om.png) + +### msame工具 +我们采用msame工具进行离线推理,参考[msame简介](https://gitee.com/ascend/tools/tree/master/msame), 获取msame推理工具及使用方法。 + +获取到msame可执行文件之后,进行推理测试。 + +### 数据集转bin +该过程原训练代码3_train_pix2pose.py中generator_train.predict()函数后加入以下代码,直接获取预处理好的图片,并以bin格式存储: +``` + if not (os.path.exists(weight_dir + "/pb_input/")): + os.makedirs(weight_dir + "/pb_input/") + + for i in range(n): + img_org = X_src[i] + inference = weight_dir + "/pb_input/" + str(i) + ".bin" + img_org.tofile(inference) +``` + +为了测试,我们提供了测试数据集,这是我们转换好的[bin文件](obs://pix2pose/tless_inference/bin_input/)。 + +### 推理测试 +使用msame推理工具,参考如下命令,发起推理测试: +``` +./msame --model "/home/HwHiAiUser/AscendProjects/path_to_file/filename_OM.om" --input "/home/HwHiAiUser/AscendProjects/path_to_file/bin_name.bin" --output "/home/HwHiAiUser/AscendProjects/Pix2pose/out " --outfmt TXT --loop 1 +``` +![输出结果](picture/om_output.png) + + + +最后,可视化后可以得到类似如下结果: +![可视化结果](picture/output_viz.png) + + + + diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/author.txt b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/author.txt new file mode 100644 index 0000000000000000000000000000000000000000..833dc4c8131d782d9c3767d8ee46b75b54896cd8 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/author.txt @@ -0,0 +1,4 @@ +Shiyuan Ma, Lei Xie +Nanjing University +Nanjing, Jiangsu, China +mashiyuan@smail.nju.edu.cn, lxie@nju.edu.cn \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/hdf52pb.py b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/hdf52pb.py new file mode 100644 index 0000000000000000000000000000000000000000..f8ed2e0d55625fe1d70b7e99a2f44cc10ac215e2 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/hdf52pb.py @@ -0,0 +1,75 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =========================== +# Author : Ma Shiyuan +# Time : 2022/4 +# Language : Python +# =========================== +import os,sys +os.system("pip install keras==2.2.4") +print("-----------------------------------------") + +from keras.models import load_model +import tensorflow as tf +from keras import backend as K +from tensorflow.python.framework import graph_util, graph_io + +def h5_to_pb(h5_weight_path, output_dir, out_prefix="output_", log_tensorboard=True): + if not os.path.exists(output_dir): + os.mkdir(output_dir) + + h5_model = build_model() + h5_model.load_weights(h5_weight_path) + + out_nodes = [] + for i in range(len(h5_model.outputs)): + out_nodes.append(out_prefix + str(i + 1)) + tf.identity(h5_model.output[i], out_prefix + str(i + 1)) + + model_name = os.path.splitext(os.path.split(h5_weight_path)[-1])[0] + '.pb' + + sess = K.get_session() + init_graph = sess.graph.as_graph_def() + main_graph = graph_util.convert_variables_to_constants(sess, init_graph, out_nodes) + graph_io.write_graph(main_graph, output_dir, name=model_name, as_text=False) + if log_tensorboard: + from tensorflow.python.tools import import_pb_to_tensorboard + import_pb_to_tensorboard.import_to_tensorboard(os.path.join(output_dir, model_name), output_dir) + + +def build_model(): + inference_model_hdf5='' + h5_model = load_model(inference_model_hdf5) + return h5_model + + +if __name__ == '__main__': + output_dir = '' + h5_weight_path='' + h5_to_pb(h5_weight_path, output_dir) + print('finished') \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f4eeb699cb9bfd53ef6361efe5807c90ff799c8 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/modelzoo_level.txt @@ -0,0 +1,6 @@ +ModelConvert:OK +QuantStatus:NOK +FuncStatus:OK +PrecisionStatus:OK +AutoTune:NOK +PerfStatus:NOK diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/om_output.png b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/om_output.png new file mode 100644 index 0000000000000000000000000000000000000000..a9f9785d98b344ed545d6990d02c44c41423cf75 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/om_output.png differ diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/output_viz.png b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/output_viz.png new file mode 100644 index 0000000000000000000000000000000000000000..0e7cf5acc5cf0e2b49d3490ffd11153f79baaa48 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/output_viz.png differ diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/pb2om.png b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/pb2om.png new file mode 100644 index 0000000000000000000000000000000000000000..dafc79aa354030f56c78496a9a53a87b7c04970e Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/picture/pb2om.png differ diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/requirements.txt b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad87d4400008c9821a834ea4b30b0683c0f253ab --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/requirements.txt @@ -0,0 +1,6 @@ +python==3.7.5 +numpy==1.16.4 +tensorflow-gpu==1.15 +tensorboard==1.14.0 +matplotlib==2.2.3 +keras==2.2.4 diff --git a/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/txt2png.py b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/txt2png.py new file mode 100644 index 0000000000000000000000000000000000000000..79303da2505336f0ed87407e84cd3e57cc812158 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow_ACL/txt2png.py @@ -0,0 +1,61 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +from PIL import Image +import argparse +import os +from glob import glob +import matplotlib.pyplot as plt + + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--atc_dir', dest='atc_dir', default='', help='directory for atc result') + parser.add_argument('--width', dest='width', type=int, default=128) + parser.add_argument('--height', dest='height', type=int, default=128) + + args = parser.parse_args() + + result = np.loadtxt(args.atc_dir, dtype=np.float) + print(result.shape) + result_1 = result.reshape(args.width, args.height, 3) + print(result_1) + # import pdb + # pdb.set_trace() + # im = Image.fromarray(np.clip(result_1 * 255.0, , 255.0).astype('uint8')) + # im = Image.fromarray(np.uint8((result_1+1)/2 *255)) + # image=np.uint8((result_1 + 1) / 2 * 255) + image = result_1 + plt.figure() + plt.imshow(image) + plt.show() + + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8fa513bf5ffc81ab7fd82922a19231a62ccfbd13 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/README.md @@ -0,0 +1,63 @@ +# SYNET +## 1.模型概述 +Symnet结合属性-对象转换的对称性原理和群论公理,由耦合网络和解耦网络两个模块组成,提出了基于Relative Moving Distance(RMD)的识别方法,利用属性的变化而非属性本身去分类属性。在Attribute-Object Composition零样本学习任务上取得了重大改进。 + +- 参考论文: + + [Li, Yong-Lu, et al. "Symmetry and group in attribute-object compositions." Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition. 2020.](https://arxiv.org/abs/2004.00587) + +- 官方实现: + + [SymNet](https://github.com/DirtyHarryLYL/SymNet) +## 2.环境 +基于昇腾310推理Ai1S环境,参考[快速创建离线推理Ai1S环境](https://gitee.com/ascend/modelzoo/wikis/%E7%A6%BB%E7%BA%BF%E6%8E%A8%E7%90%86%E6%A1%88%E4%BE%8B/%E5%BF%AB%E9%80%9F%E5%88%9B%E5%BB%BA%E7%A6%BB%E7%BA%BF%E6%8E%A8%E7%90%86Ai1S%E7%8E%AF%E5%A2%83) +配置环境。 +## 3.数据准备 +用户可运行download_data.sh下载数据集并进行预处理。预处理后的数据集已上传至obs中,包括原始数据,ckpt,pb,om,bin文件。 +obs路径:obs://cann-id1292-symnet/data/data.tar.gz。可以下载解压到项目根目录(SYMNET_ID1292_for_ACL),目录结构: +``` +SYMNET_ID1292_for_ACL +├── data # 数据集 +├── data_bin.py # 制作bin文件 +├── evaluate_acc.py # 精度评估 +├── freeze_graph.py # ckpt转pb +├── inference.sh # msame推理 +├── modelarts_entry.py # modelarts训练拉起 +├── train_full_1p.sh # 执行freeze_graph.py +├── LICENSE +├── modelzoo_level.txt +├── pb_om.sh # pb转om +├── README.md +├── requirement.txt # 环境依赖 +└── utils +``` +## 4.CKPT转PB +在ModelArts平台,通过`modelarts_entry.py`拉起训练,执行`train_full_1p.sh`,运行`freeze_graph.py`将ckpt转pb。 +pb模型已上传至obs,路径 obs://cann-id1292-symnet/data/pb/ +## 5.PB转OM +执行`pb_om.sh`,使用atc转换pb模型为om模型,atc模型转换参考[ATC模型转换](https://support.huaweicloud.com/atctool-cann51RC1alpha2/atlasatc_16_0005.html) +om模型已上传obs,路径 obs://cann-id1292-symnet/data/om/ +``` +atc --model=./data/pb/symnet_new.pb --framework=3 --output=./data/om/symnet --soc_version=Ascend310 --input_shape="Placeholder_2:1,512;test_attr_id:116;test_obj_id:116;Placeholder_6:1,12" --out_nodes="Mul_18:0;Softmax_3:0;Placeholder_6:0" +``` +## 6.bin文件制作 +运行`data_bin.py`,将测试集数据制作为bin文件。 +bin文件已上传至obs,路径 obs://cann-id1292-symnet/data/bin_file/ +``` +python3 data_bin.py --data_url=./data --obj_pred=UT_obj_lr1e-3_test_ep260.pkl --bin_file=./data/bin_file/ +``` +## 7.msame离线推理 +参考 [msame](https://gitee.com/ascend/tools/tree/master/msame) 配置msame工具。 +可将msame生成工具(tools/msame/out下)复制至本项目文件夹下,或修改`inference.sh`中msame路径为工具路径。 +自定义bin文件输入路径,推理结果输出路径,执行`inference.sh`,进行推理。 +## 8.精度计算 +执行`evaluate_acc.py`,评估推理结果精度。 +``` +python3 evaluate_acc.py --input=/path/to/msame/output/ --data_url=./data --obj_pred=UT_obj_lr1e-3_test_ep260.pkl +``` +| | 数据集 | EPOCH| 精度 | +|-------|------|------|------| +| 原文 | UT | <700 | T1:52.1   T2:67.8   T3:76.0 | +| GPU | UT | 574 | T1:0.5116   T2:0.6719   T3:0.7616 | +| NPU | UT | 636 | T1:0.5007   T2:0.6684   T3:0.7571 | +| NPU离线推理 | UT | 636 | T1:0.4991   T2:0.6696   T3:0.7561| diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/data_bin.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/data_bin.py new file mode 100644 index 0000000000000000000000000000000000000000..fbb4b533eeeb42bd1d95a6e57f76880da4e17471 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/data_bin.py @@ -0,0 +1,76 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +from utils import dataset +import argparse, os + + +def make_parser(): + parser = argparse.ArgumentParser() + + parser.add_argument("--data_url", type=str, default="./data", help="Path to dataset") + parser.add_argument("--data", type=str, default='UT',choices=['MIT', 'UT', 'MITg', 'UTg'],help="Dataset name") + parser.add_argument("--test_bz", type=int, default=1024, help="Test batch size") + parser.add_argument("--obj_pred", type=str, default=None, help="Object prediction from pretrained model") + parser.add_argument("--bin_path", type=str, default='./data/bin_file/') + return parser + + +def main(): + parser = make_parser() + args = parser.parse_args() + + test_dataloader = dataset.get_dataloader(args.data_url, args.data, 'test', batchsize=args.test_bz, + obj_pred=args.obj_pred) + input_node = ["Placeholder_2", "test_att_id", "test_obj_id", "Placeholder_6"] + if not os.path.exists(args.bin_path): + os.mkdir(args.bin_path) + + for node in input_node: + if not os.path.exists(args.bin_path + node): + os.mkdir(args.bin_path + node + "/") + + dset = test_dataloader.dataset + test_att_id = np.array([dset.attr2idx[attr] for attr, _ in dset.pairs], dtype=np.int32) + test_obj_id = np.array([dset.obj2idx[obj] for _, obj in dset.pairs], dtype=np.int32) + + count = 0 + for image_ind, batch in enumerate(test_dataloader): + placeholder_2 = np.array(batch[4]) + placeholder_6 = np.array(batch[-1]) + + for i in range(0, len(placeholder_2)): + placeholder_2[i, :].tofile(args.bin_path + input_node[0] + "/{0:05d}.bin".format(count)) + test_att_id.tofile(args.bin_path + input_node[1] + "/{0:05d}.bin".format(count)) + test_obj_id.tofile(args.bin_path + input_node[2] + "/{0:05d}.bin".format(count)) + placeholder_6[i, :].tofile(args.bin_path + input_node[3] + "/{0:05d}.bin".format(count)) + count += 1 + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/download_data.sh b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/download_data.sh new file mode 100644 index 0000000000000000000000000000000000000000..57bc362acd52039bd3ac34567b68f58b3c4433fd --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/download_data.sh @@ -0,0 +1,73 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +mkdir tmp + +# Download everything +mwget --show-progress -O tmp/attr-ops-data.tar.gz https://www.cs.utexas.edu/~tushar/attribute-ops/attr-ops-data.tar.gz +mwget --show-progress -O tmp/mitstates.zip http://wednesday.csail.mit.edu/joseph_result/state_and_transformation/release_dataset.zip +mwget --show-progress -O tmp/utzap.zip http://vision.cs.utexas.edu/projects/finegrained/utzap50k/ut-zap50k-images.zip +mwget --show-progress -O tmp/natural.tar.gz http://www.cs.cmu.edu/~spurushw/publication/compositional/compositional_split_natural.tar.gz +echo "Data downloaded. Extracting files..." + + +# Dataset metadata and features +tar -zxvf tmp/attr-ops-data.tar.gz --strip 1 +mv data/mit-states mit-states-original +mv data/ut-zap50k ut-zap50k-original +rm -r cv tensor-completion data + + + +# MIT-States +unzip tmp/mitstates.zip 'release_dataset/images/*' -d mit-states-original/ +mv mit-states-original/release_dataset/images mit-states-original/images/ +rm -r mit-states-original/release_dataset +rename "s/ /_/g" mit-states-original/images/* + +# UT-Zappos50k +unzip tmp/utzap.zip -d ut-zap50k-original/ +mv ut-zap50k-original/ut-zap50k-images ut-zap50k-original/_images/ +python reorganize_utzap.py +rm -r ut-zap50k-original/_images + + +# Natural split +tar -zxvf tmp/natural.tar.gz +mv mit-states/metadata_compositional-split-natural.t7 mit-states/metadata.t7 +mv ut-zap50k/metadata_compositional-split-natural.t7 ut-zap50k/metadata.t7 +mv mit-states/compositional-split-natural mit-states/compositional-split +mv ut-zap50k/compositional-split-natural ut-zap50k/compositional-split +mv mit-states mit-states-natural +mv ut-zap50k ut-zap50k-natural +ln -s ../mit-states-original/images mit-states-natural/images +ln -s ../ut-zap50k-original/images ut-zap50k-natural/images + + +# remove all zip files and temporary files +rm -r tmp diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/evaluate_acc.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/evaluate_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..03c354e64228064eaca05e27bd2a0ce895eed592 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/evaluate_acc.py @@ -0,0 +1,129 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import os, argparse +import torch +from utils import dataset +from collections import defaultdict +from utils.evaluator import CZSL_Evaluator + + +def make_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--input", type=str, required=True, help="Path to msame inference result") + parser.add_argument("--data_url", type=str, default="./data", help="Path to dataset") + parser.add_argument("--data", type=str, default='UT', choices=['MIT', 'UT', 'MITg', 'UTg'], help="Dataset name") + parser.add_argument("--test_bz", type=int, default=1, help="Test batch size") + parser.add_argument("--obj_pred", type=str, default=None, help="Object prediction from pretrained model") + return parser + + +def load_txt(path, type): + with open(path, 'r')as f: + line = f.readline().strip().split(" ") + + predict = np.asarray(line, dtype=type).reshape(1, -1) + return predict + + +def formated_czsl_result(report): + fstr = 'rA:{real_attr_acc:.4f}|rO:{real_obj_acc:.4f}|Cl/T1:{top1_acc:.4f}|T2:{top2_acc:.4f}|T3:{top3_acc:.4f}' + return fstr.format(**report) + + +def main(): + parser = make_parser() + args = parser.parse_args() + test_dataloader = dataset.get_dataloader(args.data_url, args.data, 'test', batchsize=args.test_bz, + obj_pred=args.obj_pred) + dset = test_dataloader.dataset + test_att_id = np.array([dset.attr2idx[attr] for attr, _ in dset.pairs]) + test_obj_id = np.array([dset.obj2idx[obj] for _, obj in dset.pairs]) + + evaluator = CZSL_Evaluator(test_dataloader.dataset, None) + + accuracies_pair = defaultdict(list) + accuracies_attr = defaultdict(list) + accuracies_obj = defaultdict(list) + + for idx, batch in enumerate(test_dataloader): + id = "{0:05d}_output_".format(idx) + prob_P_rmd = load_txt(os.path.join(args.input, id + "0.txt")) + prob_A_attr = load_txt(os.path.join(args.input, id + "1.txt")) + prob_O = load_txt(os.path.join(args.input, id + "2.txt")) + score = dict([ + ("score_rmd", [prob_P_rmd, prob_A_attr, prob_O]), # Mul_18, Softmax_3, Placeholder_6 + ]) + + for key in score.keys(): + score[key][0] = { + (a, o): torch.from_numpy(score[key][0][:, j]) + for j, (a, o) in enumerate(zip(test_att_id, test_obj_id)) + } + + prediction = score + attr_truth, obj_truth = batch[1], batch[2] + attr_truth, obj_truth = torch.from_numpy(attr_truth), torch.from_numpy(obj_truth) + + for key in prediction.keys(): + p_pair, p_a, p_o = prediction[key] + pair_results = evaluator.score_model(p_pair, obj_truth) + match_stats = evaluator.evaluate_predictions(pair_results, attr_truth, obj_truth) + accuracies_pair[key].append(match_stats) # 0/1 sequence of t/f + + a_match, o_match = evaluator.evaluate_only_attr_obj(p_a, attr_truth, p_o, obj_truth) + + accuracies_attr[key].append(a_match) + accuracies_obj[key].append(o_match) + + for name in accuracies_pair.keys(): + accuracies = accuracies_pair[name] + accuracies = zip(*accuracies) + accuracies = map(torch.mean, map(torch.cat, accuracies)) + attr_acc, obj_acc, closed_1_acc, closed_2_acc, closed_3_acc, _, objoracle_acc = map(lambda x: x.item(), + accuracies) + + real_attr_acc = torch.mean(torch.cat(accuracies_attr[name])).item() + real_obj_acc = torch.mean(torch.cat(accuracies_obj[name])).item() + + report_dict = { + 'real_attr_acc': real_attr_acc, + 'real_obj_acc': real_obj_acc, + 'top1_acc': closed_1_acc, + 'top2_acc': closed_2_acc, + 'top3_acc': closed_3_acc, + } + + print(name + ": " + formated_czsl_result(report_dict)) + + pass + + +if __name__ == '__main__': + main() + diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/freeze_graph.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/freeze_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..a6a231df1787241a89678b82bc25f8a443dbf91a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/freeze_graph.py @@ -0,0 +1,193 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from tensorflow.python.framework import graph_util +from utils import dataset, utils +import argparse +import os, tqdm +import numpy as np +import torch +from utils.evaluator import CZSL_Evaluator +from collections import defaultdict +from npu_bridge.npu_init import * + + +def make_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--train_url", type=str, default="./output", required=True, + help="output path") + parser.add_argument("--data_url", type=str, default="./data", required=True, + help="input path") + parser.add_argument("--ckpt", type=str, required=True) + parser.add_argument("--data", type=str, default='UT', choices=['MIT', 'UT', 'MITg', 'UTg'], help="Dataset name") + parser.add_argument("--test_bz", type=int, default=1024, help="Test batch size") + parser.add_argument("--obj_pred", type=str, default=None, help="Object prediction from pretrained model") + return parser + + +def freeze_graph(input_checkpoint, output_graph): + """ + ckpt转pb + """ + # 输出节点 + output_node_names = "Mul_18,Softmax_3,Placeholder_6" + saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=True) + graph = tf.get_default_graph() + input_graph_def = graph.as_graph_def() + + with utils.create_session() as sess: + saver.restore(sess, input_checkpoint) + output_graph_def = graph_util.convert_variables_to_constants( + sess=sess, + input_graph_def=input_graph_def, + output_node_names=output_node_names.split(",")) # 多个输出节点,以逗号隔开 + + with tf.gfile.GFile(output_graph, "wb") as f: # 保存模型 + f.write(output_graph_def.SerializeToString()) # 序列化输出 + print("%d ops in the final graph." % len(output_graph_def.node)) + + +def test_pb(args, pb_path): + """ + 在线推理,测试pb模型 + """ + print("Loading test dataset") + test_dataloader = dataset.get_dataloader(args.data_url, args.data, 'test', batchsize=args.test_bz, + obj_pred=args.obj_pred) + + network = 1 + evaluator = CZSL_Evaluator(test_dataloader.dataset, network) + with tf.Graph().as_default(): + output_graph_def = tf.GraphDef() + with open(pb_path, "rb") as f: + output_graph_def.ParseFromString(f.read()) + tf.import_graph_def(output_graph_def, name="") + + with utils.create_session() as sess: + sess.run(tf.global_variables_initializer()) + + print("get input tensor") + # 输入张量 + pos_image_feat = sess.graph.get_tensor_by_name("Placeholder_2:0") + test_attr_id = sess.graph.get_tensor_by_name("test_attr_id:0") + test_obj_id = sess.graph.get_tensor_by_name("test_obj_id:0") + pos_obj_prediction = sess.graph.get_tensor_by_name("Placeholder_6:0") + + print("get output tensor") + # 输出张量 + prob_P_rmd = sess.graph.get_tensor_by_name("Mul_18:0") + prob_A_attr = sess.graph.get_tensor_by_name("Softmax_3:0") + prob_O = sess.graph.get_tensor_by_name("Placeholder_6:0") + score_op = dict([ + ("score_rmd", [prob_P_rmd, prob_A_attr, prob_O]), # Mul_18, Softmax_3, Placeholder_6 + ]) + + accuracies_pair = defaultdict(list) + accuracies_attr = defaultdict(list) + accuracies_obj = defaultdict(list) + + for image_ind, batch in tqdm.tqdm(enumerate(test_dataloader), total=len(test_dataloader), postfix='test'): + dset = test_dataloader.dataset + test_att = np.array([dset.attr2idx[attr] for attr, _ in dset.pairs]) + test_obj = np.array([dset.obj2idx[obj] for _, obj in dset.pairs]) + + feed_dict = { + pos_image_feat: batch[4], # Placeholder_2 + test_attr_id: test_att, # test_attr_id + test_obj_id: test_obj, # test_obj_id + pos_obj_prediction: batch[-1], # Placeholder_6 + } + score = sess.run(score_op, feed_dict=feed_dict) + for key in score_op.keys(): + score[key][0] = { + (a, o): torch.from_numpy(score[key][0][:, i]) + for i, (a, o) in enumerate(zip(test_att, test_obj)) + } + + attr_truth, obj_truth = batch[1], batch[2] + attr_truth, obj_truth = torch.from_numpy(attr_truth), torch.from_numpy(obj_truth) + + match_stats = [] + for key in score_op.keys(): + p_pair, p_a, p_o = score[key] + pair_results = evaluator.score_model(p_pair, obj_truth) + match_stats = evaluator.evaluate_predictions(pair_results, attr_truth, obj_truth) + accuracies_pair[key].append(match_stats) # 0/1 sequence of t/f + + a_match, o_match = evaluator.evaluate_only_attr_obj(p_a, attr_truth, p_o, obj_truth) + + accuracies_attr[key].append(a_match) + accuracies_obj[key].append(o_match) + + for name in accuracies_pair.keys(): + accuracies = accuracies_pair[name] + accuracies = zip(*accuracies) + accuracies = map(torch.mean, map(torch.cat, accuracies)) + attr_acc, obj_acc, closed_1_acc, closed_2_acc, closed_3_acc, _, objoracle_acc = map(lambda x: x.item(), + accuracies) + + real_attr_acc = torch.mean(torch.cat(accuracies_attr[name])).item() + real_obj_acc = torch.mean(torch.cat(accuracies_obj[name])).item() + + report_dict = { + 'real_attr_acc': real_attr_acc, + 'real_obj_acc': real_obj_acc, + 'top1_acc': closed_1_acc, + 'top2_acc': closed_2_acc, + 'top3_acc': closed_3_acc, + 'name': "symnet", + 'epoch': 636, + } + + print(name + ": " + utils.formated_czsl_result(report_dict)) + + pass + + +def main(): + parser = make_parser() + args = parser.parse_args() + + weight_dir = os.path.join(args.data_url, './weights') + ckpt_path = os.path.join(weight_dir, args.ckpt) + print("ckpt path => ", ckpt_path) + + pb_path = os.path.join(args.train_url, './pb/') + print("pb path => ", pb_path) + if not os.path.exists(pb_path): + os.mkdir(pb_path) + + saved_pb_path = os.path.join(args.data_url, './pb/symnet.pb') + print("saved pb path => ", saved_pb_path) + + freeze_graph(ckpt_path, pb_path + "symnet.pb") + # test_pb(args, pb_path=pb_path+"symnet.pb") + + +if __name__ == '__main__': + main() diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/inference.sh b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/inference.sh new file mode 100644 index 0000000000000000000000000000000000000000..24ee9a5aaff4bb7f7168fc11ea6d76a97a4d7758 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/inference.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +Placeholder_2=./data/bin_file/Placeholder_2 +test_att_id=./data/bin_file/test_att_id +test_obj_id=./data/bin_file/test_obj_id +Placeholder_6=./data/bin_file/Placeholder_6 + +om_path="/home/HwHiAiUser/AscendProjects/SYMNET_ID1292_for_ACL/data/om/symnet.om" +output_path=./data/output +ulimit -c 0 +./msame --model ${om_path} --input ${Placeholder_2},${test_att_id},${test_obj_id},${Placeholder_6} --output ${output_path} --outfmt TXT diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelarts_entry.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelarts_entry.py new file mode 100644 index 0000000000000000000000000000000000000000..28515c5d102a251ac55d7d9a0c0bd2d1512e4906 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelarts_entry.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./train_full_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a39f2221b8103c0ae90337cb4b6bd67c69f2d11 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/modelzoo_level.txt @@ -0,0 +1,2 @@ +FuncStatus:OK +PrecisionStatus:OK \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/pb_om.sh b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/pb_om.sh new file mode 100644 index 0000000000000000000000000000000000000000..8540599f94fcea2381368f9df6ec526a8364cad9 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/pb_om.sh @@ -0,0 +1,3 @@ +#!/bin/bash +atc --model=./data/pb/symnet_new.pb --framework=3 --output=./data/om/symnet --soc_version=Ascend310 --input_shape="Placeholder_2:1,512;test_attr_id:116;test_obj_id:116;Placeholder_6:1,12" --out_nodes="Mul_18:0;Softmax_3:0;Placeholder_6:0" + diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/requirement.txt b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/requirement.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a7a6d70cba558a4ae408f9f1a5180a12e275ddd --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/requirement.txt @@ -0,0 +1,4 @@ +tensorflow==1.15.0 +torch==1.10.0 +torchvision +numpy==1.21.2 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train_full_1p.sh b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/train_full_1p.sh similarity index 88% rename from TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train_full_1p.sh rename to ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/train_full_1p.sh index 27cfdbd7a5c7316e9f82ea6d5e88abf661858bbb..d712ad6ee86d9cab42bb8b4ac25e2f4cc41155bd 100644 --- a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train_full_1p.sh +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/train_full_1p.sh @@ -1,187 +1,185 @@ -#!/bin/bash - -########################################################## -#########第3行 至 100行,请一定不要、不要、不要修改########## -#########第3行 至 100行,请一定不要、不要、不要修改########## -#########第3行 至 100行,请一定不要、不要、不要修改########## -########################################################## -# shell脚本所在路径 -cur_path=`echo $(cd $(dirname $0);pwd)` - -# 判断当前shell是否是performance -perf_flag=`echo $0 | grep performance | wc -l` - -# 当前执行网络的名称 -Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` - -export RANK_SIZE=1 -export RANK_ID=0 -export JOB_ID=10087 - -# 路径参数初始化 -data_path='' -output_path='' - -# 帮助信息,不需要修改 -if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_performance_1P.sh " - echo " " - echo "parameter explain: - --data_path # dataset of training - --output_path # output of training - --train_steps # max_step for training - --train_epochs # max_epoch for training - --batch_size # batch size - -h/--help show help message - " - exit 1 -fi - -# 参数校验,不需要修改 -for para in $* -do - if [[ $para == --data_path* ]];then - data_path=`echo ${para#*=}` - elif [[ $para == --output_path* ]];then - output_path=`echo ${para#*=}` - elif [[ $para == --train_steps* ]];then - train_steps=`echo ${para#*=}` - elif [[ $para == --train_epochs* ]];then - train_epochs=`echo ${para#*=}` - elif [[ $para == --batch_size* ]];then - batch_size=`echo ${para#*=}` - fi -done - -# 校验是否传入data_path,不需要修改 -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path\" must be config" - exit 1 -fi - -# 校验是否传入output_path,不需要修改 -if [[ $output_path == "" ]];then - output_path="./test/output/${ASCEND_DEVICE_ID}" -fi - -# 设置打屏日志文件名,请保留,文件名为${print_log} -print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" -modelarts_flag=${MODELARTS_MODEL_PATH} -if [ x"${modelarts_flag}" != x ]; -then - echo "running without etp..." - print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` - print_log="/home/ma-user/modelarts/log/${print_log_name}" -fi -echo "### get your log here : ${print_log}" - -CaseName="" -function get_casename() -{ - if [ x"${perf_flag}" = x1 ]; - then - CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' - else - CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' - fi -} - -# 跳转到code目录 -cd ${cur_path}/../ -rm -rf ./test/output/${ASCEND_DEVICE_ID} -mkdir -p ./test/output/${ASCEND_DEVICE_ID} - -# 训练开始时间记录,不需要修改 -start_time=$(date +%s) -########################################################## -#########第3行 至 100行,请一定不要、不要、不要修改########## -#########第3行 至 100行,请一定不要、不要、不要修改########## -#########第3行 至 100行,请一定不要、不要、不要修改########## -########################################################## - -#========================================================= -#========================================================= -#========训练执行命令,需要根据您的网络进行修改============== -#========================================================= -#========================================================= -# 基础参数,需要模型审视修改 -# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 -# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 -# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 -batch_size=128 - - -if [ x"${modelarts_flag}" != x ]; -then - python3.7 ./train.py - python3.7 ./get_map.py -else - python3.7 ./train.py 1>>${print_log} 2>&1 - python3.7 ./get_map.py 1>>${print_log} 2>&1 -fi - -# 性能相关数据计算 -StepTime=`grep "ms/step :" ${print_log} | tail -n 10 | awk '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}*1000'}'` - -# 精度相关数据计算 -train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}'` -# 提取所有loss打印信息 -grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt - - -########################################################### -#########后面的所有内容请不要修改########################### -#########后面的所有内容请不要修改########################### -#########后面的所有内容请不要修改########################### -########################################################### - -# 判断本次执行是否正确使用Ascend NPU -use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` -if [ x"${use_npu_flag}" == x0 ]; -then - echo "------------------ ERROR NOTICE START ------------------" - echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." - echo "------------------ ERROR NOTICE END------------------" -else - echo "------------------ INFO NOTICE START------------------" - echo "INFO, your task have used Ascend NPU, please check your result." - echo "------------------ INFO NOTICE END------------------" -fi - -# 获取最终的casename,请保留,case文件名为${CaseName} -get_casename - -# 重命名loss文件 -if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; -then - mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt -fi - -# 训练端到端耗时 -end_time=$(date +%s) -e2e_time=$(( $end_time - $start_time )) - -echo "------------------ Final result ------------------" -# 输出性能FPS/单step耗时/端到端耗时 -echo "Final Performance images/sec : $FPS" -echo "Final Performance ms/step : $StepTime" -echo "E2E Training Duration sec : $e2e_time" - -# 输出训练精度 -echo "Final Train Accuracy : ${train_accuracy}" - -# 最后一个迭代loss值,不需要修改 -ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) - -#关键信息打印到${CaseName}.log中,不需要修改 -echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=256 +epochs=636 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./freeze_graph.py --data_url=${data_path} --train_url=${output_path} --ckpt snapshot_epoch_636.ckpt --data UT --obj_pred UT_obj_lr1e-3_test_ep260.pkl +else + python3.7 ./freeze_graph.py --data_url=${data_path} --train_url=${output_path} --ckpt snapshot_epoch_636.ckpt --data UT --obj_pred UT_obj_lr1e-3_test_ep260.pkl +fi + +# 性能相关数据计算 +StepTime=`grep "sec/step :" ${print_log} | tail -n 20 | awk -F ':' '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "Best score_rmd" ${print_log} | tail -n 1 | awk -F / '{print $NF}'` +# 提取所有loss打印信息 +grep "Current score_rmd" ${print_log} | awk -F "/" '{print NF}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/__init__.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_attrs.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_attrs.json new file mode 100644 index 0000000000000000000000000000000000000000..8b45cb23eb771598ffe18a75268dec69026450d4 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_attrs.json @@ -0,0 +1 @@ +{"crumpled": "crumpled", "upright": "upright", "bright": "bright", "rough": "rough", "shattered": "shattered", "cut": "cut", "torn": "torn", "folded": "folded", "young": "young", "wet": "wet", "cluttered": "cluttered", "verdant": "verdant", "sunny": "sunny", "runny": "runny", "thawed": "thawed", "dark": "dark", "windblown": "windblown", "burnt": "burnt", "molten": "molten", "eroded": "eroded", "frayed": "frayed", "blunt": "blunt", "cloudy": "cloudy", "large": "large", "whipped": "whipped", "small": "small", "engraved": "engraved", "heavy": "heavy", "old": "old", "thin": "thin", "diced": "diced", "rusty": "rusty", "inflated": "inflated", "ruffled": "ruffled", "steaming": "steaming", "unpainted": "unpainted", "moldy": "moldy", "closed": "closed", "new": "new", "filled": "filled", "dirty": "dirty", "ripped": "ripped", "full": "full", "squished": "squished", "peeled": "peeled", "broken": "broken", "mashed": "mashed", "pureed": "pureed", "dry": "dry", "chipped": "chipped", "spilled": "spilled", "coiled": "coiled", "wrinkled": "wrinkled", "narrow": "narrow", "fallen": "fallen", "muddy": "muddy", "sliced": "sliced", "sharp": "sharp", "unripe": "unripe", "thick": "thick", "open": "open", "standing": "standing", "ancient": "ancient", "toppled": "toppled", "weathered": "weathered", "murky": "murky", "damp": "damp", "tiny": "tiny", "grimy": "grimy", "viscous": "viscous", "empty": "empty", "scratched": "scratched", "painted": "painted", "pierced": "pierced", "draped": "draped", "loose": "loose", "browned": "browned", "foggy": "foggy", "brushed": "brushed", "dull": "dull", "wide": "wide", "winding": "winding", "frozen": "frozen", "straight": "straight", "smooth": "smooth", "worn": "worn", "melted": "melted", "pressed": "pressed", "cracked": "cracked", "bent": "bent", "ripe": "ripe", "mossy": "mossy", "modern": "modern", "raw": "raw", "lightweight": "lightweight", "creased": "creased", "curved": "curved", "huge": "huge", "tight": "tight", "crinkled": "crinkled", "wilted": "wilted", "dented": "dented", "crushed": "crushed", "tall": "tall", "short": "short", "shiny": "shiny", "clear": "clear", "splintered": "splintered", "cored": "cored", "cooked": "cooked", "clean": "clean", "deflated": "deflated", "barren": "barren", "fresh": "fresh", "caramelized": "caramelized"} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_gamma.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_gamma.json new file mode 100644 index 0000000000000000000000000000000000000000..2801ae1e2654ea04e35420ab3e2248e978e44a13 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_gamma.json @@ -0,0 +1 @@ +{"attr_b": [0.8, 0.8, 0.8, 1.0, 1.2, 1.0, 1.0, 1.0, 1.0, 1.2, 0.8, 1.2, 0.8, 0.8, 1.0, 1.2, 1.0, 1.2, 0.8, 1.0, 0.8, 1.0, 0.8, 1.0, 1.2, 0.8, 0.8, 1.0, 0.8, 1.0, 1.0, 0.8, 1.0, 0.8, 1.0, 0.8, 1.2, 1.2, 1.0, 1.0, 1.2, 0.8, 0.8, 1.2, 0.8, 0.8, 0.8, 0.8, 1.2, 1.2, 0.8, 1.2, 0.8, 1.0, 0.8, 1.2, 1.2, 1.0, 0.8, 1.0, 1.2, 0.8, 1.0, 1.2, 0.8, 0.8, 1.2, 0.8, 0.8, 0.8, 1.2, 1.0, 0.8, 0.8, 1.2, 0.8, 1.0, 0.8, 0.8, 1.0, 1.0, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 1.0, 1.0, 1.0, 1.0, 1.0, 0.8, 0.8, 1.2, 0.8, 1.2, 0.8, 0.8, 1.0, 0.8, 1.0, 0.8, 0.8, 1.0, 1.2, 0.8, 1.0, 0.8, 0.8, 1.2, 1.2, 0.8, 0.8, 0.8], "attr_a": [0.8, 0.6000000000000001, 0.6000000000000001, 1.0, 1.2, 1.0, 1.0, 1.0, 1.0, 1.2, 0.6000000000000001, 1.2, 0.6000000000000001, 0.6000000000000001, 1.0, 1.2, 1.0, 1.2, 0.6000000000000001, 1.0, 0.6000000000000001, 1.0, 0.8, 1.0, 1.2, 0.6000000000000001, 0.6000000000000001, 1.0, 0.8, 1.0, 1.0, 0.8, 1.0, 0.6000000000000001, 1.0, 0.8, 1.2, 1.2, 1.0, 1.0, 1.2, 0.8, 0.6000000000000001, 1.2, 0.8, 0.6000000000000001, 0.6000000000000001, 0.6000000000000001, 1.2, 1.2, 0.6000000000000001, 1.2, 0.6000000000000001, 1.0, 0.8, 1.0, 1.2, 1.0, 0.8, 1.0, 1.2, 0.6000000000000001, 1.0, 1.0, 0.6000000000000001, 0.8, 1.2, 0.8, 0.6000000000000001, 0.6000000000000001, 1.0, 1.0, 0.6000000000000001, 0.6000000000000001, 1.2, 0.6000000000000001, 1.0, 0.8, 0.6000000000000001, 1.0, 1.0, 0.6000000000000001, 0.8, 0.8, 0.6000000000000001, 0.6000000000000001, 0.6000000000000001, 1.0, 1.0, 1.0, 1.0, 1.0, 0.6000000000000001, 0.8, 1.2, 0.8, 1.2, 0.6000000000000001, 0.6000000000000001, 1.0, 0.6000000000000001, 1.0, 0.6000000000000001, 0.6000000000000001, 1.0, 1.2, 0.6000000000000001, 1.0, 0.6000000000000001, 0.8, 1.2, 1.2, 0.6000000000000001, 0.8, 0.6000000000000001], "comp_b": [0.74, 0.14, 0.54, 1, 0.78, 0.84, 0.8, 1.06, 1.18, 1.28, 0.02, 0.02, 1.0, 0.0, 1.0, 1.62, 1.0, 0.58, 0.08, 1, 1.0, 1.26, 1.02, 1, 1.1400000000000001, 1, 0.0, 0.72, 0.74, 0.66, 1.04, 0.56, 0.56, 0.64, 1.34, 1.04, 1.08, 0.84, 1.0, 0.78, 1.0, 0.66, 0.02, 0.54, 0.7000000000000001, 0.56, 1, 0.6, 0.66, 0.9, 0.84, 1.82, 0.9400000000000001, 1.46, 0.46, 0.62, 1.1, 1.7, 0.64, 0.6, 0.52, 0.38, 0.76, 1.0, 1.0, 0.64, 0.72, 0.28, 0.28, 0.48, 0.46, 0.52, 1.0, 1.0, 1.0, 1.0, 0.68, 1.18, 1.0, 0.52, 0.12, 1.0, 1.0, 0.84, 1.0, 0.0, 0.34, 0.08, 1.0, 1.06, 0.46, 1.36, 1.0, 1.1400000000000001, 1.0, 0.56, 0.88, 1.0, 1.0, 1.06, 0.04, 0.78, 1.0, 1.02, 0.0, 1.0, 0.08, 0.56, 0.56, 0.36, 0.24, 0.32, 0.06, 1.02, 0.14], "comp_a": [0.7800000000000002, 0.22, 0.6200000000000003, 1, 0.8800000000000003, 0.8600000000000001, 0.7800000000000002, 1.0600000000000003, 1.1400000000000001, 1.1600000000000001, 0.02, 0.06, 1.0, 0.12, 1.0, 1.4600000000000002, 0.9400000000000002, 0.6800000000000002, 0.08, 1, 1.0, 1.1600000000000001, 1.0000000000000002, 1, 1.1200000000000003, 1, 0.0, 0.8200000000000003, 0.7800000000000002, 0.7000000000000002, 1.0400000000000003, 0.6400000000000003, 0.7200000000000004, 0.7200000000000002, 1.4800000000000004, 1.0600000000000003, 1.0200000000000002, 0.8800000000000001, 1.0, 0.8200000000000003, 1.0, 0.5800000000000001, 0.0, 0.6200000000000003, 0.7600000000000002, 0.7400000000000004, 1, 0.7200000000000002, 0.7000000000000002, 0.9200000000000002, 0.8400000000000001, 1.6400000000000001, 0.8400000000000001, 1.3800000000000001, 0.5000000000000002, 0.7000000000000002, 1.1000000000000003, 1.6, 0.6400000000000001, 0.6800000000000002, 0.6000000000000003, 0.49999999999999983, 0.7800000000000002, 1.0, 1.0, 0.6200000000000001, 0.8400000000000003, 0.32000000000000006, 0.32000000000000006, 0.5200000000000002, 0.6200000000000003, 0.6400000000000003, 1.0, 1.0, 1.0, 1.0, 0.7799999999999995, 1.12, 1.0, 0.5800000000000003, 0.18, 1.0, 1.0, 0.8800000000000001, 1.0, 0.0, 0.2799999999999999, 0.08, 1.0, 1.0600000000000003, 0.5600000000000003, 1.2200000000000002, 1.0, 1.1200000000000003, 1.0, 0.6200000000000003, 0.9800000000000002, 1.0, 1.0, 1.0600000000000003, 0.0, 0.8400000000000003, 1.0, 0.9400000000000002, 0.1, 1.0, 0.16, 0.6400000000000003, 0.6200000000000003, 0.4799999999999998, 0.30000000000000004, 0.4199999999999998, 0.22, 1.0200000000000002, 0.2]} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_objs.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_objs.json new file mode 100644 index 0000000000000000000000000000000000000000..c25a453353dec1fd265e136ebd46adcf259716e6 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_objs.json @@ -0,0 +1 @@ +{"lightbulb": "lightbulb", "shoes": "shoes", "deck": "deck", "laptop": "laptop", "ceramic": "ceramic", "paper": "paper", "vacuum": "vacuum", "keyboard": "keyboard", "chair": "chair", "milk": "milk", "roots": "roots", "carpet": "carpet", "sauce": "sauce", "tire": "tire", "mud": "mud", "sky": "sky", "lake": "lake", "sugar": "sugar", "bush": "bush", "bike": "bike", "fig": "fig", "garage": "garage", "orange": "orange", "furniture": "furniture", "hat": "hat", "persimmon": "persimmon", "boulder": "boulder", "plate": "plate", "coffee": "coffee", "handle": "handle", "branch": "branch", "wire": "wire", "bear": "bear", "coast": "coast", "vegetable": "vegetable", "bean": "bean", "tulip": "tulip", "fan": "fan", "snake": "snake", "desk": "desk", "silk": "silk", "bread": "bread", "aluminum": "aluminum", "cable": "cable", "gemstone": "gemstone", "bracelet": "bracelet", "candy": "candy", "bay": "bay", "bag": "bag", "sand": "sand", "steps": "steps", "knife": "knife", "clay": "clay", "iguana": "iguana", "tower": "tower", "river": "river", "garden": "garden", "clothes": "clothes", "copper": "copper", "creek": "creek", "fence": "fence", "house": "house", "fish": "fish", "library": "library", "chocolate": "chocolate", "computer": "computer", "palm": "palm", "roof": "roof", "sea": "sea", "mirror": "mirror", "candle": "candle", "lightning": "lightning", "chicken": "chicken", "ribbon": "ribbon", "redwood": "redwood", "shower": "shower", "flower": "flower", "leaf": "leaf", "jewelry": "jewelry", "lead": "lead", "clock": "clock", "armor": "armor", "tube": "tube", "ice": "ice", "granite": "granite", "sword": "sword", "cloud": "cloud", "ground": "ground", "chains": "chains", "nest": "nest", "highway": "highway", "pants": "pants", "cord": "cord", "hose": "hose", "dirt": "dirt", "salmon": "salmon", "rock": "rock", "horse": "horse", "water": "water", "newspaper": "newspaper", "cookie": "cookie", "key": "key", "pasta": "pasta", "paste": "paste", "trail": "trail", "card": "card", "kitchen": "kitchen", "box": "box", "stone": "stone", "drum": "drum", "thread": "thread", "column": "column", "island": "island", "tie": "tie", "berry": "berry", "smoke": "smoke", "castle": "castle", "glasses": "glasses", "road": "road", "cheese": "cheese", "apple": "apple", "wall": "wall", "pot": "pot", "canyon": "canyon", "tomato": "tomato", "frame": "frame", "church": "church", "table": "table", "ring": "ring", "brass": "brass", "boat": "boat", "belt": "belt", "city": "city", "bathroom": "bathroom", "toy": "toy", "fabric": "fabric", "beef": "beef", "window": "window", "tree": "tree", "plastic": "plastic", "paint": "paint", "camera": "camera", "bronze": "bronze", "tea": "tea", "valley": "valley", "bubble": "bubble", "banana": "banana", "building": "building", "ceiling": "ceiling", "diamond": "diamond", "door": "door", "gear": "gear", "shorts": "shorts", "fire": "fire", "bus": "bus", "wax": "wax", "envelope": "envelope", "oil": "oil", "cabinet": "cabinet", "tiger": "tiger", "glass": "glass", "nut": "nut", "potato": "potato", "steel": "steel", "wood": "wood", "wool": "wool", "room": "room", "salad": "salad", "car": "car", "blade": "blade", "bucket": "bucket", "bed": "bed", "cat": "cat", "rope": "rope", "soup": "soup", "street": "street", "flame": "flame", "cake": "cake", "bridge": "bridge", "stream": "stream", "well": "well", "penny": "penny", "pie": "pie", "shell": "shell", "pond": "pond", "ocean": "ocean", "dress": "dress", "cotton": "cotton", "mountain": "mountain", "sandwich": "sandwich", "lemon": "lemon", "shirt": "shirt", "concrete": "concrete", "town": "town", "balloon": "balloon", "cave": "cave", "bowl": "bowl", "snow": "snow", "rubber": "rubber", "field": "field", "book": "book", "forest": "forest", "animal": "animal", "elephant": "elephant", "tile": "tile", "gate": "gate", "beach": "beach", "pizza": "pizza", "wheel": "wheel", "wave": "wave", "plant": "plant", "ball": "ball", "mat": "mat", "screw": "screw", "farm": "farm", "eggs": "eggs", "foam": "foam", "pear": "pear", "necklace": "necklace", "fruit": "fruit", "garlic": "garlic", "log": "log", "moss": "moss", "dust": "dust", "velvet": "velvet", "basement": "basement", "coin": "coin", "desert": "desert", "pool": "pool", "cliff": "cliff", "butter": "butter", "phone": "phone", "coat": "coat", "seafood": "seafood", "floor": "floor", "metal": "metal", "dog": "dog", "meat": "meat", "jacket": "jacket", "coal": "coal", "shore": "shore", "truck": "truck", "jungle": "jungle", "bottle": "bottle", "basket": "basket"} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_weight.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_weight.py new file mode 100644 index 0000000000000000000000000000000000000000..dfa77c11445fbbc99fd3bbedc43d90221fc8d06e --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/MIT_weight.py @@ -0,0 +1,30 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +attr_weight = [4.561632133281056, 4.325826700719732, 5.5088676690046325, 7.118305581438733, 4.974046539682426, 3.6046302163498876, 4.8447080253179395, 4.474159182316003, 3.8754342510143167, 4.419824831352674, 4.471624326712815, 5.098651958137871, 4.88216558785284, 6.260855349587511, 7.192413553592455, 5.638325736241519, 3.8465662670134644, 5.560160963392184, 5.537855205877885, 4.041981300554439, 5.137304112572149, 4.417423869815136, 4.382084503369827, 4.048592894886752, 4.794518280794085, 4.454058002994916, 5.4070849746946905, 5.487665461354029, 5.606323005155346, 5.070612738073478, 4.474159182316003, 4.937081345448955, 4.85952311110308, 4.129741797363355, 7.231634266745736, 4.266361200676454, 3.769655412823722, 4.542427153445006, 7.192413553592455, 5.796549741456413, 5.187819902709052, 4.085759334762026, 4.8086030206758235, 4.094402430918046, 4.62156447400373, 6.761630637500001, 6.019693292770623, 5.362913756381553, 4.350191139593873, 5.274360359040108, 3.4760311805888917, 5.516036158483245, 5.768378864489717, 5.590697687252265, 4.520920948224043, 4.819298309792571, 5.098651958137871, 4.469095880359457, 3.81325206032948, 4.7043069010737835, 6.339636227440626, 5.070612738073478, 4.3592002095362385, 3.674003099241754, 5.8966332000133965, 4.053580436397791, 4.937081345448955, 5.108175839649126, 4.471624326712815, 4.235901993191745, 4.510338838893506, 4.53161623734079, 5.575312768412785, 6.081062239146916, 4.526254294199405, 6.146444998409767, 4.386724882926329, 4.94112175498596, 5.9072153093439335, 5.224763417900736, 6.6893099759203745, 6.323375706568846, 4.2319099719222075, 3.3806359631553646, 5.996162795360429, 4.909246546455511, 5.777681257152031, 6.173843972597882, 7.315015875684788, 4.4640580863295, 4.974046539682426, 5.157205266889445, 5.996162795360429, 4.784083403501505, 4.028887823807419, 4.089207614040942, 5.996162795360429, 4.4222315713832385, 6.786948445484291, 5.13239009776972, 4.537007085975667, 4.982449950478806, 6.119776751327606, 4.630427161261576, 6.443176906381466, 3.9890419152602195, 4.885989684291244, 5.308846535111277, 4.87456098846762, 5.4070849746946905, 5.208763076554295, 5.7870709975018695, 5.759162209384793, 4.6790689684839215, 5.137304112572149] +obj_weight = [5.246503404537141, 6.216403587016678, 5.381605889393706, 6.98477418881421, 4.855798712012098, 6.579309080706047, 5.291454792399408, 5.750029725821521, 5.835389574772678, 6.133021978077626, 4.72366234402674, 6.133021978077626, 5.759162209384793, 5.750029725821521, 6.173843972597882, 6.202014849564578, 4.94925188106921, 5.297218497116158, 5.835389574772678, 5.638325736241519, 5.257553240723727, 5.654719546017196, 5.777681257152031, 4.92904917375169, 6.043790844349684, 5.369105726629474, 5.606323005155346, 4.88216558785284, 5.886161900146101, 4.8086030206758235, 4.852088132615562, 5.494683034012676, 5.103402560896468, 5.996162795360429, 6.579309080706047, 4.7737562893456555, 5.061438361797436, 5.917910598460681, 5.865542612943365, 4.9615723654572506, 5.996162795360429, 6.081062239146916, 5.3265461122106785, 5.530529165785812, 6.007858835123621, 5.530529165785812, 5.089217925904512, 5.473776349193362, 5.108175839649126, 4.999471638048236, 5.97317327713573, 5.4070849746946905, 6.173843972597882, 5.420072170221501, 5.8453399056258455, 5.246503404537141, 4.909246546455511, 5.003772719947627, 5.545235313175508, 5.381605889393706, 5.494683034012676, 5.122633922824356, 5.796549741456413, 5.297218497116158, 5.122633922824356, 5.38791505858697, 6.339636227440626, 5.654719546017196, 5.187819902709052, 5.3506436637897385, 5.344564617713356, 6.499266373032509, 5.750029725821521, 5.320611376690864, 5.112972011912619, 5.466903469905601, 6.425158400878788, 4.7771868244424445, 5.6302285260089, 6.518684458889611, 5.480696792037937, 5.38791505858697, 6.1600506504655455, 5.203486019453451, 5.2194014747593505, 6.031669483817339, 5.38791505858697, 8.504599942558624, 6.666320457695676, 4.957448648273388, 5.369105726629474, 5.6302285260089, 5.029975092341651, 5.501750201235769, 5.103402560896468, 5.688336156816181, 5.381605889393706, 5.6630183488318915, 6.425158400878788, 4.826492585426599, 9.064215730494046, 5.984601972959354, 5.865542612943365, 5.241023938772516, 5.025560074132534, 5.426629570767661, 4.905332647134375, 5.545235313175508, 5.6302285260089, 6.093801264924346, 5.996162795360429, 5.433230254799013, 6.407458823779387, 5.825537278329666, 5.235574334004951, 7.405987653890514, 5.224763417900736, 5.203486019453451, 5.523256406456732, 6.8669911531578265, 5.198236663567307, 5.917910598460681, 5.865542612943365, 5.021164462659496, 5.13239009776972, 5.487665461354029, 5.614228184662459, 5.381605889393706, 5.582975641158354, 5.815781103384301, 7.359467638255621, 6.323375706568846, 5.5088676690046325, 4.720410308640362, 4.92904917375169, 5.950700421283672, 6.291627008254265, 5.8966332000133965, 5.344564617713356, 5.567708169027566, 5.098651958137871, 7.454777818059946, 5.575312768412785, 5.996162795360429, 4.889828460598409, 5.241023938772516, 5.886161900146101, 5.638325736241519, 5.8757991131105545, 5.886161900146101, 4.905332647134375, 5.075231683929772, 6.6893099759203745, 6.499266373032509, 6.068483456940055, 5.622196354311636, 6.6893099759203745, 5.453297817849822, 5.552670291663025, 6.538487086185791, 5.0660150288248476, 5.241023938772516, 5.516036158483245, 5.038864039758897, 5.2194014747593505, 6.736938024909629, 6.291627008254265, 5.530529165785812, 5.4006540843644, 5.079872063486274, 5.089217925904512, 5.413557489200308, 6.372972647708218, 5.193014719586156, 5.008092381092143, 5.7870709975018695, 5.1521927250659, 5.606323005155346, 5.056882545261575, 5.263124285773182, 5.732011220318842, 5.208763076554295, 5.413557489200308, 5.9072153093439335, 4.986678286588327, 6.323375706568846, 5.560160963392184, 5.545235313175508, 5.835389574772678, 5.5984798276943195, 5.4600775048352, 6.339636227440626, 6.106704669760253, 5.928721514564897, 6.621868695124842, 5.394264286265629, 6.202014849564578, 4.9615723654572506, 6.307375365222405, 5.714311643219442, 5.235574334004951, 5.961873721881797, 4.901434006718717, 4.937081345448955, 7.617296747557721, 5.886161900146101, 5.815781103384301, 4.990924577469778, 5.314711654563675, 5.606323005155346, 5.344564617713356, 5.433230254799013, 4.889828460598409, 5.466903469905601, 5.9072153093439335, 6.643847601843618, 6.307375365222405, 5.9072153093439335, 7.315015875684788, 5.996162795360429, 5.193014719586156, 5.679825467148272, 4.863261433213688, 5.338522303257394, 5.622196354311636, 5.075231683929772, 6.187830214572622, 4.965713158123282, 5.117791298348568, 5.439874797517681, 6.425158400878788, 7.9656034418259365, 5.098651958137871, 5.285724117690423, 5.252013060348111, 5.362913756381553, 5.654719546017196, 6.119776751327606, 5.5984798276943195, 6.1600506504655455, 6.007858835123621, 5.582975641158354, 6.146444998409767, 5.070612738073478, 4.833738993947366] +pair_weight = [6.643847601843618, 6.812923931887551, 6.666320457695676, 6.8669911531578265, -0.0, -0.0, 6.666320457695676, 6.8669911531578265, 8.253285514277717, -0.0, -0.0, 7.049312709951781, 7.8855607341524, 7.359467638255621, 6.666320457695676, -0.0, 7.272456261265991, 7.811452761998678, -0.0, 8.841072179179836, 7.405987653890514, 7.454777818059946, -0.0, 6.8669911531578265, 7.016522887128791, 6.6893099759203745, 6.8395921789697125, 7.454777818059946, 6.8669911531578265, -0.0, 6.736938024909629, 7.811452761998678, -0.0, -0.0, 6.954002530147457, -0.0, 6.6893099759203745, 6.786948445484291, -0.0, -0.0, 6.761630637500001, 6.8395921789697125, 7.506071112447496, 6.666320457695676, 7.118305581438733, -0.0, -0.0, 6.8395921789697125, -0.0, 9.351897802945828, 7.315015875684788, -0.0, 7.454777818059946, -0.0, -0.0, 7.154673225609608, -0.0, -0.0, 7.405987653890514, -0.0, 7.049312709951781, 7.742459890511727, 6.761630637500001, 7.315015875684788, 7.742459890511727, -0.0, 6.895162030124523, 6.924149566997776, 7.049312709951781, 7.617296747557721, -0.0, 6.643847601843618, 8.14792499861989, 6.761630637500001, 9.351897802945828, 7.154673225609608, 7.8855607341524, 7.8855607341524, 7.617296747557721, 7.811452761998678, -0.0, -0.0, 7.049312709951781, -0.0, -0.0, 7.192413553592455, 7.083214261627463, 7.272456261265991, 7.049312709951781, -0.0, -0.0, -0.0, -0.0, 6.643847601843618, 7.811452761998678, 7.083214261627463, 6.954002530147457, 6.761630637500001, 7.192413553592455, -0.0, 6.786948445484291, -0.0, 7.9656034418259365, -0.0, -0.0, 7.016522887128791, 7.560138333717772, 7.742459890511727, -0.0, 9.757362911053992, 6.643847601843618, 7.8855607341524, 7.016522887128791, 9.064215730494046, 8.052614818815567, 9.064215730494046, 7.272456261265991, 6.98477418881421, 7.454777818059946, -0.0, 6.712840473330568, 7.272456261265991, 6.924149566997776, -0.0, 6.812923931887551, 7.742459890511727, -0.0, 6.8669911531578265, -0.0, 6.643847601843618, 8.658750622385883, 7.016522887128791, 10.450510091613937, -0.0, -0.0, -0.0, 6.6893099759203745, 6.761630637500001, -0.0, -0.0, -0.0, 8.841072179179836, -0.0, 6.6893099759203745, -0.0, -0.0, 10.450510091613937, 6.6893099759203745, 8.504599942558624, 6.761630637500001, 8.841072179179836, 6.666320457695676, 6.786948445484291, 6.6893099759203745, 6.736938024909629, 7.231634266745736, 8.14792499861989, 6.786948445484291, -0.0, 6.6893099759203745, 6.812923931887551, 6.736938024909629, 7.742459890511727, 7.677921369374156, -0.0, 7.506071112447496, 7.405987653890514, 7.231634266745736, 7.083214261627463, -0.0, -0.0, 7.677921369374156, -0.0, -0.0, 6.6893099759203745, 6.895162030124523, 7.8855607341524, -0.0, -0.0, 9.064215730494046, 7.016522887128791, -0.0, 9.757362911053992, 7.506071112447496, 7.016522887128791, 6.736938024909629, 6.924149566997776, -0.0, -0.0, -0.0, 8.14792499861989, -0.0, 9.351897802945828, 8.371068549934101, 6.812923931887551, 6.761630637500001, 7.118305581438733, -0.0, -0.0, 7.8855607341524, -0.0, 6.8395921789697125, -0.0, 6.812923931887551, 6.666320457695676, -0.0, 8.658750622385883, 6.895162030124523, -0.0, 6.6893099759203745, 6.712840473330568, 8.371068549934101, 6.954002530147457, -0.0, 6.761630637500001, -0.0, 7.049312709951781, 9.064215730494046, -0.0, -0.0, -0.0, -0.0, 6.6893099759203745, -0.0, 6.643847601843618, 9.757362911053992, 8.052614818815567, -0.0, 7.454777818059946, -0.0, 6.812923931887551, -0.0, -0.0, 7.359467638255621, 7.359467638255621, 7.617296747557721, 6.98477418881421, 8.504599942558624, -0.0, -0.0, 6.666320457695676, 7.8855607341524, 6.786948445484291, 6.786948445484291, 6.666320457695676, 6.98477418881421, -0.0, 6.8669911531578265, 7.272456261265991, -0.0, -0.0, 8.658750622385883, 7.016522887128791, -0.0, 7.9656034418259365, 8.841072179179836, 6.954002530147457, -0.0, -0.0, 7.560138333717772, 7.811452761998678, 6.736938024909629, 6.643847601843618, 6.786948445484291, 6.786948445484291, 6.55868979350331, 7.192413553592455, 7.315015875684788, 7.049312709951781, 6.8669911531578265, -0.0, 7.192413553592455, -0.0, -0.0, -0.0, -0.0, 7.118305581438733, -0.0, -0.0, 6.6893099759203745, -0.0, 9.351897802945828, 6.954002530147457, -0.0, 7.677921369374156, -0.0, 6.666320457695676, 6.55868979350331, 6.786948445484291, 6.924149566997776, 7.506071112447496, -0.0, 6.761630637500001, -0.0, -0.0, 6.579309080706047, -0.0, 7.272456261265991, -0.0, 9.064215730494046, 6.538487086185791, 6.643847601843618, -0.0, 8.841072179179836, -0.0, 7.811452761998678, 6.6893099759203745, 7.272456261265991, 6.600362489903878, -0.0, 6.600362489903878, -0.0, 7.118305581438733, 6.643847601843618, 7.359467638255621, -0.0, 6.895162030124523, 6.98477418881421, 6.666320457695676, -0.0, 6.666320457695676, 6.643847601843618, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 7.049312709951781, 6.8669911531578265, 8.253285514277717, -0.0, 8.841072179179836, -0.0, -0.0, 10.450510091613937, -0.0, 10.450510091613937, -0.0, 7.083214261627463, -0.0, 9.351897802945828, 7.359467638255621, 7.192413553592455, -0.0, -0.0, -0.0, 6.579309080706047, 6.600362489903878, 7.742459890511727, -0.0, -0.0, 6.666320457695676, 8.052614818815567, 6.8395921789697125, 7.272456261265991, 6.666320457695676, 6.98477418881421, 7.118305581438733, 6.98477418881421, 6.712840473330568, -0.0, -0.0, 7.016522887128791, 7.315015875684788, 7.192413553592455, 7.359467638255621, 7.560138333717772, -0.0, -0.0, 7.049312709951781, 7.154673225609608, 7.016522887128791, 7.742459890511727, 6.8395921789697125, -0.0, 9.351897802945828, 8.052614818815567, 6.8669911531578265, 7.359467638255621, 6.579309080706047, 7.016522887128791, -0.0, 6.643847601843618, 6.712840473330568, -0.0, 6.761630637500001, -0.0, 8.841072179179836, 6.761630637500001, 6.786948445484291, 8.658750622385883, 9.064215730494046, 9.757362911053992, -0.0, -0.0, 6.600362489903878, 7.560138333717772, 6.600362489903878, -0.0, -0.0, -0.0, 7.083214261627463, 7.359467638255621, -0.0, 6.895162030124523, -0.0, 7.742459890511727, 9.351897802945828, -0.0, 8.253285514277717, -0.0, -0.0, -0.0, 8.504599942558624, 6.761630637500001, 7.231634266745736, 7.016522887128791, 6.8395921789697125, 8.658750622385883, 7.8855607341524, 7.677921369374156, -0.0, -0.0, 7.617296747557721, -0.0, 6.8669911531578265, 7.315015875684788, 6.786948445484291, 9.064215730494046, 6.895162030124523, 9.064215730494046, 8.658750622385883, -0.0, 7.016522887128791, 9.757362911053992, 10.450510091613937, -0.0, -0.0, 7.231634266745736, 6.736938024909629, -0.0, -0.0, 6.736938024909629, 6.712840473330568, 6.643847601843618, 7.677921369374156, 8.371068549934101, 6.8669911531578265, -0.0, 6.786948445484291, 6.98477418881421, -0.0, 8.841072179179836, 7.016522887128791, 6.712840473330568, -0.0, 7.083214261627463, -0.0, 6.736938024909629, 6.761630637500001, 6.761630637500001, -0.0, 9.351897802945828, 6.666320457695676, 6.812923931887551, 6.643847601843618, -0.0, 7.272456261265991, 7.083214261627463, -0.0, 6.954002530147457, -0.0, 7.405987653890514, 7.677921369374156, -0.0, 6.954002530147457, 6.643847601843618, -0.0, 9.757362911053992, -0.0, -0.0, 6.643847601843618, 7.742459890511727, 7.049312709951781, 6.761630637500001, 6.621868695124842, -0.0, -0.0, 7.454777818059946, 6.712840473330568, -0.0, -0.0, 6.666320457695676, 6.736938024909629, 6.8395921789697125, 6.712840473330568, -0.0, -0.0, 8.658750622385883, 7.454777818059946, 7.454777818059946, 7.560138333717772, -0.0, 9.064215730494046, 6.895162030124523, 7.083214261627463, 7.315015875684788, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 6.98477418881421, -0.0, -0.0, -0.0, -0.0, 7.677921369374156, -0.0, 6.8669911531578265, -0.0, 7.359467638255621, -0.0, 6.8395921789697125, 7.405987653890514, -0.0, 6.8395921789697125, 8.371068549934101, 7.742459890511727, 8.052614818815567, -0.0, -0.0, 7.083214261627463, 8.658750622385883, -0.0, 9.351897802945828, 10.450510091613937, 9.064215730494046, 7.8855607341524, 6.895162030124523, -0.0, 8.658750622385883, -0.0, 8.841072179179836, 8.658750622385883, 10.450510091613937, 9.757362911053992, -0.0, -0.0, 7.359467638255621, 6.579309080706047, -0.0, 8.253285514277717, 6.924149566997776, 7.118305581438733, -0.0, 9.757362911053992, 7.118305581438733, 6.98477418881421, -0.0, -0.0, 6.8395921789697125, 6.666320457695676, 6.712840473330568, 7.405987653890514, -0.0, 7.154673225609608, 6.761630637500001, -0.0, 6.761630637500001, 8.504599942558624, -0.0, 6.812923931887551, -0.0, 6.8669911531578265, 7.315015875684788, 7.016522887128791, -0.0, 7.231634266745736, 6.8669911531578265, 7.405987653890514, -0.0, 8.504599942558624, 6.8395921789697125, 6.924149566997776, -0.0, -0.0, 6.924149566997776, -0.0, 9.757362911053992, -0.0, 8.14792499861989, -0.0, 6.600362489903878, 8.841072179179836, -0.0, -0.0, 6.621868695124842, -0.0, 6.579309080706047, 7.742459890511727, 10.450510091613937, 6.666320457695676, 6.538487086185791, -0.0, 7.315015875684788, 7.231634266745736, 6.895162030124523, -0.0, -0.0, 7.154673225609608, 7.154673225609608, -0.0, 6.6893099759203745, 6.895162030124523, -0.0, 8.504599942558624, -0.0, -0.0, 6.621868695124842, 6.600362489903878, -0.0, 6.666320457695676, 6.736938024909629, 6.621868695124842, 7.118305581438733, -0.0, -0.0, 6.712840473330568, -0.0, 6.761630637500001, -0.0, 7.231634266745736, -0.0, -0.0, 7.154673225609608, 7.231634266745736, 6.8395921789697125, 6.812923931887551, -0.0, 7.083214261627463, 7.231634266745736, 7.083214261627463, 6.761630637500001, -0.0, 6.600362489903878, 7.315015875684788, 6.8669911531578265, 6.895162030124523, 7.454777818059946, 6.666320457695676, 8.052614818815567, 7.506071112447496, 6.8669911531578265, -0.0, 7.742459890511727, 6.8669911531578265, 7.231634266745736, 6.98477418881421, 6.924149566997776, 6.600362489903878, 6.712840473330568, -0.0, -0.0, 7.506071112447496, 7.016522887128791, 7.272456261265991, -0.0, -0.0, -0.0, -0.0, 6.8669911531578265, -0.0, -0.0, 7.454777818059946, 7.016522887128791, 6.98477418881421, -0.0, 10.450510091613937, 6.8395921789697125, 6.761630637500001, -0.0, 6.924149566997776, 9.757362911053992, -0.0, 7.506071112447496, 6.579309080706047, 7.811452761998678, -0.0, 8.658750622385883, 6.8669911531578265, 6.600362489903878, 8.841072179179836, 8.841072179179836, -0.0, -0.0, -0.0, -0.0, 6.6893099759203745, 10.450510091613937, -0.0, 6.761630637500001, 10.450510091613937, -0.0, -0.0, -0.0, 6.666320457695676, 6.8395921789697125, -0.0, -0.0, 6.924149566997776, 10.450510091613937, -0.0, -0.0, 6.895162030124523, 6.954002530147457, 6.600362489903878, -0.0, -0.0, 7.118305581438733, 7.811452761998678, 7.359467638255621, -0.0, 7.016522887128791, 7.192413553592455, 6.736938024909629, 7.192413553592455, -0.0, -0.0, 6.812923931887551, -0.0, 6.736938024909629, -0.0, 6.812923931887551, 6.812923931887551, -0.0, -0.0, 6.954002530147457, 6.6893099759203745, -0.0, -0.0, -0.0, -0.0, 6.895162030124523, -0.0, 7.049312709951781, -0.0, 6.895162030124523, -0.0, 6.600362489903878, 9.757362911053992, 6.579309080706047, 6.761630637500001, 9.351897802945828, 6.761630637500001, 6.579309080706047, 7.192413553592455, 6.786948445484291, 7.359467638255621, 8.253285514277717, -0.0, 6.895162030124523, 6.600362489903878, -0.0, -0.0, 6.666320457695676, 7.016522887128791, 9.757362911053992, 7.811452761998678, 7.231634266745736, -0.0, 6.812923931887551, 8.253285514277717, -0.0, 6.643847601843618, 8.841072179179836, 10.450510091613937, -0.0, -0.0, 6.8395921789697125, 8.841072179179836, 7.9656034418259365, -0.0, -0.0, 6.895162030124523, 6.621868695124842, -0.0, 9.064215730494046, 9.064215730494046, -0.0, 6.621868695124842, 8.371068549934101, 6.55868979350331, -0.0, 6.643847601843618, 6.786948445484291, 6.8669911531578265, 6.666320457695676, 6.786948445484291, 6.736938024909629, -0.0, 7.272456261265991, 7.016522887128791, -0.0, -0.0, 7.359467638255621, -0.0, 6.712840473330568, 6.8669911531578265, -0.0, 7.016522887128791, 6.6893099759203745, -0.0, 7.118305581438733, 6.666320457695676, 7.083214261627463, -0.0, -0.0, -0.0, -0.0, 6.8669911531578265, 6.895162030124523, 6.895162030124523, 8.253285514277717, -0.0, 6.98477418881421, 7.049312709951781, 6.8395921789697125, -0.0, 7.231634266745736, -0.0, 6.761630637500001, -0.0, -0.0, 9.757362911053992, 8.052614818815567, 10.450510091613937, 8.253285514277717, 8.658750622385883, -0.0, 7.454777818059946, 7.560138333717772, 7.742459890511727, 10.450510091613937, -0.0, 10.450510091613937, -0.0, -0.0, 8.14792499861989, -0.0, -0.0, -0.0, 8.658750622385883, 7.506071112447496, 6.761630637500001, 6.924149566997776, -0.0, 8.658750622385883, 8.841072179179836, -0.0, 6.712840473330568, 6.98477418881421, -0.0, 6.621868695124842, 7.359467638255621, 7.083214261627463, 7.016522887128791, -0.0, 6.98477418881421, 6.666320457695676, 6.761630637500001, 6.6893099759203745, -0.0, -0.0, -0.0, 7.118305581438733, -0.0, 6.6893099759203745, -0.0, -0.0, 6.98477418881421, -0.0, 7.272456261265991, -0.0, 8.052614818815567, 7.192413553592455, 6.812923931887551, -0.0, 7.560138333717772, 7.231634266745736, -0.0, 6.924149566997776, 7.231634266745736, -0.0, -0.0, 6.812923931887551, 6.6893099759203745, -0.0, -0.0, -0.0, 6.8395921789697125, 7.811452761998678, 7.677921369374156, 6.666320457695676, 7.016522887128791, -0.0, -0.0, 8.052614818815567, 7.454777818059946, -0.0, 6.895162030124523, -0.0, -0.0, 7.154673225609608, 6.736938024909629, 7.118305581438733, 6.6893099759203745, 6.924149566997776, -0.0, -0.0, 6.761630637500001, 7.154673225609608, 6.761630637500001, 7.811452761998678, 6.954002530147457, 6.924149566997776, 6.812923931887551, -0.0, 7.9656034418259365, 6.8669911531578265, 6.6893099759203745, 7.016522887128791, 9.064215730494046, 7.742459890511727, 6.954002530147457, 6.8395921789697125, 6.666320457695676, 6.8669911531578265, 8.658750622385883, 6.954002530147457, -0.0, -0.0, -0.0, 8.841072179179836, 6.6893099759203745, -0.0, 9.351897802945828, 7.677921369374156, 6.8395921789697125, -0.0, 7.811452761998678, 7.617296747557721, -0.0, -0.0, -0.0, 8.371068549934101, -0.0, -0.0, 10.450510091613937, 6.621868695124842, 6.6893099759203745, -0.0, 7.359467638255621, 8.841072179179836, 6.812923931887551, -0.0, 6.924149566997776, 6.812923931887551, 8.14792499861989, -0.0, 6.600362489903878, 6.643847601843618, 7.192413553592455, 6.895162030124523, 7.083214261627463, -0.0, 6.895162030124523, 7.9656034418259365, 7.231634266745736, 6.8669911531578265, 7.9656034418259365, -0.0, -0.0, 7.016522887128791, 6.8395921789697125, 8.253285514277717, 10.450510091613937, 6.6893099759203745, 6.666320457695676, 6.6893099759203745, 6.736938024909629, -0.0, -0.0, 6.666320457695676, 7.315015875684788, 7.272456261265991, 6.579309080706047, -0.0, -0.0, -0.0, -0.0, 7.9656034418259365, -0.0, 8.841072179179836, -0.0, 6.6893099759203745, -0.0, -0.0, 7.8855607341524, -0.0, 6.786948445484291, -0.0, -0.0, 7.272456261265991, 7.192413553592455, 7.811452761998678, -0.0, 8.841072179179836, 7.9656034418259365, 7.231634266745736, 8.841072179179836, 7.016522887128791, 9.757362911053992, 10.450510091613937, 7.118305581438733, 6.895162030124523, -0.0, -0.0, -0.0, 6.6893099759203745, 6.786948445484291, 7.677921369374156, 9.064215730494046, 9.351897802945828, 8.052614818815567, 6.954002530147457, 7.118305581438733, -0.0, -0.0, -0.0, 6.895162030124523, 8.371068549934101, 6.736938024909629, -0.0, -0.0, -0.0, 6.812923931887551, -0.0, -0.0, 7.9656034418259365, -0.0, -0.0, 7.231634266745736, 9.351897802945828, 6.8669911531578265, 7.083214261627463, 6.786948445484291, -0.0, 7.405987653890514, 10.450510091613937, 6.600362489903878, 8.253285514277717, 6.786948445484291, -0.0, 7.016522887128791, 6.666320457695676, -0.0, 6.8395921789697125, -0.0, -0.0, 6.924149566997776, 6.895162030124523, 6.895162030124523, 6.712840473330568, 6.666320457695676, -0.0, 6.736938024909629, 6.643847601843618, 6.761630637500001, 7.016522887128791, 6.6893099759203745, -0.0, 7.506071112447496, -0.0, 8.504599942558624, 7.506071112447496, 6.954002530147457, 7.016522887128791, 7.016522887128791, -0.0, -0.0, 9.351897802945828, 7.677921369374156, 6.98477418881421, 7.016522887128791, -0.0, 6.736938024909629, -0.0, 7.405987653890514, 7.083214261627463, -0.0, 7.315015875684788, -0.0, -0.0, 10.450510091613937, 7.506071112447496, 7.560138333717772, -0.0, -0.0, -0.0, -0.0, -0.0, 6.712840473330568, -0.0, 7.617296747557721, -0.0, 6.8669911531578265, -0.0, 7.049312709951781, -0.0, 7.506071112447496, 6.8669911531578265, 6.8395921789697125, 6.643847601843618, 7.083214261627463, -0.0, -0.0, -0.0, -0.0, 6.643847601843618, 6.600362489903878, 7.359467638255621, 7.405987653890514, -0.0, -0.0, 6.924149566997776, -0.0, 9.351897802945828, 6.8395921789697125, -0.0, 7.231634266745736, 7.359467638255621, 7.272456261265991, 8.14792499861989, 6.812923931887551, -0.0, 8.14792499861989, -0.0, 7.049312709951781, -0.0, -0.0, 8.504599942558624, -0.0, 6.6893099759203745, 6.8669911531578265, 6.786948445484291, 6.621868695124842, 6.666320457695676, 6.666320457695676, -0.0, 6.712840473330568, 6.643847601843618, -0.0, 6.786948445484291, -0.0, 6.812923931887551, 6.6893099759203745, 6.736938024909629, 7.405987653890514, -0.0, 6.786948445484291, -0.0, 7.154673225609608, 6.736938024909629, 7.359467638255621, 6.761630637500001, -0.0, 7.231634266745736, -0.0, 6.761630637500001, 6.666320457695676, 6.643847601843618, 7.192413553592455, 7.049312709951781, 7.154673225609608, 6.812923931887551, 7.506071112447496, -0.0, 6.579309080706047, 7.560138333717772, 6.666320457695676, 6.6893099759203745, 6.6893099759203745, 7.315015875684788, 6.895162030124523, 6.812923931887551, 6.666320457695676, 6.666320457695676, 7.016522887128791, -0.0, -0.0, -0.0, 6.8395921789697125, -0.0, -0.0, 7.9656034418259365, 6.666320457695676, -0.0, 6.736938024909629, 6.8669911531578265, -0.0, -0.0, -0.0, -0.0, -0.0, 6.666320457695676, -0.0, 7.016522887128791, 7.083214261627463, 6.8395921789697125, 7.118305581438733, -0.0, 7.405987653890514, 6.761630637500001, 7.560138333717772, -0.0, -0.0, 6.812923931887551, 10.450510091613937, 6.761630637500001, 7.083214261627463, 6.600362489903878, 8.052614818815567, -0.0, -0.0, 6.6893099759203745, -0.0, -0.0, 6.579309080706047, 7.016522887128791, 7.506071112447496, 6.736938024909629, -0.0, 6.98477418881421, 6.712840473330568, -0.0, -0.0, 6.600362489903878, 7.154673225609608, 7.742459890511727, -0.0, -0.0, 6.643847601843618, 6.579309080706047, 7.118305581438733, 7.8855607341524, -0.0, 7.315015875684788, 7.742459890511727, 7.617296747557721, 6.895162030124523, 6.924149566997776, 6.643847601843618, 8.504599942558624, 7.049312709951781, 8.504599942558624, 6.55868979350331, 7.192413553592455, 8.841072179179836, 9.351897802945828, -0.0, 6.954002530147457, 7.9656034418259365, 8.052614818815567, 7.154673225609608, 8.658750622385883, 6.895162030124523, 7.231634266745736, 7.811452761998678, 6.666320457695676, 6.666320457695676, 6.895162030124523, 6.643847601843618, 6.8669911531578265, 6.666320457695676, -0.0, 6.736938024909629, 6.643847601843618, 6.621868695124842, 6.761630637500001, -0.0, 6.98477418881421, 6.8669911531578265, 6.712840473330568, 6.786948445484291, -0.0, 6.8395921789697125, -0.0, 6.98477418881421, -0.0, 6.736938024909629, 6.924149566997776, 6.786948445484291, 7.454777818059946, 8.658750622385883, 7.192413553592455, 6.812923931887551, 6.736938024909629, 6.666320457695676, 6.895162030124523, -0.0, 6.600362489903878, 9.064215730494046, 9.757362911053992, -0.0, 7.192413553592455, 6.8669911531578265, -0.0, -0.0, -0.0, -0.0, 7.049312709951781, -0.0, 9.064215730494046, 6.712840473330568, -0.0, -0.0, 9.064215730494046, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 6.643847601843618, -0.0, -0.0, 6.643847601843618, 10.450510091613937, -0.0, 8.052614818815567, 6.600362489903878, 8.371068549934101, 7.359467638255621, -0.0, 7.049312709951781, 6.736938024909629, 9.351897802945828, -0.0, -0.0, 6.812923931887551, 8.14792499861989, 6.666320457695676, -0.0, -0.0, -0.0, -0.0, 6.666320457695676, -0.0, -0.0, 7.9656034418259365, 6.8669911531578265, 6.621868695124842, 6.924149566997776, -0.0, -0.0, -0.0, 6.8669911531578265, 6.579309080706047, 7.617296747557721, 9.351897802945828, 10.450510091613937, -0.0, -0.0, 6.621868695124842, -0.0, -0.0, 7.9656034418259365, 9.351897802945828, -0.0, 9.064215730494046, 7.359467638255621, 7.192413553592455, 6.761630637500001, 7.272456261265991, 7.315015875684788, 7.811452761998678, 9.757362911053992, -0.0, 7.049312709951781, -0.0, 6.712840473330568, 9.064215730494046, -0.0, 7.677921369374156, 6.8669911531578265, 6.8395921789697125, 7.405987653890514, 9.757362911053992, -0.0, 8.841072179179836, 7.677921369374156, -0.0, 7.506071112447496, -0.0, -0.0, 8.504599942558624, 9.351897802945828, 10.450510091613937, -0.0, 7.154673225609608, 9.351897802945828, 7.016522887128791, 7.118305581438733, 7.016522887128791, 7.8855607341524, -0.0, -0.0, -0.0, -0.0, -0.0, 6.98477418881421, 8.052614818815567, -0.0, 6.8395921789697125, 7.231634266745736, 6.812923931887551, 6.812923931887551, -0.0, -0.0, 6.643847601843618, 6.6893099759203745, 6.712840473330568, 6.8395921789697125, 6.98477418881421, -0.0, -0.0, 8.658750622385883, 6.579309080706047, 6.924149566997776, -0.0, 6.98477418881421, 6.736938024909629, -0.0, 8.841072179179836, 7.231634266745736, -0.0, 6.8669911531578265, 6.8395921789697125, 6.666320457695676, 6.712840473330568, -0.0, -0.0, 6.666320457695676, -0.0, -0.0, 8.504599942558624, -0.0, 6.8395921789697125, 6.954002530147457, 6.621868695124842, 7.315015875684788, 7.118305581438733, 7.049312709951781, 6.666320457695676, -0.0, 6.8395921789697125, 7.231634266745736, 6.761630637500001, 6.895162030124523, 6.736938024909629, -0.0, -0.0, -0.0, -0.0, 7.231634266745736, 6.812923931887551, 7.8855607341524, -0.0, 6.666320457695676, 6.55868979350331, 6.736938024909629, -0.0, 6.621868695124842, 6.812923931887551, 6.712840473330568, -0.0, 7.405987653890514, -0.0, -0.0, 6.8395921789697125, 6.666320457695676, -0.0, 8.14792499861989, -0.0, 9.351897802945828, 6.8669911531578265, -0.0, 6.895162030124523, 6.643847601843618, 7.677921369374156, 6.579309080706047, 6.812923931887551, 6.8669911531578265, 7.506071112447496, -0.0, -0.0, 6.8669911531578265, 7.315015875684788, 7.231634266745736, -0.0, 6.98477418881421, -0.0, 7.315015875684788, 7.454777818059946, 8.253285514277717, 6.98477418881421, -0.0, 10.450510091613937, -0.0, 10.450510091613937, 9.757362911053992, 9.351897802945828, 9.351897802945828, -0.0, 7.742459890511727, -0.0, -0.0, -0.0, 9.757362911053992, 6.895162030124523, 6.643847601843618, 7.9656034418259365, 9.757362911053992, 8.371068549934101, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 7.154673225609608, 7.315015875684788, 7.315015875684788, 7.617296747557721, -0.0, 6.954002530147457, -0.0, 6.736938024909629, -0.0, 7.506071112447496, 6.643847601843618, 6.895162030124523, -0.0, 6.8669911531578265, 7.617296747557721, 7.154673225609608, 6.98477418881421, 6.712840473330568, -0.0, 7.016522887128791, 9.064215730494046, -0.0, -0.0, -0.0, 7.506071112447496, -0.0, -0.0, 8.371068549934101, 6.8395921789697125, -0.0, 8.052614818815567, 9.064215730494046, 6.954002530147457, -0.0, 6.736938024909629, 7.9656034418259365, -0.0, 8.371068549934101, 6.812923931887551, 6.812923931887551, -0.0, -0.0, 8.504599942558624, 9.757362911053992, 7.049312709951781, -0.0, 7.016522887128791, 7.359467638255621, -0.0, 7.742459890511727, 7.192413553592455, 6.6893099759203745, 6.98477418881421, -0.0, -0.0, 7.8855607341524, 6.8395921789697125, 6.8669911531578265, 6.954002530147457, 6.712840473330568, -0.0, 8.052614818815567, -0.0, -0.0, 7.742459890511727, 7.617296747557721, -0.0, -0.0, -0.0, 6.786948445484291, 6.98477418881421, 6.736938024909629, -0.0, 7.154673225609608, -0.0, 6.98477418881421, 7.016522887128791, 7.231634266745736, 6.761630637500001, -0.0, 6.621868695124842, 7.192413553592455, -0.0, -0.0, 7.359467638255621, 6.786948445484291, 6.812923931887551, 6.621868695124842, -0.0, 6.643847601843618, 7.083214261627463, -0.0, 6.6893099759203745, -0.0, -0.0, -0.0, 6.736938024909629, 9.351897802945828, -0.0, -0.0, 6.712840473330568, -0.0, -0.0, 8.253285514277717, -0.0, 8.14792499861989, 6.6893099759203745, 7.560138333717772, 8.14792499861989, 8.253285514277717, -0.0, -0.0, 8.504599942558624, 7.617296747557721, 8.052614818815567, 6.666320457695676, -0.0, 8.14792499861989, -0.0, 8.504599942558624, 7.083214261627463, -0.0, 6.643847601843618, 7.231634266745736, 6.6893099759203745, -0.0, 6.8669911531578265, -0.0, -0.0, 6.579309080706047, -0.0, -0.0, -0.0, -0.0, 6.954002530147457, 6.712840473330568, -0.0, 7.454777818059946, 9.064215730494046, -0.0, 7.617296747557721, 6.736938024909629, 7.742459890511727, 7.118305581438733, 8.253285514277717, 6.579309080706047, -0.0, 7.315015875684788, 7.016522887128791, -0.0, -0.0, 6.98477418881421, -0.0, 7.118305581438733, 6.666320457695676, 9.351897802945828, 6.600362489903878, 8.658750622385883, -0.0, -0.0, 6.895162030124523, -0.0, 7.454777818059946, 6.8395921789697125, 6.812923931887551, 8.052614818815567, -0.0, 6.6893099759203745, -0.0, 6.761630637500001, 9.351897802945828, 7.560138333717772, -0.0, 7.742459890511727, 7.154673225609608, 8.052614818815567, -0.0, 7.118305581438733, -0.0, 7.083214261627463, 7.9656034418259365, -0.0, 7.154673225609608, 8.658750622385883, 8.841072179179836, -0.0, -0.0, 7.272456261265991, 6.786948445484291, 6.954002530147457, -0.0, -0.0, -0.0, 9.757362911053992, 7.154673225609608, -0.0, 7.506071112447496, 7.617296747557721, 7.560138333717772, 6.666320457695676, -0.0, 9.757362911053992, 7.083214261627463, 7.677921369374156, 6.712840473330568, 7.049312709951781, -0.0, 8.371068549934101, 10.450510091613937, 7.8855607341524, -0.0, 6.712840473330568, 7.506071112447496, -0.0, 7.049312709951781, -0.0, 6.761630637500001, 7.617296747557721, -0.0, 6.8669911531578265, 8.504599942558624, -0.0, 6.736938024909629, 6.666320457695676, -0.0, 8.841072179179836, 7.506071112447496, 8.371068549934101, 7.154673225609608, -0.0, 6.954002530147457, 6.8669911531578265, -0.0, 8.841072179179836, 6.895162030124523, 7.454777818059946, 9.757362911053992, 7.405987653890514, 8.052614818815567, -0.0, 7.049312709951781, -0.0, 7.118305581438733, 7.049312709951781, 6.924149566997776, 8.052614818815567, 7.315015875684788, 7.560138333717772, 7.8855607341524, 7.405987653890514, -0.0, 7.811452761998678, 8.841072179179836, 9.351897802945828, 8.052614818815567, 6.712840473330568, -0.0, 7.118305581438733, -0.0, 7.154673225609608, -0.0, -0.0, -0.0, 6.643847601843618, 6.954002530147457, 6.8669911531578265, 7.454777818059946, 7.617296747557721, 6.600362489903878, 7.154673225609608, 7.083214261627463, 7.083214261627463, 8.658750622385883, 7.454777818059946, -0.0, 6.786948445484291, 6.8669911531578265, 6.8669911531578265, -0.0, -0.0, -0.0, -0.0, -0.0, 7.454777818059946, 6.643847601843618, -0.0, -0.0, -0.0, -0.0, 7.272456261265991, -0.0, 6.666320457695676, -0.0, -0.0, 7.9656034418259365, 8.052614818815567, 8.658750622385883, -0.0, 7.677921369374156, -0.0, 6.712840473330568, -0.0, 6.8395921789697125, -0.0, -0.0, -0.0, -0.0, -0.0, 6.666320457695676, -0.0, 6.895162030124523, -0.0, 7.049312709951781, -0.0, 6.643847601843618, -0.0, 6.812923931887551, 6.98477418881421, -0.0, 6.736938024909629, 6.954002530147457, 7.118305581438733, 10.450510091613937, -0.0, 6.924149566997776, -0.0, 6.924149566997776, -0.0, -0.0, -0.0, 6.8669911531578265, 7.049312709951781, 6.600362489903878, -0.0, 7.8855607341524, 7.049312709951781, 7.083214261627463, -0.0, 6.736938024909629, -0.0, 6.812923931887551, -0.0, -0.0, -0.0, 7.315015875684788, 6.6893099759203745, 6.736938024909629, 6.579309080706047, -0.0, 7.272456261265991, -0.0, 7.811452761998678, 9.351897802945828, -0.0, 8.14792499861989, 7.154673225609608, 7.8855607341524, -0.0, 9.351897802945828, 7.677921369374156, -0.0, -0.0, -0.0, 9.757362911053992, -0.0, 7.405987653890514, -0.0, -0.0, -0.0, -0.0, -0.0, 6.643847601843618, 9.757362911053992, -0.0, 8.371068549934101, 10.450510091613937, -0.0, 8.371068549934101, -0.0, 7.560138333717772, 6.8395921789697125, -0.0, 6.6893099759203745, -0.0, 7.016522887128791, -0.0, 6.954002530147457, 6.643847601843618, 7.8855607341524, -0.0, -0.0, -0.0, 7.016522887128791, 6.6893099759203745, -0.0, 7.560138333717772, 7.506071112447496, 6.643847601843618, -0.0, 8.253285514277717, -0.0, 6.55868979350331, 6.736938024909629, 8.253285514277717, -0.0, 6.666320457695676, -0.0, -0.0, 6.786948445484291, 6.6893099759203745, 6.712840473330568, 6.621868695124842, -0.0, 6.954002530147457] diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_attrs.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_attrs.json new file mode 100644 index 0000000000000000000000000000000000000000..6ad23d6ba6b5e8e14abe2d04c150b20ecbe95c10 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_attrs.json @@ -0,0 +1 @@ +{"Synthetic": "synthetic", "Canvas": "canvas", "Sheepskin": "sheepskin", "Patent.Leather": "leather", "Wool": "wool", "Leather": "leather", "Satin": "satin", "Hair.Calf": "hair", "Full.grain.leather": "leather", "Rubber": "rubber", "Faux.Leather": "leather", "Suede": "suede", "Nylon": "nylon", "Nubuck": "leather", "Faux.Fur": "fur", "Cotton": "cotton"} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_gamma.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_gamma.json new file mode 100644 index 0000000000000000000000000000000000000000..e7630ac336f349ede2925aeb3a7dd4fd772c661b --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_gamma.json @@ -0,0 +1 @@ +{"attr_b": [1.0, 1.0, 0.8, 1.0, 1.2, 1.0, 1.0, 1.0, 1.2, 0.8, 1.0, 0.8, 1.2, 1.0, 0.8, 1.0], "attr_a": [1.0, 1.0, 0.8, 0.8, 1.2, 1.0, 0.8, 1.0, 1.0, 0.8, 0.8, 0.8, 1.2, 1.0, 0.8, 0.8], "comp_b": [0.84, 0.1, 1.0, 0.46, 0.5, 0.74, 0.62, 2.3000000000000003, 0.55, 0.56, 0.36, 0.6000000000000001, 0.68, 0.78, 0.54, 0.5], "comp_a": [0.8400000000000001, 0.0, 1.0, 0.44000000000000017, 0.48000000000000015, 0.7800000000000002, 0.6000000000000001, 2.3000000000000003, 0.45, 0.5600000000000003, 0.3199999999999999, 0.6000000000000001, 0.6599999999999997, 0.7800000000000002, 0.5400000000000003, 0.48000000000000015]} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_objs.json b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_objs.json new file mode 100644 index 0000000000000000000000000000000000000000..13ff1ede97754a862c1a95e629bca8386ba08e1a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_objs.json @@ -0,0 +1 @@ +{"Shoes.Clogs.and.Mules": "clogs", "Shoes.Heels": "heels", "Boots.Mid-Calf": "midcalf", "Shoes.Flats": "flats", "Boots.Knee.High": "knee-high", "Shoes.Sneakers.and.Athletic.Shoes": "sneakers", "Shoes.Boat.Shoes": "shoes", "Shoes.Oxfords": "oxfords", "Boots.Ankle": "boots", "Sandals": "sandals", "Slippers": "slippers", "Shoes.Loafers": "loafers"} \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_weight.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_weight.py new file mode 100644 index 0000000000000000000000000000000000000000..d3315e704f21f9af935dc4cf260fb388db34902a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/UT_weight.py @@ -0,0 +1,30 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +attr_weight = [3.3346977756321516, 6.045005314036012, 6.315880268171411, 4.128581330635162, 3.4429435720973482, 5.932888015915306, 0.5385720013774027, 4.785004678240413, 4.751904729814068, 3.435434150075217, 3.7892631298020407, 4.174507768761085, 5.468582407784208, 2.0151239462217565, 2.3481072476387737, 5.180900335332427] +obj_weight = [2.0469601212700113, 2.7404183922042185, 2.442367117505141, 2.230464336693612, 5.948155488046094, 3.5078171577379704, 2.256203834895188, 1.7291003739616688, 2.5755686404252045, 2.947818448105355, 1.840059754211169, 4.629481314601183] +pair_weight = [6.190717125217406, 7.637636108153731, 6.7903382477665275, -0.0, 7.078020320218308, -0.0, -0.0, 7.483485428326473, 3.4879094000800452, 7.414492556839521, -0.0, -0.0, 6.9444889275937856, 7.819957664947686, 7.819957664947686, 7.557593400480195, -0.0, 6.656806855142005, 6.01166889376842, -0.0, -0.0, 5.29422902063943, 6.539023819485621, 5.468582407784208, 7.232171000045566, 6.097191067206582, -0.0, 5.765833931252139, 4.8861007951117825, 5.449713923479825, 7.557593400480195, -0.0, 5.859862880900415, 6.152250844389609, -0.0, 5.173782867563563, 5.145809015521157, 7.289329413885515, 7.637636108153731, 7.078020320218308, -0.0, 7.34995403570195, 2.445142327426924, 3.2258484263190192, 2.9401906460564686, 2.412785893487567, -0.0, 3.8254334380077957, 2.78041102735701, 2.6231194673495026, 2.928105906841397, 3.1359762985353044, 2.7908277882152652, -0.0, 6.230722459831105, 7.724647485143361, -0.0, -0.0, 7.557593400480195, 6.864446219920249, 6.721345376279576, 7.232171000045566, -0.0, 6.01166889376842, -0.0, 7.232171000045566, 5.111907463845475, 6.987048542012581, 6.9444889275937856, -0.0, 7.178103778775291, 7.178103778775291, -0.0, 6.33835312402347, -0.0, 3.678411501241291, 6.045005314036012, 6.656806855142005, -0.0, 5.679891501451415, 4.16411806491195, -0.0, 7.34995403570195, 5.805054644405421, -0.0, 7.414492556839521, 4.2144598197728005, -0.0, 5.917850138550765, 6.484956598215345, 3.382023398335508, 4.5695831730201135, -0.0, 5.105262921126807, 6.171299039360304, -0.0, 4.258911582343634, 3.4181284029776244, 4.270340278167257, -0.0, 3.915966831216804, -0.0, -0.0, 4.935156952100976, -0.0, -0.0, 5.404043886646637, 3.855342209400369, 3.7492229683647187, 5.579247975671727, 6.511624845297507, 3.440434160491922, 6.293901361452637, 7.557593400480195, -0.0, 5.27835567148314] diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/__init__.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dc82d5136ef0402074d79102c4ee033e0e2a7b09 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/__init__.py @@ -0,0 +1,58 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import importlib + +def load_loss_weight(dataset_name): + """Loss weight to balance the categories + weight = -log(frequency)""" + + if dataset_name[-1]=='g': + dataset_name = dataset_name[:-1] + + try: + Weight = importlib.import_module('utils.aux_data.%s_weight'%dataset_name) + + if 'pair_weight' in Weight.__dict__: + return Weight.attr_weight, Weight.obj_weight, Weight.pair_weight + else: + return Weight.attr_weight, Weight.obj_weight, None + + except ImportError: + raise NotImplementedError("Loss weight for %s is not implemented yet"%dataset_name) + + +def load_wordvec_dict(dataset_name, vec_type): + if dataset_name[-1]=='g': + dataset_name = dataset_name[:-1] + + try: + Wordvec = importlib.import_module('utils.aux_data.%s_%s'%(vec_type, dataset_name)) + return Wordvec.attrs_dict, Wordvec.objs_dict + + except ImportError: + raise NotImplementedError("%s vector for %s is not ready yet"%(vec_type, dataset_name)) diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_MIT.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_MIT.py new file mode 100644 index 0000000000000000000000000000000000000000..b7835eb22f93959f8ac23611745032e9f38b26e6 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_MIT.py @@ -0,0 +1,29 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +attrs_dict = {u'crumpled': [-0.34428998827934265, -0.041088998317718506, -0.2932800054550171, -0.4592599868774414, 0.11011999845504761, -0.04722899943590164, -0.19939999282360077, -0.6640099883079529, -0.5397099852561951, 0.3364099860191345, -0.33511999249458313, 0.2157599925994873, -0.007037200033664703, -0.18472999334335327, -0.07743799686431885, 0.45440998673439026, -0.1596899926662445, 0.13877999782562256, 0.07464399933815002, -0.42572999000549316, 0.5098099708557129, 0.1652500033378601, 0.19673000276088715, -0.053564999252557755, -0.6513400077819824, -0.5163699984550476, -0.027967000380158424, 0.33294999599456787, 0.39096999168395996, -0.5551900267601013, -0.3602299988269806, -0.08196599781513214, 0.24785000085830688, 0.0569549985229969, -0.275160014629364, 0.15851999819278717, -0.266400009393692, 0.2996000051498413, 0.22662000358104706, 0.6411899924278259, -0.12699000537395477, -0.04391000047326088, 0.20701000094413757, 0.10897000133991241, 0.7827600240707397, 0.09943299740552902, 0.6978899836540222, -0.17302000522613525, -0.4871099889278412, -0.16333000361919403, 0.32010000944137573, -0.24769000709056854, 0.3541199862957001, -0.49483001232147217, -0.006093599833548069, -0.11180000007152557, -0.030539000406861305, -0.3184199929237366, -0.33945998549461365, -0.07872900366783142, 0.24357999861240387, 0.2035900056362152, -0.3389100134372711, 0.23613999783992767, 0.05988499894738197, -0.06609100103378296, -0.02644599974155426, -0.03605800122022629, 0.06889799982309341, 0.2436700016260147, 0.16088999807834625, 0.1939300000667572, -0.4092499911785126, 0.15916000306606293, 0.9880099892616272, -0.04250999912619591, -0.36667001247406006, -0.4129599928855896, 0.03487100079655647, 0.3122299909591675, 0.3718099892139435, -0.6010400056838989, -0.29017001390457153, 0.03912999853491783, -0.594980001449585, -0.04801800101995468, -0.436489999294281, 0.13519999384880066, -0.27917999029159546, 0.6059499979019165, 0.8172399997711182, -0.11087000370025635, 0.5447499752044678, -0.39754000306129456, 0.05583000183105469, -0.1837799996137619, -0.32293999195098877, -0.09995999932289124, 0.009872100315988064, 0.21085000038146973, 0.05595400184392929, 0.14565999805927277, -0.13141000270843506, 0.4396800100803375, 0.14381000399589539, 0.2757200002670288, 0.41249001026153564, 0.2784000039100647, -0.02839300036430359, -0.5855100154876709, -0.5911200046539307, 0.43588998913764954, -0.11588999629020691, -0.27608999609947205, 0.03706200048327446, 0.5907300114631653, 0.03346800059080124, 0.475629985332489, -0.020478999242186546, -0.1550000011920929, 0.536109983921051, -1.205199956893921, 0.15033000707626343, 0.8438699841499329, -0.18791000545024872, -0.2899099886417389, -0.5521399974822998, -0.689329981803894, 0.06684300303459167, 0.21823999285697937, 0.006013500038534403, 0.5989599823951721, 0.29304999113082886, -0.06502699851989746, 0.08840800076723099, 0.3768700063228607, -0.46700000762939453, 0.04524900019168854, 0.448309987783432, -0.3065800070762634, 0.4081999957561493, 0.04364300146698952, -0.28336000442504883, 0.3684700131416321, -0.07883399724960327, -0.17778000235557556, 0.015665000304579735, 0.7240200042724609, -0.19422000646591187, -1.0414999723434448, -0.1702200025320053, 0.05504300072789192, -0.2831900119781494, -0.7008699774742126, -0.09893699735403061, -0.07893099635839462, 0.5222799777984619, -0.4513300061225891, 0.1266700029373169, 0.11151000112295151, 0.03763600066304207, -0.41819000244140625, -0.29497000575065613, 0.028251999989151955, 0.019744999706745148, -0.41113999485969543, -0.40887001156806946, 0.34825000166893005, 0.14228999614715576, -0.4296799898147583, -0.04471400007605553, -0.23906999826431274, 0.43237000703811646, -0.38782998919487, -0.12258999794721603, -0.3259899914264679, -0.06383299827575684, 0.2501299977302551, 0.10746999830007553, -1.233199954032898, 0.5087299942970276, -0.24829000234603882, 0.07919900119304657, -0.09284300357103348, 0.4571099877357483, -0.5929700136184692, 0.5177800059318542, 0.04627000167965889, 0.12443000078201294, 0.09904699772596359, 0.6386600136756897, -0.019416000694036484, 0.19471000134944916, 0.0838719978928566, -0.23116999864578247, 0.6228399872779846, -0.2694000005722046, 0.6305000185966492, -0.3929300010204315, 0.08373899757862091, -0.5704100131988525, -0.44053998589515686, 0.08286800235509872, -0.031539998948574066, -0.3713099956512451, -0.5761299729347229, 0.44119998812675476, -0.2601200044155121, -0.24048000574111938, -0.2669999897480011, -0.286980003118515, 0.19423000514507294, -0.3826900124549866, 0.030036000534892082, 0.5541599988937378, -0.11800999939441681, 0.05010500177741051, -0.7599999904632568, -0.23916999995708466, 0.4950000047683716, 0.11365000158548355, 0.20733000338077545, 0.0677570030093193, -0.36781999468803406, -0.19498999416828156, 0.38350000977516174, 0.019169000908732414, -0.17447000741958618, -0.21585999429225922, -0.11618000268936157, 0.1256999969482422, 0.13425999879837036, -0.7028800249099731, 0.5762799978256226, 0.12586000561714172, -0.12690000236034393, 0.4994199872016907, 0.34033000469207764, -0.5914599895477295, -0.4601399898529053, 0.18571999669075012, -0.33281001448631287, -0.31843000650405884, -0.20848000049591064, -0.5156400203704834, -0.09362000226974487, 0.3923400044441223, -0.3147999942302704, 0.12838999927043915, 0.6325399875640869, -0.1286199986934662, -0.20558999478816986, 0.6330999732017517, -1.0046000480651855, 0.6714699864387512, 0.060159001499414444, 0.2485000044107437, -0.02113799937069416, -0.2927500009536743, 0.0758339986205101, 0.2010899931192398, 0.17090000212192535, 0.028831999748945236, 0.26809000968933105, 0.8689000010490417, -0.4667699933052063, -0.10978999733924866, -0.19144000113010406, -0.46369001269340515, -0.09587299823760986, -0.375789999961853, -0.5230299830436707, 0.3259899914264679, 0.24085000157356262, -0.4560000002384186, -0.12951000034809113, 0.11146999895572662, -0.3040199875831604, -0.39621999859809875, 0.09526299685239792, 0.33945998549461365, -0.458050012588501, 0.18772999942302704, -0.7493000030517578, -0.2089100033044815, -0.03570299968123436, 0.10760000348091125, 0.0726109966635704, 0.14020000398159027, -0.20095999538898468, -0.029551999643445015, 0.731410026550293, -0.055135998874902725, 0.3801800012588501, 0.0646049976348877, -0.43393000960350037, 0.7250300049781799, 0.37494999170303345, 0.13006000220775604, 0.1651799976825714], u'upright': [-0.5957900285720825, 0.019175000488758087, -0.46136999130249023, -0.6815199851989746, -0.0498029999434948, -0.5041599869728088, 0.28248998522758484, 0.1809999942779541, 0.03614100068807602, -0.32572999596595764, -0.20324000716209412, 0.7910199761390686, -0.20457999408245087, -0.12008000165224075, -0.2233400046825409, 0.4557799994945526, 0.0034503000788390636, 0.11242999881505966, 0.25567999482154846, -0.17445999383926392, 0.11350999772548676, 0.3127000033855438, 0.1889999955892563, 0.34898000955581665, 0.47308000922203064, 0.2701900005340576, 0.2910900115966797, -0.14007000625133514, -0.2381500005722046, 0.7228699922561646, -0.31672999262809753, 0.6037300229072571, 0.3355799913406372, -0.4567300081253052, -0.21532000601291656, -0.05112899839878082, 0.809689998626709, -0.2983799874782562, -0.1879899948835373, 0.7912999987602234, 0.012590000405907631, 0.883400022983551, -0.11970999836921692, -0.3178899884223938, -0.17935000360012054, 0.16313999891281128, 1.038699984550476, -0.1629199981689453, 0.03324199840426445, 0.6893100142478943, -0.18007999658584595, -0.06562300026416779, 0.12466999888420105, -0.09863600134849548, -0.16638000309467316, -0.0067460001446306705, 0.577430009841919, 0.0704680010676384, -0.555679976940155, 0.7520400285720825, 0.31380999088287354, 0.05780100077390671, -0.19885000586509705, 0.3447999954223633, -0.6484699845314026, -0.32565999031066895, -0.20730000734329224, 0.48475000262260437, 0.006269299890846014, 0.09177099913358688, -0.19418999552726746, 0.5964000225067139, -0.1569100022315979, 0.08733999729156494, 0.19715000689029694, -0.023096999153494835, -0.032152000814676285, 0.5869699716567993, 0.4654200077056885, -0.16738000512123108, -0.6344799995422363, 0.43963998556137085, -0.43985000252723694, 0.2752699851989746, 0.32833999395370483, -0.17430000007152557, 0.41244998574256897, 0.6257299780845642, -0.4902699887752533, 0.3331100046634674, 1.401900053024292, 0.12342999875545502, 0.2446800023317337, 0.22960999608039856, 0.040727000683546066, -0.46292999386787415, 0.016090000048279762, -0.07835999876260757, 0.22327999770641327, 0.014204000122845173, -0.054464999586343765, 0.10524000227451324, 0.017705999314785004, -0.45350998640060425, -0.27562999725341797, 0.4726499915122986, 0.25900998711586, -0.5215799808502197, 0.06327900290489197, -0.4700799882411957, 0.23444999754428864, 0.7276700139045715, 0.4650000035762787, 0.016183000057935715, -0.40272998809814453, -0.001344699994660914, -0.13718000054359436, -0.3431299924850464, 0.12616999447345734, 0.07495500147342682, -0.9484000205993652, -0.3942500054836273, 0.7754799723625183, -0.39010998606681824, -0.4437600076198578, 0.3612000048160553, -0.48875001072883606, -0.18271000683307648, 0.03357899934053421, -0.3285999894142151, 0.11517000198364258, 0.3929699957370758, 0.1528400033712387, 0.4145500063896179, 0.0043704998679459095, 0.2974500060081482, 0.2367600053548813, -0.055309001356363297, 0.2550700008869171, 0.16780999302864075, 0.314300000667572, -0.03175299987196922, -0.21904000639915466, 0.017125999554991722, -0.34784001111984253, -0.15669000148773193, -0.31808000802993774, -0.503000020980835, -0.2831000089645386, 0.18693000078201294, -0.4714199900627136, -0.05297999829053879, 0.19471000134944916, -0.3226900100708008, -0.06025300174951553, 0.4862399995326996, 0.3199799954891205, -0.5942000150680542, 0.7472400069236755, 0.885200023651123, -0.10279999673366547, -0.19228999316692352, -0.294730007648468, 0.1357100009918213, -0.14381000399589539, 0.15031999349594116, -0.19708000123500824, 0.3357299864292145, 0.15855999290943146, -0.6714900135993958, 0.23125000298023224, 0.19543999433517456, 0.33932000398635864, -0.2425999939441681, 0.5366799831390381, -0.4459800124168396, 0.38203001022338867, 0.7479100227355957, -0.15850000083446503, -0.8431500196456909, 0.3577899932861328, -0.23125000298023224, 0.7521899938583374, 0.21629999577999115, 0.2036599963903427, -0.44468000531196594, 0.5249599814414978, 0.3437199890613556, 0.2199700027704239, 0.0038525001145899296, -0.1534699946641922, 0.29416000843048096, 0.9099500179290771, 0.39225998520851135, 0.4933300018310547, -0.05889099836349487, -0.7750999927520752, -0.4431000053882599, -0.8883399963378906, 0.14026999473571777, 0.979960024356842, 0.6456000208854675, 0.14117999374866486, 0.17719000577926636, 0.32780998945236206, 0.3128400146961212, 0.3891099989414215, -0.6410300135612488, -0.037592001259326935, -0.18941999971866608, 0.5467699766159058, 0.038235001266002655, -0.03853899985551834, 0.23940999805927277, 0.21825000643730164, 0.06871499866247177, -0.026706000789999962, -0.1224299967288971, -0.21615999937057495, -0.01954299956560135, -0.5525100231170654, -0.052903998643159866, -0.5299000144004822, 0.23331999778747559, -0.49919000267982483, 0.42767998576164246, -0.26809000968933105, -0.04951599985361099, 0.0187389999628067, 0.2946699857711792, 0.012039000168442726, -0.0897120013833046, 0.29269999265670776, -0.5196099877357483, -0.4896700084209442, 0.03976399824023247, -1.1705000400543213, 0.13149000704288483, -0.29745998978614807, -0.04895099997520447, 0.179639995098114, -0.5142099857330322, 0.08651699870824814, 0.5452200174331665, -0.5137500166893005, -0.4144600033760071, 0.3215000033378601, 0.49307000637054443, -0.35082998871803284, 0.4210300147533417, 0.41576001048088074, -0.3450399935245514, 0.3889099955558777, -0.4281100034713745, 0.5829200148582458, 0.3765200078487396, -0.39956000447273254, -0.10429000109434128, 0.12804000079631805, -0.2037699967622757, 0.17821000516414642, -0.2788200080394745, 0.33719998598098755, -0.046202998608350754, -0.2492000013589859, -0.6338499784469604, 0.37762001156806946, 0.009081199765205383, 0.2292100042104721, 0.31589001417160034, -0.48385998606681824, 0.06653200089931488, -0.5473799705505371, -0.044234998524188995, -0.023473000153899193, -0.5034700036048889, -0.34376001358032227, -0.0369580015540123, -0.40369999408721924, -0.5415400266647339, 0.22100000083446503, -0.27553001046180725, -0.4309700131416321, -0.320250004529953, 0.11348000168800354, 0.5608400106430054, 0.12849999964237213, 0.8705599904060364, 0.3400599956512451, -0.2324499934911728, 0.5834299921989441, 0.32510998845100403, -0.21527999639511108, -0.417959988117218, 0.4119099974632263, 0.006088799796998501, -0.09378699958324432, 0.0845550000667572, 0.427700012922287, -0.009439700283110142], u'bright': [-0.07795599848031998, -0.40202000737190247, 0.049734000116586685, -0.6969299912452698, -0.21193000674247742, -0.05250399932265282, -0.0836310014128685, 0.09935200214385986, 0.05850199982523918, -1.0078999996185303, 0.3141300082206726, 0.034324001520872116, -0.17890000343322754, 0.39395999908447266, 0.15018999576568604, 0.09302199631929398, -0.4922800064086914, -0.21785999834537506, -0.038520000874996185, -0.14399999380111694, -0.1648699939250946, 0.6391800045967102, 0.13535000383853912, 0.25870001316070557, 0.2820799946784973, -0.38319000601768494, 0.38982999324798584, -0.3435699939727783, -0.06914100050926208, 0.13343000411987305, 0.12657999992370605, 0.07868599891662598, -0.7499799728393555, 0.33063000440597534, -1.0298999547958374, 0.9204099774360657, -0.0027687998954206705, -0.5104600191116333, 0.24556000530719757, 0.2842400074005127, 0.7098199725151062, 0.08877900242805481, -0.00964799989014864, 0.3682299852371216, -0.32962000370025635, -0.22378000617027283, -0.03271700069308281, 0.0355600006878376, -0.258899986743927, -0.9045699834823608, -0.22891999781131744, 0.014535999856889248, 0.7959200143814087, -0.5244899988174438, -0.24379999935626984, 0.048374999314546585, -0.227400004863739, -0.339819997549057, 0.5101500153541565, -0.16808000206947327, 0.02370000071823597, -0.4198099970817566, 0.33204999566078186, 0.004925900138914585, 0.33215001225471497, -0.10098999738693237, 0.07976900041103363, -0.2898600101470947, -0.1688999980688095, -0.09980300068855286, -0.315530002117157, -0.13955999910831451, -0.008422800339758396, 0.2161100059747696, -0.0014239000156521797, -0.30386999249458313, -0.2929399907588959, 0.07836999744176865, 0.3224799931049347, -0.2189899981021881, 0.030533000826835632, 0.11401999741792679, -0.21359999477863312, -0.011180000379681587, 0.19193999469280243, 0.15113000571727753, 0.046799998730421066, 0.01307000033557415, 0.1354600042104721, 0.265390008687973, 0.33449000120162964, -0.020399000495672226, -0.554669976234436, 0.11248999834060669, -0.16856999695301056, 0.6713500022888184, 0.19491000473499298, -0.22946999967098236, 0.35830000042915344, -0.4237399995326996, 0.06273499876260757, -0.15261000394821167, -0.24156999588012695, 0.43546000123023987, -0.3053399920463562, 0.2527199983596802, 0.25336000323295593, 0.20513999462127686, -0.41569000482559204, -0.3744499981403351, -0.10626000165939331, -0.06250499933958054, 0.8102200031280518, 0.2106499969959259, 0.3438200056552887, -0.0935019999742508, -0.11286000162363052, 0.5442399978637695, 0.5676800012588501, -0.20263999700546265, -0.246069997549057, -0.43476998805999756, 0.255840003490448, 0.6044099926948547, -0.04026800021529198, -0.08831500262022018, 0.044530998915433884, 0.8038100004196167, -0.008058600127696991, 0.05978899821639061, -0.028300000354647636, -0.05963899940252304, -0.05723800137639046, 0.12727999687194824, -0.29774999618530273, 0.06047099828720093, -0.07694000005722046, 0.24003000557422638, 0.12861000001430511, -0.17678000032901764, 0.2097100019454956, 0.44655001163482666, 0.3075000047683716, -0.7183700203895569, 0.1953900009393692, 0.04029899835586548, -0.24833999574184418, 0.08654200285673141, -0.013275999575853348, -0.06019200012087822, 0.052469998598098755, -0.30663999915122986, 0.21905000507831573, 0.04218300059437752, 0.7319999933242798, -0.5209599733352661, -0.1867399960756302, -0.35040000081062317, -0.3255000114440918, 0.1949000060558319, -0.04548799991607666, -0.6750800013542175, 0.38190001249313354, 0.05581099912524223, 0.2359900027513504, -0.2021699994802475, -0.36381998658180237, 0.542140007019043, 0.12394999712705612, 0.17961999773979187, 0.18002000451087952, -0.22538000345230103, -0.45831000804901123, 0.08744099736213684, 0.19509999454021454, -0.641290009021759, 0.24647000432014465, -0.019910000264644623, -0.01783600077033043, -0.10593999922275543, -0.17753000557422638, -0.2467000037431717, 0.24849000573158264, -0.11236999928951263, 0.3831700086593628, -0.24936999380588531, 0.7868800163269043, -0.22643999755382538, -0.2959800064563751, -0.3893299996852875, 0.20157000422477722, 0.017100999131798744, -0.13773000240325928, 0.13770000636577606, 0.030445000156760216, 0.05753999948501587, -0.3585500121116638, -0.3718099892139435, -0.3615100085735321, 0.15986000001430511, 0.9166899919509888, -0.1159299984574318, 0.13685999810695648, 0.05743199959397316, 0.13451999425888062, -0.3613399863243103, 0.4568600058555603, 0.7163199782371521, -0.16453999280929565, -0.07962699979543686, 0.46632999181747437, -0.49873000383377075, -0.2813499867916107, -0.44538000226020813, 0.529009997844696, -0.032705001533031464, 0.8951699733734131, -0.3186900019645691, 0.22353999316692352, -0.45045000314712524, 1.1282000541687012, -0.008663900196552277, -0.20364999771118164, 0.019512999802827835, -0.27094000577926636, -0.08269699662923813, -0.1618800014257431, -0.244609996676445, -0.1641799956560135, 0.07797200232744217, -0.1949699968099594, 0.24018000066280365, 0.18876999616622925, -0.38842999935150146, -0.6278200149536133, 0.20792999863624573, -0.4930799901485443, -0.24544000625610352, -0.8529199957847595, -0.32532998919487, 0.3540300130844116, 0.00441339984536171, -0.6633800268173218, 0.6234899759292603, -0.34509000182151794, 0.09598399698734283, 0.32912999391555786, -0.4106599986553192, -0.09391500055789948, -0.055018000304698944, -0.10162000358104706, -0.21678000688552856, -0.04464000090956688, -0.052202001214027405, 0.4005900025367737, 0.022206999361515045, -0.06558399647474289, -0.028074000030755997, -0.18779000639915466, 0.48993000388145447, 0.16467000544071198, -0.02718699909746647, 0.5270000100135803, 0.012558000162243843, -0.32853999733924866, -0.3606700003147125, -0.029726000502705574, 0.05677499994635582, 0.2051600068807602, 0.42065000534057617, 0.1106100007891655, -0.18725000321865082, -0.07474599778652191, -0.2121800035238266, -0.14428000152111053, 0.2617200016975403, -0.8544099926948547, -0.032517001032829285, -0.636650025844574, 0.09309300035238266, -0.012581000104546547, 0.2512199878692627, -0.03308200091123581, 0.04187000170350075, 0.3051399886608124, 0.1972299963235855, -0.05215200036764145, 0.3797900080680847, -0.2685000002384186, -0.3354800045490265, 0.3811799883842468, 0.46529000997543335, -0.27177000045776367, 0.261819988489151, -0.08762600272893906, 0.0703200027346611, 0.2617399990558624, -0.05974699929356575, 0.3023099899291992, 0.20986999571323395], u'dirty': [0.491239994764328, 0.1550700068473816, -0.29315999150276184, -0.7560200095176697, -0.49230000376701355, 0.3270699977874756, -0.6664000153541565, 0.8999999761581421, 0.3460800051689148, -0.8286600112915039, 0.13178999722003937, -0.2205200046300888, -0.12266000360250473, 0.5310800075531006, -0.1007699966430664, -0.585860013961792, -0.07777699828147888, 0.09018100053071976, 0.49309998750686646, 0.6993700265884399, 0.14610999822616577, 0.21209000051021576, 0.01319700013846159, 0.1428699940443039, 0.0016740000573918223, -0.4127799868583679, 0.27017998695373535, 0.0677890032529831, 0.3657299876213074, 0.07251899689435959, -0.760699987411499, 0.180649995803833, -0.37011000514030457, -0.1670600026845932, -0.1969500035047531, 0.6393799781799316, -0.3925899863243103, -0.3215999901294708, -0.25088998675346375, 0.09215699881315231, -0.0495930016040802, -0.3668400049209595, -0.18190999329090118, 0.030378999188542366, 0.34549999237060547, -0.002455000067129731, 0.3919000029563904, -0.43261998891830444, 0.5713599920272827, -0.2767699956893921, 0.14508000016212463, 0.10131999850273132, 0.267659991979599, -0.11015000194311142, 0.19776999950408936, -0.4381999969482422, -0.2508400082588196, 0.055073000490665436, -0.08339300006628036, -0.3778499960899353, -0.28387001156806946, -0.8384100198745728, -0.4945400059223175, 0.35717999935150146, -0.36917999386787415, -0.49529001116752625, 1.0425000190734863, -0.23948000371456146, -0.19885000586509705, -0.029580000787973404, 0.7105200290679932, 0.5424100160598755, -0.017186999320983887, 0.21038000285625458, -0.24192999303340912, -0.2715800106525421, 0.04489700123667717, 0.6245399713516235, 0.4103100001811981, -0.07989499717950821, -0.26590999960899353, -0.9563500285148621, 0.33858001232147217, 0.02098800055682659, 0.28505998849868774, -0.15960000455379486, 0.4337199926376343, 0.35523998737335205, -0.18629999458789825, 0.02542399987578392, 0.06889999657869339, 0.420989990234375, 0.49154001474380493, -0.3038100004196167, -0.06919199973344803, 0.22407999634742737, 0.29526999592781067, -0.27250000834465027, 0.5104399919509888, -0.3440299928188324, 0.15029999613761902, -0.45993998646736145, -0.6527100205421448, 0.5405099987983704, -0.5706599950790405, -0.6244300007820129, -0.4615499973297119, -0.06697099655866623, 0.3186500072479248, 0.1839500069618225, -0.09557799994945526, -0.09914600104093552, -0.5046300292015076, -0.2608200013637543, -0.13666999340057373, 0.2939800024032593, 0.5105500221252441, 0.08120100200176239, -0.43213000893592834, -0.7499300241470337, -0.19506999850273132, -0.14320999383926392, -0.1137000024318695, 0.2755599915981293, -0.04776100069284439, 0.40411999821662903, -0.07381899654865265, 0.33908000588417053, -0.17048999667167664, -0.1628199964761734, -0.08128099888563156, -0.3669799864292145, 0.27213001251220703, 0.12038999795913696, 0.003793099895119667, -0.3140000104904175, 0.06695099920034409, 0.3286899924278259, -0.0016725000459700823, -0.10558000206947327, 0.13676999509334564, 0.06634700298309326, 0.5230100154876709, -0.1334500014781952, -0.709879994392395, 0.45392000675201416, 0.18185000121593475, 0.5353599786758423, 0.41909000277519226, 0.2726599872112274, -0.3740200102329254, 0.5234599709510803, -0.676609992980957, 0.1476999968290329, 0.10769999772310257, -0.12541000545024872, -0.11586999893188477, 0.15373000502586365, 0.7021499872207642, 0.542110025882721, 0.18313999474048615, -0.16776999831199646, 0.4253700077533722, 0.6070200204849243, -0.055004000663757324, 0.1514499932527542, -0.21059000492095947, 0.25286999344825745, 0.0778999999165535, -0.30555999279022217, 0.08235900104045868, 0.7662400007247925, -0.7020800113677979, -0.4492799937725067, -0.6382399797439575, -0.2935200035572052, -0.5737599730491638, 0.10102999955415726, -0.18689000606536865, -0.4374699890613556, 0.22766999900341034, -0.21529999375343323, 0.4205299913883209, 0.6896700263023376, -0.14990000426769257, -0.02565000019967556, 1.2589999437332153, -0.22702999413013458, 1.0041999816894531, -0.3066500127315521, 0.5153200030326843, 0.0017353999428451061, 0.3595699965953827, -0.10371000319719315, -0.37088000774383545, -0.025102000683546066, -0.5411800146102905, -0.2576499879360199, -0.5083000063896179, 0.10892999917268753, 0.4665699899196625, 0.6877700090408325, 0.7408400177955627, -0.05626000091433525, -0.007438899949193001, -0.6105499863624573, 0.22040000557899475, 0.29416999220848083, 0.06212000176310539, -0.1340699940919876, -0.7829700112342834, 0.06991899758577347, -0.08291900157928467, 0.6532800197601318, 0.5722600221633911, 0.2191700041294098, 0.4782100021839142, -0.22885000705718994, -0.4601399898529053, -0.08395999670028687, -0.05138000100851059, 0.68190997838974, 0.32291001081466675, 0.04006199911236763, 0.04046100005507469, -0.24372999370098114, -0.08984100073575974, -0.6130899786949158, 0.036713000386953354, 0.43369001150131226, 0.9476199746131897, -0.6317999958992004, -0.36827000975608826, -0.18513000011444092, 0.035433001816272736, -0.3397899866104126, 0.10831999778747559, -0.05808499827980995, -0.15775999426841736, -0.15115000307559967, -0.12734000384807587, 0.07551799714565277, 0.11181999742984772, -0.3213300108909607, 0.14577999711036682, -0.37825000286102295, -0.3306399881839752, 0.020614000037312508, -0.04667799919843674, 0.007018299773335457, 0.13882000744342804, -0.8087400197982788, -0.4811899960041046, -0.19665999710559845, 0.8922200202941895, -0.10577999800443649, 0.243709996342659, 0.23713000118732452, 0.09298200160264969, 0.0003149700060021132, -0.2730500102043152, 0.19710999727249146, -0.46456998586654663, 0.4003700017929077, 0.6423900127410889, 0.3477500081062317, 0.005798900034278631, 0.008074600249528885, -0.3931199908256531, 0.2653700113296509, 0.02178099937736988, -0.05681400001049042, -0.3615899980068207, 0.05755100026726723, -0.10706999897956848, 0.2552799880504608, -1.457900047302246, -0.4234899878501892, 0.21251000463962555, -0.1624699980020523, 0.0511230006814003, 0.42972999811172485, 0.022703999653458595, 0.1415800005197525, 0.21455000340938568, 0.4878099858760834, -0.09922300279140472, 0.5395200252532959, -0.15108999609947205, -0.3381099998950958, -0.3319000005722046, 0.23813000321388245, -0.405460000038147, 0.8844500184059143, -0.01396500039845705, -0.2716499865055084, 0.29653000831604004, 0.009897899813950062, -0.2991800010204315, 0.3576500117778778], u'rough': [-0.07220099866390228, -0.49511000514030457, -0.436379998922348, 0.25450998544692993, -0.8852900266647339, 0.2739199995994568, 0.22349999845027924, 0.9330400228500366, 0.13910000026226044, -0.8909000158309937, -0.3865000009536743, 0.193340003490448, -0.11958999931812286, 0.44176000356674194, -0.5069199800491333, 0.11428000032901764, -0.08141399919986725, 0.34577998518943787, 0.28286999464035034, 0.34922999143600464, -0.34942999482154846, 0.2757999897003174, -0.05626500025391579, 0.21447999775409698, -0.4430199861526489, -0.09469199925661087, 0.3838900029659271, 0.0849120020866394, -0.029178999364376068, 0.7554100155830383, 0.052792999893426895, 0.45824000239372253, -0.4332999885082245, -0.030918000265955925, -0.6415500044822693, 0.4712100028991699, -0.16850000619888306, -0.2741200029850006, -0.3123700022697449, -0.37011998891830444, -0.13176000118255615, 0.12460999935865402, 0.6731699705123901, -0.4597499966621399, 0.24769000709056854, 0.2659299969673157, -0.03799799829721451, -0.2542800009250641, -0.6279100179672241, -0.12161999940872192, -0.18735000491142273, 0.04961099848151207, 0.10942000150680542, 0.13549000024795532, -0.027141999453306198, -0.08231200277805328, -0.6539099812507629, -0.2535400092601776, -0.08764699846506119, 0.22672000527381897, -0.10299000144004822, 0.11740999668836594, 0.05702099949121475, -0.3600600063800812, -0.17967000603675842, -0.5179100036621094, 0.3410399854183197, -0.12027999758720398, 0.3275499939918518, -0.2779900133609772, 0.22057999670505524, 0.34672999382019043, -0.7438799738883972, -0.12661999464035034, -0.13822999596595764, 0.07326900213956833, 0.15228000283241272, 0.10876999795436859, 0.004908700007945299, -0.3104200065135956, -0.1615000069141388, 0.009182900190353394, 0.8577899932861328, 0.09676399827003479, -0.2820099890232086, -0.024507999420166016, 0.4775699973106384, -0.2531200051307678, -0.07075399905443192, 0.2547700107097626, -0.14538000524044037, 0.5277500152587891, -0.4906400144100189, -0.4999600052833557, 0.08769799768924713, -0.024660000577569008, 0.08274099975824356, 0.025049999356269836, -0.05482200160622597, -0.516539990901947, -0.2761799991130829, 0.2426699995994568, -0.3521200120449066, 0.32440999150276184, -0.6080999970436096, 0.5074399709701538, -0.008625499904155731, -0.17961999773979187, -0.0014032999752089381, -0.402209997177124, 0.10867000371217728, -0.5043500065803528, 0.15666000545024872, -0.25613999366760254, -0.2212499976158142, 0.20534999668598175, 0.32510998845100403, 0.027002999559044838, 0.020005999132990837, 0.2810800075531006, -0.16554999351501465, -0.5295799970626831, -0.14474999904632568, 0.4246399998664856, -0.026038000360131264, 0.514270007610321, 0.48337000608444214, 0.048413001000881195, 0.13624000549316406, -0.4858500063419342, 0.0796779990196228, 0.7534400224685669, -0.019519999623298645, -0.08512700349092484, -0.09806200116872787, 0.11428999900817871, 0.0777989998459816, -0.18921999633312225, -0.22155000269412994, 0.0590520016849041, 0.4205000102519989, 0.7917299866676331, -0.5321000218391418, 0.10752999782562256, -0.14427000284194946, -0.2290399968624115, 0.17010000348091125, -0.21039000153541565, 0.2599799931049347, -0.004911100026220083, -0.19557000696659088, 0.021007999777793884, -0.38982999324798584, -0.12916000187397003, -0.10743000358343124, -0.02523599937558174, -0.1508300006389618, 0.16448000073432922, 0.6268699765205383, 0.5965099930763245, -0.3218800127506256, -0.43641000986099243, -0.36629998683929443, -0.2766999900341034, 0.3172999918460846, -0.34463998675346375, 0.3029800057411194, 0.11236000061035156, -0.35455000400543213, 0.08671800047159195, -0.39188000559806824, -0.11868000030517578, -0.63919997215271, 0.10886000096797943, -0.42186999320983887, -0.03378999978303909, 0.16123999655246735, 0.28022998571395874, -0.5638099908828735, -0.5723199844360352, -0.10745000094175339, 0.23056000471115112, 0.12120000272989273, 0.052368998527526855, -0.09832599759101868, -0.35293999314308167, 0.35374999046325684, -0.006082199979573488, -0.10213000327348709, 0.46588000655174255, -0.277209997177124, 0.5555199980735779, 0.795740008354187, -0.30900999903678894, 0.48030999302864075, -0.14480000734329224, -0.5375900268554688, -0.01498899981379509, -0.07423300296068192, -0.19304999709129333, 1.2360999584197998, 0.37116000056266785, 0.5682799816131592, 0.3441599905490875, 0.4907500147819519, 0.24695000052452087, -0.11845000088214874, 0.6376399993896484, -0.3024100065231323, 0.03719199821352959, -0.0032822999637573957, 0.23323999345302582, 0.11045999825000763, -0.3766799867153168, -0.6336699724197388, 0.17116999626159668, 0.6190099716186523, -0.2664499878883362, -0.31964001059532166, -0.24291999638080597, 0.8689299821853638, 0.06709299981594086, 0.015596999786794186, -0.14383000135421753, -0.024682000279426575, 0.1500999927520752, 0.4387800097465515, -0.867900013923645, -0.498199999332428, -0.3465699851512909, 0.27612999081611633, -0.2709900140762329, -0.5902900099754333, -0.27035000920295715, 0.1141899973154068, -0.15578000247478485, -0.34553998708724976, -0.1535000056028366, 0.21918000280857086, 0.11657000333070755, 0.02558100037276745, 0.04441500082612038, 0.027217000722885132, -0.18242999911308289, -0.4607599973678589, -0.1878499984741211, -0.17427000403404236, 0.37448999285697937, -0.020061999559402466, 0.2535400092601776, -0.24879999458789825, -0.33208999037742615, -0.14640000462532043, -0.8128100037574768, 0.3378799855709076, 0.30733999609947205, 0.22370000183582306, -0.020760999992489815, -0.238429993391037, 0.5919899940490723, -0.3355900049209595, 0.6298800110816956, -0.7644199728965759, -0.014632999897003174, 0.19912999868392944, 0.5520700216293335, 0.0033340000081807375, -0.39928001165390015, -0.3178899884223938, -0.08227399736642838, -0.3000899851322174, -0.35359999537467957, 0.04824899882078171, -0.2908099889755249, 0.11654999852180481, 0.2739099860191345, -0.7526599764823914, 0.06514500081539154, -0.08003599941730499, -0.03184700012207031, -0.15929999947547913, 0.09413599967956543, 0.07973100244998932, 0.42201998829841614, 0.1933099925518036, -0.43887999653816223, 0.26155999302864075, -0.07491999864578247, 0.21716000139713287, -0.20789000391960144, -0.1825300008058548, 0.5726000070571899, -0.20047999918460846, 0.4526900053024292, 0.4182699918746948, 0.6612600088119507, -0.10284999758005142, 0.23703999817371368, 0.3469099998474121, 0.11949999630451202], u'shattered': [0.322380006313324, -0.10763999819755554, -0.5844100117683411, -0.4415299892425537, 0.015542999841272831, 0.6671599745750427, -0.06814099848270416, 0.0968329980969429, 0.2168699949979782, -0.779990017414093, 0.1797800064086914, 0.44859999418258667, 0.024855000898241997, -0.20404000580310822, -0.683459997177124, 0.02345000021159649, 0.35420000553131104, 0.7093499898910522, -0.4564799964427948, 0.12105000019073486, 0.421640008687973, 0.7002099752426147, 0.1945199966430664, -0.058733001351356506, 0.12559999525547028, 0.23107999563217163, -0.459089994430542, -0.15680000185966492, -0.048395998775959015, -0.1666399985551834, 0.1579499989748001, 0.27511999011039734, 0.39280998706817627, 0.4058699905872345, -0.15246999263763428, 0.07254599779844284, -0.5319600105285645, 0.325980007648468, 0.37338998913764954, 0.5582600235939026, 0.3675299882888794, -0.18477000296115875, -0.09326999634504318, -0.5964400172233582, 0.32082998752593994, 0.03945999965071678, -0.1471399962902069, -0.06319499760866165, -0.37828999757766724, -0.06201700121164322, 0.7383300065994263, 0.02317800000309944, 0.4692800045013428, -0.09206700325012207, 0.47648000717163086, 0.16773000359535217, -0.3434399962425232, 0.1558700054883957, -0.7423999905586243, 0.08296799659729004, -0.20454999804496765, 0.05422800034284592, -0.16176000237464905, -0.018079999834299088, 0.22943000495433807, 0.5309100151062012, -0.10527999699115753, -0.008000300265848637, 0.6288300156593323, 0.061866000294685364, -0.05529399961233139, -0.4422000050544739, -0.06656099855899811, 0.3120400011539459, 0.6144999861717224, 0.060520999133586884, 0.19735999405384064, -0.9752399921417236, -0.03617100045084953, 0.5102699995040894, -0.20104999840259552, 0.1366499960422516, 0.025219999253749847, 0.5158399939537048, -0.23722000420093536, 0.05658499896526337, -0.0713609978556633, -0.30908000469207764, 0.12276999652385712, 0.42917001247406006, 0.5993499755859375, 0.09573200345039368, 0.2027300000190735, -0.1690399944782257, 0.7557700276374817, 0.12838000059127808, 0.40356001257896423, -0.14700999855995178, 0.5969899892807007, 0.4309000074863434, 0.03244699910283089, 0.3570300042629242, 0.2465600073337555, -0.3366599977016449, 0.4939799904823303, 0.08346600085496902, 0.5945600271224976, -0.36893001198768616, 0.22192999720573425, -0.09071999788284302, -0.2519899904727936, -0.12732000648975372, -0.08625499904155731, -0.6005499958992004, 0.28317999839782715, -0.25146999955177307, -0.02635899931192398, -0.19789999723434448, 0.14119000732898712, -0.27351999282836914, 0.43011999130249023, -0.3515399992465973, -0.1863899976015091, 0.8660100102424622, -0.005323499906808138, -0.2866100072860718, -0.43105998635292053, 0.10050000250339508, -0.3301999866962433, 0.31470999121665955, 0.011156000196933746, 1.0192999839782715, 0.3782399892807007, 0.24337999522686005, 0.18322999775409698, -0.16487999260425568, -0.10040999948978424, 0.11386000365018845, 0.21560999751091003, -0.6088799834251404, -0.5408499836921692, 0.18199999630451202, -0.17994999885559082, 0.9988300204277039, 0.0011690000537782907, -0.1243399977684021, 0.499889999628067, 0.4156300127506256, -0.09386199712753296, -0.20430000126361847, 0.15710000693798065, -0.9297699928283691, -0.41808998584747314, 0.14316999912261963, 0.14790000021457672, 0.03441900014877319, 0.20197999477386475, -0.18474000692367554, 0.1475600004196167, 0.5198000073432922, 0.4876300096511841, -0.4037500023841858, -0.05717499926686287, -0.14270000159740448, 0.3369799852371216, 0.11262000352144241, 0.08641199767589569, -0.40713000297546387, 0.060139000415802, -0.4655900001525879, -0.03685599938035011, 0.10636000335216522, 0.4300000071525574, -0.36041998863220215, 0.32738998532295227, 0.3503499925136566, 0.10478000342845917, 0.3423500061035156, -0.06235799938440323, -0.670710027217865, -0.17712000012397766, -0.1657399982213974, 0.1443299949169159, -0.4395500123500824, 0.40165001153945923, 0.1311500072479248, 0.3242399990558624, 0.2045000046491623, -0.31700000166893005, 0.35767000913619995, 0.41119998693466187, -0.7827799916267395, -0.45879998803138733, -0.32771000266075134, 0.19657999277114868, 0.49171000719070435, 0.023761000484228134, 0.45386001467704773, -0.5016499757766724, 0.18765999376773834, 0.9460999965667725, 0.20654000341892242, 0.2591699957847595, -0.10389000177383423, -0.7124199867248535, -0.6222500205039978, 0.2386299967765808, -0.6756799817085266, -0.2846600115299225, 0.6724900007247925, -0.09260600060224533, 0.3569999933242798, -0.19404999911785126, -0.06498400121927261, 0.3737199902534485, 0.11401999741792679, 0.566100001335144, -0.3589000105857849, 0.23093000054359436, 0.6329699754714966, 0.8190299868583679, -0.2904900014400482, 0.2448199987411499, -0.40838998556137085, 0.01461500022560358, -0.18110999464988708, 0.1126599982380867, -0.28088998794555664, -0.1468600034713745, -0.15666000545024872, 0.11389999836683273, -0.408050000667572, 0.06686999648809433, -0.04992299899458885, -0.4356899857521057, 0.22203999757766724, 0.3134300112724304, 0.03311700001358986, -0.3827199935913086, -0.725350022315979, -0.011253999546170235, -0.5424299836158752, -0.0965069979429245, 0.07197000086307526, -0.1826999932527542, -0.48100998997688293, -0.18577000498771667, -0.26927998661994934, -0.1609800010919571, 0.12370000034570694, 0.5859000086784363, 0.07289300113916397, 0.4909699857234955, -1.0161000490188599, 0.5109300017356873, 0.060589998960494995, -0.061420001089572906, 0.06597500294446945, 0.3634200096130371, 0.37751999497413635, 0.5676599740982056, 0.12482000142335892, -0.12477999925613403, 0.2956799864768982, 0.32249000668525696, 0.07056199759244919, -0.05985400080680847, -0.418040007352829, -0.1618500053882599, -0.026542000472545624, -1.0521999597549438, -0.1659500002861023, 0.01641399972140789, -0.08956900238990784, -0.34174999594688416, -0.018959999084472656, -1.145799994468689, 0.22731000185012817, 0.3801499903202057, -0.5310699939727783, 0.027202999219298363, 0.033677998930215836, -0.016352999955415726, -0.370059996843338, 0.16348999738693237, 0.10730999708175659, -0.5678499937057495, 0.26620998978614807, 0.6021100282669067, -0.1970299929380417, -0.08739999681711197, -0.07274399697780609, -0.14951999485492706, 0.0649150013923645, -0.0003724000125657767, 0.6193199753761292, 0.2203100025653839, -0.1386300027370453, -0.4230799973011017, 0.7224400043487549], u'cut': [-0.1674399971961975, -0.09372600167989731, -0.45100998878479004, 0.29945001006126404, 0.16243000328540802, -0.19600999355316162, 0.11411000043153763, 0.28547999262809753, -0.3203499913215637, -1.4743000268936157, -0.02266000024974346, 0.2602199912071228, 0.16425000131130219, 0.19208000600337982, -0.2711699903011322, -0.040036000311374664, -0.1768600046634674, 0.15896999835968018, -0.4630599915981293, -0.11960999667644501, -0.11396999657154083, -0.07577099651098251, 0.3098999857902527, 0.2663300037384033, 0.0802989974617958, -0.22561000287532806, -0.07385300099849701, -0.39642998576164246, -0.20171000063419342, 0.04819599911570549, 0.15977999567985535, 0.4412600100040436, 0.33566999435424805, 0.09805899858474731, -1.0916999578475952, -0.22296999394893646, 0.24151000380516052, 0.06777799874544144, 0.25303998589515686, 0.06132800132036209, -0.10939999669790268, -0.3406499922275543, -0.17735999822616577, -0.1951799988746643, -0.3396100103855133, 0.17541000247001648, -0.5018399953842163, -0.0160910002887249, -0.20768000185489655, -0.24131999909877777, 0.24773000180721283, -0.199070006608963, 0.21897000074386597, 0.1510400027036667, -0.34213998913764954, -0.46939000487327576, -0.3745500147342682, 0.549589991569519, 0.2859100103378296, 0.15533000230789185, -0.06116899847984314, 0.18939000368118286, -0.08706899732351303, 0.0654510036110878, 0.05297200009226799, -0.4345499873161316, -0.03922500088810921, 0.1445399969816208, 0.37665998935699463, 0.21778999269008636, 0.00810300000011921, 0.13756999373435974, 0.44624000787734985, 0.30452999472618103, 0.56072998046875, -0.008391800336539745, -0.15334999561309814, 0.1323699951171875, -0.4644100069999695, -0.25540998578071594, -0.2481600046157837, -0.19866999983787537, 0.6934000253677368, -0.10040999948978424, 0.19207000732421875, 0.06898900121450424, -0.6866899728775024, -0.05697999894618988, 0.03820699825882912, 0.04428499937057495, 0.40713000297546387, 0.5056899785995483, -0.34376001358032227, -0.37582001090049744, -0.04600600153207779, -0.550570011138916, -0.017775999382138252, 0.4309200048446655, 0.0500589981675148, -0.3980199992656708, -0.37977999448776245, 0.020468000322580338, -0.1694200038909912, -0.33601999282836914, -0.23732000589370728, 0.11355999857187271, -0.06915400177240372, -0.15400999784469604, -0.44093000888824463, 0.31536999344825745, -0.49507999420166016, -0.341729998588562, 0.11400999873876572, -0.37654000520706177, -0.2664400041103363, 0.4484899938106537, 0.3067399859428406, 0.24097999930381775, 0.31205999851226807, -0.4014799892902374, -0.0008763400255702436, -0.2966099977493286, -0.035521000623703, -0.010517000220716, -0.22982999682426453, 0.2771899998188019, -0.19277000427246094, -0.30397000908851624, -0.27810999751091003, 0.40307000279426575, -0.013762000016868114, 0.966920018196106, -0.22968000173568726, 0.2506900131702423, -0.2824699878692627, 0.034077998250722885, -0.07675299793481827, -0.13169999420642853, 0.17402000725269318, -0.05027500167489052, 0.03932199999690056, 0.1014999970793724, -0.09806700050830841, -0.4777199923992157, -0.8188999891281128, 0.3343699872493744, 0.2601200044155121, -0.13104000687599182, 0.46404001116752625, -0.42508000135421753, 0.2470400035381317, 0.017823999747633934, 0.18252000212669373, -0.2381799966096878, 0.17199000716209412, 0.3307400047779083, -0.019989000633358955, -0.09316200017929077, -0.36309000849723816, 0.05935399979352951, 0.007421399932354689, 0.37369000911712646, -0.048186998814344406, 0.16614000499248505, 0.24318000674247742, -0.123089998960495, 0.09098000079393387, 0.3418099880218506, -0.019423000514507294, 0.009162000380456448, -0.48021000623703003, 0.055073998868465424, -0.332040011882782, 0.3115899860858917, -0.23848000168800354, 0.20453999936580658, 0.07610700279474258, 0.013075999915599823, 0.09994400292634964, 0.5071600079536438, 0.11181999742984772, -0.11405999958515167, 0.39886999130249023, -0.4319300055503845, 0.08651400357484818, -0.6089699864387512, -0.0886560007929802, 0.284060001373291, 0.1359499990940094, 0.3706600069999695, -0.19797000288963318, 0.971809983253479, -0.04516100138425827, -0.02710999920964241, 0.4940199851989746, -0.17880000174045563, -0.426690012216568, -0.2635500133037567, 0.3248099982738495, 0.21829000115394592, 1.0378999710083008, 0.12511999905109406, 0.39958998560905457, 0.5772899985313416, -0.4162600040435791, 0.22371000051498413, -0.13871000707149506, 0.09162899851799011, -0.30212000012397766, -0.6184899806976318, 0.15142999589443207, -0.538569986820221, 0.027428999543190002, 0.537880003452301, 0.03285299986600876, 0.3424600064754486, 0.6227399706840515, -0.2759299874305725, 0.32245999574661255, -0.264710009098053, 0.8680199980735779, 0.10339999943971634, -0.03181200101971626, -0.06070699915289879, 0.07971599698066711, -0.1499900072813034, -0.10407000035047531, -0.22766999900341034, -0.21772000193595886, -0.8778600096702576, 0.16532999277114868, 0.0328500010073185, 0.13902999460697174, -0.21186000108718872, 0.3383699953556061, -0.4593699872493744, 0.38578999042510986, -0.15916000306606293, -0.14240999519824982, -0.241239994764328, 0.20340999960899353, -0.4979400038719177, 0.3978799879550934, -0.00348620000295341, -1.2330000400543213, 0.10260000079870224, 0.6318899989128113, -0.012419000267982483, 0.1282999962568283, -0.6381099820137024, 0.16845999658107758, 0.006752400193363428, 0.1317099928855896, -0.38284000754356384, 0.5693299770355225, -0.11105000227689743, -0.26809000968933105, 0.5602399706840515, -0.028232000768184662, 0.28196001052856445, 0.7386299967765808, 0.07426100224256516, -0.39076998829841614, 0.1507200002670288, -0.0801210030913353, -0.011862000450491905, 0.11449000239372253, 0.3964200019836426, -0.6675300002098083, -0.46560999751091003, 0.29589998722076416, 0.24898000061511993, -0.3952299952507019, 0.25391000509262085, -0.33379998803138733, -0.08060400187969208, -1.7418999671936035, -0.33880001306533813, 0.0019456000300124288, -0.8597400188446045, -0.21459999680519104, 0.1222900003194809, 0.006058000028133392, -0.12353000044822693, 0.04002299904823303, 0.161080002784729, 0.5894100069999695, -0.2991600036621094, -0.3092699944972992, 0.10499999672174454, 0.17217999696731567, -0.2057799994945526, 0.1204800009727478, 0.23882000148296356, -0.004466299898922443, 0.1082800030708313, -0.20923000574111938, -0.18838000297546387, 0.03578399866819382, -0.20397000014781952], u'torn': [0.4594799876213074, -0.6202999949455261, -0.28161999583244324, -0.40900999307632446, -0.13346999883651733, 0.27487000823020935, -0.3173699975013733, 0.22597000002861023, 0.4314900040626526, -1.0307999849319458, 0.030991999432444572, 0.14485999941825867, 0.5734599828720093, -0.08209999650716782, -0.6362199783325195, 0.26649001240730286, 0.01551199983805418, 0.5190399885177612, -0.3642500042915344, -0.25450000166893005, -0.21913999319076538, 0.1547500044107437, 0.18456999957561493, -0.058671001344919205, -0.5779200196266174, 0.18126000463962555, -0.4904400110244751, -0.1633400022983551, 0.19894999265670776, 0.05911700055003166, 0.1831900030374527, 0.4133799970149994, -0.34995999932289124, 0.06464199721813202, -0.16161000728607178, 0.05244600027799606, -0.2802099883556366, 0.05499500036239624, -0.20503999292850494, 0.3686999976634979, -0.045430999249219894, -1.2073999643325806, -0.23136000335216522, -0.7743099927902222, 0.5135999917984009, -0.36643001437187195, 0.3421100080013275, 0.3667899966239929, -0.3802100121974945, -0.1151600033044815, 0.7340700030326843, 0.10221999883651733, 0.13147999346256256, -0.24280999600887299, -0.05630600079894066, -0.4773600101470947, 0.31512999534606934, -0.005673899780958891, -0.5225099921226501, -0.044874001294374466, -0.5193099975585938, -0.14585000276565552, -0.4703800082206726, -0.450190007686615, 0.1334100067615509, -0.41060999035835266, 0.1814900040626526, -0.28060001134872437, 0.6761000156402588, -0.1868000030517578, 0.33667001128196716, -0.5277199745178223, -0.15094000101089478, 0.4008199870586395, 0.6217600107192993, -0.09785299748182297, 0.2620899975299835, -0.2778500020503998, -0.013496000319719315, 0.0866909995675087, 0.2709299921989441, -0.31209999322891235, -0.40310999751091003, 0.05666099861264229, -0.20669999718666077, 0.25630998611450195, -0.4128899872303009, -0.24628999829292297, -0.21331000328063965, 0.2703799903392792, 0.061030998826026917, 0.12602999806404114, -0.2640500068664551, -0.40856999158859253, -0.38697001338005066, 0.05015699937939644, 0.5655699968338013, -0.18355000019073486, 0.13266000151634216, 0.061847999691963196, 0.3600800037384033, 0.16575999557971954, 0.16064999997615814, -0.017772000283002853, -0.15466000139713287, 0.8462799787521362, 0.3206700086593628, 0.30017998814582825, 0.2297399938106537, 0.09190300107002258, -0.16152000427246094, 0.15522000193595886, -0.3244200050830841, -0.48524999618530273, 0.20521999895572662, 0.1523600071668625, 0.364329993724823, 0.034182000905275345, 0.6773599982261658, -0.5869899988174438, 0.2595899999141693, -0.5529299974441528, 0.16261999309062958, 0.4270400106906891, 0.381740003824234, 0.2386299967765808, -0.5203199982643127, -0.4724999964237213, -0.2745699882507324, 0.17095999419689178, -0.08297599852085114, 0.3723199963569641, 0.008467099629342556, 0.2305999994277954, 0.042660001665353775, -0.12916000187397003, -0.3324500024318695, 0.15042999386787415, 0.09995999932289124, -0.7338500022888184, -0.1811400055885315, 0.4151099920272827, -0.031943999230861664, 0.4011499881744385, -0.6692900061607361, 0.3712100088596344, 0.29006001353263855, 0.6008399724960327, -0.39629000425338745, -0.09952700138092041, -0.2304600030183792, -0.18125000596046448, -0.15835000574588776, -0.5619000196456909, 0.6011599898338318, -0.20082999765872955, -0.07590600103139877, 0.4386099874973297, 0.32951998710632324, 1.1134999990463257, 0.704990029335022, -0.6057299971580505, -0.11482000350952148, -0.042778998613357544, 0.11554999649524689, 0.4980500042438507, 0.5485799908638, -0.055668000131845474, 0.26607000827789307, -0.5976099967956543, 0.17976999282836914, -0.03026299923658371, -0.28633999824523926, -0.33305999636650085, 0.03858700022101402, 0.21664999425411224, 0.09772200137376785, 0.21991999447345734, -0.17685000598430634, -0.21874000132083893, -0.1998099982738495, 0.08154100179672241, -0.1710599958896637, -0.40773001313209534, 0.04262999817728996, -0.01411799993366003, -0.46713998913764954, 0.333189994096756, -0.0256740003824234, -0.15378999710083008, 0.004650699906051159, -0.06262999773025513, 0.18025000393390656, 0.5261200070381165, -0.010338000021874905, 0.8422200083732605, 0.0400100015103817, -0.025539999827742577, 0.2647700011730194, 0.35864999890327454, 0.6363700032234192, 0.1545799970626831, 0.3072499930858612, 0.031908001750707626, 0.0006860200082883239, -0.5697500109672546, 0.688289999961853, -0.05693700164556503, 0.4314199984073639, 0.47286999225616455, 0.5683599710464478, -0.08743999898433685, -0.0019039999460801482, 0.31863999366760254, 0.5030999779701233, 0.46862998604774475, 0.37397000193595886, -0.6550400257110596, 0.26116999983787537, 0.7105399966239929, 0.46349000930786133, -0.4525600075721741, 0.17459000647068024, 0.061702001839876175, 0.14735999703407288, -0.22949999570846558, -0.03838000074028969, -0.4293600022792816, -0.5771499872207642, -0.12306000292301178, 0.5946000218391418, -0.4262099862098694, -0.33816999197006226, -0.06852100044488907, 0.06214899942278862, -0.030944999307394028, 0.3347499966621399, -0.06634200364351273, -0.38763999938964844, -0.9047600030899048, 0.37915998697280884, -0.23297999799251556, 0.1793300062417984, 0.3062799870967865, -0.19177000224590302, -0.5767300128936768, 0.0766960009932518, 0.3529999852180481, 0.18331000208854675, 0.13007000088691711, 0.3068599998950958, 0.7234500050544739, 0.012911999598145485, -0.7783899903297424, 0.3767400085926056, 0.2906799912452698, 0.05908399820327759, -0.15929999947547913, 0.4805000126361847, 0.47086000442504883, 0.12173999845981598, 0.1337299942970276, -0.5810400247573853, 0.24817000329494476, 0.20915000140666962, -0.5921099781990051, -0.34797999262809753, 0.3480899930000305, -0.0917230024933815, -0.30072999000549316, -0.41179001331329346, -0.32433998584747314, -0.2757599949836731, -0.3154299855232239, -0.6119800209999084, 0.08621499687433243, -1.1128000020980835, 0.265859991312027, 0.5189999938011169, 0.0706389993429184, 0.7869399785995483, 0.1633799970149994, -0.10955999791622162, 0.0014321999624371529, -0.3479999899864197, 0.2421800047159195, 0.1109900027513504, 0.42559999227523804, 0.23443999886512756, -0.8103700280189514, -0.18283000588417053, -0.6046599745750427, 0.22851000726222992, 0.1614300012588501, -0.24366000294685364, 0.5438399910926819, 0.5247200131416321, -0.2699599862098694, 0.41356000304222107, 0.16763000190258026], u'folded': [-0.13099999725818634, 0.14896999299526215, 0.11311999708414078, -0.525629997253418, -0.14258000254631042, -0.18708999454975128, -0.21306000649929047, 0.12444999814033508, 4.958800127496943e-05, 0.04123799875378609, -0.3128199875354767, 0.3082999885082245, 0.07235600054264069, -0.2758600115776062, 0.30619001388549805, 0.6070399880409241, 0.43296000361442566, 0.0364530012011528, 0.35526999831199646, -0.7217299938201904, -0.14715999364852905, -0.21856999397277832, -0.057794999331235886, 0.19754000008106232, 0.06663999706506729, 0.21918000280857086, -0.23393000662326813, 0.22228999435901642, -0.2046400010585785, -0.15011000633239746, -0.0628879964351654, -0.3081299960613251, -0.016269000247120857, 0.1604599952697754, -0.5430600047111511, 0.3382500112056732, -0.24300000071525574, 0.05458100140094757, 0.058132000267505646, 0.7239800095558167, -0.26023000478744507, -0.42636001110076904, -0.09493199735879898, 0.273470014333725, 0.46581000089645386, 0.20920999348163605, 0.17020000517368317, -0.050999999046325684, -0.08111900091171265, 0.2746399939060211, 0.25033000111579895, 0.015212000347673893, -0.34841999411582947, -0.6481800079345703, -0.17970000207424164, -0.30643999576568604, 0.16256999969482422, 0.2535099983215332, 0.1571899950504303, -0.2368600070476532, 0.5603700280189514, 0.12391000241041183, -0.2353300005197525, -0.27546000480651855, 0.18585999310016632, -0.4596399962902069, 0.13402999937534332, 0.20956000685691833, 0.08400599658489227, -0.10840000212192535, 0.43667998909950256, -0.20377999544143677, -0.44670000672340393, 0.6783300042152405, 0.01511599961668253, -0.15503999590873718, -0.17961999773979187, -0.31935998797416687, -0.429610013961792, 0.0263420008122921, -0.565280020236969, 0.3576500117778778, -0.3010900020599365, -0.037085000425577164, -0.06199900060892105, -0.06241700053215027, -0.15343999862670898, 0.3444100022315979, -0.44032999873161316, 0.6621599793434143, 0.21796000003814697, 0.2694000005722046, 0.03718600049614906, 0.7318400144577026, -0.3646300137042999, 0.12820999324321747, -0.5426200032234192, 0.4167799949645996, 0.5356199741363525, -0.4976100027561188, 0.8288900256156921, -0.003001099918037653, 0.2603999972343445, -0.17735999822616577, 0.20069999992847443, -0.17340999841690063, -0.17741000652313232, 0.5751500129699707, 0.2921600043773651, 0.17876000702381134, -0.3689500093460083, 0.23199999332427979, -0.3021300137042999, -0.05649999901652336, -0.6890400052070618, 0.020507000386714935, -0.15865999460220337, 0.02528499998152256, 0.24355000257492065, -0.5221400260925293, -0.09783200174570084, -0.36768999695777893, 0.8543699979782104, 0.7183399796485901, -0.3751699924468994, 0.21860000491142273, -0.4314500093460083, -0.3264000117778778, -0.402319997549057, 0.04835499823093414, -0.08814699947834015, -0.06445199996232986, 0.4713500142097473, -0.050652001053094864, -0.012392999604344368, 0.3057500123977661, 0.3412100076675415, -0.2566699981689453, -0.3529700040817261, -0.149509996175766, -0.008430800400674343, 0.28793999552726746, -0.47102999687194824, 0.07611899822950363, 0.008674699813127518, 0.19945000112056732, 0.45566999912261963, -0.2046400010585785, 0.11841999739408493, -0.20100000500679016, -0.15922999382019043, 0.2794699966907501, -0.41110000014305115, -0.965499997138977, 0.2871899902820587, -0.38978999853134155, 0.02505899965763092, -0.4983200132846832, 0.028766000643372536, 0.25773999094963074, -0.5250899791717529, -0.4213399887084961, -0.9829699993133545, 0.7716299891471863, 0.4999000132083893, -0.020116999745368958, 0.04442699998617172, 0.2965799868106842, 0.08924899995326996, 0.25067999958992004, 0.09436900168657303, 0.1519400030374527, -0.03903700038790703, 0.00014131999341771007, -0.23444999754428864, -0.16904999315738678, 0.03240299969911575, 0.661050021648407, 0.27761000394821167, -0.48642000555992126, 0.6888200044631958, -0.4144900143146515, 0.1961199939250946, 0.2125999927520752, 0.44093000888824463, -0.5778599977493286, -0.0035218000411987305, 0.32923999428749084, 0.4308600127696991, -0.13627000153064728, 0.5598800182342529, 0.49268001317977905, 0.3433600068092346, 0.01865299977362156, 0.05028799921274185, 0.5322700142860413, -0.0923440009355545, 0.3022800087928772, 0.10970000177621841, 0.35578998923301697, 0.3752000033855438, -0.3140699863433838, 0.348690003156662, 0.02839300036430359, 0.20983000099658966, -0.5128800272941589, 0.39215999841690063, -0.29148000478744507, 0.05256300047039986, -0.34564998745918274, 0.017076000571250916, -0.1938599944114685, -0.06751000136137009, 0.5412600040435791, 0.45781999826431274, 0.5163900256156921, -0.16859999299049377, -0.675599992275238, 0.06327199935913086, 0.366890013217926, 0.41413000226020813, 0.0669260025024414, -0.3481000065803528, -0.22806000709533691, -0.12151999771595001, 0.031129999086260796, 0.20810000598430634, 0.16037000715732574, -0.5747100114822388, -0.5752300024032593, 0.88264000415802, -0.2814500033855438, -0.50968998670578, -0.48930999636650085, -0.16934999823570251, -0.5184599757194519, -0.1899300068616867, -0.32739999890327454, -0.5709999799728394, 0.1168999969959259, 0.536620020866394, -0.29490000009536743, -0.11670999974012375, -0.17302000522613525, -0.7509700059890747, 0.054214999079704285, 0.40369999408721924, 0.24630999565124512, -0.0967399999499321, 0.2007399946451187, 0.13796000182628632, 0.35405999422073364, -0.14302000403404236, -0.0034827999770641327, 0.5196099877357483, 0.2473600059747696, -0.12013000249862671, 0.07197500020265579, -0.2487799972295761, 0.29197999835014343, -0.3989599943161011, -0.6078600287437439, 0.24866999685764313, 0.2531900107860565, 0.0805480033159256, -0.42965999245643616, 0.005924399942159653, 0.14770999550819397, -0.19085000455379486, 0.031516000628471375, -0.24442000687122345, -0.012033999897539616, 0.2356099933385849, 0.1288899928331375, 0.28979000449180603, -0.3569500148296356, 0.03982900083065033, 0.017865000292658806, -0.13752000033855438, 0.39809998869895935, -0.23152999579906464, -0.23075999319553375, -0.04576199874281883, -0.1872899979352951, -0.17820000648498535, 0.09809400141239166, 0.5331000089645386, 0.33851000666618347, 0.3631399869918823, -0.6861699819564819, 0.6793599724769592, 0.6738799810409546, -0.37132999300956726, 0.29552000761032104, 0.27320000529289246, 0.7602400183677673, 0.29229000210762024, -0.3835799992084503, 0.4560900032520294, -1.0357999801635742], u'young': [-0.10346999764442444, 0.15125000476837158, 0.3628599941730499, -0.07876899838447571, 0.061333999037742615, 0.7150400280952454, -0.21296000480651855, 0.09468699991703033, -0.1782499998807907, -0.5221800208091736, 0.366349995136261, -0.0822329968214035, -0.30270999670028687, -0.0224629994481802, 0.08294399827718735, -0.3481900095939636, -0.3567500114440918, -0.3803600072860718, 0.07141599804162979, 0.2188899964094162, -0.46327999234199524, 0.7729399800300598, -0.08674000203609467, 0.12031999975442886, -0.14861999452114105, -0.12302999943494797, 0.4139699935913086, 0.27307000756263733, 0.1206900030374527, 0.014872999861836433, 0.2129800021648407, 0.4818800091743469, -0.20970000326633453, 0.020670000463724136, -0.9873999953269958, 0.11334999650716782, 0.21931999921798706, 0.3991599977016449, 0.5742200016975403, -0.16896000504493713, 0.8514900207519531, -0.5122600197792053, 0.1762000024318695, -0.10507000237703323, -0.5416200160980225, 0.10057999938726425, 0.1953199952840805, -0.2589400112628937, -0.16646000742912292, -0.2777999937534332, 0.023074999451637268, -0.5684000253677368, -0.5163999795913696, 0.0925619974732399, -0.0734419971704483, 0.17202000319957733, 0.0527070015668869, -0.02759299986064434, 0.04603800177574158, -0.05415400117635727, 0.13508999347686768, -0.12745000422000885, 0.8583999872207642, 0.2642799913883209, -0.2906700074672699, -0.11874999850988388, -0.1473200023174286, -0.26429998874664307, -0.4637399911880493, -0.18985000252723694, -0.14621999859809875, -0.4004499912261963, -0.2630999982357025, 0.4293400049209595, -0.1447400003671646, 0.030395999550819397, 0.04092700034379959, -0.07465499639511108, 0.14514000713825226, -0.05471799895167351, -0.2780599892139435, 0.057780999690294266, -0.023887999355793, 0.12500999867916107, -0.2775999903678894, 0.3645099997520447, -0.22421999275684357, -0.12915000319480896, 0.11796999722719193, 0.32311001420021057, -0.3028700053691864, 0.1735599935054779, -0.5307499766349792, 0.21154999732971191, 0.17208999395370483, -0.055635999888181686, 0.32969000935554504, -0.5051599740982056, 0.08725599944591522, -0.20813000202178955, -0.010332000441849232, -0.5076000094413757, 0.011429999954998493, -0.4427799880504608, -0.16701999306678772, 0.18174999952316284, -0.1806900054216385, 0.012769999913871288, -0.12381000071763992, -0.2205200046300888, 0.11221999675035477, 0.27851998805999756, 0.2795099914073944, -0.35725998878479004, 0.436599999666214, 0.2310599982738495, -0.22939999401569366, 0.3800399899482727, -0.0024804999120533466, -0.30316999554634094, -0.13237999379634857, 0.15538999438285828, 0.05514499917626381, 0.1860000044107437, 0.14485999941825867, -0.12298999726772308, -0.24548999965190887, -0.2390899956226349, 0.25637000799179077, -0.6273800134658813, -0.16651000082492828, -0.487529993057251, -0.2456900030374527, 0.2957800030708313, -0.001756099984049797, -0.09458799660205841, 0.2385600060224533, -0.3400999903678894, -0.02669999934732914, -0.039416998624801636, 0.18704000115394592, -0.17125000059604645, -0.009911499917507172, -0.1572200059890747, -0.19707000255584717, 0.08980699628591537, 0.019578000530600548, -0.051867999136447906, -0.049685001373291016, 0.06448300182819366, 0.18413999676704407, -0.013340000063180923, 0.3128499984741211, 0.6309199929237366, 0.8030300140380859, -0.18400999903678894, -0.23712000250816345, -0.10219000279903412, 0.050085000693798065, -0.48127999901771545, -0.18928000330924988, -0.300790011882782, 0.27974000573158264, -0.18479999899864197, -0.6347100138664246, -0.4170700013637543, -0.445499986410141, 0.3018699884414673, 0.5267300009727478, -0.35065001249313354, 0.42921000719070435, -0.0336420014500618, -1.2036999464035034, -0.015456000342965126, -0.13801999390125275, -0.2782500088214874, -0.059484999626874924, -0.35512998700141907, -0.6750800013542175, 0.9410499930381775, 0.15884999930858612, -0.3062399923801422, -0.10507000237703323, -0.05900000035762787, 0.054489001631736755, 0.01915300078690052, -0.5270100235939026, -0.3187899887561798, 0.3041599988937378, 0.14190000295639038, -0.3701300024986267, -0.1237500011920929, 0.07761699706315994, 0.24235999584197998, 0.2632000148296356, 0.29490000009536743, 0.024783000349998474, -0.817550003528595, 0.21859000623226166, -0.15455999970436096, 1.618299961090088, 0.22078999876976013, 0.20993000268936157, 0.18424999713897705, -0.12387000024318695, -0.011060000397264957, 0.19191999733448029, 0.280460000038147, -0.10180000215768814, -0.22216999530792236, 0.5059400200843811, 0.0021271000150591135, 0.06912799924612045, 0.22995999455451965, 0.2884100079536438, -0.12937000393867493, 0.6126999855041504, 0.11350999772548676, -0.5397599935531616, -0.48906999826431274, 0.03928700089454651, 0.34419000148773193, -0.3364500105381012, -0.12284000217914581, -0.16779999434947968, 0.3531399965286255, 0.03347000107169151, -0.3515099883079529, -0.14880000054836273, 0.2552900016307831, -0.004479499999433756, 0.06952700018882751, -0.17329999804496765, 0.12184999883174896, -0.15520000457763672, 0.46950000524520874, -0.24842000007629395, 0.37457001209259033, -0.815500020980835, -0.006209199782460928, 0.37310001254081726, 0.11681000143289566, 0.056582000106573105, 0.2814899981021881, -0.4022600054740906, 0.13244999945163727, 0.42629000544548035, -0.09926100075244904, -0.19839000701904297, 0.10419999808073044, 0.3471499979496002, -0.12464000284671783, -0.014241999946534634, 0.028324000537395477, 1.3488999605178833, -0.15509000420570374, 0.23869000375270844, -0.06548699736595154, 0.32534000277519226, 0.04323799908161163, 0.13989000022411346, -0.523580014705658, -0.020932000130414963, 0.0846560001373291, -0.14484000205993652, -0.123259998857975, -0.26927000284194946, 0.050572000443935394, -0.19449999928474426, 0.6731799840927124, -0.2536500096321106, -0.3290799856185913, 0.3184100091457367, -0.22335000336170197, -0.2350199967622757, 0.11892999708652496, -1.7337000370025635, 0.11917000263929367, -0.03079099953174591, 0.07321599870920181, -0.25637999176979065, -0.2331400066614151, 0.2947100102901459, 0.04641899839043617, -0.3562000095844269, 0.5387899875640869, -0.029354000464081764, 0.9644700288772583, -0.19753000140190125, 0.027674999088048935, -0.04143499955534935, -0.5133100152015686, -0.1465200036764145, -0.0015521999448537827, 0.21836000680923462, 0.006457599811255932, -0.05621400102972984, 0.31696000695228577, -0.4614900052547455, -0.19091999530792236], u'wet': [0.23939000070095062, -0.2445400059223175, -0.37863001227378845, -0.5201200246810913, -0.46035000681877136, -0.38978999853134155, 0.4628300070762634, 0.9740200042724609, 0.6156299710273743, -0.7426199913024902, 0.20374999940395355, -0.5897499918937683, -0.2673099935054779, -0.18386000394821167, -0.38398000597953796, -0.6860299706459045, -0.26023000478744507, 0.40720999240875244, 0.5007100105285645, 0.19383999705314636, 0.0045647998340427876, -0.013048999942839146, 0.11925999820232391, -0.017253000289201736, -0.6349800229072571, -0.08685000240802765, 0.5889700055122375, 0.04456999897956848, 0.006480799987912178, -0.19845999777317047, 0.29907000064849854, -0.010317999869585037, -0.20217999815940857, -0.09749100357294083, -0.8714200258255005, 0.5038999915122986, 0.010254999622702599, 0.09984000027179718, -0.4957599937915802, 0.1974399983882904, -0.6422299742698669, 0.2528800070285797, -0.23673999309539795, -0.5044699907302856, 0.7107200026512146, -0.04077000170946121, 0.5026599764823914, 0.538349986076355, 0.03607799857854843, -0.3255000114440918, -0.01293100044131279, -0.13016000390052795, 0.053036000579595566, -0.17601999640464783, 0.07075800001621246, 0.6923199892044067, 0.010745000094175339, -0.6970900297164917, 0.5599799752235413, 0.18883000314235687, 0.28224000334739685, -0.3099600076675415, -0.03979000076651573, 0.3801400065422058, -0.4419499933719635, -0.3812200129032135, 0.11417999863624573, -0.2974100112915039, -0.40751001238822937, -0.2933099865913391, 0.21660999953746796, 0.3585500121116638, -0.46105000376701355, 0.33103999495506287, -0.9910299777984619, -0.1949699968099594, 0.54899001121521, 0.28213000297546387, -0.20250999927520752, -0.3028999865055084, 0.22415000200271606, -0.0731310024857521, -0.27333998680114746, -0.04068100079894066, -0.33998000621795654, 0.39379000663757324, -0.08015300333499908, -0.19631999731063843, -0.22499999403953552, 0.31154999136924744, 0.11636999994516373, -0.47244998812675476, 0.24448999762535095, -0.038086000829935074, -0.38582998514175415, 0.29190000891685486, 0.3709399998188019, -0.20297999680042267, 0.4067400097846985, -0.15639999508857727, -0.022545000538229942, -0.0451430007815361, -0.5865899920463562, 0.29423001408576965, -0.6603699922561646, -0.22926999628543854, -0.09878599643707275, 0.6946300268173218, -0.3681600093841553, 0.4262300133705139, -0.3774699866771698, -0.48462000489234924, -0.4819900095462799, 0.061581000685691833, -0.2381799966096878, -0.5326399803161621, 0.12352000176906586, 0.8786799907684326, -0.054575998336076736, -0.015359999611973763, -0.2836199998855591, -0.24393999576568604, 0.35089001059532166, 0.4287300109863281, -0.03770099952816963, 0.30305999517440796, 0.3093000054359436, 0.03335700184106827, 0.7907999753952026, -0.4271799921989441, 0.32269999384880066, 0.7974799871444702, 0.17813999950885773, 0.7945899963378906, -0.13895000517368317, 0.0364839993417263, -0.07079499959945679, 0.3677400052547455, 0.1637199968099594, 0.08514600247144699, 0.15937000513076782, -0.21782000362873077, -0.0545479990541935, -0.7537199854850769, -0.48131000995635986, 0.1084199994802475, -0.47687000036239624, 0.30546998977661133, 0.3068099915981293, -0.3055399954319, -0.5313000082969666, -0.30588001012802124, -0.781470000743866, -0.07694999873638153, 0.2916400134563446, 0.08303199708461761, -0.28780999779701233, 0.018962999805808067, 0.5477399826049805, 0.644760012626648, 0.3707199990749359, -1.248900055885315, -0.2631799876689911, 0.14696000516414642, 0.3360700011253357, 0.33111000061035156, 0.7504799962043762, 0.42712000012397766, 0.06814199686050415, -0.26554998755455017, 0.735759973526001, 0.7684100270271301, 0.1762399971485138, 0.5415199995040894, 0.2696000039577484, 0.18592999875545502, 0.3031100034713745, 0.291020005941391, -0.00012981999316252768, -0.4145300090312958, 0.5019800066947937, 0.16654999554157257, 0.22113999724388123, 0.06658799946308136, -0.1319199949502945, 0.009346099570393562, 1.132599949836731, -0.3469800055027008, 0.13765999674797058, -0.19580000638961792, 0.4399000108242035, 0.2906399965286255, -0.12132000178098679, -0.06902799755334854, -0.1810699999332428, -0.07815899699926376, -0.40672001242637634, -0.17053000628948212, -0.4207899868488312, -0.2069700062274933, 0.24377000331878662, -0.37926000356674194, 0.020501000806689262, 0.11050999909639359, 0.3032799959182739, 0.06928200274705887, -0.38741999864578247, 0.2741599977016449, -0.3772999942302704, -0.6495800018310547, -0.6189600229263306, -0.19453999400138855, 0.41905999183654785, -0.592519998550415, 0.26440998911857605, -0.013127000071108341, -0.07482600212097168, -0.9078599810600281, 0.5949100255966187, -0.5232399702072144, 0.8249800205230713, 0.3709700107574463, 0.1733199954032898, 0.20694999396800995, 0.22220000624656677, 0.26116999983787537, 0.06686200201511383, -0.7315099835395813, -0.06021900102496147, 0.27524998784065247, 1.2740000486373901, -0.4643799960613251, -0.1411599963903427, -0.2955099940299988, 0.08326199650764465, 0.14076000452041626, -0.9567899703979492, -0.17931999266147614, -0.09784000366926193, -0.29061999917030334, -0.7825300097465515, 0.27261000871658325, -0.21344999969005585, -0.04709300026297569, -1.1916999816894531, -0.26440000534057617, 0.16850000619888306, -0.03269900009036064, 0.04965699836611748, -0.16854000091552734, -0.06859199702739716, -0.2778699994087219, 0.018022999167442322, -0.6065000295639038, 0.42340999841690063, 0.5661200284957886, 0.13130000233650208, 0.15063999593257904, 0.19855999946594238, 0.263729989528656, -0.13840000331401825, -0.7542799711227417, -0.42120999097824097, 0.5979400277137756, -0.23181000351905823, 0.5377500057220459, -0.894760012626648, -0.004934399854391813, 0.36656999588012695, -0.4554400146007538, 0.1722099930047989, 0.3562000095844269, -0.5304200053215027, -0.25764000415802, 0.15206000208854675, -0.21876999735832214, -0.16229000687599182, -0.22968000173568726, -0.597100019454956, 0.19888000190258026, -0.6552500128746033, -0.10708999633789062, 0.292930006980896, 0.3352600038051605, -0.6578999757766724, 0.39087000489234924, 0.31703999638557434, -0.09390799701213837, -0.2832300066947937, -0.04404199868440628, -0.18466000258922577, 0.17767000198364258, -0.45162999629974365, -0.005859099794179201, 0.2715199887752533, 0.3685300052165985, 0.3334600031375885, 0.05107000097632408, 0.019415000453591347, 0.3728100061416626], u'cluttered': [-0.37613001465797424, 0.23823000490665436, -0.4577299952507019, -0.12342000007629395, -0.006164600141346455, 0.25088998675346375, -0.5598800182342529, -0.09535899758338928, 0.4787600040435791, 0.01838099956512451, -0.2511099874973297, 0.1290699988603592, -0.020346999168395996, 0.02335200086236, -0.013172999955713749, -0.17233000695705414, -0.4346500039100647, -0.0874980017542839, 0.19306999444961548, 0.09262800216674805, 0.42928001284599304, 0.6132199764251709, -0.048193998634815216, 0.08515699952840805, -0.6695200204849243, -0.21407000720500946, 0.3733200132846832, 0.060474999248981476, 0.3037300109863281, 0.04060199856758118, 0.09565799683332443, -0.16203999519348145, -0.01220100000500679, 0.332069993019104, 0.012357999570667744, 0.6569100022315979, -0.3117699921131134, -0.7069600224494934, -0.43974998593330383, -0.30564001202583313, -0.4731000065803528, 0.2528800070285797, -0.24437999725341797, -0.0941689983010292, 0.2882699966430664, 0.22105999290943146, 0.06002400070428848, -0.17130999267101288, -0.014533000066876411, -0.4579299986362457, 0.04478999972343445, -0.3705900013446808, 0.31762999296188354, -0.423880010843277, 0.5377200245857239, -0.029895000159740448, -0.1740799993276596, -0.487280011177063, -0.08324400335550308, 0.17523999512195587, 0.29780998826026917, -0.6826800107955933, 0.25075000524520874, 0.18681000173091888, 0.14316000044345856, 0.35795000195503235, 0.223690003156662, 0.30340999364852905, 0.5202500224113464, -0.5344399809837341, -0.40522998571395874, -0.1687999963760376, -0.3052099943161011, 0.5344700217247009, -0.0703750029206276, 0.14854000508785248, -0.08500000089406967, 0.25450000166893005, 0.14646999537944794, 0.21597999334335327, -0.01624700054526329, 0.2604700028896332, -0.0034205999691039324, 0.025909999385476112, -0.07752499729394913, -0.15352000296115875, 0.23810000717639923, 0.05637900158762932, -0.0036897999234497547, 0.606249988079071, 0.2702699899673462, -0.34512001276016235, 0.20458999276161194, -0.03899800032377243, -0.2829599976539612, 0.08067700266838074, 0.06584999710321426, -0.284280002117157, 0.22894999384880066, -0.48058998584747314, 0.3405199944972992, -0.12495999783277512, -0.05933599919080734, -0.1985200047492981, -0.8998399972915649, -0.191880002617836, 0.43821001052856445, -0.09264600276947021, 0.3682500123977661, -0.1252100020647049, -0.9800199866294861, 0.30052998661994934, -0.5392199754714966, 0.13138000667095184, -0.17076000571250916, -0.4192500114440918, 0.1259399950504303, -0.10717999935150146, -0.01702200062572956, 0.22606000304222107, 0.1915999948978424, -0.09616900235414505, 0.3325499892234802, 0.3880299925804138, 0.011284999549388885, 0.2626599967479706, -0.1354299932718277, -0.7135499715805054, -0.0410429984331131, 0.26440000534057617, 0.41993001103401184, -0.17437000572681427, 0.25220000743865967, -0.23241999745368958, 0.42875999212265015, -0.035773999989032745, 0.7317299842834473, -0.002030499977990985, -0.10181999951601028, -0.11406999826431274, -0.07410100102424622, 0.043108999729156494, 0.2596000134944916, 0.34935998916625977, 0.006744599901139736, -0.029374999925494194, 0.5127099752426147, 0.6934599876403809, 0.1385599970817566, -0.5212799906730652, -0.3851499855518341, -0.2217700034379959, 0.003395400010049343, -0.38120999932289124, -0.34797000885009766, 0.23829999566078186, -0.08955299854278564, 0.5340099930763245, 0.49599000811576843, 0.25360000133514404, -0.5006099939346313, 0.26078999042510986, 0.47328001260757446, 0.05288900062441826, 1.0285999774932861, -0.22098000347614288, -0.08538299798965454, -0.21518999338150024, 0.11014000326395035, -0.07539299875497818, -0.15443000197410583, 0.26903000473976135, -0.4549799859523773, -0.41769999265670776, -0.1917400062084198, -0.1775200068950653, -0.300680011510849, 0.02608799934387207, -0.24074000120162964, -0.8882499933242798, -0.08710599690675735, -0.26010000705718994, 0.23966999351978302, 0.21703000366687775, 0.5148599743843079, -0.03700200095772743, 1.4723999500274658, -0.06194400042295456, -0.024441000074148178, 0.0974230021238327, 0.1387999951839447, -0.7309799790382385, -0.3814300000667572, 0.03442100062966347, 0.28898999094963074, -0.09316399693489075, -0.8580099940299988, 0.5907300114631653, -0.7824100255966187, 0.35587000846862793, -0.25619998574256897, -0.2683199942111969, -0.09141000360250473, -0.055973999202251434, 0.6608999967575073, -0.44602999091148376, -0.16680000722408295, -0.43737998604774475, -0.0872659981250763, -0.15724000334739685, -0.5545200109481812, 0.2581000030040741, -0.05632400140166283, -0.15067000687122345, -0.011517999693751335, 0.1760299950838089, -0.09856700152158737, -0.39699000120162964, 0.3817499876022339, 0.32447001338005066, 0.22529999911785126, -0.02369000017642975, -0.7131800055503845, 0.27542001008987427, -0.09773500263690948, -0.07466799765825272, -0.07621700316667557, 0.03339400142431259, -0.23330000042915344, -0.4074999988079071, 0.2594299912452698, 0.2651500105857849, -0.21971000730991364, -0.2047400027513504, 0.033640000969171524, 0.06822700053453445, -0.025880999863147736, 0.39607998728752136, -0.35374000668525696, -0.2101999968290329, 0.4850800037384033, -0.08920600265264511, 0.0012620999477803707, 0.654449999332428, -0.18538999557495117, -0.21963000297546387, -0.047974999994039536, 0.23534999787807465, -0.23541000485420227, 0.6129500269889832, 0.43887999653816223, -0.18967999517917633, 0.023475000634789467, -0.5706200003623962, -0.03948099911212921, 0.5087900161743164, -0.16708000004291534, -0.27821001410484314, 0.40022000670433044, 0.4612799882888794, -0.10926999896764755, -0.33368998765945435, -0.2263599932193756, -0.5127099752426147, 0.7842400074005127, 0.31411001086235046, 0.4251199960708618, -0.12794999778270721, 0.5465800166130066, 0.08838000148534775, -0.40448999404907227, 0.5743799805641174, -0.25753000378608704, 0.0978970006108284, -0.33351001143455505, 0.39563998579978943, 0.10261999815702438, 0.6149299740791321, -0.27952998876571655, -0.43939998745918274, -0.16234000027179718, 0.07012300193309784, 0.010768000036478043, -0.3777799904346466, 0.15902000665664673, -0.009314400143921375, -0.08868499845266342, 0.6187700033187866, -0.47415998578071594, -0.5267099738121033, -0.09687300026416779, 0.04100999981164932, -0.1901800036430359, 0.0905120000243187, -0.06616900116205215, -0.1669600009918213, 0.45205000042915344, -0.03736000135540962, -0.33375000953674316, 0.3743000030517578], u'verdant': [-0.1484300047159195, -0.6607000231742859, -0.07585100084543228, 0.12115000188350677, -0.13059000670909882, 0.042688999325037, 0.30910998582839966, -0.4681200087070465, 0.31742000579833984, 0.3814699947834015, -0.30856001377105713, -0.34297001361846924, -0.08717799931764603, -0.5014899969100952, -0.5234400033950806, 0.42462000250816345, -0.4544300138950348, -0.09350699931383133, 0.24979999661445618, 0.9678999781608582, -0.1774500012397766, 0.7865899801254272, -0.6778799891471863, 0.0821240022778511, -0.10493999719619751, -0.4767700135707855, -0.24732999503612518, 0.10236000269651413, -0.21145999431610107, -0.0677499994635582, 0.5391700267791748, 0.12773999571800232, -0.41363000869750977, -0.021750999614596367, 0.7596700191497803, 0.40459999442100525, -0.019627999514341354, -0.3104400038719177, -0.3350200057029724, -0.5218200087547302, 0.27441999316215515, 0.00617779977619648, -0.1961199939250946, 0.13383999466896057, 0.719760000705719, 0.04468800127506256, 0.23135000467300415, 0.08243799954652786, 0.4172399938106537, -0.259880006313324, -0.12278000265359879, -0.12345000356435776, 0.5941900014877319, -0.4664599895477295, 0.33292001485824585, -0.3092299997806549, -0.007532400079071522, -0.5742200016975403, 0.21629999577999115, -0.013233000412583351, -0.2602800130844116, -0.13874000310897827, 0.14337000250816345, 0.38019999861717224, -0.10106000304222107, 0.47350001335144043, -0.0482649989426136, 0.2919299900531769, -0.1787099987268448, -0.1949000060558319, -0.27167999744415283, 0.35585999488830566, 0.2446800023317337, -0.4544700086116791, -0.02232700027525425, -0.09015800058841705, 0.12330000102519989, -0.22101999819278717, -0.5283399820327759, 0.039080001413822174, -0.23003000020980835, 0.20274999737739563, -0.35679998993873596, 0.3797599971294403, 0.47457000613212585, 0.5516899824142456, 0.15724000334739685, 0.07527200132608414, 0.22023999691009521, -0.008149500004947186, 0.15591999888420105, -0.33924001455307007, 0.7068899869918823, -0.03195200115442276, -0.39581000804901123, 0.13753999769687653, 0.7874000072479248, -0.03695699945092201, 0.03227800130844116, 0.3934299945831299, -0.38982000946998596, 0.6100900173187256, -0.4811500012874603, 0.6994100213050842, -0.9678000211715698, -0.1694200038909912, 0.17141999304294586, 0.009327700361609459, -0.27066999673843384, -0.42831000685691833, -0.36956000328063965, -0.4256899952888489, 0.4421199858188629, 0.3635700047016144, 0.1968899965286255, 0.13235999643802643, 0.40536001324653625, 0.4324199855327606, 0.14789000153541565, 0.47890999913215637, 0.12131000310182571, -0.3239699900150299, 0.060844000428915024, 0.38839998841285706, 0.11467999964952469, 0.3631899952888489, 0.33504000306129456, 0.37040001153945923, -0.6846200227737427, 0.02991200052201748, -0.0756009966135025, 0.2837899923324585, 0.07376699894666672, 0.16880999505519867, -0.3533399999141693, 0.2668899893760681, 0.5630699992179871, 0.24210000038146973, -0.08348400145769119, -0.41648000478744507, 0.3791700005531311, 0.24270999431610107, -0.2888700067996979, 0.1967799961566925, -0.46472999453544617, 0.2340800017118454, -0.14969000220298767, -0.3352999985218048, -0.24724000692367554, -0.27564001083374023, -0.5918099880218506, -0.2834399938583374, -0.05276300013065338, 0.1659500002861023, 0.5831900238990784, -0.2994999885559082, 0.029364999383687973, -0.07384999841451645, 0.6073399782180786, 0.42423999309539795, -0.37213000655174255, -0.19783000648021698, 0.44238999485969543, -0.059436000883579254, -0.08069299906492233, -0.6226599812507629, 0.07239899784326553, 0.009177600033581257, -0.19288000464439392, -0.5412300229072571, -0.0382860004901886, -0.11356999725103378, -0.20206999778747559, -0.18785999715328217, 0.033789001405239105, -0.060228001326322556, -0.14726999402046204, 0.24196000397205353, -0.2064799964427948, -0.3976399898529053, -0.17827999591827393, 0.06513199955224991, 0.24628999829292297, -0.10626000165939331, 0.1989700049161911, -0.11565999686717987, 0.589900016784668, -0.08389899879693985, -0.16238999366760254, 0.17122000455856323, -0.5625699758529663, 0.19268999993801117, -0.027279000729322433, -0.11112000048160553, 0.03374499827623367, 0.328110009431839, -0.29140999913215637, 0.37999001145362854, 0.0674550011754036, 0.6451600193977356, 0.00037394999526441097, -0.42719000577926636, -0.36750999093055725, 0.11597000062465668, 0.036010999232530594, -0.6380699872970581, -0.048996999859809875, 0.6000000238418579, -0.17270000278949738, -0.2521600127220154, 0.12323000282049179, 0.42173999547958374, -0.1987999975681305, -0.5236300230026245, -0.21689000725746155, 0.1096699982881546, 0.014840000309050083, -0.36948999762535095, 0.4021500051021576, -0.07045900076627731, 0.4792400002479553, -0.07331500202417374, 0.016308000311255455, -0.22462999820709229, -0.02455900050699711, -0.14699000120162964, 0.28828001022338867, 0.3961600065231323, -0.10846000164747238, -0.4557799994945526, 0.1287900060415268, 0.3331100046634674, -0.1031000018119812, 0.010707000270485878, 0.3847300112247467, -0.015961000695824623, 0.2366199940443039, -0.06552399694919586, 0.0838719978928566, -0.39621999859809875, 0.16875000298023224, -0.21126000583171844, -0.11728999763727188, 0.5414900183677673, -0.4396899938583374, -0.11939000338315964, 0.0006959399906918406, 0.4718700051307678, -0.16635000705718994, -0.41714000701904297, 0.11358000338077545, -0.09012100100517273, 0.4361400008201599, -0.9448800086975098, 0.1291400045156479, 0.41187000274658203, -0.27742999792099, 0.23027999699115753, -0.02449600026011467, -0.23184999823570251, 0.5824099779129028, -0.3014200031757355, 0.6185799837112427, -0.5864999890327454, 0.009240900166332722, -0.12280000001192093, -0.028813999146223068, 0.046654000878334045, 0.43470999598503113, -0.2847200036048889, -0.13930000364780426, -0.34163999557495117, -0.27884000539779663, -0.060256000608205795, 0.05956299975514412, 0.39886999130249023, 0.6798499822616577, 0.43786999583244324, -0.02412099950015545, -0.3464699983596802, -0.24964000284671783, -0.2975899875164032, -0.5866299867630005, -0.057266999036073685, -0.5755800008773804, -0.4249599874019623, 0.4103499948978424, -0.420879989862442, -0.10756000131368637, -0.07586699724197388, 0.262580007314682, 0.005894400179386139, 0.22390000522136688, 0.23061999678611755, -0.23684999346733093, 0.2526000142097473, 0.10013999789953232, 0.3832400143146515, 0.05721699818968773, -0.2399500012397766], u'sunny': [-0.07985000312328339, -0.4540500044822693, -0.4672499895095825, -0.18623000383377075, -0.6703400015830994, 0.20603999495506287, 0.28352001309394836, 0.47516998648643494, -0.11905000358819962, -0.4271399974822998, -0.23156000673770905, -0.19162000715732574, -0.10362999886274338, 0.23902000486850739, -0.44064998626708984, -0.3084999918937683, -0.33112001419067383, 0.182219997048378, 0.8637700080871582, 0.5752699971199036, 0.23055000603199005, 0.617139995098114, -0.14891000092029572, -0.3364099860191345, -0.020705999806523323, -0.3158999979496002, 0.1842299997806549, 0.1510699987411499, -0.09587699919939041, -0.48012998700141907, -0.030199000611901283, 0.16132000088691711, -0.24142999947071075, 0.02457300014793873, -0.3714900016784668, 0.2071399986743927, -0.5512999892234802, -0.057652998715639114, -0.7958999872207642, 0.14500999450683594, 0.6101199984550476, 0.10333999991416931, -0.014344999566674232, 0.026505999267101288, 0.3043299913406372, -0.3277300000190735, 0.7206599712371826, 0.3342199921607971, 0.007557900156825781, -0.8323500156402588, -0.4812699854373932, -0.5757099986076355, 0.9949700236320496, -0.42719000577926636, -0.2901099920272827, 0.25694000720977783, 0.3416000008583069, -0.10209000110626221, 0.7502300143241882, 0.12399999797344208, -0.02952600084245205, -0.12689000368118286, 0.4516400098800659, 0.11428000032901764, 0.1747100055217743, -0.23120999336242676, 0.06782899796962738, 0.4153900146484375, -0.40318000316619873, -0.5218200087547302, -0.5089499950408936, 0.02805599942803383, -0.1589599996805191, -0.008019600063562393, -0.6049299836158752, -0.27667000889778137, -0.41694000363349915, 0.2836900055408478, 0.17023999989032745, -0.3884199857711792, 0.06420399993658066, 0.6759399771690369, -0.45489999651908875, 0.6772500276565552, -0.2714900076389313, 0.5424200296401978, -0.13465000689029694, 0.14600999653339386, 0.37551000714302063, -0.05480099841952324, 0.0280930008739233, -0.3259499967098236, -0.09517399966716766, -0.21348999440670013, -0.4140099883079529, 0.4882499873638153, 0.7436800003051758, -0.058802999556064606, -0.22026999294757843, 0.04800700023770332, 0.30906999111175537, -0.6294500231742859, 0.028991999104619026, 0.4622200131416321, -0.12032999843358994, 0.5208799839019775, -0.17878000438213348, 0.19526000320911407, -0.32267001271247864, -0.2707900106906891, -0.037689000368118286, -0.5828499794006348, 0.5147799849510193, 0.5480700135231018, 0.16428999602794647, -0.30757999420166016, -0.3153800070285797, -0.1949699968099594, 0.31279999017715454, 0.1782200038433075, -0.19068999588489532, 0.013206000439822674, -0.27998000383377075, 0.5352500081062317, -0.23232999444007874, 0.5413500070571899, 0.28909000754356384, 0.05388500168919563, 0.17625999450683594, 0.041508998721838, -0.2603299915790558, 0.8352900147438049, 0.15178999304771423, 0.008725499734282494, -0.08731000125408173, -0.31036999821662903, 0.11530999839305878, -0.14507000148296356, 0.15775999426841736, -0.47551000118255615, -0.09700600057840347, -0.10592000186443329, -0.37130001187324524, -0.4058699905872345, -0.8073499798774719, -0.04373899847269058, -0.1876399964094162, -0.031466998159885406, 0.21727000176906586, 0.16877000033855438, -0.40035000443458557, -0.4037899971008301, 0.4867500066757202, 0.13449999690055847, 0.5153800249099731, -0.05136699974536896, -0.43004998564720154, 0.42715001106262207, 0.15737999975681305, 0.5176399946212769, -0.08111699670553207, -1.0056999921798706, 0.034756001085042953, 0.22604000568389893, 0.3571699857711792, -0.19559000432491302, 0.2821199893951416, -0.08815199881792068, 0.19964000582695007, 0.1694200038909912, 0.0005959700210951269, 0.11840999871492386, -0.711080014705658, 0.12781000137329102, 0.07689099758863449, -0.07357300072908401, 0.15786999464035034, -0.6451900005340576, -0.1377899944782257, -0.3645800054073334, -0.19328999519348145, 0.27366000413894653, 0.22247999906539917, 0.22662000358104706, -0.15535999834537506, 0.22362999618053436, 0.656719982624054, -0.5686299800872803, -0.5442799925804138, 0.09300100058317184, -0.21719999611377716, 0.09474100172519684, 0.057388000190258026, -0.2985199987888336, -0.45228999853134155, -0.4242900013923645, -0.3035599887371063, -0.21050000190734863, -0.560230016708374, 0.016826000064611435, 0.5808500051498413, -0.6389399766921997, -0.2666400074958801, 0.4475499987602234, 0.42384999990463257, -0.45094001293182373, -0.06963499635457993, 0.19949999451637268, 0.06331100314855576, -0.30636000633239746, 0.07595399767160416, -0.24518999457359314, -0.27741000056266785, -0.7692000269889832, 0.2858099937438965, -0.03677000105381012, 0.2200700044631958, -0.6452000141143799, 0.5365700125694275, -0.6712599992752075, 1.2226999998092651, 0.17027999460697174, -0.38016000390052795, 0.3890100121498108, -0.8207100033760071, 0.2682499885559082, 0.6042400002479553, -0.047940999269485474, -0.005558399949222803, 0.27452999353408813, 0.49421000480651855, -0.21174000203609467, 0.39208000898361206, -0.6478899717330933, -0.11922000348567963, 0.04735200107097626, -0.9651399850845337, 0.051566001027822495, -0.750029981136322, -0.2515699863433838, -0.051479000598192215, 0.029405999928712845, -0.838699996471405, 0.47060999274253845, 0.14805999398231506, -0.3137199878692627, 0.14031000435352325, -0.22137999534606934, -0.4631099998950958, -0.10593000054359436, 0.24924999475479126, -0.08078499883413315, 0.39706000685691833, 0.3729400038719177, -0.2102999985218048, -0.16540999710559845, 0.12432000041007996, 0.015189999714493752, -0.17723000049591064, 0.39381998777389526, 0.2529999911785126, -0.14550000429153442, 0.0737529993057251, 0.04734700173139572, -0.606660008430481, -0.26357001066207886, 0.12417999655008316, -0.0927169993519783, 0.8065699934959412, -0.27978000044822693, 0.4281899929046631, -0.6720200181007385, -0.23895999789237976, -0.3457599878311157, 0.6086599826812744, -0.18799999356269836, -0.29082998633384705, 0.4127599895000458, -0.2875100076198578, 0.4300200045108795, -0.4789600074291229, -0.07858599722385406, -0.5447099804878235, 0.47936999797821045, -0.11354000121355057, -0.054627999663352966, 0.1561799943447113, -0.034981001168489456, -0.44843998551368713, -0.038593001663684845, 0.24792000651359558, 0.29576998949050903, -0.18182000517845154, 0.5377799868583679, -0.1981399953365326, 0.06227000057697296, 0.17016999423503876, 0.24352000653743744, -0.16453999280929565, 0.36581000685691833], u'thawed': [0.06126299872994423, -0.011748000048100948, 0.4429500102996826, 0.39504000544548035, 0.6784200072288513, -0.10870999842882156, -0.2806299924850464, -0.21965999901294708, -0.2296999990940094, -0.0551070012152195, 0.34391000866889954, -0.3979400098323822, 0.6545500159263611, -0.6696400046348572, -0.960889995098114, 0.5885199904441833, -0.3952600061893463, -0.23934000730514526, -0.4893200099468231, 0.4077399969100952, -0.298799991607666, -0.15623000264167786, 0.5298900008201599, -0.045396000146865845, -0.7493199706077576, 0.04407599940896034, -0.712689995765686, -0.10293000191450119, 0.12817999720573425, -0.6225299835205078, -0.12430000305175781, -0.38593998551368713, 0.2912200093269348, -0.13192999362945557, 0.7986599802970886, 0.47277000546455383, -0.4107399880886078, 0.5659800171852112, -0.44734999537467957, 0.8152599930763245, 0.09382499754428864, -0.02154799923300743, -0.17419999837875366, 0.5715600252151489, 0.0627020001411438, -0.2948800027370453, -0.05150900036096573, 0.5944700241088867, -0.2536099851131439, 0.3602699935436249, -0.34226998686790466, -0.4041000008583069, 0.21116000413894653, -0.4153600037097931, -0.16750000417232513, 0.08199799805879593, 1.2906999588012695, -0.4569700062274933, 0.37494000792503357, 0.27658000588417053, 0.08483599871397018, 0.6088200211524963, -0.17377999424934387, -0.042114999145269394, -0.06853300333023071, -0.18734000623226166, -0.26175999641418457, 0.3568600118160248, 0.050078000873327255, 0.076044000685215, 0.2714900076389313, 0.8108900189399719, -0.30230000615119934, 0.43241000175476074, 0.17635999619960785, -0.08236400038003922, -0.2837600111961365, -0.5864499807357788, -0.5845699906349182, -0.029402999207377434, -0.01707400009036064, 0.08756999671459198, 0.21859000623226166, 0.17264999449253082, 0.4571000039577484, -0.4195599853992462, -0.5734000205993652, -0.1377899944782257, -0.8117899894714355, -0.3451099991798401, -0.44929999113082886, 0.23038999736309052, -0.013631000183522701, 0.3404799997806549, -0.4534499943256378, 0.2967599928379059, 0.2918199896812439, 0.5026900172233582, 0.20512999594211578, 0.4671800136566162, -0.05373299866914749, -0.6695600152015686, -0.33125001192092896, -0.14701999723911285, -0.3821699917316437, 0.0506180003285408, 0.26162999868392944, 0.9065399765968323, -0.2989499866962433, 0.2831900119781494, -0.04067699983716011, 0.04639099910855293, 0.2996000051498413, -0.036382000893354416, -0.42120999097824097, -0.3342199921607971, -0.4315899908542633, -0.19166000187397003, 0.35819000005722046, 0.12570999562740326, 0.09322399646043777, -0.610069990158081, -0.20257000625133514, 0.07930099964141846, -0.0629739984869957, 1.1385999917984009, -0.3531300127506256, -0.08111400157213211, -0.042075999081134796, 0.3891899883747101, -0.5778800249099731, 0.5332000255584717, 0.3835200071334839, 0.155799999833107, -0.690750002861023, -0.5354999899864197, 0.2188899964094162, -0.43518999218940735, -0.5935400128364563, -0.1248600035905838, 0.21028000116348267, -0.19634999334812164, -0.9474200010299683, -0.3085800111293793, -0.22822000086307526, 0.013906000182032585, 0.07495799660682678, 0.07846300303936005, 0.26524001359939575, -0.37024998664855957, -1.082200050354004, 0.1648000031709671, -0.14827999472618103, 0.21615999937057495, -0.2597000002861023, 0.2031400054693222, 0.1577800065279007, -0.07475200295448303, 0.02699200063943863, 0.3718299865722656, -0.3589099943637848, 0.22062000632286072, -0.33574000000953674, 0.012817000038921833, 1.13919997215271, -0.12134999781847, 0.22231000661849976, -0.7783499956130981, -0.0843140035867691, -0.2193399965763092, 1.0877000093460083, 0.5031599998474121, -0.420960009098053, -0.42056000232696533, 0.6133000254631042, 0.055897001177072525, 0.40821000933647156, 0.09335900098085403, 0.21362000703811646, -0.3487899899482727, -0.4027999937534332, 0.0734580010175705, -0.14673000574111938, -0.29572999477386475, -0.23311999440193176, -0.15094000101089478, 0.878030002117157, -0.30024001002311707, 0.36414000391960144, -0.9360899925231934, 0.2928900122642517, 0.3462199866771698, 0.5331699848175049, -0.4244900047779083, -0.751579999923706, 0.01915000006556511, -0.20452000200748444, 0.7105600237846375, 0.7986099720001221, -0.11322999745607376, -0.06553199887275696, -0.4372299909591675, 0.32811999320983887, 1.103700041770935, 0.03348499909043312, -0.7878900170326233, 0.8014100193977356, 0.4656200110912323, 0.03552199900150299, 0.334199994802475, -0.015731999650597572, -0.04682699963450432, -0.3390499949455261, 0.04741600155830383, 0.525879979133606, -0.4471000134944916, 0.565060019493103, -0.9081400036811829, -0.16804000735282898, 0.43452998995780945, 0.7182499766349792, 0.30581000447273254, -0.32714998722076416, -0.2846899926662445, 0.21472999453544617, -0.6889100074768066, 0.15657000243663788, 0.011517999693751335, -0.5997099876403809, 0.16086000204086304, 0.42142999172210693, -0.13937999308109283, 0.34904998540878296, -0.4698599874973297, -0.12415999919176102, -0.025543000549077988, 0.3982999920845032, 0.3043400049209595, -0.5443000197410583, -0.7260299921035767, -0.24660000205039978, -0.49761998653411865, 0.2847200036048889, -0.02589399926364422, -0.6961299777030945, -0.24341000616550446, 0.3602299988269806, 1.1054999828338623, -0.15158000588417053, -0.6172199845314026, 0.3536800146102905, 0.3177500069141388, 0.17940999567508698, -0.14560000598430634, -0.6632099747657776, -0.40904998779296875, 0.5918800234794617, 0.047207001596689224, 0.21595999598503113, 0.7156299948692322, -0.20282000303268433, 0.7051500082015991, -0.4293600022792816, 0.276309996843338, 0.11821000277996063, -0.35210999846458435, -0.18955999612808228, 0.17149999737739563, -0.14056000113487244, -0.4678399860858917, -0.2698099911212921, 0.9532600045204163, 0.029262999072670937, 0.1981000006198883, -0.14413000643253326, 0.40779998898506165, 0.8464000225067139, -0.31112998723983765, -0.5078099966049194, -0.6825900077819824, -0.19351999461650848, -0.1036899983882904, 0.28685998916625977, -0.46088001132011414, -0.2814599871635437, 0.039701998233795166, 0.3003099858760834, 0.5512300133705139, 0.031244000419974327, -0.03127000108361244, -0.29061999917030334, -0.579069972038269, 0.9102799892425537, 0.2815000116825104, -0.5371000170707703, -0.2607400119304657, 0.30379000306129456, 0.07056999951601028, 0.1452600061893463, -0.18443000316619873], u'dark': [-0.006430199835449457, -0.29673999547958374, 0.3598099946975708, -0.5496199727058411, -0.4007999897003174, 0.010707000270485878, -0.18258999288082123, 0.4269300103187561, 0.4428499937057495, -1.095900058746338, 0.3893499970436096, -0.3082900047302246, -0.32218998670578003, 0.34735000133514404, -0.5285199880599976, -0.09883999824523926, -0.12482000142335892, -0.16506999731063843, -0.06494300067424774, 0.10733000189065933, 0.14128999412059784, 0.40821999311447144, 0.20243999361991882, 0.5134999752044678, -0.3450999855995178, -0.4000299870967865, 0.8286499977111816, -0.3483099937438965, -0.6065700054168701, 0.3969399929046631, -0.12150000035762787, 0.3126699924468994, -0.5051299929618835, -0.3500399887561798, -0.535860002040863, 0.7549399733543396, -0.0267730001360178, 0.03564799949526787, -0.2649799883365631, 0.1722400039434433, 0.2371399998664856, 0.09150400012731552, -0.09216000139713287, -0.00785870011895895, 0.35822999477386475, -0.04517799988389015, 0.05061100050806999, -0.34292998909950256, -0.5474100112915039, -0.7231400012969971, -0.2214300036430359, 0.15084999799728394, 0.45552998781204224, -0.25635001063346863, 0.18937000632286072, -0.007638799957931042, -0.03780699893832207, 0.004981399979442358, 0.6360700130462646, -0.3111000061035156, -0.4945400059223175, -0.07575800269842148, 0.2338400036096573, 0.1350499987602234, -0.28856000304222107, -0.6375700235366821, 0.04558299854397774, 0.05769500136375427, 0.4739699959754944, -0.2768999934196472, 0.41771000623703003, 0.23792000114917755, 0.021637000143527985, -0.24488000571727753, -0.08017300069332123, 0.015521000139415264, -0.3360700011253357, -0.20192000269889832, 0.4482499957084656, -0.7434499859809875, 0.41345998644828796, 0.22506999969482422, -0.020225999876856804, 0.017268000170588493, 0.14169999957084656, 0.22055000066757202, 0.527649998664856, 0.40904000401496887, -0.012775000184774399, 0.4841800034046173, -0.16042999923229218, -0.10324999690055847, 0.14361999928951263, 0.5545099973678589, -0.576259970664978, 0.6269000172615051, 0.2115900069475174, 0.22982999682426453, 0.9096400141716003, -0.539680004119873, 0.5268300175666809, -0.3124299943447113, -0.364080011844635, 0.42524001002311707, -0.37981998920440674, -0.10621999949216843, 0.2933399975299835, 0.20077000558376312, -0.293720006942749, 0.16130000352859497, -0.032311998307704926, 0.11737000197172165, 0.4671500027179718, -0.12086000293493271, 0.04326999932527542, 0.16584999859333038, -0.058782998472452164, 0.09797800332307816, -0.32227998971939087, -0.7821000218391418, -0.36032000184059143, -0.5391899943351746, -0.17318999767303467, 0.4862000048160553, -0.1389700025320053, 0.20761999487876892, 0.1500999927520752, 0.3674899935722351, -0.04250599816441536, 0.008264199830591679, 0.06353399902582169, 0.1667100042104721, -0.20479999482631683, 0.44850999116897583, -0.07944600284099579, -0.07085099816322327, -0.7239099740982056, 0.45840001106262207, -0.3834399878978729, -0.14330999553203583, 0.4941900074481964, 0.08485600352287292, -0.42160001397132874, 0.21926000714302063, 0.012505999766290188, -0.3238300085067749, -0.20159000158309937, -0.08535200357437134, 0.024484999477863312, -0.1646600067615509, -0.058455001562833786, 0.14899000525474548, -0.4424099922180176, -0.3652400076389313, 0.9471399784088135, -0.718970000743866, 0.27421998977661133, -0.27476999163627625, 0.26440998911857605, -0.053881000727415085, 0.0670820027589798, -0.5259799957275391, 0.4631099998950958, 0.006645900197327137, 0.028147999197244644, 0.19916999340057373, -0.6197800040245056, 0.700439989566803, -0.3044799864292145, -0.3634899854660034, 0.291269987821579, 0.19367000460624695, -0.047150999307632446, 0.13784000277519226, 0.03568900004029274, -0.23523999750614166, -0.10503000020980835, 0.11027999967336655, -0.16072000563144684, -0.7577999830245972, -0.20803000032901764, 0.15349000692367554, 0.05365300178527832, 0.04460600018501282, 0.12321999669075012, -0.3265799880027771, 0.9423800110816956, -0.16637000441551208, -0.1262899935245514, 0.008988600224256516, -0.24307000637054443, 0.07134299725294113, 0.08668699860572815, 0.15062999725341797, -0.030774999409914017, -0.2918800115585327, -0.8799999952316284, 0.016450999304652214, -0.6094599962234497, 0.3098599910736084, 1.5628000497817993, 0.03918800130486488, 0.02782200090587139, -0.1364700049161911, 0.18960000574588776, -0.4333699941635132, 0.038026001304388046, 0.24018999934196472, -0.4195399880409241, -0.41613999009132385, 0.4534200131893158, -0.1728699952363968, 0.10474999994039536, 0.005872400011867285, 0.08661700040102005, 0.02467699907720089, 1.284999966621399, -0.2966899871826172, 0.5256900191307068, 0.05790000036358833, 0.655239999294281, -0.3423599898815155, -0.11743000149726868, -0.16651000082492828, -0.3552300035953522, -0.21196000277996063, -0.049963999539613724, -0.5621399879455566, -0.47407999634742737, -0.3561899960041046, 0.023097999393939972, -0.455049991607666, -0.2339800000190735, -0.1474599987268448, -0.14464999735355377, 0.04354599863290787, -0.26003000140190125, 0.05458600074052811, -0.8063700199127197, -0.09642300009727478, 0.2941800057888031, -0.03574899956583977, -0.31022998690605164, 0.12393999844789505, -0.8318799734115601, -0.12234000116586685, 0.1498199999332428, -0.154339998960495, -0.09662900120019913, -0.16492000222206116, -0.2287600040435791, 0.23580999672412872, 0.12870000302791595, 0.21390999853610992, 0.3068400025367737, 0.08326999843120575, 0.2797200083732605, -0.25481998920440674, 0.01381400041282177, 0.48532000184059143, 0.5906199812889099, -0.2919999957084656, 0.7098100185394287, -0.43400999903678894, -0.46057000756263733, 0.08205600082874298, -0.2147900015115738, -0.3624800145626068, -0.30656999349594116, 0.40101000666618347, -0.229980006814003, 0.1792300045490265, 0.15717999637126923, -0.018278000876307487, -0.44036999344825745, 0.6362199783325195, -1.118399977684021, 0.13393999636173248, 0.011378999799489975, -0.3214299976825714, -0.293969988822937, 0.524649977684021, -0.3015500009059906, -0.16975000500679016, 0.09484200179576874, 0.5492900013923645, -0.6743599772453308, 0.3968200087547302, 0.16322000324726105, 0.06306800246238708, 0.20015999674797058, 0.6160600185394287, -0.8649899959564209, 0.8141400218009949, -0.044645000249147415, 0.17699000239372253, -0.13683000206947327, 0.5848600268363953, -0.19110000133514404, 0.6656299829483032], u'windblown': [-0.6784999966621399, -0.3687799870967865, -0.3886300027370453, -0.1886100023984909, 0.31325000524520874, -0.3014400005340576, -0.01627199910581112, 0.11138000339269638, 0.3584499955177307, 0.5352200269699097, -0.3655399978160858, 0.06343799829483032, -0.2858699858188629, -0.09372399747371674, -0.04106200113892555, 0.16877000033855438, -0.275409996509552, -0.06213099882006645, -0.03151499852538109, 0.5873100161552429, 0.11240000277757645, -0.24210000038146973, -0.029247000813484192, 0.17059999704360962, -0.04437699913978577, -0.17715999484062195, 0.0205329991877079, 0.2286600023508072, -0.07412000000476837, 0.37345001101493835, 0.027111999690532684, -0.5355100035667419, -0.30970001220703125, -0.26440998911857605, 0.5263100266456604, -0.34220999479293823, -0.20844000577926636, 0.29761001467704773, 0.3133299946784973, 0.4542199969291687, -0.008870299905538559, 0.6283900141716003, 0.48949000239372253, -0.3910999894142151, 0.4995799958705902, -0.040401000529527664, -0.023831000551581383, 0.2080100029706955, -0.017865000292658806, -0.12067999690771103, 0.04219900071620941, -0.31352999806404114, -0.4211600124835968, 0.0006808500038459897, 0.3129499852657318, 0.42563000321388245, -0.40724000334739685, -0.3278700113296509, 0.7215099930763245, -0.04631299898028374, 0.22046999633312225, 0.07418099790811539, 0.28940001130104065, -0.22413000464439392, 0.09747499972581863, 0.17946000397205353, -0.2773599922657013, 0.22335000336170197, -0.07601100206375122, -0.013505999930202961, -0.16687999665737152, 0.010324000380933285, -0.11711999773979187, -0.11832000315189362, -0.46595001220703125, 0.09247700124979019, -0.23441000282764435, -0.6036099791526794, 0.5839599967002869, 0.2207999974489212, -0.18708999454975128, 0.4230499863624573, 0.1329900026321411, -0.06918500363826752, -0.5412999987602234, 0.07583600282669067, 0.23765000700950623, 0.17005999386310577, 0.826200008392334, 0.03844200074672699, -0.24708999693393707, -0.18231000006198883, 0.43928998708724976, 0.05671299993991852, -0.44367000460624695, 0.04360499978065491, -0.2540000081062317, 0.11545000225305557, -0.3042300045490265, 0.5459200143814087, 0.845229983329773, 0.294730007648468, 0.11715000122785568, 0.07785800099372864, -0.29019999504089355, 0.3910500109195709, 0.4100300073623657, -0.0023125000298023224, -0.4160799980163574, -0.5337799787521362, -0.002855099970474839, 0.04778600111603737, 0.05695800110697746, 0.03414199873805046, 0.1595900058746338, 0.5709800124168396, 0.8215600252151489, 0.4165799915790558, 0.043542999774217606, 0.3159399926662445, 0.3393099904060364, -0.009845900349318981, -0.40911000967025757, -0.19482000172138214, 0.3368600010871887, -0.038509998470544815, -0.28512999415397644, 0.08299099653959274, 0.2028599977493286, -0.001419799984432757, 0.05783500149846077, 0.6405900120735168, 0.07801800221204758, -0.11212000250816345, 0.12319999933242798, -0.5915600061416626, -0.3061800003051758, 0.35499000549316406, -0.05049800127744675, 0.15233999490737915, 0.14510999619960785, -0.2956799864768982, -0.15676000714302063, -0.10290999710559845, 0.15183000266551971, 0.2580699920654297, 0.2801699936389923, 0.4586600065231323, 0.0034497000742703676, -0.030744999647140503, -0.6129000186920166, -0.30177000164985657, -0.43108001351356506, -0.14788000285625458, -0.560230016708374, -0.11191999912261963, 0.24323000013828278, -0.6041399836540222, 0.4078800082206726, 0.8511800169944763, -0.12392999976873398, 0.027375999838113785, -0.10691999644041061, 0.4600900113582611, 0.6944500207901001, -0.3745400011539459, 0.19208000600337982, 0.07076899707317352, -0.3119100034236908, -0.33070001006126404, -0.24094000458717346, 0.28446000814437866, 0.35690999031066895, -0.4506700038909912, 0.1177000030875206, -0.5991500020027161, 0.05105200037360191, 0.24925999343395233, -0.36107000708580017, -0.4078100025653839, -0.15640999376773834, -0.5233100056648254, 0.12138999998569489, 0.2537499964237213, -0.36375999450683594, 0.592490017414093, 1.0322999954223633, -0.29976001381874084, -0.30886998772621155, 0.30952998995780945, -0.18414999544620514, -0.0817520022392273, -0.22359000146389008, -0.18623000383377075, 0.480430006980896, -0.20512999594211578, 0.13790999352931976, 0.009283799678087234, 0.16624000668525696, 0.461870014667511, -0.627560019493103, -0.3022499978542328, 0.5735899806022644, -0.4293600022792816, -0.06224599853157997, 0.08723899722099304, 0.014762000180780888, -0.23531000316143036, 0.2903999984264374, -0.03256800025701523, -0.21642999351024628, 0.04872399941086769, -0.06904400140047073, -0.03678100183606148, -0.6039100289344788, -0.16360999643802643, 0.4083999991416931, -0.3201900124549866, -0.2187100052833557, -0.12178999930620193, 0.21966999769210815, 0.18030999600887299, 0.22776000201702118, -0.019209999591112137, -0.4510200023651123, -0.2682900130748749, 0.46672001481056213, 0.31624001264572144, 0.2392600029706955, -0.00499190017580986, 0.44769999384880066, 0.4035399854183197, -0.1694899946451187, -0.1192300021648407, 0.26684001088142395, -0.15098999440670013, -0.2128400057554245, -0.07685399800539017, 0.020764000713825226, -0.5519199967384338, 0.3690600097179413, -0.13176999986171722, -0.36059001088142395, 0.01704300008714199, -0.3759799897670746, 0.44179001450538635, -0.8906499743461609, 0.12707999348640442, 0.00015298000653274357, 0.00513519998639822, 0.26104000210762024, -0.1388300061225891, 0.2740800082683563, -0.5644999742507935, 0.31648001074790955, -0.0005644600023515522, -0.09897000342607498, 0.24995000660419464, -0.536549985408783, -0.11090999841690063, 0.27156999707221985, 0.03518399968743324, 0.010222000069916248, -0.20814000070095062, 0.1486700028181076, -0.3245599865913391, 0.2594600021839142, -0.09662599861621857, -0.10091999918222427, -0.2995299994945526, -0.11449000239372253, 0.12338999658823013, -0.4600299894809723, 0.5540400147438049, -0.2528499960899353, -0.2887899875640869, 0.33934998512268066, -0.260699987411499, -0.5256999731063843, -0.19620999693870544, -0.27605998516082764, 0.32260000705718994, -0.5428799986839294, -0.0930740013718605, 0.08913899958133698, -0.1263599991798401, 0.282260000705719, -0.29980000853538513, -0.5741099715232849, -0.22639000415802002, -0.0745059996843338, 0.03063499927520752, -0.12138999998569489, 0.8322499990463257, -0.033038001507520676, -0.75941002368927, 0.553600013256073, 0.15445999801158905, 0.3863300085067749, -0.4774799942970276], u'burnt': [-0.1395999938249588, -0.3779500126838684, 0.03679399937391281, 0.20857000350952148, 0.05316000059247017, -0.4946799874305725, 0.4911800026893616, 0.40151000022888184, -0.034272000193595886, -0.38618001341819763, -0.30779001116752625, -0.121629998087883, -0.16349999606609344, 0.03835200145840645, -0.47088000178337097, 0.3637700080871582, -0.07590500265359879, 0.16746999323368073, -0.4123699963092804, -0.25240999460220337, -0.21472999453544617, 0.08051300048828125, 0.4305799901485443, -0.26118001341819763, -0.2652699947357178, -0.40740999579429626, -0.594290018081665, -0.2709699869155884, 0.03906499966979027, 0.8243799805641174, 0.2565000057220459, 0.10339000076055527, -0.7672600150108337, -0.025746000930666924, 0.30406999588012695, 0.4176599979400635, -0.31226998567581177, 0.13138000667095184, 0.26069000363349915, 0.08445700258016586, 0.23441000282764435, -0.5785800218582153, -0.1934799998998642, -0.3078100085258484, 0.4964199960231781, 0.32907000184059143, -0.10959000140428543, 0.02608500048518181, -0.016846999526023865, -0.6817799806594849, -0.05265500023961067, -0.10468000173568726, 0.30994999408721924, -0.29159998893737793, -0.031075000762939453, 0.17351000010967255, -0.2651199996471405, -0.12292999774217606, 0.44905999302864075, -0.22964000701904297, -0.4843299984931946, 0.00692680012434721, -0.3287400007247925, 0.06081400066614151, -0.014119000174105167, -0.7185699939727783, -0.13541999459266663, -0.32315999269485474, -0.3553699851036072, 0.1496099978685379, -0.14587999880313873, -0.09387200325727463, -0.2618100047111511, 0.5242400169372559, -0.05548899993300438, -0.15248000621795654, 0.2123900055885315, -0.4629000127315521, -0.05418600142002106, 0.6131700277328491, 0.005441099870949984, -0.08571100234985352, 0.03816799819469452, 0.33351999521255493, -0.29534000158309937, -0.12974999845027924, -0.23266999423503876, 0.1666399985551834, 0.38095998764038086, 0.4537299871444702, 0.3929100036621094, -0.6458699703216553, 0.522409975528717, 0.4556800127029419, 0.325300008058548, 0.26513001322746277, 0.12949000298976898, 0.05751100182533264, -0.26396000385284424, 0.020468000322580338, -0.1397700011730194, -0.3006500005722046, -0.012822000309824944, 0.21536000072956085, 0.20347000658512115, 0.0908450037240982, 0.592199981212616, 0.06492099910974503, -0.632830023765564, -0.4172399938106537, -0.39250001311302185, -0.270220011472702, 0.11138000339269638, -0.5389999747276306, 0.07124099880456924, -0.3933599889278412, -0.6268500089645386, 0.24897000193595886, 0.3274399936199188, -0.49184998869895935, -0.03206399828195572, -0.9390100240707397, -0.37692001461982727, 0.7202500104904175, -0.36548998951911926, -0.242249995470047, -0.4085099995136261, -0.13179999589920044, -0.29896000027656555, 0.127360001206398, -0.08800999820232391, 0.9495599865913391, 0.31926000118255615, 0.1873999983072281, 0.15139000117778778, 0.25516998767852783, 0.21699999272823334, -0.03201499953866005, 0.4637799859046936, -0.05855200067162514, 0.38008999824523926, -0.2506600022315979, -0.45462000370025635, -0.15384000539779663, -0.24086999893188477, -0.07436800003051758, -0.014328000135719776, 0.7954800128936768, -0.27215999364852905, -0.43698999285697937, 0.44453001022338867, 0.49167001247406006, -0.002839999971911311, -0.4894300103187561, 0.08776000142097473, 0.07592500001192093, 0.6431000232696533, -0.02678300067782402, 0.19637000560760498, -0.3343200087547302, 0.1793700009584427, -0.35051000118255615, -0.3857100009918213, 0.05332399904727936, -0.2798199951648712, 0.2988300025463104, 0.15220999717712402, 0.04979899898171425, -0.127360001206398, -0.3549500107765198, -0.09826800227165222, 0.2406100034713745, 0.697700023651123, -0.1064700037240982, 0.28540000319480896, 0.25012001395225525, -0.16482000052928925, -0.08762100338935852, 0.6693099737167358, -0.7938899993896484, -0.6200199723243713, 0.06082899868488312, 0.011049999855458736, -0.4408400058746338, 0.6004700064659119, -0.486050009727478, 0.3049600124359131, -0.07232800126075745, 0.39399999380111694, 0.02215299941599369, 0.26309001445770264, -0.45636001229286194, 0.02410000003874302, -0.1339000016450882, 0.3269599974155426, -0.25911998748779297, -0.19785000383853912, -0.0858670026063919, -0.12522999942302704, 0.3022800087928772, -0.15112000703811646, -0.3653700053691864, -0.21557000279426575, 0.10475999861955643, 0.05642300099134445, -0.6570900082588196, 0.8362399935722351, 0.01380200032144785, -0.4388299882411957, -0.5738700032234192, -0.1440100073814392, 0.24431000649929047, -0.7420099973678589, -0.21671999990940094, 0.022734999656677246, 0.2606300115585327, 0.7060700058937073, 0.09282500296831131, 0.24018000066280365, 0.22745999693870544, 0.10931999981403351, -0.16662000119686127, -0.24648000299930573, -0.16058999300003052, -0.226610004901886, 0.08174099773168564, -0.3810099959373474, -0.16755999624729156, -0.6086599826812744, 0.06028600037097931, -0.005377199966460466, -0.27713000774383545, -0.6463900208473206, 0.22337999939918518, 0.2520900070667267, 0.2741999924182892, 0.4067699909210205, 0.25102001428604126, -1.1470999717712402, -0.4344399869441986, 0.11489000171422958, 0.09658099710941315, -0.035326000303030014, -0.3669799864292145, -0.33500999212265015, 0.43288999795913696, 0.5572999715805054, 0.6024199724197388, 0.21943999826908112, -0.012165999971330166, 0.2773999869823456, -0.46439000964164734, -0.05963199958205223, -0.7529399991035461, 0.8259099721908569, 0.03774699941277504, -0.37422001361846924, 0.4659000039100647, 0.23315000534057617, -0.47822999954223633, 0.16674000024795532, -0.027256999164819717, -0.6762700080871582, 0.427480012178421, 0.2614699900150299, -0.6475899815559387, -0.32265999913215637, -0.24184000492095947, -0.05501699820160866, 0.2652699947357178, -0.34259000420570374, -0.44512999057769775, 0.22660000622272491, 0.2090200036764145, -0.3491300046443939, -0.12812000513076782, -1.1017999649047852, -0.5867199897766113, -0.5342400074005127, 0.3982999920845032, -0.25995999574661255, -0.2944599986076355, -0.21163000166416168, -0.7803000211715698, 0.017774999141693115, 0.9007200002670288, 0.04037899896502495, -0.6590700149536133, 0.5148900151252747, 0.343860000371933, -0.17776000499725342, -0.5007500052452087, 0.0728909969329834, 0.10145000368356705, -0.40865999460220337, -0.20137999951839447, 0.23899999260902405, 0.1556600034236908, -0.14322000741958618, 0.1488099992275238], u'molten': [0.26743999123573303, -0.05078599974513054, -0.01626800000667572, -0.33090999722480774, -0.19086000323295593, -0.438510000705719, 0.6675199866294861, -0.5804799795150757, 0.534280002117157, -0.4566600024700165, -0.18286000192165375, -0.4072900116443634, -0.5690699815750122, -0.4372200071811676, -0.03214399889111519, -0.03664499893784523, -0.7534400224685669, 0.6807600259780884, 0.42357000708580017, 0.37452998757362366, 0.134210005402565, 0.1582300066947937, -0.41324999928474426, 0.5585100054740906, 0.19399000704288483, -0.6327400207519531, 0.08180099725723267, 0.6266000270843506, -0.11209999769926071, -0.4770900011062622, -0.14162999391555786, 0.06255599856376648, -0.42155998945236206, 0.03899100050330162, 0.5457299947738647, 0.01333799958229065, -0.42605000734329224, 0.2916499972343445, 0.7349200248718262, 0.7406700253486633, -0.32732999324798584, -0.018915999680757523, 0.1348399966955185, 0.05515199899673462, -0.19288000464439392, -0.061482999473810196, -0.1386999934911728, -0.032106999307870865, 0.15553000569343567, -0.41150999069213867, -0.21484999358654022, 0.39473000168800354, -0.32927000522613525, 0.3423900008201599, 0.2733199894428253, 0.057992998510599136, 0.035962000489234924, -0.02037299983203411, 0.5286399722099304, 0.32969000935554504, -0.1469999998807907, 0.4778999984264374, 0.6177800297737122, 0.24221999943256378, 0.6678699851036072, 0.3757700026035309, -0.006961499806493521, 0.31367000937461853, -0.49869999289512634, 0.7005699872970581, 0.31103000044822693, -0.13636000454425812, 0.3759100139141083, 0.4645499885082245, 0.00397690013051033, 0.12370000034570694, -0.018062999472022057, -0.3503200113773346, -0.09459500014781952, -0.5162799954414368, 0.13199999928474426, -0.24573999643325806, -0.6318399906158447, 0.3234100043773651, 0.3676300048828125, 0.1866600066423416, 0.019884999841451645, -0.12319999933242798, -0.15286000072956085, 0.03585200011730194, -0.1716800034046173, 0.4508099853992462, 0.5517799854278564, -0.4361799955368042, -0.628250002861023, -0.15894000232219696, -0.24706000089645386, 0.23089000582695007, 0.2337000072002411, 0.5327399969100952, -0.2677600085735321, 0.05579699948430061, 0.16791999340057373, -0.2053000032901764, 0.5417799949645996, 0.16954000294208527, -0.048319000750780106, 0.4490399956703186, -0.3912599980831146, -0.028063999488949776, 0.3186500072479248, 0.5245299935340881, -0.29377999901771545, -0.5509300231933594, -0.18982000648975372, 0.17125999927520752, -0.4790099859237671, -0.3449999988079071, 0.18901999294757843, 0.26728999614715576, 0.054648999124765396, -0.37487998604774475, -0.47505998611450195, 0.48853999376296997, -0.0302520003169775, -0.3919599950313568, 0.14316000044345856, 0.08356700092554092, -0.47846001386642456, -0.5074499845504761, 0.11992999911308289, 1.000599980354309, 0.4051100015640259, 0.40516000986099243, 0.3942300081253052, 0.30601999163627625, -0.9158700108528137, 0.3053300082683563, 0.17872999608516693, 0.4162899851799011, 0.154229998588562, -0.6669300198554993, -0.4237399995326996, -0.5478000044822693, 0.21055999398231506, -0.18192000687122345, -0.019710000604391098, 0.03861600160598755, 0.29284998774528503, -1.0276999473571777, 0.4342600107192993, 0.04353199899196625, -0.37894999980926514, 0.09578700363636017, 0.7729200124740601, 0.28349998593330383, 0.3503499925136566, -0.4422999918460846, 0.11913999915122986, -0.3984600007534027, -0.3614400029182434, -0.7771199941635132, -0.14053000509738922, 0.055038001388311386, 0.11003000289201736, -0.5993899703025818, 0.4090299904346466, 0.00959280040115118, 0.021455999463796616, -0.5854300260543823, 0.27140000462532043, 0.7704300284385681, 0.4434100091457367, -0.5170300006866455, 0.1493300050497055, -0.6405500173568726, -0.3364199995994568, 0.13902999460697174, 0.5655500292778015, -0.45166000723838806, 0.44179001450538635, -0.021035000681877136, -6.371999916154891e-05, -0.1615999937057495, -0.18333999812602997, -0.3912299871444702, 0.487309992313385, 0.3737100064754486, -0.10721000283956528, -0.9548699855804443, 0.51214998960495, 0.4669100046157837, -0.14293000102043152, 0.13323000073432922, 0.48319000005722046, -0.01711600087583065, 0.07153599709272385, 0.25859999656677246, -0.03148699924349785, 0.12672999501228333, 0.1788100004196167, -0.7044699788093567, 0.1522199958562851, 0.14419999718666077, -0.4091300070285797, -0.04758499935269356, 0.6173499822616577, -0.27430999279022217, -0.39250001311302185, 0.16308000683784485, 0.08530700206756592, 0.6019300222396851, 0.05649999901652336, -0.7346900105476379, -0.31036001443862915, 1.0703999996185303, 0.2526400089263916, -0.32245001196861267, 0.24417999386787415, 0.22304999828338623, -0.507610023021698, 0.5091500282287598, 0.5766100287437439, -0.42660000920295715, -0.26693999767303467, -0.017798999324440956, -0.47220999002456665, -0.11085999757051468, -0.07701200246810913, 0.18238000571727753, 0.011464999988675117, -0.6971099972724915, 0.8195800185203552, -0.4632200002670288, 0.015571000054478645, -0.2634100019931793, 0.3297699987888336, -0.2592099905014038, -0.10824000090360641, -0.18448999524116516, -0.3093299865722656, -0.05279500037431717, -0.6347000002861023, -0.3909600079059601, -0.26982998847961426, -0.3218500018119812, 0.1950100064277649, -0.387719988822937, -0.20833000540733337, 0.09083399921655655, -0.3693099915981293, -0.5053099989891052, -0.11911000311374664, -0.42590999603271484, -0.23038999736309052, -0.11371999979019165, 0.05080199986696243, -0.5235000252723694, -0.409960001707077, 0.03801700100302696, 0.12080000340938568, -0.1473899930715561, 0.08738499879837036, -0.16107000410556793, 0.25633999705314636, 0.4212999939918518, -0.34773001074790955, 0.3668299913406372, -0.0107829999178648, -0.13484999537467957, -0.573140025138855, -0.12902000546455383, 0.4590499997138977, 0.8452100157737732, -0.35499998927116394, 0.4875899851322174, -0.008892600424587727, -0.3656199872493744, -1.30840003490448, -0.26719000935554504, -0.27268001437187195, 0.10892999917268753, -0.3842400014400482, 0.25450000166893005, 0.21317000687122345, 0.22848999500274658, -0.7333400249481201, 0.10931000113487244, 0.41389000415802, 0.31452998518943787, -0.6898999810218811, 0.21085000038146973, 0.6349200010299683, 1.261299967765808, 0.9689000248908997, -0.44773000478744507, 0.0034485000651329756, -0.1763100028038025, -0.5613499879837036, -0.12978999316692352], u'eroded': [-0.33803999423980713, -0.1598999947309494, -0.5300300121307373, 0.22853000462055206, 0.21710999310016632, 0.06877899914979935, -0.05456399917602539, 0.3486500084400177, 0.44416001439094543, -1.4106999635696411, -0.2730199992656708, -0.4899500012397766, 0.149399995803833, -0.03136099874973297, -0.8201000094413757, -0.6386600136756897, 0.28784000873565674, 0.35638999938964844, 0.5166800022125244, -0.01256600022315979, -0.15264999866485596, 0.06807799637317657, 0.07994099706411362, -0.5750100016593933, 0.03626900166273117, -0.17527000606060028, 0.1578799933195114, 0.07037000358104706, -0.26291000843048096, 0.7231299877166748, 0.4566799998283386, 0.2886199951171875, -0.3711400032043457, -0.18904000520706177, 0.04527600109577179, -0.13966000080108643, -0.2968299984931946, -0.10995999723672867, 0.949999988079071, 0.3154500126838684, 0.3929100036621094, 0.2783600091934204, 0.12737999856472015, -0.10781999677419662, -0.15815000236034393, 0.3454500138759613, 0.08948499709367752, 0.26249998807907104, 0.12700000405311584, 0.06904300302267075, -0.02190300077199936, -0.4632500112056732, -0.21570999920368195, 0.2579199969768524, 0.6104599833488464, -0.290010005235672, 0.0728989988565445, -0.17464999854564667, -0.008019199594855309, 0.6942200064659119, 0.7107999920845032, 0.3650299906730652, 0.24192999303340912, 0.1755100041627884, -0.15068000555038452, 0.17980000376701355, -0.07671400159597397, 0.10774999856948853, -0.031824998557567596, 0.4555000066757202, -0.42636001110076904, 0.32986000180244446, 0.1316699981689453, 0.3041599988937378, 0.6256399750709534, -0.17205999791622162, 0.09900599718093872, -0.9710900187492371, -0.30737999081611633, -0.06361400336027145, -0.532289981842041, 0.14240999519824982, 0.0839489996433258, 0.40692999958992004, 0.2558799982070923, -0.054625000804662704, 0.04394499957561493, -0.25238001346588135, 0.2053699940443039, 0.4258500039577484, -0.007067500147968531, 0.4360699951648712, 0.4234299957752228, 0.5370299816131592, -0.5007500052452087, -0.17767000198364258, 0.4005799889564514, 0.08672100305557251, 0.11649999767541885, 0.8787999749183655, -0.07628799974918365, 0.16582000255584717, -0.477620005607605, -0.3772999942302704, 0.05459799990057945, 0.12110999971628189, -0.13294999301433563, 0.22870999574661255, 0.4131700098514557, -0.758620023727417, -0.028085999190807343, -0.6138100028038025, 0.11477000266313553, -0.7695599794387817, -0.1324400007724762, 0.292059987783432, 0.42640000581741333, -0.10526999831199646, -0.6017500162124634, -0.6510599851608276, 0.42322999238967896, -0.3693299889564514, -0.3371700048446655, 0.2910900115966797, 0.1129399985074997, 0.2966200113296509, -0.4468500018119812, 0.17493000626564026, -0.1616699993610382, -0.0335719995200634, -0.7759900093078613, 0.620140016078949, 0.43125998973846436, 0.04742300137877464, -0.011471999809145927, -0.17307999730110168, 0.2851499915122986, 0.19343000650405884, 0.5342900156974792, 0.2708500027656555, 0.06671199947595596, -0.13736000657081604, 0.15240000188350677, 0.21458999812602997, -0.054315000772476196, -0.22899000346660614, -0.1024399995803833, 0.3128899931907654, 0.2744799852371216, -0.5304200053215027, 0.3950600028038025, -0.164900004863739, -0.30807000398635864, 0.250110000371933, -0.11178000271320343, 0.6104300022125244, 0.3297500014305115, 0.04471899941563606, 0.21852000057697296, 0.45298999547958374, -0.6917300224304199, -0.55663001537323, -0.13947999477386475, 0.8727800250053406, 0.919160008430481, -0.6633599996566772, -0.1927500069141388, -0.4451499879360199, -0.1324000060558319, 0.010848999954760075, -0.23557999730110168, 0.6976799964904785, 0.14975999295711517, -0.02438800036907196, -0.06309100240468979, 0.02449999935925007, 0.3568899929523468, 0.6155400276184082, 0.5848199725151062, 0.263590008020401, -0.16473999619483948, 0.09889499843120575, -0.13628000020980835, -0.2745000123977661, -0.039381999522447586, -0.06242400035262108, 0.39546999335289, 0.09786300361156464, -0.297789990901947, 0.2507700026035309, 0.27932000160217285, 0.1910099983215332, 0.10180000215768814, -0.8105199933052063, 0.3867500126361847, 0.23002000153064728, -0.009579000063240528, -0.5138700008392334, 0.6064599752426147, 0.05558900162577629, 0.3738499879837036, 0.035346999764442444, -0.3410399854183197, 0.14765000343322754, -0.2021300047636032, -0.4277600049972534, 0.15469999611377716, -0.7129200100898743, -0.6703500151634216, 0.737779974937439, 0.07442200183868408, 0.2961899936199188, 0.18289999663829803, -0.20559999346733093, 0.3344700038433075, 0.17316000163555145, 0.4097900092601776, -0.0611019991338253, 0.2814599871635437, 0.11838000267744064, 0.2921200096607208, 0.02401600033044815, -0.038743000477552414, -0.3286600112915039, 0.20960000157356262, 0.18544000387191772, 0.0015338000375777483, 0.2503199875354767, 0.15931999683380127, 0.03529300168156624, 0.44727998971939087, -0.1653899997472763, -0.16401000320911407, 0.09826499968767166, -0.5358700156211853, -0.3684000074863434, 0.35089999437332153, -0.5936300158500671, -0.2723200023174286, -0.1544100046157837, -0.06629099696874619, 0.32436999678611755, 0.6347299814224243, -0.5773199796676636, -1.7172000408172607, -0.2309200018644333, -0.1432799994945526, -0.3713200092315674, 0.11671999841928482, 0.41558998823165894, 0.006935399957001209, -0.4165700078010559, 0.31672000885009766, -0.6593300104141235, -0.4464400112628937, -0.16656999289989471, 0.11445999890565872, 0.41280001401901245, -0.28060001134872437, 0.03773000091314316, -0.4430699944496155, 0.3576599955558777, -0.4757100045681, 0.41012999415397644, 0.37040001153945923, -0.2504099905490875, 0.4719800055027008, -0.18429000675678253, -0.01970200054347515, -0.35589998960494995, -0.13479000329971313, 0.07513400167226791, -0.2583500146865845, -0.20747999846935272, 0.185139998793602, 0.21254999935626984, 0.0046732001937925816, -0.33305999636650085, 0.20666000247001648, -0.07492999732494354, -0.02427699975669384, 0.2690500020980835, -0.2593599855899811, -0.5348700284957886, -0.03410699963569641, -0.5046399831771851, 0.0415319986641407, 0.3480899930000305, 0.8214200139045715, -0.18751999735832214, 0.24758000671863556, 0.02740499936044216, 0.045292001217603683, 0.18400000035762787, -0.23693999648094177, 0.0800660029053688, -0.1890999972820282, -0.38335999846458435, -0.315420001745224, -1.2335000038146973], u'frayed': [0.4985699951648712, -0.35982000827789307, 0.09905900061130524, -0.13928000628948212, -0.2373500019311905, -0.0006950899842195213, -0.6273599863052368, -0.3200699985027313, 0.8954799771308899, -0.610450029373169, -0.15892000496387482, 0.6428700089454651, 0.3364599943161011, -0.3522700071334839, -0.6756299734115601, 0.21060000360012054, -0.21449999511241913, 0.13184000551700592, -0.6423799991607666, 0.3670800030231476, 0.020860999822616577, -0.3649100065231323, 0.11204999685287476, -0.47258999943733215, -0.21476000547409058, -0.02854900062084198, -0.017318999394774437, -0.27504000067710876, 0.08056499809026718, 0.8513000011444092, 0.11287999898195267, -0.10153999924659729, -0.35054999589920044, 0.3841800093650818, -0.06317099928855896, 0.24679000675678253, -0.23829999566078186, -0.3149699866771698, -0.04298299923539162, 0.5946400165557861, 0.1189500018954277, -0.3170900046825409, -0.3908500075340271, -0.649869978427887, -0.0394739992916584, 0.2861199975013733, 0.042413998395204544, -0.20330999791622162, -1.1389000415802002, -0.10163000226020813, 0.23624999821186066, -0.5673999786376953, 0.2611300051212311, -0.2934899926185608, 0.3621799945831299, -0.1324000060558319, -0.21417999267578125, -0.17181000113487244, -0.5316299796104431, 0.4218299984931946, 1.0823999643325806, -0.13042999804019928, 0.18294000625610352, -0.3547399938106537, 0.2354699969291687, 0.16272999346256256, 0.34637001156806946, 0.8140199780464172, 0.8550300002098083, 0.023905999958515167, 0.12467999756336212, -0.15078000724315643, -0.027132000774145126, 0.32196998596191406, 0.45493000745773315, -0.002639699960127473, -0.6144800186157227, -0.15723000466823578, -0.632420003414154, -0.32468000054359436, 0.158160001039505, -0.3479900062084198, 0.04448999837040901, -0.08991800248622894, 0.06283500045537949, 0.0665619969367981, -0.0892229974269867, -0.4848400056362152, -0.5110899806022644, 0.3342199921607971, -0.043209999799728394, 1.105299949645996, 0.4360100030899048, 0.03560600057244301, -0.1575700044631958, 0.2241699993610382, 0.6815699934959412, 0.7527899742126465, -0.4514800012111664, 0.3717299997806549, 0.07246600091457367, -0.4369699954986572, -0.4151900112628937, 0.44023001194000244, 0.09925100207328796, -0.12081000208854675, 0.11412999778985977, 0.3607099950313568, 0.10723999887704849, -0.2626599967479706, -0.21059000492095947, 0.23994000256061554, -0.4844900071620941, -0.6505299806594849, -0.15137000381946564, -0.1316400021314621, 0.1564600020647049, 0.4738300144672394, 0.16107000410556793, -0.29253000020980835, 0.345660001039505, -0.5664399862289429, 0.29006001353263855, 0.43147000670433044, 0.08076799660921097, 0.5707899928092957, -0.7696999907493591, -0.30048999190330505, 0.2880299985408783, 0.0721379965543747, -0.10016000270843506, 0.013388000428676605, 0.45596998929977417, 0.4965499937534332, -0.3016600012779236, -0.4675000011920929, -0.16157999634742737, -0.1678600013256073, 0.5194799900054932, -0.6395900249481201, -0.4484800100326538, 0.296970009803772, 0.1783200055360794, 0.28014999628067017, 0.45875999331474304, 0.4641999900341034, 0.09476400166749954, 0.14962999522686005, 0.47683998942375183, -0.3094399869441986, -0.3597100079059601, -0.11022000014781952, -0.3031800091266632, -0.04691300168633461, -0.26337000727653503, -0.4421199858188629, 0.19720999896526337, -0.18267999589443207, 0.0851299986243248, 0.8127300143241882, -0.07259199768304825, -0.30601000785827637, -0.09435100108385086, -0.05930100008845329, 0.49083998799324036, -0.05185500159859657, -0.5850800275802612, -0.8229299783706665, 0.04835300147533417, 0.08084599673748016, -0.11485999822616577, 0.2565299868583679, -0.5560600161552429, 0.196260005235672, -0.044491998851299286, 0.43911001086235046, -0.005695300176739693, 0.5255299806594849, 0.015527999959886074, -0.2753300070762634, 0.28154999017715454, 0.1482899934053421, -0.6342999935150146, -0.11812999844551086, 0.1641400009393692, 0.426939994096756, 0.1800599992275238, 0.02164500020444393, -0.4811500012874603, 0.09712100028991699, -0.11676999926567078, -0.4687100052833557, -0.3425599932670593, -0.14601999521255493, -0.33757999539375305, 0.04629499837756157, -0.06964100152254105, 0.04483100026845932, -0.03829900175333023, -0.02113400027155876, 0.06526199728250504, 0.6550300121307373, 1.236299991607666, 0.020493000745773315, 0.12519000470638275, -0.21811999380588531, 0.48690998554229736, 0.04274599999189377, -0.46592000126838684, 0.7511399984359741, 0.41637998819351196, 0.8958799839019775, 0.5332900285720825, 0.04934199899435043, 0.36980998516082764, 0.046810001134872437, 1.0951999425888062, -0.4476799964904785, -0.4582599997520447, 0.7959499955177307, 1.054900050163269, 0.06120000034570694, 0.08864299952983856, 0.12020000070333481, 0.4909000098705292, -0.4501799941062927, -0.5147500038146973, -0.5174700021743774, -0.5940200090408325, -0.09696099907159805, 0.6264700293540955, 0.05069899931550026, 0.24413999915122986, 0.018556000664830208, -0.075033999979496, -0.07788799703121185, 0.5841900110244751, 0.1487099975347519, -0.19461999833583832, -0.3712800145149231, 0.2448199987411499, -0.1808599978685379, -0.11736000329256058, 0.45781999826431274, -0.5297499895095825, -0.18875999748706818, -0.16338999569416046, -0.03341900184750557, 0.9585199952125549, -0.43494999408721924, 0.278219997882843, -0.3487499952316284, 0.7790700197219849, -0.4315800070762634, 0.12961000204086304, 0.39125001430511475, 0.05401400104165077, 0.7677199840545654, 0.9515500068664551, 0.13721999526023865, 0.24654999375343323, 0.1629599928855896, -0.4429300129413605, 0.4758000075817108, 0.09785500168800354, 0.624779999256134, -0.09265299886465073, -0.1905599981546402, -0.2671799957752228, 0.04347199946641922, -0.12801000475883484, 0.18365000188350677, 0.046921998262405396, -0.5202000141143799, -0.7932900190353394, 0.055438000708818436, 0.3329299986362457, 0.09441500157117844, 0.42504000663757324, 0.35558000206947327, 0.8998600244522095, 0.0332150012254715, -0.007680600043386221, -0.2853899896144867, -0.24544000625610352, -0.5501199960708618, 0.20913000404834747, 0.8054199814796448, -0.03393099829554558, -0.21084000170230865, 0.0975790023803711, -0.31419000029563904, -0.19551999866962433, 0.18595999479293823, -0.1964299976825714, -0.3499799966812134, 0.23183000087738037, -0.10822000354528427, -0.09093700349330902, -0.3429499864578247], u'blunt': [-0.3631100058555603, -0.02821500040590763, -0.05297200009226799, -0.30504998564720154, -0.19704000651836395, -0.12636999785900116, -0.40564000606536865, 0.029764000326395035, -0.03670400008559227, -0.6859800219535828, 0.1026500016450882, 0.2584500014781952, -0.04288199916481972, 0.21788999438285828, 0.02609499916434288, 0.5343700051307678, -0.028496000915765762, 0.4661099910736084, -0.02459299936890602, -0.1261499971151352, 0.5105800032615662, 0.14636999368667603, 0.023440999910235405, 0.30351999402046204, -0.5225899815559387, 0.03655799850821495, 0.14312000572681427, -0.3660700023174286, -0.015185000374913216, 0.13687999546527863, -0.7750300168991089, 0.1893800050020218, -0.7414299845695496, -0.19657999277114868, -0.3639799952507019, 0.3185099959373474, -0.05232999846339226, -0.3081899881362915, -0.008024699985980988, -0.746399998664856, 0.09208100289106369, 0.33755001425743103, 0.6531500220298767, 0.026196999475359917, -0.2524600028991699, 0.35833999514579773, -0.6065700054168701, -0.48017001152038574, -0.18690000474452972, 0.1856199949979782, 0.10518000274896622, 0.13107000291347504, 0.21789999306201935, -0.0588579997420311, -0.034846000373363495, -0.5738999843597412, -1.0343999862670898, 0.12255000323057175, 0.2968200147151947, 0.22743000090122223, 0.47777000069618225, -0.3382500112056732, 0.2839899957180023, -0.17072999477386475, 0.10040999948978424, -0.863099992275238, 0.31147000193595886, -0.15625, 0.3400900065898895, -0.2464500069618225, -0.42434000968933105, -0.27011001110076904, 0.36858001351356506, 0.025784000754356384, 0.7416499853134155, -0.42737001180648804, -0.2524600028991699, 0.26855000853538513, -0.06645199656486511, -0.05211399868130684, -0.1820400059223175, 0.38016998767852783, 0.41471999883651733, -0.3730199933052063, -0.5072699785232544, 0.05014999955892563, -0.05375400185585022, 0.5562999844551086, 0.08755200356245041, 0.47044000029563904, 0.11518999934196472, 0.5519700050354004, -0.21765999495983124, -0.2810499966144562, 0.5241600275039673, -0.28902000188827515, -0.18362000584602356, -0.12325000017881393, 0.3071500062942505, 0.04806400090456009, -0.12279000133275986, -0.3120799958705902, -0.07315900176763535, 0.4140999913215637, 0.3135499954223633, 0.17139999568462372, -0.40821000933647156, -0.4743199944496155, 0.36254000663757324, -0.23210999369621277, 0.1666399985551834, 0.45302000641822815, 0.42129001021385193, 0.27522000670433044, 0.24770000576972961, 0.19160999357700348, -0.2878299951553345, -0.012233000248670578, 0.40257999300956726, -0.6891400218009949, -0.1177700012922287, -0.361160010099411, -0.7470399737358093, -0.2471799999475479, -0.5379999876022339, 0.16047999262809753, -0.24987000226974487, -0.2553099989891052, 0.09522099792957306, -0.3463500142097473, 0.09686499834060669, 0.31949999928474426, -0.3543899953365326, -0.3488500118255615, -0.19569000601768494, 0.29517999291419983, -0.13912999629974365, 0.11986000090837479, 0.6121799945831299, 0.09409599751234055, 0.1229500025510788, 0.325980007648468, 0.07930400222539902, -0.022960999980568886, -0.2468000054359436, 0.19825999438762665, 0.3978999853134155, -0.3825100064277649, 0.11778999865055084, -0.15715999901294708, 0.156810000538826, -0.24788999557495117, 0.019481999799609184, 0.32495999336242676, 0.08510900288820267, 0.1289999932050705, 0.23473000526428223, -0.2313700020313263, 0.18366000056266785, 0.16269999742507935, 0.0021114000119268894, 0.2302599996328354, 0.3736799955368042, 0.08977100253105164, 0.2789900004863739, 0.36757001280784607, -0.25982001423835754, -0.17725999653339386, 0.07383900135755539, -0.4758000075817108, -0.01107800006866455, 0.1014299988746643, -0.4998199939727783, -0.3458699882030487, -0.1385599970817566, -0.2471199929714203, -0.2599399983882904, -0.21389999985694885, 0.6347100138664246, 0.11202999949455261, -0.32389000058174133, -0.8816999793052673, -0.0798799991607666, 0.2558099925518036, -0.2759599983692169, -0.3861500024795532, -0.30226999521255493, 0.31332001090049744, -0.478769987821579, -0.052073001861572266, -0.17986999452114105, -0.20730000734329224, 0.1569100022315979, 0.6302300095558167, 0.03109700046479702, 0.5756199955940247, 0.0867139995098114, -0.08522500097751617, 0.12101999670267105, -0.2575100064277649, -0.018647000193595886, -0.050606999546289444, 0.011490000411868095, 0.053939998149871826, -0.6297799944877625, 0.46358001232147217, -0.05389099940657616, 0.5002800226211548, -0.11364000290632248, 0.4948500096797943, 0.11918999999761581, 0.007191100157797337, 0.35503000020980835, 0.1739400029182434, 0.06898800283670425, -0.39996999502182007, 0.17273999750614166, -0.365229994058609, -0.7182999849319458, -0.24556000530719757, -0.06999599933624268, 0.49289000034332275, 0.2780599892139435, 0.26715001463890076, -0.47624000906944275, 0.49873998761177063, -0.2928900122642517, -0.3445099890232086, -0.03110400028526783, -0.5046399831771851, 0.13553999364376068, -0.28497999906539917, 0.1424199938774109, -0.7255600094795227, -1.0995999574661255, -0.0968720018863678, 0.0020365999080240726, 0.039987001568078995, -0.4248200058937073, 0.29256001114845276, -0.1780800074338913, -0.08570399880409241, 0.012474999763071537, -0.06874900311231613, 0.11331000179052353, -0.02471099980175495, 0.22759999334812164, -0.3424000144004822, -0.3718400001525879, 0.5058199763298035, 0.2569800019264221, -0.22262999415397644, 0.03892600163817406, -0.43946999311447144, -0.29111000895500183, 0.05522200092673302, -0.017023999243974686, -0.048666998744010925, -0.5784800052642822, -0.12547999620437622, -0.3429499864578247, 0.08221899718046188, -0.9366599917411804, 0.6605700254440308, -0.41005000472068787, -0.07128199934959412, 0.16095000505447388, 0.16674000024795532, 0.10976000130176544, 0.6096000075340271, 0.15943999588489532, -0.1779100000858307, -0.1994200050830841, -0.1576700061559677, -0.2379699945449829, -0.11969999969005585, -0.5587800145149231, 0.49243998527526855, 0.18008999526500702, -0.4685699939727783, -0.048813000321388245, -0.35433998703956604, 0.0777370035648346, 0.1422400027513504, 0.4539099931716919, 0.010018999688327312, -0.17361000180244446, 0.19140000641345978, 0.20127999782562256, 0.036122001707553864, -0.13387000560760498, 0.1448799967765808, 0.6239299774169922, -0.027622999623417854, 0.0808510035276413, 0.2796899974346161, -0.7466300129890442, -0.2907699942588806, -0.0654980018734932, 0.3637099862098694], u'cloudy': [-0.5051400065422058, -0.7132999897003174, -0.007846799679100513, -0.9271799921989441, -0.21979999542236328, 0.3350600004196167, -0.3073199987411499, 0.6055799722671509, 0.16144999861717224, -0.08180399984121323, -0.2521800100803375, -0.5891799926757812, -0.3332599997520447, 0.0030928999185562134, 0.2367900013923645, -0.31411999464035034, -0.3246000111103058, -0.030642999336123466, 0.7773900032043457, 0.21362000703811646, -0.1827400028705597, 0.22931000590324402, -0.62677001953125, -0.2818099856376648, -0.7423200011253357, 0.16088999807834625, 0.43634000420570374, -0.11479000002145767, -0.032829999923706055, -1.3479000329971313, 0.3359000086784363, -0.13423000276088715, 0.016638999804854393, 0.2334900051355362, -0.20406000316143036, -0.21774999797344208, -0.4657900035381317, -0.18070000410079956, -0.08147100359201431, 0.3554700016975403, 0.28650999069213867, 0.49948999285697937, -0.22412000596523285, 0.2451000064611435, 0.5842599868774414, -0.10758999735116959, 0.07711700350046158, 0.7046800255775452, -0.6637799739837646, -0.767769992351532, -0.26333001255989075, -0.37797001004219055, 0.1057099997997284, -0.7177199721336365, -0.004503299947828054, 0.10204999893903732, 0.620389997959137, 0.25867000222206116, 0.9668300151824951, -0.21115000545978546, -0.3789600133895874, 0.012740000151097775, 0.16763000190258026, 0.0507889986038208, -0.27803000807762146, 0.02098800055682659, 0.27480998635292053, 0.13659000396728516, 0.21889999508857727, -0.3239400088787079, -0.0648839995265007, 0.4137899875640869, -0.02807299979031086, -0.07103200256824493, -0.27406999468803406, -0.3140200078487396, -0.1327899992465973, 0.9929599761962891, 0.20760999619960785, -0.28878000378608704, -0.29201000928878784, 0.5583800077438354, -0.03836299851536751, 0.5488399863243103, -0.24775999784469604, 0.08409400284290314, 0.5448600053787231, 0.12744000554084778, 0.42610999941825867, 0.726360023021698, 0.27145999670028687, -0.37477999925613403, 0.15803000330924988, 0.452890008687973, 0.05379499867558479, 1.3037999868392944, 0.5077999830245972, 0.1159299984574318, -0.5348100066184998, 0.2135699987411499, 0.8478900194168091, -0.28126999735832214, -0.2024800032377243, 0.6475800275802612, -0.4086199998855591, 0.6212300062179565, 0.5103999972343445, 0.6629999876022339, -0.48107001185417175, -0.11417999863624573, 0.3276199996471405, -1.1574000120162964, 0.3513199985027313, -0.10999000072479248, 0.31306999921798706, -0.26982998847961426, -0.09286700189113617, 0.2175299972295761, 0.7671800255775452, 0.34696999192237854, 0.5736899971961975, -0.046661000698804855, 0.1779399961233139, 0.2767300009727478, 0.1039699986577034, 0.07771600037813187, 0.0966849997639656, 0.03402300179004669, 0.1561499983072281, -0.34119999408721924, -0.3861500024795532, 0.45326000452041626, 0.0418579988181591, -0.35117998719215393, 0.2117300033569336, -0.06009799987077713, -0.5942000150680542, -0.3081499934196472, 0.6151999831199646, -0.32398998737335205, 0.16735999286174774, 0.38078999519348145, -0.7584699988365173, -0.23425999283790588, -0.7246099710464478, -0.3150700032711029, 0.0741180032491684, 0.2738899886608124, 0.18793000280857086, -0.2869099974632263, -0.029423000290989876, -0.7100300192832947, 0.16006000339984894, -0.27772000432014465, 0.24230000376701355, 0.914929986000061, 0.4084799885749817, 0.20258000493049622, 0.2696399986743927, 0.3887999951839447, -0.6249399781227112, -1.1723999977111816, -0.40303000807762146, 0.12374000251293182, 1.0216000080108643, -0.48017001152038574, 0.7680400013923645, 0.3343000113964081, -0.45552000403404236, 0.4564700126647949, 0.7117199897766113, 0.4616200029850006, -0.239889994263649, 0.1280599981546402, 0.060495998710393906, -0.583620011806488, -0.22871999442577362, -0.8119000196456909, 0.2533299922943115, -0.0970430001616478, 0.39157000184059143, 0.5493699908256531, 0.4285700023174286, -0.42346999049186707, 0.3256799876689911, 0.10751999914646149, 1.1719000339508057, -0.590499997138977, -0.9236699938774109, -0.5093700289726257, -0.16641999781131744, 0.45438000559806824, 0.4468100070953369, -0.286080002784729, -0.512499988079071, -0.6605100035667419, -0.3288399875164032, -0.3292100131511688, -0.9460700154304504, -0.3502199947834015, 0.4146000146865845, 0.24146999418735504, -0.5110099911689758, 0.028867000713944435, 0.032471999526023865, -0.6339300274848938, -0.2551099956035614, -0.07669100165367126, -0.12842999398708344, -0.025766000151634216, 0.2747099995613098, 0.19912000000476837, -0.18020999431610107, -0.5967299938201904, -0.2435699999332428, -0.144119992852211, 0.17486999928951263, -0.6938300132751465, 0.7330399751663208, -0.2791000008583069, 0.731719970703125, -0.3755899965763092, -0.18637999892234802, 0.33496999740600586, -1.1031999588012695, 0.04246100038290024, 0.08974500000476837, 0.14470000565052032, 0.3666999936103821, -0.046514999121427536, 0.6287000179290771, 0.14160999655723572, 0.7504000067710876, -0.6621299982070923, -0.3820599913597107, -0.4033600091934204, -0.8000400066375732, -0.31453999876976013, -0.8746899962425232, -0.018435999751091003, -0.5090299844741821, -0.391620010137558, -0.9152100086212158, 0.05601000040769577, -0.5405799746513367, -0.8260200023651123, 0.41332000494003296, -0.00027210000553168356, 0.03518899902701378, -0.31953999400138855, -0.19128000736236572, 0.1469700038433075, 0.37380000948905945, 0.2712799906730652, -0.7502700090408325, -0.27750998735427856, -0.2735399901866913, -0.41262999176979065, -0.020315999165177345, 0.6748499870300293, 0.4775499999523163, 0.03858400136232376, -0.45357000827789307, 0.35471001267433167, -0.34707000851631165, -0.05271900072693825, -0.1430799961090088, -0.2980400025844574, 0.391539990901947, -0.4658699929714203, 0.362060010433197, 0.449180006980896, 0.08985800296068192, 0.1332699954509735, -0.08867699652910233, 0.12266000360250473, 0.5597400069236755, 0.4265199899673462, -0.18216000497341156, -0.4485200047492981, -0.18745000660419464, 0.4262999892234802, 0.384909987449646, 0.4808399975299835, -0.02599399909377098, -0.2817800045013428, -0.3237999975681305, 0.15498000383377075, 0.31560999155044556, 0.10797999799251556, 0.19731999933719635, -0.22182999551296234, 0.3946099877357483, 0.6682599782943726, -0.24595999717712402, -0.48767998814582825, -0.6734899878501892, 0.027719000354409218, -0.5047900080680847, 0.5594599843025208], u'large': [-0.3354499936103821, 0.31369999051094055, 0.17218999564647675, -1.0163999795913696, 0.2104800045490265, 0.37970998883247375, 0.3134300112724304, 0.3216699957847595, -0.2718999981880188, -1.596500039100647, -0.0906749963760376, 0.26172998547554016, -0.0764399990439415, 0.4013200104236603, 0.11462999880313873, -0.2283100038766861, -0.1679600030183792, 0.07009799778461456, 0.03576299920678139, -0.2727600038051605, -0.28007999062538147, -0.057548001408576965, 0.8543400168418884, 0.23177999258041382, -0.22157999873161316, -0.028082000091671944, 0.12416999787092209, 0.11826000362634659, -0.3654800057411194, 0.16930000483989716, -0.4131999909877777, 0.5769299864768982, -0.3578000068664551, 0.38054001331329346, -0.1337299942970276, 0.18108999729156494, -0.298550009727478, -0.0600770004093647, -0.316210001707077, 0.38920000195503235, -0.19528000056743622, -0.2639999985694885, 0.210099995136261, 0.3412899971008301, -0.13321000337600708, 0.19637000560760498, 0.4867100119590759, 0.0310210008174181, 0.16383999586105347, 0.4781999886035919, 0.22347000241279602, 0.49035999178886414, -0.21076999604701996, 7.907200051704422e-05, 0.05955300107598305, -0.13333000242710114, -0.5420399904251099, 0.19064000248908997, 0.6082299947738647, 0.16332000494003296, -0.08065400272607803, -0.07430200278759003, 0.27386000752449036, 0.12526999413967133, 0.18464000523090363, -0.19767999649047852, -0.18821999430656433, 0.28797999024391174, -0.06002900004386902, 0.17023999989032745, 0.07460899651050568, 0.0121069997549057, 0.2229200005531311, 0.12529000639915466, -0.2881599962711334, 0.16009999811649323, -0.11729999631643295, -0.147039994597435, 0.21774999797344208, -0.5111700296401978, -0.29388999938964844, 0.03880000114440918, -0.4671100080013275, -0.3907400071620941, 0.5015400052070618, 0.19830000400543213, 0.10030999779701233, 0.006348900031298399, -0.24199999868869781, -0.09516099840402603, 0.2967100143432617, 0.07931800186634064, -0.2599000036716461, 0.17824000120162964, -0.19280000030994415, -0.18851999938488007, 0.12161000072956085, 0.011095999740064144, 0.007953399792313576, 0.04118900001049042, -0.0677890032529831, -0.015043999999761581, -0.19151000678539276, -0.5125700235366821, -0.7231799960136414, 0.44099000096321106, 0.054329998791217804, -0.17396999895572662, -0.1030300036072731, 0.05852000042796135, -0.3573800027370453, 0.005084000062197447, -0.18761000037193298, 0.1762399971485138, 0.08197099715471268, 0.11020000278949738, -0.2963699996471405, 0.33880001306533813, 0.21302999556064606, -0.4030199944972992, 0.004871100187301636, 0.015099000185728073, 0.36487001180648804, 0.33994999527931213, 0.38582998514175415, 0.22253000736236572, -0.07423800230026245, 0.2778800129890442, -0.368010014295578, -0.1642799973487854, 0.18379999697208405, 0.5461300015449524, -0.5778499841690063, 0.21526999771595, 0.3560900092124939, -0.012222000397741795, 0.00556579977273941, -0.34073999524116516, -0.11836999654769897, 0.07437299937009811, -0.18201999366283417, 0.09683900326490402, -0.009803400374948978, 0.08143600076436996, 0.08536999672651291, -0.18006999790668488, 0.30880001187324524, 0.06261599808931351, 0.2156900018453598, 0.08591300249099731, 0.13107000291347504, -0.057982999831438065, -0.1473899930715561, -0.11215999722480774, 0.3528999984264374, 0.14313000440597534, 0.10474000126123428, 0.012651000171899796, -0.2393600046634674, -0.3586899936199188, -0.08259300142526627, 0.43108001351356506, 0.5063300132751465, 0.21295000612735748, -0.033980000764131546, -0.42640000581741333, 0.1652899980545044, 0.028068000450730324, -0.017595000565052032, 0.16347000002861023, 0.28439998626708984, -0.33122000098228455, -0.16864000260829926, -0.2055100053548813, 0.12639999389648438, -0.11597000062465668, -0.10947000235319138, 0.15487000346183777, 0.7886899709701538, -0.07400199770927429, 0.09002000093460083, -0.0773169994354248, 0.37400001287460327, -0.1519400030374527, 0.19290000200271606, -0.5910300016403198, 0.23107999563217163, 0.44982001185417175, -0.02456199936568737, 0.08783800154924393, 0.36239999532699585, 0.5302199721336365, -0.11896000057458878, -0.0898440033197403, 0.49004000425338745, 0.2843700051307678, 0.2739900052547455, 0.019137000665068626, 0.18252000212669373, 0.06122199818491936, 0.2813200056552887, -0.16288000345230103, -0.48848000168800354, -0.18709999322891235, 0.07410900294780731, -0.05684899911284447, -0.3676599860191345, 0.0744670033454895, 0.1638599932193756, -0.45208999514579773, 0.3921000063419342, 0.08988499641418457, 0.0184480007737875, -0.39921998977661133, 0.06124500185251236, 0.14168000221252441, 0.22084000706672668, 0.060989998281002045, 0.16202999651432037, 0.1409599930047989, 0.7020900249481201, -0.018200000748038292, -0.06311400234699249, -0.47745001316070557, -0.16478000581264496, -0.11089999973773956, -0.18271000683307648, -0.00755950016900897, -0.0478769987821579, -0.3230299949645996, 0.01747100055217743, -0.04852199926972389, -0.3873400092124939, 0.32965999841690063, 0.15681999921798706, 0.3614700138568878, -0.06984899938106537, 0.0010224999859929085, -0.5912299752235413, -0.09666900336742401, 0.2779799997806549, 0.5063899755477905, 0.3530699908733368, 0.028706999495625496, -1.4104000329971313, -0.5185700058937073, 0.3076300024986267, -0.03328799828886986, 0.013209999538958073, -0.10531999915838242, 0.10451000183820724, -0.06313099712133408, -0.08995100110769272, -0.2804499864578247, 0.6590999960899353, 0.28766000270843506, -0.44802001118659973, -0.14264999330043793, -0.30983999371528625, 0.28279998898506165, 0.06024400144815445, 0.036518000066280365, 0.5230399966239929, 0.46608999371528625, 0.11379999667406082, -0.4076400101184845, -0.16152000427246094, 0.09628800302743912, 0.0914440006017685, 0.3811199963092804, -0.023088999092578888, 0.2688399851322174, -0.14504000544548035, 0.2624399960041046, 0.021259000524878502, 0.11726000159978867, -2.444000005722046, 0.2872599959373474, -0.24993999302387238, 0.30717000365257263, -0.4747200012207031, 0.3080199956893921, 0.09074900299310684, 0.06132400035858154, -0.2737799882888794, 0.4584699869155884, -0.36368000507354736, 0.44321000576019287, 0.3377799987792969, 0.17048999667167664, 0.005373199936002493, -0.2406499981880188, -0.13912999629974365, 0.06557200103998184, 0.1339700073003769, 0.5724200010299683, 0.09540300071239471, -0.4032000005245209, -0.130280002951622, -0.25446000695228577], u'whipped': [-0.19803999364376068, -0.046519000083208084, -0.4799500107765198, 0.25944000482559204, -0.11670000106096268, -0.3501499891281128, -0.15233999490737915, 0.2624500095844269, 0.05723600089550018, 0.05478399991989136, 0.7074199914932251, -0.17459000647068024, 0.3703500032424927, -0.3605799973011017, -0.4543299973011017, 0.7481399774551392, 0.10266000032424927, -0.016301000490784645, -0.3229599893093109, 0.47328999638557434, 0.0077137998305261135, 0.10824000090360641, -0.1745299994945526, -0.14823000133037567, -0.33632001280784607, 0.2522999942302704, 0.2926500141620636, 0.35464000701904297, -0.486380010843277, -0.450439989566803, -0.5647799968719482, -0.19651000201702118, -0.2559800148010254, -0.30691999197006226, -0.5091800093650818, 0.28325000405311584, -0.6862800121307373, 0.3483400046825409, 0.04800200089812279, 0.25453999638557434, 0.12682999670505524, -0.31551000475883484, -0.021283000707626343, -0.144119992852211, 0.5990800261497498, 0.058212000876665115, -0.08476799726486206, 0.03763899952173233, 0.38648998737335205, 0.5813000202178955, -0.44093000888824463, 0.32078999280929565, 0.45357999205589294, -0.2616400122642517, -0.7612800002098083, 0.0691479966044426, -0.023067999631166458, -0.3500699996948242, -0.014740999788045883, 0.3793100118637085, 0.4530099928379059, 0.47071999311447144, -0.02275400049984455, -0.3089599907398224, -0.5877799987792969, -0.326229989528656, -0.003756199963390827, 0.1987600028514862, -0.3795900046825409, -0.13790999352931976, -0.18378999829292297, 0.10961999744176865, -0.5398499965667725, -0.006271599791944027, -0.22756999731063843, -0.1337900012731552, 0.12594999372959137, -0.4614799916744232, -0.14371000230312347, 0.25029000639915466, 0.4762499928474426, 0.20535999536514282, -0.3305000066757202, -0.3024600148200989, -0.20892000198364258, -0.4989300072193146, -0.5039899945259094, -0.08348800241947174, 0.11176999658346176, -0.34784001111984253, -0.18422000110149384, -0.2992199957370758, -0.03125099837779999, -0.149509996175766, -0.76528000831604, 0.01169500034302473, 0.350739985704422, 0.5748400092124939, -0.08636300265789032, 0.4241200089454651, 0.28137001395225525, 0.05795599892735481, -0.34415000677108765, -0.26815998554229736, 0.35242998600006104, 0.24699999392032623, 0.11320000141859055, 0.657039999961853, -0.8557199835777283, 0.3794400095939636, 0.13440999388694763, 0.07790599763393402, 0.405129998922348, -0.2842000126838684, -0.04220400005578995, 0.464029997587204, -0.7547799944877625, 0.48170000314712524, 0.001879999996162951, -0.1301400065422058, 0.1677599996328354, -0.5873399972915649, -0.26499998569488525, 0.1114099994301796, 0.00452810013666749, -0.20309999585151672, -0.19453999400138855, 0.07884500175714493, -0.45497000217437744, 0.4345099925994873, -5.599300129688345e-05, 0.9194200038909912, -0.39282000064849854, 0.7893000245094299, -0.3416900038719177, -0.298009991645813, 0.42497000098228455, 0.045743998140096664, 0.06090199947357178, 0.21074999868869781, -0.10068000108003616, 0.0891290009021759, -0.8722699880599976, 0.8843899965286255, -0.34393998980522156, 0.4914799928665161, 0.1360500007867813, -0.5173100233078003, 0.41703999042510986, -0.6712599992752075, 0.057342998683452606, 0.3072099983692169, 0.5205199718475342, 0.44749000668525696, -0.3895699977874756, 0.030899999663233757, 0.008545800112187862, -0.25551000237464905, 0.519760012626648, 0.4249800145626068, 0.02782200090587139, -0.08651100099086761, -0.23156000673770905, -0.08920200169086456, 0.2059600055217743, -0.45879998803138733, 0.2870199978351593, 0.26311999559402466, -0.3107199966907501, -0.5928900241851807, 0.4398899972438812, 0.20723000168800354, -0.6362900137901306, -0.5894700288772583, 0.31233999133110046, -0.22910000383853912, -0.5700799822807312, 0.37130001187324524, 0.3190700113773346, -0.27246999740600586, -0.0413069985806942, -0.2785300016403198, 0.4515399932861328, -0.08447600156068802, -0.18606999516487122, -0.6740400195121765, 0.3840000033378601, -0.2709699869155884, -0.41266000270843506, -0.08203999698162079, 0.10606999695301056, 0.1826000064611435, -0.23607000708580017, -0.20664000511169434, 0.31095999479293823, -0.5365999937057495, -0.37713998556137085, 0.16824999451637268, 0.4641599953174591, 0.2254599928855896, 0.41909998655319214, -0.05129300057888031, 0.46757999062538147, 0.13181999325752258, -0.2673799991607666, -0.15081000328063965, 0.42618998885154724, 0.35047000646591187, 0.18749000132083893, 0.23013000190258026, -0.08240900188684464, 0.0401419997215271, -0.16148999333381653, -0.1781100034713745, -0.15505999326705933, 0.07610099762678146, 0.2639800012111664, -0.6748899817466736, 0.12408000230789185, 0.08327099680900574, 0.924560010433197, 0.7080100178718567, -0.06171000003814697, -0.559939980506897, -0.6568899750709534, 0.17940999567508698, 0.4675700068473816, -0.19979000091552734, 0.07945699989795685, -0.4676400125026703, 0.46244001388549805, -0.18371999263763428, -0.26743000745773315, -0.1506199985742569, 0.4193899929523468, -0.03315100073814392, -0.4353500008583069, 0.12239000201225281, -0.47933998703956604, 0.13413000106811523, -0.23760999739170074, 0.055093999952077866, -0.9546200037002563, -0.33083000779151917, -0.1689399927854538, 0.1405400037765503, 0.6995800137519836, 1.1500999927520752, -0.0950469970703125, -0.4610700011253357, -0.09536000341176987, -0.3613699972629547, 0.3438799977302551, -0.5472599864006042, 0.226500004529953, -0.46759000420570374, 0.009162399917840958, -0.29916998744010925, -0.03813000023365021, 0.030175000429153442, -0.20658999681472778, -0.1167600005865097, -0.037262000143527985, 0.08881299942731857, -0.32986998558044434, -0.7580900192260742, -0.37959998846054077, -0.292059987783432, -0.20670999586582184, -0.14018000662326813, -0.3433000147342682, -0.35763999819755554, 0.09616100043058395, 0.5788800120353699, -0.0883219987154007, 0.5335800051689148, -0.14528000354766846, -0.1276099979877472, -1.0224000215530396, -0.4331600069999695, 0.11043000221252441, 0.3984900116920471, -0.2540000081062317, -0.01120499987155199, 0.18727000057697296, 0.8396099805831909, 0.39342001080513, -0.0004248199984431267, 0.19292999804019928, 0.10632999986410141, 0.02346700057387352, -0.3551200032234192, 0.20048999786376953, 0.24661999940872192, 0.47268998622894287, -0.2913999855518341, 0.48427000641822815, 0.24005000293254852, 0.18055999279022217, -0.42239001393318176], u'small': [-0.43299001455307007, 0.32829999923706055, -0.09427499771118164, -0.7457699775695801, 0.09729400277137756, 0.3034299910068512, 0.24456000328063965, 0.23423999547958374, 0.11643999814987183, -1.3854000568389893, -0.20632000267505646, 0.33972999453544617, -0.053957000374794006, 0.31498000025749207, 0.11494000256061554, 0.2925100028514862, -0.26183998584747314, -0.031321000307798386, 0.05107299983501434, -0.3513599932193756, -0.06878799945116043, 0.27994999289512634, 0.6613399982452393, 0.4903799891471863, -0.4678100049495697, -0.09004499763250351, -0.2037699967622757, -0.03209200128912926, -0.280129998922348, 0.344870001077652, -0.15876999497413635, 0.46024999022483826, -0.3542099893093109, 0.4900999963283539, -0.3040800094604492, 0.4970000088214874, -0.09775999933481216, 0.18095999956130981, -0.07873500138521194, 0.04361899942159653, -0.03572700172662735, -0.0554720014333725, 0.5286499857902527, 0.3684700131416321, 0.05800199881196022, -0.03285299986600876, 0.4468100070953369, 0.1057099997997284, 0.23000000417232513, 0.5418400168418884, 0.40108999609947205, 0.2713199853897095, 0.260670006275177, 0.1648399978876114, -0.1965000033378601, -0.06748200207948685, -0.6909199953079224, 0.052956998348236084, 0.710070013999939, 0.013009999878704548, 0.3177900016307831, -0.35350000858306885, 0.47409000992774963, 0.060419999063014984, 0.2960900068283081, -0.08097100257873535, -0.04022299870848656, 0.3108600080013275, -0.05904800072312355, 0.08952700346708298, -0.023670999333262444, -0.0031117000617086887, 0.48291999101638794, 0.25220999121665955, -0.4905500113964081, 0.015608999878168106, -0.21219000220298767, -0.2625400125980377, 0.10605999827384949, -0.5484899878501892, -0.07945399731397629, 0.38383999466896057, -0.13756999373435974, 0.22811999917030334, 0.49772998690605164, 0.24302999675273895, 0.25780001282691956, -0.10300999879837036, -0.4755899906158447, -0.12228000164031982, 0.5821599960327148, 0.28679001331329346, -0.24309000372886658, -0.04845599830150604, 0.28119999170303345, -0.23228999972343445, 0.3141300082206726, 0.09697499871253967, -0.21615999937057495, 0.11934000253677368, 0.1415800005197525, 0.12211000174283981, -0.24722999334335327, -0.5694699883460999, -0.6579399704933167, 0.0891529992222786, 0.21461999416351318, -0.3493199944496155, 0.1714099943637848, 0.09115400165319443, -0.557640016078949, -0.10458000004291534, 0.01065600011497736, -0.09652400016784668, 0.08570399880409241, 0.06460999697446823, -0.0687279999256134, 0.1804099977016449, 0.15594999492168427, 0.12814000248908997, 0.01645199954509735, 0.0016550000291317701, 0.4145599901676178, -0.02121400088071823, 0.07385999709367752, 0.21315999329090118, 0.029834000393748283, 0.24059000611305237, -0.05165399983525276, -0.10135000199079514, 0.034421999007463455, -0.1905599981546402, -0.02182300016283989, 0.13194000720977783, 0.489439994096756, -0.05034999921917915, 0.3111400008201599, -0.07296700030565262, -0.08268699795007706, -0.3533399999141693, -0.013307999819517136, 0.2494100034236908, -0.13371999561786652, 0.1616699993610382, -0.5195000171661377, -0.087008997797966, 0.21789999306201935, 0.2538599967956543, -0.07347600162029266, 0.16087999939918518, -0.05938199907541275, -0.5024899840354919, -0.15806999802589417, -0.16756999492645264, 0.44968000054359436, 0.22458000481128693, 0.1919499933719635, 0.330020010471344, -0.03356600180268288, -0.22206999361515045, 0.15534000098705292, 0.06911300122737885, 0.26469001173973083, -0.005183400120586157, -0.20115000009536743, -0.22658999264240265, -0.12001000344753265, -0.07406000047922134, 0.05587099865078926, -0.0437919981777668, -0.06620900332927704, -0.4711500108242035, -0.27074000239372253, 0.17172999680042267, -0.2523699998855591, -0.2657800018787384, -0.28925999999046326, -0.006259600166231394, 0.4977099895477295, 0.048774998635053635, 0.3230299949645996, -0.12184999883174896, 0.212459996342659, -0.06889600306749344, 0.37685999274253845, -0.497079998254776, 0.15415999293327332, 0.1789799928665161, 0.18014000356197357, -0.10665000230073929, 0.4231399893760681, 0.48976999521255493, 0.12410999834537506, 0.17776000499725342, 0.19731000065803528, 0.5033900141716003, 0.021515000611543655, -0.19975000619888306, -0.1969500035047531, -0.2827500104904175, 0.6962400078773499, 0.016805000603199005, -0.28102999925613403, 0.2284799963235855, 0.12902000546455383, -0.2939299941062927, -0.383870005607605, 0.07118599861860275, -0.04830700159072876, -0.09056700021028519, 0.28123998641967773, 0.07756999880075455, -0.06627500057220459, -0.2098499983549118, 0.12430000305175781, 0.08687499910593033, 0.4458099901676178, -0.02241699956357479, -0.42190998792648315, 0.19197000563144684, 0.4508199989795685, 0.11105000227689743, -0.49147000908851624, -0.09086000174283981, 0.13809999823570251, 0.0023229001089930534, 0.021260999143123627, -0.22925999760627747, 0.07124000042676926, -0.09533900022506714, 0.08205900341272354, -0.5006200075149536, -0.302590012550354, 0.061177000403404236, 0.45982998609542847, 0.47468000650405884, 0.09174799919128418, 0.2226399928331375, -0.37202998995780945, 0.10785999894142151, 0.3549000024795532, 0.21040000021457672, 0.19999000430107117, 0.07610400021076202, -1.3044999837875366, -0.5774700045585632, 0.5189999938011169, 0.06846900284290314, -0.38346999883651733, -0.12574000656604767, 0.04781100153923035, -0.21213999390602112, -0.24007000029087067, -0.26385998725891113, 0.5067899823188782, 0.6416000127792358, -0.1755100041627884, -0.20360000431537628, -0.22473999857902527, 0.2270900011062622, 0.20689000189304352, 0.08132000267505646, 0.28826001286506653, 0.15282000601291656, 0.19621999561786652, -0.35670000314712524, -0.2560400068759918, 0.21318000555038452, -0.1404300034046173, 0.5110599994659424, -0.1319500058889389, 0.17746999859809875, -0.12300000339746475, 0.19979999959468842, -0.20782999694347382, 0.3435499966144562, -2.408900022506714, 0.34228000044822693, -0.3987399935722351, 0.2849400043487549, -0.5406000018119812, 0.5463100075721741, -0.14949999749660492, 0.016374999657273293, -0.30254000425338745, 0.2932499945163727, -0.0723629966378212, 0.1933099925518036, 0.44071000814437866, 0.31411001086235046, -0.06312499940395355, -0.28707000613212585, -0.16933000087738037, -0.15031999349594116, -0.30976998805999756, 0.8141099810600281, -0.2567700147628784, -0.1940699964761734, -0.11113999783992767, -0.05807400122284889], u'engraved': [-0.43452998995780945, 0.3598499894142151, 0.23623999953269958, -0.23125000298023224, 0.16184000670909882, 0.6300299763679504, -0.13207000494003296, -0.15051999688148499, -0.2340400069952011, 0.036600999534130096, 0.0414000004529953, -0.11862999945878983, -0.23813000321388245, -0.22032000124454498, -0.051819998770952225, 0.07554099708795547, -0.3864000141620636, -0.021784000098705292, -0.3029100000858307, -0.9160400032997131, -0.09522400051355362, 0.25808998942375183, -0.2265699952840805, 0.22559000551700592, -0.2434699982404709, -0.526170015335083, 0.10474000126123428, 0.550790011882782, 0.33421000838279724, -0.12020000070333481, 1.1718000173568726, 0.722320020198822, -0.48045000433921814, 0.25949999690055847, 0.2859799861907959, 0.2923299968242645, -0.04812299832701683, -0.5016999840736389, 0.22888000309467316, -0.62882000207901, -0.335750013589859, 0.1511400043964386, 0.22524000704288483, -0.30204999446868896, -0.16975000500679016, 0.4307200014591217, 0.14041000604629517, -0.10211999714374542, -0.025241000577807426, 0.2550399899482727, -0.010137000121176243, 0.16328999400138855, 0.36671000719070435, 0.5118899941444397, 0.04336300119757652, -0.1809699982404709, -0.5410199761390686, 0.8869900107383728, 0.15702000260353088, 0.271369993686676, 0.5035099983215332, 0.13218000531196594, 0.10242000222206116, 0.0695900022983551, 0.8793500065803528, 0.09108799695968628, -0.11868000030517578, -0.3486799895763397, 0.2399500012397766, 0.010135999880731106, 0.3633100092411041, -0.2924700081348419, -0.07973700016736984, 0.3780499994754791, 0.10877999663352966, 0.12227000296115875, 0.8129799962043762, 0.007986299693584442, -0.1334100067615509, -0.5207200050354004, 0.21051999926567078, 0.26704999804496765, -0.16944000124931335, -0.05673300102353096, 0.009598899632692337, 0.1214900016784668, -0.38909998536109924, -0.4415299892425537, 0.16071000695228577, 0.6309700012207031, 0.45263001322746277, 0.0677110031247139, -0.27188000082969666, 0.0200009997934103, 0.056063998490571976, 0.3075000047683716, 0.19128000736236572, -0.22513000667095184, 0.00781320035457611, -0.47088000178337097, 0.16303999722003937, 0.40997999906539917, -0.08120600134134293, 0.5678600072860718, -0.258789986371994, -0.17026999592781067, 0.40540000796318054, -0.19999000430107117, 0.12530000507831573, -0.5343400239944458, -0.04160400107502937, 0.6162700057029724, 0.1999099999666214, -0.3034200072288513, -0.7403900027275085, -0.22538000345230103, -0.5112199783325195, 0.3515700101852417, 0.38969001173973083, -0.6724299788475037, -0.5938799977302551, 0.19550999999046326, 0.10120999813079834, 0.35275998711586, -0.09288199990987778, -0.05041300132870674, -0.13977999985218048, -0.3655700087547302, -0.4263499975204468, 0.27312999963760376, -0.16662000119686127, 0.3405500054359436, 0.3432700037956238, 0.18984000384807587, -0.6032199859619141, -0.11957000195980072, 0.39153000712394714, 0.28439998626708984, 0.3676699995994568, 0.13123999536037445, -0.23631000518798828, 0.5982499718666077, -0.15025000274181366, 0.002215699991211295, 0.7170500159263611, -0.4022499918937683, 0.14749999344348907, -0.10916999727487564, 0.5312700271606445, -0.4554400146007538, -0.17357000708580017, 0.22471000254154205, -0.35767999291419983, -0.5989199876785278, -0.007253699935972691, -0.07427600026130676, -0.5131300091743469, -0.5176900029182434, 0.14603999257087708, -0.3369300067424774, -0.35003000497817993, 0.6688600182533264, 0.3122299909591675, 0.12713000178337097, 0.6713700294494629, 0.6833900213241577, 0.559909999370575, 0.5911499857902527, 0.0428679995238781, -0.6751999855041504, -0.06499800086021423, -0.10187000036239624, 0.16915999352931976, -0.3193100094795227, 0.2950800061225891, -0.2865299880504608, 0.336899995803833, 0.1320600062608719, 0.004507699981331825, -0.6428599953651428, -0.15780000388622284, -0.1160999983549118, 0.4821000099182129, -0.17260999977588654, 0.06718699634075165, -1.1044000387191772, -0.20441000163555145, 0.5193799734115601, 0.3139899969100952, 0.07821100205183029, 0.6814799904823303, -0.13176999986171722, 0.7002099752426147, 0.6751300096511841, 0.20118999481201172, -0.10294000059366226, -0.07170800119638443, -0.2946999967098236, -0.17454999685287476, -0.06045199930667877, 0.6505600214004517, -0.21549999713897705, -0.16272999346256256, -0.04428799822926521, 0.8347100019454956, -0.23317000269889832, -0.1394300013780594, -0.06198300048708916, -0.8511599898338318, -0.05264599993824959, 0.7227500081062317, -0.12734000384807587, 0.43129000067710876, -0.2655999958515167, 0.0923290029168129, -0.018882999196648598, 0.11440999805927277, -0.17291000485420227, -0.19148999452590942, 0.2525799870491028, -0.5490599870681763, 0.21570000052452087, 0.480540007352829, -0.09147900342941284, -0.6113899946212769, 0.10791999846696854, 0.1572200059890747, 0.07649099826812744, 0.01720299944281578, -0.49577999114990234, -0.6250200271606445, -0.028231000527739525, -0.7504400014877319, -0.22338999807834625, 0.08939500153064728, 0.11841999739408493, -0.5070099830627441, -0.9208400249481201, -0.3536500036716461, -0.0852229967713356, -0.3002299964427948, -0.026938000693917274, 0.06625799834728241, 0.013562999665737152, -0.0501209981739521, -0.05583899840712547, -0.7778599858283997, 0.03979700058698654, -0.10211999714374542, 0.4994199872016907, 0.15320000052452087, -0.37296000123023987, 0.36699000000953674, -0.27355000376701355, 0.29249998927116394, 0.0962510034441948, 0.12309999763965607, 0.10307999700307846, 0.14619000256061554, 0.03869200125336647, -0.501579999923706, 0.44736000895500183, 0.7435899972915649, 0.1474200040102005, 0.41835999488830566, 0.08231300115585327, 0.06412000209093094, 0.4334299862384796, -0.2728300094604492, -0.12284000217914581, 0.1950799971818924, -0.16332000494003296, 0.5615500211715698, -0.22585000097751617, -0.3653799891471863, 0.1667100042104721, 0.03258100152015686, -0.7032999992370605, -0.22800999879837036, 0.11344999819993973, 0.2585200071334839, -0.09365600347518921, -0.2404700070619583, -0.384660005569458, -0.018775999546051025, 0.3822300136089325, -0.04211999848484993, 0.008228999562561512, 0.3146800100803375, -0.13062000274658203, 0.03703499957919121, 0.07483900338411331, 0.39965999126434326, 0.25878000259399414, -0.2984499931335449, -0.3012999892234802, 0.8981299996376038, 0.35822001099586487, -0.05330900102853775, -0.3840799927711487], u'heavy': [-0.13578000664710999, -0.06529100239276886, -0.5670199990272522, -0.3486599922180176, -0.3861300051212311, -0.5605199933052063, 0.4320400059223175, 0.9040399789810181, -0.13005000352859497, -1.6455999612808228, 0.2558700144290924, -0.19011999666690826, -0.25925999879837036, 0.629289984703064, 0.1531199961900711, -0.4971199929714203, -0.2600899934768677, 0.1381399929523468, -0.22084000706672668, -0.29736998677253723, 0.36847999691963196, -0.5065400004386902, 0.448419988155365, -0.008393200114369392, -0.3974300026893616, -0.32607001066207886, 1.121500015258789, -0.2834799885749817, -0.14452999830245972, 0.11264000087976456, -0.3057200014591217, 0.14454999566078186, -0.07509499788284302, 0.05433199927210808, -0.4008899927139282, -0.2137099951505661, -0.5329700112342834, -0.19859999418258667, -0.11337999999523163, 0.5511400103569031, -0.081557996571064, 0.06276199966669083, 0.7843000292778015, -0.13333000242710114, 0.14530999958515167, 0.19388000667095184, 0.40257999300956726, -0.7350500226020813, 0.14365999400615692, 0.2283799946308136, 0.18345999717712402, 0.39201998710632324, -0.4188399910926819, -0.0023791000712662935, -0.6967099905014038, 0.07688800245523453, -0.025460999459028244, -0.37970998883247375, 0.7129600048065186, 0.43707001209259033, 0.22919000685214996, -0.056710001081228256, -0.03832799941301346, -0.2110700011253357, -0.19598999619483948, 0.05630800127983093, -0.4298799932003021, 0.6636599898338318, -0.19163000583648682, 0.11703000217676163, 0.17500999569892883, -0.047318000346422195, -0.20674000680446625, -0.3388400077819824, -0.2175000011920929, 0.2011599987745285, -0.2533800005912781, 0.40797001123428345, -0.05505499988794327, -0.48721998929977417, -0.12570999562740326, 0.004084100015461445, 0.16957999765872955, -0.5475299954414368, -0.09499000012874603, -0.05362899973988533, 0.08155699819326401, 0.19363999366760254, 0.1930599957704544, 0.05285000056028366, 0.6945099830627441, -0.12664000689983368, -0.028881000354886055, -0.2084600031375885, -0.4299600124359131, 0.11116000264883041, -0.155239999294281, -0.16673000156879425, 0.10035999864339828, 0.2867400050163269, -0.3159500062465668, 0.5745900273323059, 0.03458299860358238, -0.13009999692440033, -0.1622299998998642, 0.012409999966621399, 0.11828000098466873, -0.16923999786376953, -0.16911999881267548, -0.260919988155365, -0.39482998847961426, -0.3335399925708771, -0.29194000363349915, 0.03290500119328499, 0.3743700087070465, 0.48131999373435974, -0.3035700023174286, -0.13644999265670776, 0.20401999354362488, -0.7091400027275085, 0.09368100017309189, -0.4850099980831146, -0.2875500023365021, 0.23449000716209412, 0.15237000584602356, 0.0851140022277832, 0.2286600023508072, 0.11197999864816666, -0.12922999262809753, 0.12891000509262085, 0.2966499924659729, 1.3634999990463257, -0.11800999939441681, -0.10118000209331512, 0.43195998668670654, 0.04748399928212166, -0.33360999822616577, 0.5836600065231323, 0.38780999183654785, 0.5909600257873535, -0.3102799952030182, 0.13141000270843506, 0.01898300088942051, -0.2009200006723404, -0.08831799775362015, -0.23925000429153442, 0.4243699908256531, -0.20352999866008759, -0.4397999942302704, 0.035771001130342484, 0.3730100095272064, -0.6192799806594849, -0.5001699924468994, 0.00834949966520071, 0.6571900248527527, 0.0132020004093647, 0.3142099976539612, -0.0960799977183342, 0.21040000021457672, 0.47569000720977783, 0.3190299868583679, -0.2547299861907959, 0.4888699948787689, -0.21125000715255737, 0.007092100102454424, -0.09973999857902527, 1.1324000358581543, 0.3127099871635437, -0.030541999265551567, -0.3701600134372711, 0.4009000062942505, 0.63919997215271, -0.366129994392395, -0.2535400092601776, 0.053050000220537186, -0.3894200026988983, -0.004502600058913231, 0.08182699978351593, -0.122079998254776, 0.46713998913764954, 0.002755699912086129, 0.07050599902868271, 0.05032400041818619, -0.007806300185620785, -0.21679000556468964, -0.035496000200510025, 0.20127999782562256, 0.22092999517917633, -0.15065999329090118, -0.823639988899231, -0.1437000036239624, 0.1886799931526184, -0.2992199957370758, -0.2959200143814087, 0.3587299883365631, -0.4736500084400177, -0.3378700017929077, 0.18127000331878662, 0.7256100177764893, -0.0451509989798069, 0.18172000348567963, -0.5284900069236755, 0.40911000967025757, 0.4581199884414673, 0.08561699837446213, 0.22984999418258667, -0.33197999000549316, 0.2001499980688095, 0.3929600119590759, -0.2687399983406067, 0.16051000356674194, 0.16089999675750732, -0.27630001306533813, 0.1497499942779541, 0.17323000729084015, -0.5038899779319763, 0.5059999823570251, -0.05981700122356415, 0.5348899960517883, 0.25148001313209534, 0.5533900260925293, 0.15994000434875488, 0.32934999465942383, -0.11613000184297562, -0.36838001012802124, 0.6706100106239319, 0.22569000720977783, -0.6144300103187561, 0.2549000084400177, -0.33878999948501587, 0.21265999972820282, -0.3494099974632263, -0.32760998606681824, -0.256989985704422, 0.646809995174408, -0.034758999943733215, -0.35725000500679016, -0.30663999915122986, -0.3630000054836273, 0.013372000306844711, -0.05564400181174278, -0.05540600046515465, 0.20987999439239502, -0.3871000111103058, -0.9238899946212769, 0.05221499875187874, 0.023062000051140785, -0.4336499869823456, -0.3144899904727936, -0.05601000040769577, -0.4280700087547302, 0.2253500074148178, 0.5399100184440613, -0.04361699894070625, 0.33230000734329224, -0.29513999819755554, -0.49876999855041504, -0.4372299909591675, -0.4097000062465668, -0.11853999644517899, -0.04733600094914436, 0.14226999878883362, -0.5440099835395813, 0.3964399993419647, 0.12347999960184097, 0.21887999773025513, -0.2982900142669678, -0.44075000286102295, -0.22890999913215637, -0.01723100058734417, -0.037772998213768005, 0.19226999580860138, -0.2600800096988678, 0.0706150010228157, -0.4999000132083893, 0.3316600024700165, -1.743499994277954, -0.6188899874687195, -0.42879000306129456, 0.24769000709056854, -0.4759800136089325, 0.2597599923610687, 0.3974500000476837, -0.14527000486850739, 0.6950700283050537, 0.2907100021839142, -0.6821200251579285, 0.04848200082778931, 0.11294999718666077, -0.5275700092315674, -0.13375000655651093, 0.00598399993032217, -0.08500900119543076, -0.053745999932289124, 0.6357399821281433, 1.2187999486923218, -0.34549999237060547, -0.15076999366283417, -0.18821999430656433, 0.17374999821186066], u'old': [-0.3785800039768219, -0.06678199768066406, -0.37432000041007996, -0.3420499861240387, 0.5022100210189819, -0.08733399957418442, -0.3554700016975403, -0.5183699727058411, 0.1834300011396408, -1.0293999910354614, 0.1830500066280365, -0.13017000257968903, 0.17023999989032745, -0.35791000723838806, 0.18327000737190247, 0.06514299660921097, -0.02112000063061714, -0.14507000148296356, 0.3589400053024292, -0.15388000011444092, -0.005598700139671564, -0.13589000701904297, 0.207519993185997, -0.1268099993467331, -0.815559983253479, -0.19618000090122223, -0.19070999324321747, -0.23140999674797058, -0.10531999915838242, 0.5067200064659119, 0.009219200350344181, 0.46568000316619873, -0.14369000494480133, 0.4954499900341034, -0.6873000264167786, 0.3394100069999695, -0.01938300020992756, -0.29058998823165894, 0.2322700023651123, 0.43838000297546387, 0.9489399790763855, -0.265639990568161, -0.054600998759269714, 0.18342000246047974, 0.14369000494480133, 0.11625999957323074, 0.33893999457359314, 0.5922799706459045, -0.37279000878334045, -0.6063799858093262, 0.27862998843193054, -0.8024100065231323, -0.3012999892234802, 0.1830500066280365, 0.41179999709129333, -0.13805000483989716, 0.43661999702453613, -0.15162000060081482, -0.6118999719619751, -0.40237000584602356, 0.8246999979019165, 0.11190000176429749, 0.9744499921798706, 0.02654300071299076, -0.5165200233459473, -0.27167001366615295, -0.1117900013923645, 0.21585999429225922, 0.48104000091552734, 0.06661000102758408, -0.27654001116752625, -0.15873000025749207, -0.18477000296115875, 0.16584999859333038, -0.006548999808728695, 0.4541800022125244, -0.3147200047969818, -0.5801200270652771, 0.2353699952363968, 0.33851000666618347, 0.034297000616788864, -0.07563100010156631, 0.43160000443458557, -0.035808999091386795, -0.21894000470638275, 0.012994999997317791, -0.30327001214027405, -0.003152499906718731, 0.396450012922287, 0.4835500121116638, 0.05906299874186516, 0.08168099820613861, 0.12898999452590942, -0.0874829962849617, -0.04108100011944771, 0.16259999573230743, 0.5898900032043457, -0.15484000742435455, 0.16869999468326569, -0.1381099969148636, -0.03777199983596802, 0.28314998745918274, 0.5532299876213074, -0.40863001346588135, -0.10078000277280807, 0.23221999406814575, 0.25971999764442444, -0.1354600042104721, 0.12189000099897385, -0.016534000635147095, -0.04877299815416336, -0.17985999584197998, -0.3939799964427948, -0.2212499976158142, 0.2595599889755249, 0.2168000042438507, -0.6478300094604492, 0.5326499938964844, 0.2705099880695343, -0.06788399815559387, 0.32910001277923584, 0.2815600037574768, -0.01957700029015541, 0.2997699975967407, 0.0890130028128624, 0.22524000704288483, 0.031196000054478645, 0.34046000242233276, 0.6869699954986572, -0.5680699944496155, 0.2508699893951416, -0.2263299971818924, -0.18107999861240387, -0.2668600082397461, 0.42462000250816345, 0.33004000782966614, 0.3547999858856201, -0.12249000370502472, -0.22832000255584717, 0.1130099967122078, 0.6870700120925903, -0.21111999452114105, 0.3806999921798706, 0.3690299987792969, 0.029291000217199326, 0.459850013256073, -0.030047999694943428, 0.7087399959564209, -0.09295599907636642, 0.07261800020933151, 0.13203999400138855, 0.049692001193761826, -0.033055998384952545, 0.19449999928474426, 0.27191999554634094, 0.3380900025367737, -0.8259900212287903, -0.3857100009918213, -0.2738899886608124, 0.1964000016450882, 0.6307200193405151, -0.1137700006365776, 0.21955999732017517, -0.23965999484062195, -0.2851400077342987, 0.6158000230789185, 0.24237999320030212, 0.6168699860572815, 0.5202500224113464, -0.5633000135421753, -0.3201900124549866, 0.2687999904155731, -0.6193000078201294, 0.16301999986171722, -0.3964200019836426, 0.42570000886917114, 0.3624899983406067, -0.31769001483917236, 0.11440999805927277, 0.21875999867916107, 0.22081999480724335, 0.6196399927139282, 0.5738300085067749, 0.058921001851558685, 0.3974300026893616, 0.030050000175833702, -0.05147000029683113, -0.15988999605178833, 0.8932899832725525, 0.5665000081062317, -0.07131599634885788, -0.06486299633979797, -0.04782800003886223, -0.027481000870466232, -0.38530999422073364, -0.2588599920272827, -0.35596999526023865, -0.5031700134277344, -0.11175999790430069, 0.1759900003671646, 1.7202999591827393, -0.03616899996995926, 0.10885000228881836, 0.46296000480651855, 0.007524800021201372, -0.6669600009918213, -0.36131998896598816, 0.49011000990867615, 0.41422000527381897, 0.425029993057251, 0.1154400035738945, -0.1540299952030182, 0.04034300148487091, -0.21299000084400177, 0.5292500257492065, -0.19267000257968903, -0.0716169998049736, -0.4687800109386444, -0.32229000329971313, 0.09903199970722198, 0.21412000060081482, -0.45096999406814575, 0.1299699991941452, -0.34529000520706177, -0.012385999783873558, 0.5382400155067444, 0.37185001373291016, -0.5532400012016296, -0.1687300056219101, 0.529990017414093, 0.21663999557495117, 0.06574399769306183, 0.21014000475406647, -0.015118000097572803, 0.25505000352859497, 0.3856399953365326, 0.22741000354290009, -0.26782000064849854, -0.35427001118659973, 0.13133999705314636, -0.2889400124549866, -0.1404300034046173, 0.30943000316619873, -0.03412099927663803, -0.35332998633384705, 0.22269000113010406, 0.0332380011677742, -0.11766000092029572, 0.05524099990725517, -0.06635899841785431, 0.1278800070285797, -0.09824000298976898, -0.1558700054883957, -0.2620300054550171, 1.3269000053405762, -0.18580999970436096, -0.16731999814510345, 0.0380220003426075, 0.34237000346183777, -0.7374600172042847, -0.21876999735832214, 0.6086699962615967, -0.28422999382019043, 0.22999000549316406, 0.19847999513149261, 0.6673300266265869, 0.18241000175476074, 0.2603900134563446, 0.07524199783802032, -0.06287399679422379, -0.3416700065135956, -0.09212300181388855, 0.7612199783325195, -0.2893100082874298, 0.20387999713420868, 0.054545000195503235, -2.1073999404907227, 0.23086999356746674, 0.13876000046730042, 0.14904999732971191, 0.5039299726486206, 0.5610799789428711, 0.13950000703334808, -0.0757180005311966, 0.27039000391960144, 0.8401700258255005, -0.2964099943637848, -0.027130000293254852, -0.8069800138473511, -0.03687100112438202, 0.08108600229024887, 0.450219988822937, 0.029342999681830406, 0.012153999879956245, -0.09494899958372116, 0.10762999951839447, 0.06310799717903137, -0.4196400046348572, -0.8080099821090698, 1.124500036239624], u'diced': [0.011966000311076641, -0.10313999652862549, 0.14655999839305878, -0.34894001483917236, 0.35367000102996826, -0.36485999822616577, 0.45416000485420227, -0.06583499908447266, -0.5989000201225281, 0.8279299736022949, 0.6877700090408325, 0.05584599822759628, 0.31852999329566956, 0.9975900053977966, -0.1906300038099289, 0.4380199909210205, -0.8342099785804749, 0.050078000873327255, -0.5665199756622314, 0.7531899809837341, 0.28738000988960266, 0.13391999900341034, 0.4502499997615814, -0.05903400108218193, -0.11640000343322754, -1.0751999616622925, -0.1610099971294403, 0.9544199705123901, -0.8421199917793274, -0.4630599915981293, -0.8270599842071533, 0.2243800014257431, 0.06439399719238281, 0.4270699918270111, 0.3651899993419647, 0.8377199769020081, 0.1507200002670288, 0.24525000154972076, -0.34053999185562134, -0.4510599970817566, 0.23643000423908234, -0.3209500014781952, 0.1529099941253662, 0.20994000136852264, 0.22824999690055847, -0.04747600108385086, -0.3606399893760681, 0.7257400155067444, -0.49928000569343567, 0.15871000289916992, -0.17628000676631927, 0.29548001289367676, 0.43077000975608826, 0.10081999748945236, -0.7648800015449524, -0.5348100066184998, -0.10781999677419662, -0.10767000168561935, 0.36675000190734863, -0.30118998885154724, 0.5103700160980225, -0.20931999385356903, -0.6982600092887878, 0.2435300052165985, -0.48500001430511475, -0.45107999444007874, -0.4526900053024292, 0.45013999938964844, 0.612030029296875, -0.5950899720191956, 0.21683000028133392, -0.29225000739097595, 0.20124000310897827, 0.08918999880552292, -0.7924100160598755, 1.0058000087738037, 1.0917999744415283, 0.002845000009983778, 0.4643099904060364, -0.40345999598503113, -0.2141599953174591, -0.2360299974679947, -0.17188000679016113, -0.16249999403953552, 0.022105000913143158, -0.27785998582839966, -0.9045400023460388, -0.21067999303340912, 0.3101699948310852, 0.23738999664783478, 0.7404900193214417, 0.029436999931931496, 0.2209399938583374, -0.09822099655866623, -0.30893999338150024, 0.3340800106525421, -0.1690800040960312, 0.5028300285339355, 0.2688800096511841, 0.7790799736976624, -0.15094000101089478, 0.08971499651670456, 0.6944599747657776, -1.0983999967575073, -1.2821999788284302, 0.7837899923324585, -0.014074999839067459, 0.18645000457763672, -0.3144499957561493, -0.4045099914073944, 0.3259600102901459, 0.28130999207496643, -0.008569399826228619, -0.3627299964427948, -0.36730000376701355, -0.2682900130748749, -0.6024699807167053, 0.6187499761581421, 0.7735199928283691, 0.27775999903678894, 0.11458999663591385, -0.6573299765586853, 0.1503800004720688, 0.2632400095462799, -0.42340001463890076, 0.13694000244140625, -0.22880999743938446, 0.10819000005722046, -0.421999990940094, 0.8273299932479858, -0.17166000604629517, 1.0699000358581543, -0.15699000656604767, 0.7244499921798706, -0.24872000515460968, -0.033702999353408813, -0.13693000376224518, 0.049198001623153687, -0.8662099838256836, -0.06523899734020233, 0.256089985370636, 0.21356000006198883, -0.512939989566803, -0.06709600239992142, -0.5572800040245056, -0.3509399890899658, -0.15500999987125397, -0.19032999873161316, 1.0223000049591064, -0.6503000259399414, -1.2239999771118164, 0.4749999940395355, 0.1604599952697754, 0.7439200282096863, -0.9620599746704102, -0.4883599877357483, 0.29017001390457153, -0.07528900355100632, -1.135200023651123, -0.0836080014705658, -0.18418000638484955, 0.698419988155365, 0.21935999393463135, -0.6560800075531006, 0.6469600200653076, -0.4895699918270111, -0.5887600183486938, 0.16629000008106232, -0.49246999621391296, -0.6952000260353088, 0.3795500099658966, 0.19009000062942505, 0.030220000073313713, -0.007156100124120712, -0.1604900062084198, -0.6818699836730957, 0.20689000189304352, -0.46156999468803406, 0.6667500138282776, -0.1573300063610077, 0.09140200167894363, 0.01662999950349331, 0.5243300199508667, -0.5934600234031677, -0.5359100103378296, -0.6521099805831909, 0.2865999937057495, 0.36430999636650085, 0.9602599740028381, 0.31731998920440674, 0.13842999935150146, 0.9325299859046936, -0.20489999651908875, 0.08205299824476242, 0.5018799901008606, 0.10320000350475311, -0.11354999989271164, 0.6639000177383423, 0.465719997882843, -0.03064499981701374, -0.22436000406742096, -0.03510100021958351, 0.41095998883247375, 0.32910001277923584, 0.21412000060081482, 1.2137000560760498, -0.09852699935436249, 0.6180999875068665, 0.08336199820041656, 0.05687100067734718, 0.5037400126457214, -0.08250299841165543, -0.34894999861717224, 0.1942799985408783, 0.7544299960136414, 0.15018999576568604, 1.2623000144958496, -0.44960999488830566, 0.2899700105190277, 0.5621600151062012, 0.025971999391913414, 0.2240699976682663, -0.8214200139045715, -0.08159899711608887, -0.025169000029563904, -0.16023999452590942, -0.030090000480413437, -0.45680001378059387, -0.050554998219013214, -0.19913999736309052, -0.13635000586509705, 0.676609992980957, 0.02268899977207184, -0.41165000200271606, 0.39316999912261963, 0.7414500117301941, 0.025266999378800392, 0.13043999671936035, -0.8199599981307983, 0.23193000257015228, -0.1859399974346161, -0.12730999290943146, 0.05341099947690964, 0.08136399835348129, 0.03945299983024597, -0.37994998693466187, 0.5092099905014038, 0.07654400169849396, -0.18313999474048615, -1.0015000104904175, -0.17121000587940216, 0.3834500014781952, -0.6383100152015686, -0.43004000186920166, -0.42921000719070435, 0.46643999218940735, -0.10909000039100647, 0.28913000226020813, -0.6144999861717224, 0.42941999435424805, 0.37606000900268555, -0.06342600286006927, 0.5750799775123596, 0.2888700067996979, 0.38659998774528503, -0.606440007686615, -0.17041000723838806, 0.031543999910354614, -0.16182999312877655, -0.07624799758195877, -0.22202999889850616, 0.4221400022506714, -0.001893899985589087, 0.32409000396728516, -0.5765299797058105, 0.32236000895500183, 0.38304999470710754, -0.36351001262664795, -0.6772900223731995, -1.4574999809265137, 0.27897000312805176, -0.5560700297355652, 0.34314998984336853, 0.28951001167297363, 0.45583999156951904, -0.3115600049495697, 0.3588100075721741, 0.04856700077652931, -0.3453800082206726, 0.4162999987602234, 0.01886500045657158, 0.14101000130176544, 0.22121000289916992, -0.28268998861312866, -0.4235000014305115, -1.0627000331878662, -0.34567001461982727, -0.4325999915599823, 0.8613600134849548, 0.28110000491142273], u'rusty': [0.3589499890804291, 0.15086999535560608, -0.3316799998283386, -0.010436000302433968, -0.14811000227928162, -0.05069100111722946, -0.22645999491214752, 0.17053000628948212, 0.5123500227928162, 0.6370400190353394, 0.00402839994058013, -0.06331799924373627, -0.8127300143241882, 0.04369499906897545, -0.49116000533103943, -0.1244100034236908, -0.3550499975681305, 0.1813800036907196, 0.02537900023162365, -0.3089100122451782, 0.7411199808120728, 0.2838299870491028, -0.10191000252962112, 0.16631999611854553, -0.1810699999332428, -0.1964000016450882, 0.2505800127983093, 0.17418000102043152, -0.16107000410556793, -0.02121499925851822, 0.21938000619411469, 0.3338499963283539, 0.011101000010967255, 0.14213000237941742, -0.45138001441955566, 0.41203999519348145, -0.25255998969078064, -0.0029132000636309385, 0.10200999677181244, 0.18231000006198883, 0.09123100340366364, 0.5217199921607971, 0.3476099967956543, -0.6840100288391113, 0.14318999648094177, -0.07542099803686142, -0.3157599866390228, 0.1396999955177307, -0.18685999512672424, -0.31518998742103577, 0.0368649996817112, -0.26607000827789307, 0.16753000020980835, -0.11012999713420868, 0.08008900284767151, 0.06265799701213837, 0.122809998691082, -0.24747000634670258, 0.22396999597549438, -0.1934799998998642, 0.1360899955034256, 0.1222200021147728, 0.3047400116920471, 0.3570399880409241, 0.2728399932384491, -0.4553399980068207, -0.4538399875164032, 0.049518000334501266, 0.5612199902534485, 0.005668399855494499, -0.3352299928665161, 0.0064941998571157455, 0.21660999953746796, 0.9763299822807312, -0.4798699915409088, -0.08244600147008896, -0.35036998987197876, 0.35604000091552734, -0.04118400067090988, -0.2778699994087219, 0.4377099871635437, 0.20765000581741333, 0.1277099996805191, -0.04747400060296059, 0.026959000155329704, -0.316540002822876, 0.11739999800920486, 0.0670740008354187, -0.3115899860858917, -0.08907999843358994, 0.5805699825286865, 0.5746999979019165, -0.5234799981117249, -1.179800033569336, -0.4703199863433838, 0.49300000071525574, 0.39487001299858093, -0.28543001413345337, 0.15207000076770782, -0.31306999921798706, -0.09061499685049057, -0.12138000130653381, -0.3875100016593933, -0.34488001465797424, 0.1604599952697754, -0.29381000995635986, 0.18647000193595886, -0.025498000904917717, -0.6065400242805481, -0.3456200063228607, -0.36733999848365784, 0.09311699867248535, -0.3034000098705292, -1.0808000564575195, -0.3693599998950958, 0.16259999573230743, -0.2791599929332733, 0.4841099977493286, 0.17045000195503235, 0.39267000555992126, -0.33913999795913696, -0.6438400149345398, -0.07168000191450119, -0.2817299962043762, 0.09761899709701538, -0.04388599842786789, -0.23819999396800995, 0.14628000557422638, -0.18614999949932098, 0.3844499886035919, -0.04369200021028519, -0.03560600057244301, 0.29903000593185425, -0.17182999849319458, 0.11477000266313553, 0.053279999643564224, 0.38732001185417175, -0.05216600000858307, 0.15185999870300293, 0.19133999943733215, 0.3030799925327301, 0.06765300035476685, 0.23458999395370483, -0.4150800108909607, 0.2606399953365326, 0.13459999859333038, -0.04493600130081177, 0.6767299771308899, -0.20438000559806824, -0.05549800023436546, 0.18491999804973602, -0.07366099953651428, -0.4737800061702728, -0.14393000304698944, -0.1013299971818924, -0.6888999938964844, 0.4224399924278259, -0.5041400194168091, 0.352620005607605, 0.6248400211334229, 0.3610199987888336, 0.012884000316262245, -0.17749999463558197, -0.5255399942398071, -0.2683500051498413, 0.22020000219345093, -0.2971999943256378, 0.3912000060081482, 0.046539999544620514, -0.11643999814987183, -0.07336000353097916, 0.8762400150299072, 0.192440003156662, -0.46994999051094055, -0.054120998829603195, -0.3230699896812439, 0.21397000551223755, -0.4611800014972687, 0.3148599863052368, -0.1381099969148636, 0.4322099983692169, 0.7524799704551697, 0.42963001132011414, 0.18567000329494476, -0.1888200044631958, -0.21448999643325806, 0.34376999735832214, 0.1651500016450882, 0.6547499895095825, 0.2320600003004074, -0.5556899905204773, -0.4830000102519989, 0.33656999468803406, 0.7341399788856506, 0.33302000164985657, -0.1521800011396408, -0.3334900140762329, -0.06613600254058838, -0.440420001745224, -0.46340999007225037, 0.8845700025558472, 0.003378999885171652, -0.20413999259471893, -0.12471000105142593, 0.47095000743865967, 0.2689000070095062, 0.9251400232315063, -0.7149199843406677, -0.38934001326560974, 0.13670000433921814, -0.07590299844741821, 0.3302899897098541, -0.17274999618530273, -0.47453001141548157, 0.11275000125169754, -0.04537099972367287, 0.2658199965953827, 0.7965099811553955, -0.47282999753952026, -0.08234000205993652, 0.32190001010894775, -0.40108001232147217, -0.1276800036430359, 0.09072200208902359, 0.07256600260734558, 0.2899399995803833, -0.016428999602794647, -0.17922000586986542, -0.3117299973964691, -0.21834999322891235, 0.09499599784612656, -0.25690001249313354, -0.06894899904727936, 0.2549000084400177, -0.3507100045681, 0.7877399921417236, -0.12791000306606293, -0.032464999705553055, -0.3131999969482422, -0.38071000576019287, 0.1939300000667572, -0.3364199995994568, -0.3216400146484375, -0.3255299925804138, 0.26965001225471497, -0.37428998947143555, 0.4415000081062317, -0.16231000423431396, 0.3570300042629242, 0.3934299945831299, -0.5858299732208252, -0.12015999853610992, 0.08549799770116806, -0.7776700258255005, 0.9037899971008301, -0.13947999477386475, 0.13342000544071198, -0.4952299892902374, 0.18445000052452087, 0.12195999920368195, -0.6122099757194519, 0.19006000459194183, -0.1473200023174286, -0.08844900131225586, 0.18624000251293182, -0.30774998664855957, -0.3872799873352051, -0.2130099982023239, -0.42555999755859375, 0.25859999656677246, -0.19856999814510345, -0.46560001373291016, -0.415149986743927, -0.2337999939918518, -0.26006001234054565, 0.24627000093460083, 0.07649099826812744, 0.026606999337673187, -0.7224100232124329, 0.12201999872922897, 0.15494999289512634, 0.26669999957084656, -0.019536999985575676, 0.16058999300003052, 0.17980000376701355, -0.3450999855995178, 0.08337599784135818, 0.28185999393463135, -0.32879000902175903, 0.15228000283241272, 0.3415600061416626, -0.17744000256061554, -0.015312000177800655, 0.3476000130176544, -0.3615399897098541, 0.052048999816179276, 0.07923799753189087, -0.21788999438285828, -0.198730006814003, 0.505050003528595], u'inflated': [0.7446900010108948, 0.5210599899291992, 0.38201001286506653, 0.2749199867248535, 0.10175999999046326, -0.06488999724388123, 0.27452999353408813, 0.4761500060558319, -0.10036999732255936, -0.7969800233840942, -0.7605100274085999, 0.09193799644708633, -0.1372700035572052, 0.07847800105810165, -0.4244900047779083, 0.6371300220489502, 0.3089999854564667, 0.1607300043106079, 0.2923299968242645, 0.12524999678134918, 0.5001400113105774, 0.04458199813961983, -0.023507999256253242, -0.004742399789392948, 0.03998500108718872, 0.28227001428604126, 0.2530600130558014, 0.17684000730514526, 0.22146999835968018, 0.9616199731826782, -0.6115599870681763, 0.1271899938583374, -0.46904999017715454, 0.10283999890089035, 0.10847000032663345, 0.03799999877810478, -0.2937600016593933, -0.40782999992370605, 0.13558000326156616, 0.2017199993133545, -0.42287999391555786, -0.4734500050544739, -0.40564000606536865, 0.469650000333786, -0.1844100058078766, -0.15690000355243683, -0.4509600102901459, -0.10012999922037125, 0.4013899862766266, 0.6919800043106079, 0.70933997631073, -0.39612001180648804, -0.5105900168418884, -0.162540003657341, -0.0071159000508487225, 0.06723900139331818, -0.02276100032031536, 0.19776000082492828, -0.27028000354766846, -0.3732700049877167, 0.29434001445770264, -0.336899995803833, -0.04615199938416481, -0.5659700036048889, 0.05907600000500679, 0.03058600053191185, 0.039976999163627625, -0.6438199877738953, 0.036393001675605774, 0.40845000743865967, 0.31490999460220337, 0.4459899961948395, 0.3933500051498413, 0.4027099907398224, 0.2409300059080124, -0.006198499817401171, 0.9829800128936768, -0.01551000028848648, -0.04663800075650215, -0.3825699985027313, -0.4330100119113922, 0.5571799874305725, 0.17372000217437744, 0.37038999795913696, 0.23601999878883362, 0.915440022945404, 0.1624699980020523, -0.3675200045108795, 0.043540000915527344, 0.712909996509552, 0.20420999825000763, 0.3139199912548065, -0.005445499904453754, 0.21612000465393066, -0.028140999376773834, 0.6580700278282166, -0.6716499924659729, 0.21738000214099884, -0.5853599905967712, -0.11665999889373779, -0.16962000727653503, -0.15982000529766083, -0.16496999561786652, -0.20764000713825226, 0.4228399991989136, 0.20589999854564667, -0.8206800222396851, 0.05876300111413002, -0.07170499861240387, -0.006776699796319008, 0.06852100044488907, 0.6305999755859375, 0.034758999943733215, -0.39348000288009644, 0.600380003452301, -0.11271999776363373, 0.1863200068473816, -0.09860900044441223, 0.03350900113582611, -0.6133700013160706, 0.7075200080871582, -0.5535600185394287, 0.5861999988555908, 0.46841999888420105, 0.15002000331878662, 0.027194999158382416, -0.5203400254249573, -0.4796200096607208, -0.11258000135421753, -0.335099995136261, -0.420989990234375, 0.6390500068664551, -0.06223899871110916, 0.658240020275116, 0.006413700059056282, -0.15357999503612518, 0.09366100281476974, -0.34731000661849976, -0.13412000238895416, 0.6646999716758728, 0.12637999653816223, -0.23806999623775482, -0.004153899848461151, 0.3269999921321869, -0.008877900429069996, 0.5081400275230408, -0.03635700047016144, -0.5515400171279907, 1.0849000215530396, 0.4077399969100952, -0.020152999088168144, 0.0651170015335083, -0.05298899859189987, -0.3339099884033203, -0.46751001477241516, 0.7845500111579895, 0.15721000730991364, -0.010958000086247921, -0.19035999476909637, 0.10046999901533127, -0.31749001145362854, -0.5464299917221069, -0.054016001522541046, -0.05049699917435646, 0.7362300157546997, -0.986810028553009, -0.1640399992465973, 0.02959199994802475, -0.11215999722480774, -0.09952999651432037, 0.05914999917149544, 0.15803000330924988, -0.5074899792671204, -0.2306399941444397, 0.35175999999046326, -0.4529699981212616, 0.03135799989104271, 0.18639999628067017, -0.23763999342918396, 0.13443000614643097, 0.31738001108169556, -0.3694800138473511, -0.22460000216960907, 0.13673999905586243, 0.5093700289726257, 0.07156900316476822, 0.3177900016307831, -0.026507999747991562, -0.1383499950170517, -0.021198000758886337, 0.6035699844360352, 0.14284999668598175, -0.1262200027704239, -0.16565999388694763, -0.14404000341892242, 0.16072000563144684, 0.32265999913215637, 0.49709999561309814, 0.29403001070022583, 0.27577000856399536, -0.462909996509552, -0.22511999309062958, -0.459089994430542, -0.6212499737739563, 0.6215699911117554, 0.1873600035905838, -0.24683000147342682, 0.3369799852371216, -0.6226999759674072, -0.0427200011909008, 0.06773199886083603, 0.4148699939250946, 0.06169100105762482, -0.5471100211143494, 0.22537000477313995, 0.11163000017404556, 0.25523000955581665, 0.2418999969959259, -0.29194000363349915, 0.47554999589920044, 0.367249995470047, 0.20618000626564026, 0.11958999931812286, -0.4672600030899048, -0.09388499706983566, -0.004397499840706587, -0.8908799886703491, 0.3649600148200989, 0.23440000414848328, -0.34922999143600464, -0.2731899917125702, -0.16333000361919403, -0.5620700120925903, 0.07197900116443634, -0.5419899821281433, -0.17021000385284424, -0.29030001163482666, -0.4329800009727478, -0.4326300024986267, -0.13747000694274902, 0.4782400131225586, -0.33904001116752625, 0.4171200096607208, 0.1493300050497055, -0.8820899724960327, -1.145300030708313, 0.7820299863815308, -0.47415000200271606, 0.20904000103473663, 0.07560399919748306, -0.3130599856376648, -0.05954800173640251, 0.20666000247001648, -0.5373700261116028, -0.39980998635292053, 0.03975699841976166, -0.3270399868488312, 0.17499999701976776, -0.16068999469280243, -0.4926399886608124, -0.028158999979496002, -0.1779100000858307, -0.11314000189304352, 0.08161900192499161, -0.1237500011920929, 0.31407999992370605, 0.6134999990463257, -0.7365599870681763, -0.11875999718904495, 0.28554999828338623, -0.16630999743938446, 0.8116499781608582, 0.44686999917030334, 0.38385000824928284, -0.23962000012397766, 0.052685000002384186, -0.44795000553131104, -0.11818999797105789, -0.147039994597435, -0.1398099958896637, 0.13609999418258667, 0.027494000270962715, 0.3862900137901306, -0.013996000401675701, -0.026321999728679657, 0.14701999723911285, 0.26590999960899353, -0.31341999769210815, -0.5815899968147278, -0.2827000021934509, -0.20773999392986298, -0.4511699974536896, 0.10108000040054321, -0.0049080997705459595, 0.3524700105190277, -0.5781300067901611, 0.1853400021791458, -0.2198999971151352, -0.272460013628006, -0.21137000620365143], u'ruffled': [-0.17609000205993652, -0.35888999700546265, 0.0735659971833229, 0.35958001017570496, -0.35273998975753784, -0.1856600046157837, -0.15154999494552612, 0.0025315999519079924, 0.44780001044273376, -0.25692999362945557, -0.32071998715400696, 0.6065700054168701, 0.01755400002002716, 0.1409199982881546, -0.5308300256729126, 0.41971999406814575, 0.20002999901771545, 0.14768999814987183, 0.10136999934911728, 0.3071500062942505, -0.06778399646282196, 0.2195100039243698, 0.27279001474380493, 0.2890799939632416, -1.141800045967102, -0.07773599773645401, -0.023645000532269478, 0.1935500055551529, -0.013686000369489193, 0.5383599996566772, 0.14597000181674957, -0.5724999904632568, -0.4596799910068512, -0.011273999698460102, 0.10087999701499939, 0.2119700014591217, -0.1462399959564209, -0.29997000098228455, 0.2552199959754944, -0.15725000202655792, -0.5885699987411499, -0.21689000725746155, 0.08321700245141983, -0.44699999690055847, -0.13559000194072723, -0.0235580001026392, -0.22623999416828156, -0.2144400030374527, -0.8785799741744995, -0.009295100346207619, -0.28529998660087585, -0.5990700125694275, 0.6368100047111511, -0.29756999015808105, -0.25113001465797424, 0.06439699977636337, -0.8350200057029724, 0.06524500250816345, -0.030462000519037247, 0.24536000192165375, 0.9441400170326233, -0.05403999984264374, -0.13116000592708588, -0.6266800165176392, -0.48976999521255493, -0.19812999665737152, 0.9238200187683105, 0.516260027885437, 0.16779999434947968, -0.06577199697494507, 0.4283300042152405, 0.29177001118659973, 0.09906700253486633, 0.03976999968290329, 0.3889800012111664, 0.46086999773979187, -0.2925400137901306, -0.3805699944496155, -0.4378100037574768, 0.1289999932050705, -0.07726100087165833, 0.14521999657154083, 0.09201599657535553, -0.12134999781847, 0.774150013923645, 0.6377800107002258, 0.16346000134944916, -0.6713899970054626, -0.20327000319957733, -0.19968000054359436, 0.1377200037240982, 0.15569999814033508, -0.10963000357151031, 0.26273998618125916, -0.23454000055789948, -0.046546999365091324, 0.5550900101661682, 0.28248998522758484, -0.5739499926567078, 0.7012799978256226, 0.1417900025844574, -0.367000013589859, -0.6893699765205383, -0.2914600074291229, -0.6497200131416321, 0.5338900089263916, -0.0015691999578848481, 0.046574998646974564, 0.06812600046396255, 0.04167899861931801, -0.01611199975013733, 0.38201001286506653, 0.018511999398469925, -0.2719700038433075, -0.33702000975608826, 0.10619000345468521, -0.21682000160217285, 0.545490026473999, 0.022700000554323196, -0.741100013256073, 0.5268200039863586, -0.42972999811172485, 0.4614799916744232, -0.02605300024151802, -0.012505999766290188, 0.047345999628305435, -0.5852100253105164, 0.3603599965572357, -0.2337300032377243, -0.26326000690460205, 0.11309000104665756, 0.029326999559998512, 0.00796709954738617, -0.18815000355243683, -0.6710100173950195, -0.24874000251293182, -0.26186999678611755, -0.30656999349594116, 0.5209800004959106, -0.07674600183963776, -0.15634000301361084, -0.397460013628006, 0.0011295999865978956, 0.26719000935554504, 0.10972999781370163, 0.23711000382900238, 0.12110000103712082, -0.23296000063419342, 0.33566999435424805, 0.2537499964237213, -0.047189000993967056, -0.2141299992799759, -0.08642300218343735, 0.36739999055862427, -0.5279399752616882, -0.21961000561714172, 0.12846000492572784, 0.025543000549077988, 0.36164000630378723, 0.43386000394821167, -0.2730199992656708, -0.2760699987411499, -1.291200041770935, 0.3594299852848053, 0.5911399722099304, -0.21423999965190887, -0.6683200001716614, 0.1551699936389923, 0.2672500014305115, -0.1264999955892563, -0.25870999693870544, -0.5036900043487549, -0.3549500107765198, -0.38363000750541687, -0.3841800093650818, -0.5601900219917297, -0.045896001160144806, 0.5711699724197388, -0.32673999667167664, -0.28891998529434204, -0.38874998688697815, -0.5297300219535828, -0.29600000381469727, -0.02340400032699108, -0.0939830020070076, -0.05863700062036514, 0.3929699957370758, 0.10678000003099442, -0.2837499976158142, -0.023336999118328094, -0.2875699996948242, 0.030469000339508057, -0.7466599941253662, -0.07932499796152115, 0.2791000008583069, 0.0565200001001358, 0.1616699993610382, -0.27998998761177063, -0.34696000814437866, -0.18010999262332916, 0.45249998569488525, 0.14416000247001648, 0.30671998858451843, 0.5064600110054016, 0.4343299865722656, -0.37286999821662903, 0.12163999676704407, 0.27184998989105225, -0.34303998947143555, 0.2580200135707855, 0.040460001677274704, -0.2610799968242645, 0.7001000046730042, 0.13784000277519226, 0.2137400060892105, -0.6163700222969055, 0.5452399849891663, -0.8688600063323975, -0.4376699924468994, -0.058504000306129456, 0.5267599821090698, -0.130390003323555, 0.0015713999746367335, -0.42603999376296997, -0.19480000436306, -0.6966699957847595, -0.18102000653743744, -0.6116600036621094, -0.040160998702049255, -0.6179100275039673, 0.47929999232292175, -0.03929800167679787, 0.20262999832630157, -0.14952999353408813, 0.2547700107097626, -0.02991200052201748, 0.34929001331329346, -0.18276000022888184, -0.3622500002384186, -0.39732998609542847, 0.28119000792503357, -0.0476670004427433, -0.7611299753189087, 0.5879899859428406, -0.24932999908924103, 0.502269983291626, 0.2774699926376343, -0.09909900277853012, 0.2996799945831299, -0.16133999824523926, -0.1525000035762787, -0.4755600094795227, 0.35618001222610474, -0.3040800094604492, 0.11348000168800354, -0.11298999935388565, 0.4999600052833557, 0.8591099977493286, -0.3662799894809723, -0.21985000371932983, 0.033358000218868256, 0.5215299725532532, 0.36134999990463257, 0.34031999111175537, -0.30869999527931213, -0.294979989528656, -0.07150500267744064, 0.2669200003147125, -0.4867199957370758, 0.5740000009536743, 0.013757999986410141, -0.35409000515937805, 0.2035199999809265, -0.5303199887275696, 0.009911599569022655, 0.4235199987888336, 1.127500057220459, -0.1453399956226349, -0.3542500138282776, -0.09583500027656555, 0.40838000178337097, 0.5229600071907043, 0.1926099956035614, 0.08637800067663193, -0.3071399927139282, -0.46358999609947205, 0.5384899973869324, 0.7268199920654297, -0.14313000440597534, -0.4339599907398224, 0.05225500091910362, -0.0036295000463724136, 0.3246900141239166, 0.7442299723625183, 0.357369989156723, -0.7246500253677368, 0.3153499960899353, 0.18459999561309814, 0.6110799908638, -0.2739799916744232], u'steaming': [-0.13163000345230103, -0.1920199990272522, 0.5442100167274475, -0.1286800056695938, -0.3221000134944916, -0.16678999364376068, 0.3677699863910675, -0.3281700015068054, 0.2506200075149536, -0.16874000430107117, -0.12645000219345093, 0.25824999809265137, -0.3693599998950958, -0.48636001348495483, 0.24075999855995178, 0.19329999387264252, 0.3599199950695038, -0.34645000100135803, 0.20208999514579773, 0.391620010137558, -0.16452999413013458, -0.1124500036239624, -0.3316600024700165, -0.24000999331474304, -0.703719973564148, -0.48517000675201416, -0.010325999930500984, 0.1890600025653839, 0.37272000312805176, 0.1506499946117401, -0.46860000491142273, 0.3809399902820587, 0.18190999329090118, -0.22226999700069427, 0.1029599979519844, 1.0485999584197998, -0.30768999457359314, 0.2504799962043762, -0.5103800296783447, -0.007208399940282106, -0.2749199867248535, 0.10542000085115433, -0.1313299983739853, -0.708840012550354, 0.6108199954032898, -0.004699099808931351, 1.0621000528335571, 0.6338099837303162, -0.22245000302791595, 0.21473999321460724, 0.2858999967575073, 0.4708000123500824, -0.08790899813175201, -0.2609800100326538, -0.324070006608963, 0.1944900006055832, -0.24517999589443207, -0.04124699905514717, -0.2885200083255768, 0.5937899947166443, -0.2872599959373474, -0.7327399849891663, 0.15088999271392822, 0.25960999727249146, -0.4994199872016907, 0.4918299913406372, -0.06489600241184235, 0.3778499960899353, -0.4454900026321411, -0.20329000055789948, 0.03370799869298935, 0.2206300050020218, -0.2286899983882904, -0.8544999957084656, -0.660040020942688, 0.1298999935388565, 0.7818400263786316, -0.035211000591516495, -0.38837000727653503, -0.28499001264572144, -0.3692399859428406, -0.589959979057312, -0.11253999918699265, 0.0418579988181591, 0.03987500071525574, -0.5505800247192383, 0.26155000925064087, 0.08763299882411957, 0.5313299894332886, -0.7662500143051147, 0.20171000063419342, -0.23624999821186066, -0.249549999833107, -0.1726199984550476, -0.6242300271987915, 0.07408899813890457, -0.04798100143671036, 0.17599999904632568, -0.22166000306606293, 0.5908399820327759, -0.04017899930477142, -0.0487309992313385, -0.3164899945259094, -0.33267998695373535, -0.2869899868965149, 0.1813800036907196, 0.379830002784729, 0.13617999851703644, 0.2889600098133087, 0.2231599986553192, 0.43112999200820923, 0.17591999471187592, -0.6615800261497498, -0.6037300229072571, -0.36777999997138977, -0.6704599857330322, -0.5088499784469604, 0.12453000247478485, -0.08170600235462189, 0.11590000241994858, 0.008635899983346462, 0.07579100131988525, -0.259909987449646, 0.0582440011203289, 0.23062999546527863, -0.14358000457286835, -0.21630999445915222, -0.6976600289344788, 0.18672999739646912, 0.05871500074863434, -0.004474100191146135, 0.7597900032997131, 0.40707001090049744, 0.18453000485897064, -0.1912200003862381, 0.46834999322891235, -0.2424899935722351, -0.011497999541461468, 0.3562900125980377, 0.48315000534057617, 0.12003999948501587, 0.5739200115203857, -0.2875699996948242, 0.26802998781204224, -0.3135699927806854, -0.2116899937391281, -0.490339994430542, 0.6936200261116028, 0.06985300034284592, -0.5743399858474731, -0.1162400022149086, -0.19102999567985535, -0.21170000731945038, -0.162990003824234, 0.38732999563217163, -0.05188300088047981, 0.002175800036638975, -0.4398599863052368, 0.024629000574350357, -0.11595000326633453, -0.4263400137424469, -0.40623000264167786, -0.302700012922287, -0.4318999946117401, 0.3428100049495697, -0.07446199655532837, 0.4743399918079376, 0.1840900033712387, 0.03837500140070915, -0.26677998900413513, 0.30856001377105713, 0.3427799940109253, -0.07773499935865402, -0.6313999891281128, -0.3577499985694885, -0.4165700078010559, -0.400519996881485, 0.42344000935554504, 0.6249499917030334, -0.34553998708724976, 0.036357998847961426, 0.35297998785972595, 0.5111299753189087, 0.13186000287532806, 0.2091600000858307, 0.1166900023818016, 0.5629000067710876, 0.3521299958229065, -0.008126599714159966, -1.0987000465393066, -0.0066877999342978, 0.6217700242996216, -0.15705999732017517, -0.24887999892234802, -0.09701599925756454, -0.48673999309539795, 0.03477099910378456, 0.11386000365018845, -0.5888199806213379, -0.4360699951648712, 0.05620099976658821, -0.2570199966430664, 0.10217999666929245, 0.054058000445365906, 0.3092699944972992, -0.12560999393463135, 0.26524001359939575, -0.41220998764038086, 0.23225000500679016, -0.546180009841919, -0.17217999696731567, -0.09967800229787827, 0.12785999476909637, 0.22041000425815582, -0.17422999441623688, 0.057711999863386154, -0.0588500015437603, -0.6140000224113464, -0.13742999732494354, -0.10036999732255936, 0.8664199709892273, 0.22919000685214996, 0.06752599775791168, -0.4012199938297272, -0.30331000685691833, -0.5530999898910522, -0.08914399892091751, -0.38207998871803284, 0.403329998254776, -0.033348001539707184, 0.16110999882221222, -0.18176999688148499, -0.11650999635457993, 0.7713599801063538, 0.08498399704694748, -0.4567199945449829, 0.006076099816709757, 0.08161800354719162, -0.12941999733448029, -0.370959997177124, -0.9083999991416931, -0.0359329991042614, -0.5314900279045105, 0.2478400021791458, 0.054680999368429184, -0.5075100064277649, 0.2965399920940399, 0.4820599853992462, -0.24108000099658966, 0.22578999400138855, -0.07761699706315994, -0.9019899964332581, -0.07494799792766571, 0.36131998896598816, -0.3300600051879883, 0.05034999921917915, -0.7294399738311768, -0.10267999768257141, -0.402319997549057, 0.08621499687433243, 0.1595499962568283, -0.5216599702835083, 0.17922000586986542, -0.03137800097465515, 0.11782000213861465, 0.22666999697685242, -0.22056999802589417, -0.47435998916625977, -0.25356000661849976, -0.33428001403808594, -0.8691400289535522, -0.18643000721931458, 0.006151299923658371, 0.7702699899673462, 0.12172999978065491, 0.06430699676275253, -0.1257600039243698, 0.18856999278068542, -0.9592700004577637, -0.3277699947357178, -0.5229099988937378, -0.02279599942266941, -0.28648000955581665, -0.04737100005149841, 0.1773100048303604, -0.20401999354362488, 0.20781999826431274, -0.34556999802589417, 0.7011299729347229, 0.22506000101566315, -0.46584999561309814, 0.5174400210380554, 0.00831419974565506, 0.24786999821662903, -0.31463000178337097, 0.16954000294208527, 0.1656000018119812, -0.4154900014400482, -0.35447999835014343, -0.03244499862194061], u'unripe': [0.2581399977207184, 0.018199000507593155, 0.642549991607666, -0.16637000441551208, 0.8677700161933899, -0.6459599733352661, 0.2749899923801422, 0.6733199954032898, -0.07122799754142761, 1.1871000528335571, -0.16347000002861023, -0.49915000796318054, 0.18211999535560608, -0.9434599876403809, 0.05178700014948845, -0.23492999374866486, -0.35409998893737793, -0.5893099904060364, -0.3805899918079376, -0.23555000126361847, -0.26844000816345215, 0.4318999946117401, 0.05338200181722641, -0.3859899938106537, -0.5138599872589111, -0.8748000264167786, -0.07736100256443024, 0.005160199943929911, -0.023327000439167023, 0.1033800020813942, -0.4998599886894226, 0.035969000309705734, -0.5328999757766724, -0.17329999804496765, 0.13474999368190765, 0.3203299939632416, -0.5596100091934204, -0.9842299818992615, 0.06649799644947052, -0.20701999962329865, 0.33489999175071716, 0.2663100063800812, 0.38071998953819275, -0.24500000476837158, -0.502560019493103, 0.42671000957489014, 0.03044000081717968, -0.18102000653743744, -0.4769200086593628, 0.06161699816584587, -0.2784099876880646, -0.13639000058174133, 0.6489700078964233, -0.44912999868392944, -0.33559998869895935, -0.7152299880981445, 0.9919300079345703, -0.07607399672269821, 0.5651199817657471, 0.288129985332489, -0.11894000321626663, -0.8536700010299683, -0.5864999890327454, 0.3921299874782562, -0.34007999300956726, -0.35899001359939575, -0.267520010471344, -0.6710000038146973, 0.14891000092029572, 0.21302999556064606, -0.022842999547719955, -0.029397999867796898, 0.11776000261306763, 0.2935500144958496, 0.26280999183654785, 0.3534500002861023, 0.23643000423908234, -0.20444999635219574, 0.29089999198913574, 0.2699800133705139, -0.16527999937534332, 0.15444999933242798, -0.027861999347805977, 0.016939999535679817, 0.10860999673604965, -0.33472999930381775, -0.26085999608039856, 0.09712100028991699, 0.24236999452114105, 0.17833000421524048, -0.2045300006866455, 0.21458999812602997, 0.07340200245380402, -0.7831799983978271, -0.45798999071121216, 0.26725998520851135, -0.20645000040531158, 0.033865999430418015, -0.24528999626636505, 0.5717300176620483, -0.0751819983124733, -0.04054800048470497, -0.1348000019788742, 0.7198299765586853, -0.34470999240875244, 0.3221000134944916, -0.055045999586582184, 0.15388000011444092, -0.05774100124835968, -0.5886200070381165, 0.7476999759674072, 0.562030017375946, 0.2890099883079529, 0.16367000341415405, 0.44655999541282654, -0.005057500209659338, -0.07774099707603455, 0.07755400240421295, 0.03691700100898743, 0.8013499975204468, -0.4079299867153168, -0.4039100110530853, -0.08391200006008148, 0.5358099937438965, -0.13223999738693237, 0.08136799931526184, -0.0971980020403862, 0.06262999773025513, 0.6481999754905701, 0.14069999754428864, -0.7120800018310547, 0.013512999750673771, 0.3244900107383728, -0.43367999792099, -0.6759200096130371, -0.011568999849259853, 0.05354899913072586, 0.02284100092947483, -0.009438999928534031, 0.10497000068426132, 0.32760998606681824, 0.5708000063896179, -0.0037175999023020267, -0.04093199968338013, 0.3737800121307373, -0.4380800127983093, -0.07753600180149078, -0.5090799927711487, 0.9219300150871277, -0.5863500237464905, -0.8077999949455261, -0.08833499997854233, 0.057301998138427734, 0.07485800236463547, -0.20502999424934387, -0.2073100060224533, 0.6690499782562256, 0.28047001361846924, 0.011297999881207943, 0.18328000605106354, -0.2998799979686737, -0.06164899840950966, -0.13970999419689178, 0.18113000690937042, -0.0277319997549057, -0.753250002861023, -0.5247799754142761, -0.004347099922597408, -0.41982999444007874, -0.44620999693870544, -0.015363000333309174, -0.4341199994087219, 0.6093299984931946, -0.05136699974536896, 0.2728799879550934, -0.09243500232696533, 0.31529000401496887, 0.3567799925804138, 0.2488500028848648, -0.4145300090312958, 0.08755599707365036, -0.1625099927186966, 0.02673099935054779, -0.5346800088882446, -0.4212400019168854, -0.43472999334335327, 0.5053099989891052, 0.10583999752998352, 0.7502999901771545, -0.11979000270366669, -0.0636420026421547, 0.3300899863243103, 0.3909499943256378, -0.05845699831843376, 0.018830999732017517, 0.022034000605344772, -0.19952000677585602, 0.6334800124168396, -0.207179993391037, -0.10360000282526016, -0.8142799735069275, -0.4451099932193756, 0.25380000472068787, 0.12500999867916107, -0.2581700086593628, -0.23704999685287476, 0.23399999737739563, 0.6521999835968018, 0.4972200095653534, -0.01153900008648634, 0.5544000267982483, -0.001762300031259656, -0.257099986076355, 0.05259399861097336, 0.20539000630378723, 0.08598200231790543, 1.0782999992370605, 0.16933999955654144, -0.20725999772548676, 0.16854000091552734, 0.134770005941391, -0.12492000311613083, -0.6861000061035156, -0.1428699940443039, -0.766730010509491, -0.5274900197982788, 0.17971999943256378, 0.20693999528884888, 0.40902000665664673, -0.07378499954938889, 0.2233400046825409, -0.20809000730514526, 0.09723000228404999, 0.544439971446991, -0.006854100152850151, 0.5757700204849243, -0.027255000546574593, -0.10221999883651733, -0.3031899929046631, -0.18460999429225922, 0.596530020236969, 0.22806000709533691, -0.31101998686790466, -0.13745999336242676, -0.2625899910926819, 0.28970998525619507, 0.22019000351428986, 0.12208999693393707, -0.11981000006198883, -0.004918999969959259, 0.04537200182676315, 0.12917999923229218, -1.0104000568389893, -0.2782300114631653, -0.7398099899291992, 0.44567999243736267, 0.13134999573230743, -0.0700870007276535, 0.21624000370502472, -0.08827199786901474, 0.03187499940395355, 0.3073599934577942, 0.1565999984741211, -0.3662700057029724, 0.5841400027275085, 0.0001746600028127432, 0.35484999418258667, -0.3232699930667877, 0.05176199972629547, 0.4368399977684021, -0.3049300014972687, 0.41666001081466675, -0.2162500023841858, 0.59934002161026, -0.4907599985599518, -0.6288700103759766, 0.19558000564575195, -0.24243000149726868, -0.5022799968719482, -0.16377000510692596, -0.2607699930667877, -0.4474000036716461, -0.02324499934911728, -0.11767999827861786, -0.6305099725723267, 0.22317999601364136, -0.03282000124454498, 0.01975400000810623, 0.2506200075149536, 0.35322999954223633, 0.13139000535011292, 0.052786000072956085, 0.9693999886512756, -0.14695000648498535, 0.2534700036048889, -0.15939000248908997, -0.5284500122070312, -0.35833999514579773, 0.19064000248908997, -0.8554099798202515], u'moldy': [0.023076999932527542, 0.11131999641656876, -0.36719998717308044, 0.09723000228404999, 0.32951998710632324, -0.2857399880886078, -0.2829799950122833, -0.03387900069355965, 0.48572999238967896, 0.6061800122261047, -0.15707999467849731, -0.4094499945640564, 0.11585000157356262, -0.34619998931884766, 0.21195000410079956, -0.32638001441955566, -0.1865299940109253, -0.48871999979019165, 0.2480199933052063, 0.07789299637079239, 0.1786399930715561, 0.15835000574588776, -0.32050999999046326, -0.4259699881076813, -0.05028799921274185, -0.6898699998855591, -0.016690000891685486, -0.1711300015449524, 0.08015900105237961, 0.16683000326156616, -0.3654699921607971, -0.4691599905490875, -0.510640025138855, -0.12047000229358673, 0.0920419991016388, 0.3117299973964691, -0.718970000743866, -0.6953499913215637, 0.2284799963235855, 0.18818999826908112, 0.355459988117218, -0.13229000568389893, -0.22739000618457794, -0.12849999964237213, 0.16962000727653503, 0.4796200096607208, 0.0036720000207424164, 0.0674239993095398, -0.8260599970817566, -0.32659998536109924, 0.09504099935293198, -0.18562999367713928, 0.3686800003051758, 0.22925999760627747, 0.4778900146484375, -0.6942800283432007, -0.21842999756336212, 0.06343100219964981, -0.28876999020576477, -0.2550399899482727, 0.16015000641345978, -0.3824700117111206, 0.04225600138306618, 0.5802199840545654, -0.9297099709510803, 0.2976199984550476, 0.9105200171470642, 0.20874999463558197, 0.4108999967575073, -0.12253999710083008, 0.7265999913215637, -0.1601399928331375, -0.21052999794483185, -0.3089599907398224, -0.33559998869895935, -0.23104000091552734, 0.20295000076293945, 0.30153000354766846, 0.09621000289916992, 0.04148999974131584, 0.1399500072002411, -0.5499200224876404, 0.4454599916934967, 0.13631999492645264, -0.23523999750614166, -0.3599100112915039, -0.2307800054550171, 0.14348000288009644, 0.04858100041747093, 0.4366700053215027, 0.3869599997997284, 0.043779000639915466, 0.23047000169754028, -0.14503000676631927, -0.40026000142097473, -0.02611600048840046, 0.1958799958229065, 0.27098000049591064, -0.35479000210762024, 0.3874399960041046, -0.10206999629735947, 0.1464100033044815, 0.13225999474525452, 0.18068000674247742, -0.9162399768829346, -0.32811999320983887, -0.1551699936389923, 0.09369199723005295, -0.5098000168800354, -0.21367000043392181, -0.18398000299930573, 0.44269001483917236, -0.34286001324653625, -0.41273999214172363, -0.146479994058609, 0.043063998222351074, 0.26886001229286194, 0.7992500066757202, 0.17295999825000763, 0.23492999374866486, 0.2356099933385849, -0.4668700098991394, 0.056418001651763916, 0.8784000277519226, 0.017534000799059868, 0.07828199863433838, -0.4747700095176697, 0.27605998516082764, 0.3928300142288208, 0.13370999693870544, -0.16335999965667725, -0.09049700200557709, 0.46206000447273254, -0.1571899950504303, 0.34064000844955444, -0.3732999861240387, -0.13471999764442444, 0.8006700277328491, -0.5448899865150452, -0.060851000249385834, 0.4013800024986267, 0.21044999361038208, 0.1603900045156479, -0.07330700010061264, 0.17839999496936798, -0.3229700028896332, -0.8254200220108032, 0.1694599986076355, 0.10582000017166138, -0.4920099973678589, -0.38938000798225403, 0.20770999789237976, -0.1537500023841858, 0.00854090042412281, -0.7846500277519226, 0.5831800103187561, -0.05505099892616272, -0.0083553995937109, 0.17732000350952148, 0.5093299746513367, -0.28964999318122864, -0.14607000350952148, 0.03534400090575218, 0.05023600161075592, 0.5659800171852112, -0.2022400051355362, -0.20206999778747559, -0.20958000421524048, 0.6416500210762024, -0.46086999773979187, 0.21261000633239746, 0.5271700024604797, 0.2758300006389618, 0.11802999675273895, -0.11665000021457672, 0.24900999665260315, 0.2834300100803375, 0.08072300255298615, 0.44152000546455383, -0.8723400235176086, -0.046925000846385956, -0.05173100158572197, -0.005035200156271458, -0.46386998891830444, 0.1331299990415573, 0.03751799836754799, 1.010200023651123, -0.06535399705171585, 0.20654000341892242, 0.3031199872493744, 0.5047399997711182, -0.1565999984741211, -0.4407300055027008, -0.6196699738502502, 0.05369500070810318, -0.5371800065040588, -0.6744300127029419, 0.3463999927043915, -0.08087699860334396, 0.31338000297546387, -0.729390025138855, -0.2376600056886673, 0.16512000560760498, -0.2025900036096573, -0.25064000487327576, -0.4191100001335144, 0.27921000123023987, -0.2689400017261505, -0.3025299906730652, 0.18871000409126282, -0.7278500199317932, 0.29649001359939575, 0.28422999382019043, 0.04222000017762184, 0.1455399990081787, -0.27788999676704407, 0.5072900056838989, -0.013309000059962273, 0.023979999125003815, 0.2032800018787384, 0.39493000507354736, -0.4892500042915344, 0.0819609984755516, 0.52947998046875, -0.333869993686676, -0.05803599953651428, -0.21222999691963196, 0.09520599991083145, 0.4756999909877777, -0.13710999488830566, 0.12234000116586685, -0.005540600046515465, -0.0015026000328361988, -0.7502599954605103, -0.3600099980831146, -0.015170999802649021, 0.33893999457359314, -0.2870199978351593, -0.8062800168991089, -0.4089699983596802, 0.4647200107574463, -0.2392899990081787, -0.07331199944019318, -0.30956000089645386, -0.3195900022983551, 0.18554000556468964, -0.22030000388622284, -0.02029299922287464, -0.01711300015449524, -0.28567999601364136, -0.10206999629735947, -0.34804001450538635, -0.4433700144290924, -0.40507999062538147, 0.35565999150276184, 0.5631700158119202, -0.1751299947500229, -0.6636599898338318, 0.7685400247573853, 0.14811000227928162, -0.10109999775886536, -0.02991200052201748, -0.19314999878406525, -0.28161999583244324, 0.7391300201416016, -0.021601999178528786, -0.05890500172972679, -0.3285599946975708, -0.10777000337839127, 0.1304199993610382, -0.3037300109863281, 0.916670024394989, -0.2712700068950653, 0.23154999315738678, -0.741599977016449, -0.1355700045824051, 0.6347600221633911, -0.21118000149726868, -0.3763499855995178, -0.12690000236034393, -0.024038000032305717, 0.2589600086212158, -0.1276800036430359, 0.2191299945116043, 0.2936199903488159, 0.5212200284004211, 0.26750001311302185, 0.39434999227523804, 0.1151999980211258, -0.20529000461101532, -0.1526300013065338, -0.20246000587940216, 0.6830599904060364, -0.05640999972820282, -0.0810910016298294, -1.0044000148773193, 0.052393000572919846, 0.3549500107765198, -0.6046500205993652, 0.07589200139045715], u'closed': [0.11336000263690948, -0.2579199969768524, -0.052698999643325806, 0.11604999750852585, 0.4495199918746948, -0.3488200008869171, -0.2971999943256378, 0.24997000396251678, 0.5176699757575989, -1.5504000186920166, -0.4903799891471863, 0.21504999697208405, 0.31637999415397644, 0.5331500172615051, 0.06844999641180038, -0.5721499919891357, 0.09312800318002701, 0.31988000869750977, -0.13380999863147736, -0.33445000648498535, -0.2130099982023239, -0.5467600226402283, 0.3090200126171112, -0.37327998876571655, 0.19112999737262726, 0.09407299757003784, -0.3256100118160248, -0.15161000192165375, 0.15894000232219696, 0.09517399966716766, 0.47714999318122864, 0.13165000081062317, 0.09290900081396103, 0.8852800130844116, -0.7392500042915344, -0.08809000253677368, -0.2776699960231781, 0.03026299923658371, -0.15680000185966492, 0.15275000035762787, -0.4865399897098541, 0.34786999225616455, -0.4719200134277344, 1.0282000303268433, -0.41176000237464905, 0.2200700044631958, 0.24663999676704407, 0.48827001452445984, -0.2886199951171875, 0.13676999509334564, -0.1890300065279007, -0.25088998675346375, -0.4450699985027313, -0.23765000700950623, 0.3740299940109253, 5.1402999815763906e-05, 0.3363400101661682, 0.48548999428749084, 0.4590199887752533, -0.5743200182914734, 0.3956199884414673, 0.05204299837350845, -0.10920999944210052, -0.23513999581336975, 0.0299529992043972, -0.7893000245094299, 0.2463800013065338, -0.2747099995613098, -0.1666100025177002, -0.17274999618530273, -0.44449999928474426, -0.028248000890016556, 0.20715999603271484, 0.371069997549057, 0.34606000781059265, -0.15237000584602356, -0.30125001072883606, 0.38631999492645264, -0.4121299982070923, -0.45364999771118164, -0.07933899760246277, -0.5354899764060974, -0.3633599877357483, 0.07116200029850006, 0.16060000658035278, 0.21533000469207764, 0.17587999999523163, -0.06349600106477737, -0.2447900027036667, 0.07012999802827835, -0.11226999759674072, -0.7314199805259705, 0.039351001381874084, 0.25209999084472656, 0.0401029996573925, 0.14925000071525574, -0.3304600119590759, 0.31115999817848206, -0.22269000113010406, -0.5621500015258789, 0.6611700057983398, 0.10332000255584717, -0.2860899865627289, -0.35168999433517456, -0.12173999845981598, -0.2048799991607666, 0.11174999922513962, 0.20040999352931976, -0.4067299962043762, 0.3658599853515625, -0.8601400256156921, -0.6618800163269043, 0.2084600031375885, -0.008277099579572678, -0.16662999987602234, 0.36419999599456787, 0.12827999889850616, 0.12942999601364136, 0.05962200090289116, 0.091280996799469, -0.2857699990272522, -0.45638999342918396, 0.44275999069213867, -0.3288699984550476, 0.1447100043296814, -0.029955999925732613, -0.11437000334262848, 0.07021799683570862, -0.5988199710845947, 0.016333000734448433, 0.22686000168323517, 0.6117299795150757, 0.06244400143623352, -0.20178000628948212, -0.11066000163555145, 0.07011699676513672, -0.0010370999807491899, 0.028901999816298485, 0.16224999725818634, 0.027638999745249748, 0.2119400054216385, 0.051291000097990036, -0.5224400162696838, 0.47832000255584717, -0.7702999711036682, -0.3288800120353699, 0.42274001240730286, -0.3947499990463257, -0.15650999546051025, -0.1872200071811676, 0.49160000681877136, -0.20194999873638153, -0.045848000794649124, 0.11907999962568283, 0.7610700130462646, -0.07969299703836441, -0.5167199969291687, -0.3487899899482727, 0.6670799851417542, 0.07275599986314774, -0.13630999624729156, -0.04736199975013733, -0.5185099840164185, 0.10174000263214111, -0.17246000468730927, 0.42691999673843384, 0.32276999950408936, -0.0016343999886885285, 0.27643001079559326, -0.09407699853181839, 0.09136199951171875, -0.08952099829912186, 0.11405999958515167, 0.2426300048828125, 0.47437000274658203, 0.1470700055360794, 0.07256700098514557, 0.4443399906158447, 0.44617000222206116, 0.28661999106407166, -0.01372199971228838, 0.6611599922180176, -0.4436599910259247, 0.009575899690389633, -0.4064599871635437, 0.04743799939751625, -0.06543800234794617, -0.15536999702453613, 0.26945000886917114, 0.27336999773979187, -0.06419400125741959, -0.3648900091648102, -0.41517001390457153, -0.014585000462830067, -0.029743999242782593, -0.26895999908447266, -0.4986400008201599, -0.28891998529434204, -0.4555799961090088, -0.12366999685764313, 0.3927200138568878, -0.17315000295639038, -0.09940599650144577, 0.3900899887084961, -0.4906100034713745, 0.36574000120162964, -0.3085399866104126, -0.8977100253105164, 0.40424999594688416, -0.19633999466896057, -0.06814400106668472, -0.30496999621391296, -0.6050800085067749, -0.11445000022649765, -0.034384001046419144, 0.19113999605178833, 0.09234199672937393, -0.00606010016053915, 0.4475100040435791, 0.5085999965667725, 0.9874799847602844, -0.4251900017261505, -0.14723999798297882, -0.31529000401496887, -0.16769999265670776, -0.051621001213788986, -0.11421000212430954, -0.262580007314682, -0.13816000521183014, -0.03140300139784813, 0.3072499930858612, -0.6319100260734558, -0.315310001373291, -0.2322400063276291, 0.42113998532295227, 0.3805600106716156, 0.26135000586509705, -0.11964000016450882, 0.11924999952316284, 0.6006699800491333, -0.15474000573158264, 0.13023999333381653, -0.07242800295352936, -0.04631400108337402, -0.12037999927997589, -0.7528899908065796, 0.3667899966239929, -0.47106000781059265, 0.019943000748753548, -0.195810005068779, 0.17994999885559082, 0.25415998697280884, 0.10676000267267227, -0.09086000174283981, -0.6699100136756897, -0.22043000161647797, -0.15821999311447144, -0.1518000066280365, -0.08159100264310837, 0.4435499906539917, -0.07368099689483643, 0.02420699968934059, -0.07635600119829178, -0.21602000296115875, 0.0738309994339943, -0.0859609991312027, 0.5145300030708313, -0.20336000621318817, 0.05870499834418297, -0.38447999954223633, 0.14381000399589539, -0.039037998765707016, 0.4691300094127655, -0.25922998785972595, -0.4352700114250183, 0.2698099911212921, -1.2098000049591064, -0.5157399773597717, -0.010590000078082085, 0.8353700041770935, -0.7598400115966797, -0.16484999656677246, 0.08658099919557571, -0.9365599751472473, 0.16529999673366547, -0.058733001351356506, -0.09713400155305862, -0.09156499803066254, -0.08815599977970123, -0.47936001420021057, 0.16820000112056732, -0.18213999271392822, -0.4281800091266632, 0.6791099905967712, -0.07255999743938446, 1.0047999620437622, -0.15932999551296234, -1.2136000394821167, 0.2816599905490875, -0.07649099826812744], u'new': [-0.6233299970626831, -0.42434000968933105, -0.03532100096344948, -0.02669299952685833, 0.23119999468326569, 0.1763100028038025, 0.2872200012207031, -0.24921000003814697, 0.222120001912117, -1.6779999732971191, 0.3710300028324127, -0.11761999875307083, 0.07749100029468536, 0.0906670019030571, 0.3978999853134155, 0.6239299774169922, -0.46143999695777893, -0.0950779989361763, 0.09238400310277939, -0.014494000002741814, -0.36805999279022217, 0.370959997177124, 0.4930900037288666, 0.3688800036907196, 0.10792999714612961, 0.1754399985074997, 0.25029999017715454, 0.4418700039386749, -0.1150600016117096, 0.10503000020980835, 0.2095700055360794, 0.19103999435901642, -0.17090000212192535, 0.891480028629303, -1.0973000526428223, 0.45796999335289, 0.17506000399589539, -0.00014930999896023422, 0.35701000690460205, 0.15610000491142273, -0.20892000198364258, 0.5382199883460999, -0.3108299970626831, 0.39403998851776123, 0.029394999146461487, 0.3453800082206726, -0.02304000034928322, 0.3652600049972534, -0.09128200262784958, -0.24337999522686005, -0.09748999774456024, -0.5985900163650513, -0.15198999643325806, -0.27588000893592834, -0.04460800066590309, 0.01334299985319376, -0.17576000094413757, 0.06688299775123596, 0.12439999729394913, -0.41804999113082886, 0.09104499965906143, 0.10093999654054642, 0.2768000066280365, -0.3449400067329407, 0.3817099928855896, 0.16173000633716583, 0.33254000544548035, -0.25571000576019287, 0.11421000212430954, 0.11263000220060349, -0.06486999988555908, 0.18190999329090118, -0.04058299958705902, 0.00441939989104867, -0.4027999937534332, 0.22875000536441803, -0.4865100085735321, 0.2034199982881546, -0.13837000727653503, -0.027806999161839485, -0.409960001707077, 0.17770999670028687, -0.005822900217026472, -0.10168000310659409, 0.021131999790668488, 0.14128999412059784, 0.1146399974822998, -0.4013800024986267, 0.37797999382019043, -0.195810005068779, 0.13854999840259552, -0.30581000447273254, 0.20364999771118164, -0.3731299936771393, -0.718529999256134, -0.34696000814437866, -0.7994899749755859, -0.432669997215271, 0.1897200047969818, -0.5842800140380859, -0.08920899778604507, 0.2359900027513504, 0.22807000577449799, -0.12161000072956085, -0.07857199758291245, -0.29761001467704773, 0.35760000348091125, 0.48434001207351685, 0.057360000908374786, 0.061778999865055084, 0.6281300187110901, -0.10907000303268433, -0.6646299958229065, -0.1392199993133545, 0.13837000727653503, -0.11042000353336334, -0.17531000077724457, 0.1453000009059906, 0.0033203999046236277, -0.9372599720954895, 0.343860000371933, -0.2601799964904785, -0.11917000263929367, 0.2045300006866455, -0.1109199970960617, -0.35100001096725464, 0.2174299955368042, -0.0461760014295578, 0.09403499960899353, -0.14177000522613525, 0.3123199939727783, 0.11681000143289566, -0.05601099878549576, -0.32106998562812805, 0.11947999894618988, -0.02967200055718422, 0.009140499867498875, -0.024924999102950096, -0.19246000051498413, 0.0176170002669096, -0.28365999460220337, 0.016294000670313835, 0.1660500019788742, 0.2710700035095215, -0.4182800054550171, 0.12791000306606293, -0.17348000407218933, 0.04563299939036369, -0.21362000703811646, -0.09851100295782089, 0.8175600171089172, -0.16543999314308167, 0.1680299937725067, -0.08945100009441376, 0.17106999456882477, -0.02994599938392639, -0.8314999938011169, 0.2027900069952011, -0.14590999484062195, -0.24493999779224396, 0.5791599750518799, 0.026843000203371048, -0.6091499924659729, 0.21699999272823334, -0.21601000428199768, 0.09811999648809433, 0.18690000474452972, 0.2801100015640259, 0.11309000104665756, 0.0951479971408844, 0.16718000173568726, -0.17298999428749084, -0.7786300182342529, 0.49693000316619873, -0.08809299767017365, -0.12110000103712082, 0.8159000277519226, -0.3386799991130829, -0.15790000557899475, 0.3224000036716461, 0.10812000185251236, 0.4168500006198883, 0.3488599956035614, 0.20563000440597534, 0.10628999769687653, 0.10474999994039536, -0.464819997549057, -0.23170000314712524, 0.17055000364780426, -0.3699299991130829, 0.1707800030708313, 0.26872000098228455, 0.0008636100101284683, -0.019946999847888947, -0.1561499983072281, 0.05335500091314316, -0.07825800031423569, -0.13425999879837036, -0.03585999831557274, 0.4826900064945221, 0.6561099886894226, 0.37292999029159546, -0.12687000632286072, -0.20077000558376312, -0.12775999307632446, -0.11849000304937363, -0.07305499911308289, 0.6110900044441223, -0.09624800086021423, -0.1341399997472763, -0.13631999492645264, -0.18695999681949615, 0.37692001461982727, 0.1463399976491928, -0.1275700032711029, -0.33055999875068665, 0.09097100049257278, 0.3413600027561188, -0.19731999933719635, -0.10374999791383743, 0.6424099802970886, 0.3172900080680847, 0.007083899807184935, 0.6125800013542175, 0.3071799874305725, -0.06650400161743164, 0.36864998936653137, 0.4057700037956238, -0.34727999567985535, 0.37602999806404114, -0.1970600038766861, 0.3204599916934967, -0.13319000601768494, 0.3225100040435791, 0.3225100040435791, -0.04604800045490265, 0.28165000677108765, -0.06755000352859497, 0.16264000535011292, 0.009487899951636791, -0.4144099950790405, -0.30511000752449036, 0.5883899927139282, 0.32493001222610474, -0.890470027923584, 0.003149600001052022, 0.20548999309539795, -0.229420006275177, -0.3486799895763397, 0.02744700014591217, 0.34775999188423157, -0.29881998896598816, -0.3077099919319153, 0.39355000853538513, 0.2671299874782562, -0.2293200045824051, 0.04659400135278702, -0.150409996509552, 0.36368998885154724, 0.11947999894618988, -0.37470999360084534, -0.21202999353408813, 0.4194500148296356, 0.18002000451087952, 0.2841300070285797, -0.09569399803876877, -0.07278600335121155, 0.4898099899291992, -0.2620300054550171, -0.24184000492095947, 0.04769200086593628, 0.10931999981403351, 0.02446500025689602, -0.07646200060844421, 0.30094999074935913, 0.6703100204467773, -2.0557000637054443, 0.2428400069475174, 0.5646799802780151, 0.4503600001335144, -0.25780999660491943, -0.0274059996008873, -0.04879099875688553, 0.06238500028848648, -0.2771100103855133, 0.05746399983763695, -0.5352699756622314, 0.7819100022315979, -0.03819999843835831, -0.5181999802589417, -0.36520999670028687, -0.9028599858283997, -0.42851001024246216, 0.10705000162124634, 0.03800300136208534, 0.6803399920463562, -0.040102001279592514, -0.13613000512123108, 0.09867999702692032, 0.609000027179718], u'filled': [-0.2484399974346161, 0.1095300018787384, 0.14767999947071075, -0.29607999324798584, 0.16097000241279602, 0.3223699927330017, 0.6643000245094299, -0.026207000017166138, 0.22109000384807587, -0.6866899728775024, -0.21559999883174896, -0.03875499963760376, -0.36779001355171204, 0.07008799910545349, -0.28582000732421875, 0.023416999727487564, -0.35133999586105347, 0.24106000363826752, 0.42166000604629517, 0.46748000383377075, 0.0347599983215332, 0.04793199896812439, 0.04811900109052658, 0.3060300052165985, -0.31380999088287354, -0.01955600082874298, -0.0861629992723465, 0.06063299998641014, 0.27955999970436096, -0.11123999953269958, -0.11067000031471252, 0.038697000592947006, -0.43981999158859253, 0.06891799718141556, -0.4462699890136719, 0.36757999658584595, -0.32460999488830566, -0.3749200105667114, -0.07600300014019012, 0.25161001086235046, -0.4065200090408325, 0.0271029993891716, 0.17093999683856964, 0.25393998622894287, 0.04888699948787689, 0.08045099675655365, 0.29679998755455017, 0.05970599874854088, 0.164560005068779, 0.15233999490737915, 0.3853999972343445, 0.18209999799728394, -0.4308199882507324, 0.09931900352239609, -0.06942299753427505, 0.018081000074744225, 0.2519400119781494, -0.17725999653339386, 0.5076500177383423, 0.34228000044822693, -0.08672299981117249, -0.23496000468730927, 0.4071199893951416, -0.06883999705314636, 0.01160299964249134, -0.4270800054073334, 0.0003502700128592551, 0.11940000206232071, -0.15028999745845795, -0.1347000002861023, -0.16032999753952026, -0.26622000336647034, 0.14815999567508698, 0.1133200004696846, -0.12738999724388123, 0.02592400088906288, 0.7854999899864197, 0.17135000228881836, 0.1316400021314621, -0.3087500035762787, 0.32666999101638794, 0.22734999656677246, -0.19935999810695648, 0.25964999198913574, -0.041659001260995865, 0.4351400136947632, 0.3679499924182892, -0.27845999598503113, -0.007259699981659651, 0.11597999930381775, 0.4353100061416626, -0.23619000613689423, -0.2195899933576584, -0.40400999784469604, -0.08863099664449692, 0.4063799977302551, 0.1309099942445755, -0.05802199989557266, 0.4953399896621704, -0.7809699773788452, 0.27526000142097473, 0.43094998598098755, -0.09029699862003326, -0.9166600108146667, -0.15401999652385712, 0.043772000819444656, 0.15681999921798706, 0.023382000625133514, -0.14801999926567078, -0.14305000007152557, -0.46790000796318054, 0.18424999713897705, 0.2476000040769577, -0.16372999548912048, -0.2600899934768677, 0.4108799993991852, -0.050287000834941864, 0.5880600214004517, 0.29872000217437744, -0.5498800277709961, 0.18077999353408813, -0.32482999563217163, -0.22378000617027283, 0.7628200054168701, -0.3179199993610382, -0.031325001269578934, -0.008356899954378605, -0.24459999799728394, 0.01396199967712164, 0.2893899977207184, 0.33092001080513, 0.2844400107860565, 0.12246999889612198, 0.28839001059532166, 0.09957800060510635, -0.14256000518798828, -0.03643200173974037, 0.41978999972343445, -0.0903019979596138, -0.00687129981815815, 0.13476000726222992, -0.15355999767780304, -0.18106000125408173, 0.14902999997138977, -0.02625199966132641, 0.13882000744342804, 0.2849699854850769, 0.1531900018453598, -0.14079999923706055, 0.06332899630069733, 0.2328999936580658, 0.5783900022506714, -0.4207000136375427, -0.2068600058555603, 0.5579699873924255, -0.03434300050139427, -0.2724800109863281, 0.1341399997472763, 0.34575000405311584, 0.08697500079870224, 0.11089999973773956, -0.04084400087594986, 0.797569990158081, 0.00405769981443882, 0.2235099971294403, -0.13854999840259552, 0.18876999616622925, 0.3432700037956238, -0.11083000153303146, -0.3651300072669983, 0.19343000650405884, 0.5355100035667419, -0.4408699870109558, -0.03242599964141846, -0.15723000466823578, -0.018827000632882118, -0.44005998969078064, 0.28119000792503357, -0.11168999969959259, -0.3910999894142151, 0.33917999267578125, 0.3506399989128113, 0.4812999963760376, -0.060798998922109604, -0.02805499918758869, -0.333050012588501, 0.4968299865722656, -0.060593001544475555, 0.25731000304222107, -0.031077999621629715, 0.4228900074958801, -0.28883999586105347, -0.2603699862957001, 0.05926299840211868, 0.16827000677585602, 0.022616000846028328, -0.6137099862098694, 0.6521099805831909, -0.36880001425743103, 0.6741700172424316, 0.23406000435352325, -0.19704000651836395, -0.013499000109732151, -0.021748000755906105, 0.06271400302648544, -0.31874001026153564, 0.09165599942207336, 0.09640000015497208, -0.022670000791549683, 0.023632999509572983, -0.058357998728752136, -0.1801300048828125, -0.06636500358581543, -0.0034990001004189253, -0.1962900012731552, 0.02959199994802475, 0.12853999435901642, -0.5999000072479248, 0.346670001745224, 0.11123000085353851, 0.7455899715423584, 0.29328998923301697, -0.07658799737691879, -0.08907300233840942, 0.008762500248849392, 0.20077000558376312, -0.09180200099945068, -0.09661699831485748, 0.2370299994945526, -0.6121000051498413, -0.15649999678134918, -0.2337699979543686, -0.04465499892830849, -0.36083999276161194, 0.36250001192092896, 0.7058299779891968, 0.24514999985694885, -0.1455100029706955, -0.9977999925613403, 0.13986000418663025, -0.11462000012397766, -0.013830999843776226, -0.060054998844861984, 0.4337399899959564, -0.528689980506897, -0.48726001381874084, 0.5799700021743774, 0.16056999564170837, 0.2845500111579895, -0.0032567998860031366, 0.527679979801178, -0.31856998801231384, 0.13884000480175018, -0.34165000915527344, 0.13892999291419983, -0.25694000720977783, 0.007579899858683348, -0.4309000074863434, 0.40801000595092773, 0.2732900083065033, -0.3561600148677826, -0.22548000514507294, 0.33230000734329224, -0.07362499833106995, 0.49254000186920166, -0.5676299929618835, -0.18644000589847565, -0.296779990196228, -0.12415000051259995, 0.08393500000238419, -0.26600998640060425, 0.07288599759340286, 0.22297999262809753, 0.0346749983727932, 0.056435998529195786, -0.09437499940395355, -1.6384999752044678, 0.5309299826622009, 0.16824999451637268, 0.07121200114488602, -0.5063400268554688, 0.13030000030994415, -0.07644400000572205, -0.07983800023794174, 0.23447999358177185, 0.33612000942230225, 0.0049760001711547375, 0.39340001344680786, -0.11890000104904175, -0.1842000037431717, 0.11462999880313873, -0.01002699974924326, -0.027356000617146492, 0.4434100091457367, -0.08795499801635742, -0.10666000097990036, 0.13200999796390533, -0.25001001358032227, -0.6524699926376343, -0.14404000341892242], u'pressed': [-0.08301199972629547, -0.026364000514149666, -0.4564799964427948, -0.46507999300956726, 0.32161998748779297, -0.4771699905395508, 0.19077999889850616, -0.2235099971294403, 0.3104900121688843, -0.9205399751663208, -0.33586999773979187, -0.15672999620437622, 0.26802000403404236, 0.3125300109386444, -0.14343999326229095, 0.31617000699043274, -0.2884500026702881, -0.14885999262332916, -0.1261100023984909, -0.11269000172615051, 0.22227999567985535, -0.15300999581813812, 0.2311200052499771, -0.09746900200843811, -0.5300400257110596, -0.040467001497745514, 0.4168199896812439, -0.16196000576019287, -0.12256000190973282, 0.054976001381874084, 0.14319999516010284, -0.2483299970626831, -0.2200700044631958, -0.23853999376296997, -0.734749972820282, 0.05989300087094307, -0.1806900054216385, 0.0070269000716507435, -0.18347999453544617, -0.11618000268936157, -0.1059499979019165, 0.007067999802529812, 0.148049995303154, 0.10597000271081924, 0.48333999514579773, 0.254830002784729, -0.1612900048494339, -0.49818000197410583, -0.0879879966378212, -0.05274700000882149, 0.3654400110244751, 0.14192000031471252, -0.009357799775898457, -0.06186800077557564, 0.328110009431839, 0.10361000150442123, -0.22101999819278717, -0.01663300022482872, 0.17844000458717346, 0.031980000436306, 0.25742000341415405, -0.29649999737739563, -0.2943600118160248, 0.22604000568389893, 0.13395999372005463, 0.1779100000858307, -0.1341100037097931, 0.03645699843764305, -0.02811400033533573, -0.4084399938583374, 0.09756100177764893, -0.08131500333547592, 0.19047999382019043, 0.4947099983692169, 0.6145899891853333, 0.29513999819755554, -0.2724800109863281, 0.44047001004219055, -0.2267100065946579, -0.35436999797821045, -0.11218000203371048, 0.042854998260736465, 0.03231799975037575, -0.08418499678373337, -0.10322999954223633, -0.060373999178409576, -0.29892000555992126, 0.33136001229286194, -0.2523399889469147, -0.2443999946117401, 0.020831000059843063, 0.2718999981880188, -0.5549600124359131, 0.3526900112628937, -0.212459996342659, 0.17942999303340912, -0.2576200067996979, 0.3217799961566925, 0.10110999643802643, 0.1078300029039383, -0.12678000330924988, -0.23297999799251556, -0.10328000038862228, 0.17229999601840973, 0.292820006608963, -0.039889998733997345, -0.03768699988722801, 0.09892600029706955, -0.2390899956226349, 0.025107000023126602, 0.47411999106407166, 0.6269999742507935, -0.18592000007629395, -0.09502600133419037, -0.6489499807357788, 0.021087000146508217, -0.23319999873638153, -0.14263999462127686, 0.2533699870109558, -0.5001400113105774, -0.32899001240730286, -0.053909000009298325, 0.40639999508857727, -0.11066000163555145, -0.21491999924182892, 0.324319988489151, -0.16189999878406525, -0.006978900171816349, 0.1255899965763092, 0.2006099969148636, -0.002268299926072359, 0.18967999517917633, -0.01614600047469139, 0.30612000823020935, -0.20125000178813934, 0.4208100140094757, 0.051297999918460846, 0.1077599972486496, 0.18893000483512878, -0.16500000655651093, 0.34275001287460327, 0.02758900076150894, -0.3714999854564667, -0.005630100145936012, -0.16234999895095825, 0.4625900089740753, -0.20169000327587128, -0.47885000705718994, 0.05803399905562401, -0.006945100147277117, -0.07366299629211426, -0.02951500006020069, 0.26096001267433167, 0.0195700004696846, -0.1564899981021881, -0.4374000132083893, -0.12421000003814697, -0.6821699738502502, 0.004951400216668844, 0.14047999680042267, 0.03586199879646301, -0.14778999984264374, 0.09483399987220764, 0.11751999706029892, 0.6356099843978882, -0.0009079599985852838, -0.2259799987077713, 0.18967999517917633, 0.20343999564647675, -0.186039999127388, -0.24506999552249908, 0.18421000242233276, -0.029366999864578247, -0.23965999484062195, 0.4632300138473511, 0.034800998866558075, -0.302590012550354, 0.4293000102043152, -0.17907999455928802, 0.38690000772476196, -0.1739799976348877, -0.21884000301361084, -0.17844000458717346, -0.30632999539375305, -0.22627000510692596, -0.14979000389575958, 0.02472900040447712, 0.195250004529953, 0.6827099919319153, 0.5151299834251404, 0.14439000189304352, -0.05569100007414818, 0.10270000249147415, 0.25321999192237854, 0.3206999897956848, 0.1316400021314621, 0.09596099704504013, 0.04500199854373932, -0.16463999450206757, 0.05651199817657471, 0.19708000123500824, -0.22495000064373016, 0.32315000891685486, 0.4497700035572052, 0.14196999371051788, -0.04551900178194046, 0.2917500138282776, 0.16843000054359436, -0.21310000121593475, -0.015143999829888344, 0.2248300015926361, 0.20332999527454376, 0.008471200242638588, 0.6275299787521362, 0.3645299971103668, 0.0786449983716011, -0.22664999961853027, -0.2688699960708618, 0.07626300305128098, 0.1062999963760376, 0.9330899715423584, 0.20509999990463257, 0.18400999903678894, 0.15904000401496887, -0.07730100303888321, 0.2693899869918823, -0.03988400101661682, 0.044047001749277115, -0.45642998814582825, -0.22176000475883484, 0.12020000070333481, -0.2220499962568283, 0.3035399913787842, 0.3841699957847595, 0.17453999817371368, -0.1052900031208992, -0.11986999958753586, 0.39452001452445984, 0.06115400046110153, -0.3098599910736084, -0.33643001317977905, -0.006455599796026945, 0.03866500034928322, -0.1415500044822693, 0.18188999593257904, 0.09803599864244461, 0.19574999809265137, -0.04686899855732918, -0.30292001366615295, -0.19609999656677246, 0.14354999363422394, 0.06365600228309631, 0.1833599954843521, 0.17829999327659607, 0.7864300012588501, 0.036010999232530594, -0.5100499987602234, 0.5902100205421448, -0.5464199781417847, 0.25760000944137573, -0.2319599986076355, -0.2059199959039688, 0.11044000089168549, 0.5289099812507629, -0.14462999999523163, -0.11236999928951263, -0.3322399854660034, 0.0016716000391170382, -0.21953999996185303, 0.1378300040960312, -0.18366999924182892, 0.13449999690055847, -0.35780999064445496, 0.01054100040346384, -0.49702998995780945, 0.25565001368522644, -1.2630000114440918, -0.30660998821258545, 0.5122399926185608, -0.4102100133895874, 0.2401600033044815, -0.35102999210357666, 0.13021999597549438, 0.24955999851226807, -0.16117000579833984, -0.2188200056552887, 0.14636999368667603, 0.11361999809741974, 0.35109999775886536, 0.13561999797821045, -0.09620700031518936, 0.12892000377178192, 0.43083998560905457, 0.21164999902248383, -0.09333600103855133, 0.38839998841285706, -0.07981300354003906, -0.23662999272346497, -0.15262000262737274, 0.19527000188827515], u'ripped': [0.5440499782562256, -0.36103999614715576, -0.3600899875164032, 0.03627000004053116, 0.08781400322914124, 0.2030699998140335, -0.4982300102710724, 0.4430600106716156, -0.008371200412511826, -0.2395700067281723, 0.10093999654054642, 0.3501800000667572, 0.08918800204992294, -0.19523000717163086, -0.8097599744796753, 0.8827400207519531, -0.17817999422550201, 0.48747000098228455, 0.12759999930858612, 0.6452699899673462, 0.2492000013589859, 0.018554000183939934, -0.12894999980926514, -0.11537999659776688, -0.017256999388337135, 0.52920001745224, -0.16087999939918518, -0.001994800055399537, -0.03067299909889698, -0.46832001209259033, 0.1732800006866455, 0.41572999954223633, -0.33184999227523804, 0.32269999384880066, -0.3243499994277954, -0.10661999881267548, -0.38082998991012573, -0.2773900032043457, 0.19594000279903412, 0.49755001068115234, 0.16422000527381897, -0.12654000520706177, -0.2025199979543686, -0.4089300036430359, 0.14788000285625458, 0.3882899880409241, 0.36904001235961914, -0.38113000988960266, -0.09807199984788895, -0.28815001249313354, 0.12291000038385391, -0.1131099984049797, 0.2561599910259247, -0.5065799951553345, -0.14948999881744385, -0.03820899873971939, -0.2058199942111969, -0.20714999735355377, -0.3061099946498871, -0.4458099901676178, -0.34606000781059265, -0.35155999660491943, -0.16419999301433563, -0.34248998761177063, 0.15703000128269196, -0.29420000314712524, 0.314410001039505, -0.3371700048446655, -0.3893199861049652, 0.11027000099420547, 0.5384100079536438, -0.1477700024843216, 0.0788009986281395, 0.3475799858570099, 0.38892000913619995, -0.2785399854183197, -0.17993000149726868, -0.48666998744010925, -0.3842400014400482, 0.2809799909591675, 0.3250899910926819, -0.6640599966049194, 0.05525299906730652, 0.18685999512672424, -0.15785999596118927, -0.047249000519514084, -0.30972999334335327, -0.23284000158309937, 0.46355000138282776, 0.38253000378608704, 0.47088000178337097, 0.1703599989414215, -0.0006557800224982202, -0.5617700219154358, 0.1399500072002411, 0.2748199999332428, 0.18333999812602997, -0.16120000183582306, 0.4258500039577484, -0.4076699912548065, 0.2002599984407425, 0.21522000432014465, 0.1333799958229065, -0.12093999981880188, 0.5710300207138062, 0.08769900351762772, 0.5751699805259705, 0.18974000215530396, -0.2871899902820587, -0.21692000329494476, -0.31689000129699707, 0.19121000170707703, 0.24105000495910645, -0.1161699965596199, -0.1736299991607666, 0.6675099730491638, 0.21696999669075012, 0.1632000058889389, 0.4400300085544586, -0.6578199863433838, 0.14890000224113464, -0.8824700117111206, 0.09760800004005432, 0.33785000443458557, -0.03671000152826309, -0.2391500025987625, -0.4025300145149231, -0.25418001413345337, 0.06729499995708466, -0.16718000173568726, 0.15059000253677368, 0.47266000509262085, -0.13446000218391418, -0.11176999658346176, 0.05962499976158142, 0.13463999330997467, -0.20702999830245972, 0.3492400050163269, 0.4587000012397766, 0.015317000448703766, -0.6258000135421753, 0.0541049987077713, 0.1844799965620041, 0.76528000831604, -0.532509982585907, 0.4132100045681, 0.5280600190162659, 0.5415300130844116, -0.027488000690937042, -0.11032000184059143, 0.16585999727249146, -0.1353600025177002, 0.08364800363779068, -0.5613700151443481, 0.1281999945640564, 0.04467400163412094, 0.13154999911785126, -0.007688600104302168, 0.5499699711799622, 0.3528200089931488, 0.25231000781059265, -0.12343999743461609, 0.10752999782562256, -0.1638299971818924, 0.1680299937725067, -0.1748100072145462, 0.1629199981689453, -0.13888999819755554, 0.13907000422477722, -0.8507000207901001, -0.42607998847961426, -0.24324999749660492, 0.22842000424861908, -0.31400999426841736, 0.18584999442100525, 0.328110009431839, -0.265859991312027, 0.9803799986839294, 0.21152999997138977, -0.7243899703025818, 0.4495300054550171, -0.6973199844360352, 0.18129000067710876, -0.47095999121665955, 0.26249000430107117, 0.2718299925327301, -0.014422999694943428, 0.6351400017738342, 0.05372000113129616, -0.08246000111103058, 0.2969299852848053, -0.3625899851322174, -0.16539999842643738, -0.043094001710414886, 0.6153299808502197, 0.22735999524593353, -0.522599995136261, 0.13771000504493713, -0.3493199944496155, 0.31286999583244324, 0.435699999332428, -0.2486100047826767, 0.04661000147461891, -0.30737999081611633, 0.08231999725103378, -0.013495000079274178, 0.29752999544143677, 0.12721000611782074, -0.015223000198602676, 0.6355400085449219, -0.5234100222587585, 0.0746690034866333, -0.14211000502109528, 0.08680299669504166, 0.4652000069618225, 0.1771100014448166, 0.3708899915218353, -0.5366700291633606, 0.0072546000592410564, 0.20242999494075775, 1.020900011062622, 0.48871999979019165, -0.25720998644828796, -0.6790900230407715, 0.3113499879837036, 0.0742729976773262, -0.5831999778747559, -0.8118199706077576, -0.011172999627888203, -0.23029999434947968, 0.4513300061225891, 0.02407499961555004, -0.1999099999666214, -0.04141300171613693, 0.10999000072479248, -0.07419099658727646, 0.6347699761390686, -0.027056999504566193, -0.6163700222969055, -0.358599990606308, 0.5167099833488464, -0.006665899883955717, -0.2762199938297272, -0.25644999742507935, -0.0530100017786026, -0.7048900127410889, 0.08827599883079529, 0.01003700029104948, 0.13616999983787537, 0.17653000354766846, 0.22573000192642212, 0.03118699975311756, -0.022621000185608864, -0.9043099880218506, 0.7182199954986572, -0.23246000707149506, 0.07546400278806686, -0.040706999599933624, -0.01578499935567379, -0.13474999368190765, -0.033472999930381775, 0.13707999885082245, -0.12108000367879868, 0.4257499873638153, 0.135110005736351, -0.32622000575065613, -0.08049099892377853, -0.1463800072669983, -0.301800012588501, -0.12411999702453613, -0.5653799772262573, -0.5177900195121765, -0.13829000294208527, -0.09048999845981598, -0.7254199981689453, -0.3946300148963928, -0.6691200137138367, 0.1648000031709671, -0.48458001017570496, 0.23420999944210052, -0.10407000035047531, 0.3562699854373932, 0.3971500098705292, 0.020493000745773315, -0.8027099967002869, 0.534529983997345, -0.06752800196409225, -0.06341399997472763, 0.4223499894142151, -0.38119998574256897, 0.19227999448776245, -0.08831100165843964, 0.016064999625086784, 0.4280500113964081, -0.010083000175654888, 0.6279699802398682, 0.16524000465869904, -0.48691999912261963, 0.208639994263649, 0.21006999909877777], u'full': [-0.31240999698638916, -0.01357599999755621, -0.040824998170137405, -0.16829000413417816, -0.1109900027513504, 0.42392998933792114, 0.09300100058317184, -0.30862998962402344, 0.1830900013446808, -1.582900047302246, -0.09753599762916565, 0.3878200054168701, -0.10337000340223312, 0.01447100006043911, -0.5398399829864502, 0.24076999723911285, 0.1871899962425232, 0.1465499997138977, 0.08146899938583374, 0.13689999282360077, 0.20059999823570251, -0.03168800100684166, 0.000683640013448894, 0.11490999907255173, 0.0279690008610487, 0.27588000893592834, -0.14343999326229095, -0.27636000514030457, 0.33840999007225037, 0.1030300036072731, -0.0684949979186058, -0.10502000153064728, -0.04906399920582771, 0.05467600002884865, -0.8385900259017944, 0.28602999448776245, 0.04468400031328201, -0.044537000358104706, -0.3036099970340729, 0.1204100027680397, -0.1996999979019165, -0.1348000019788742, -0.1440500020980835, -0.159620001912117, 0.43907999992370605, 0.044992998242378235, 0.22915999591350555, 0.2643899917602539, 0.02570899948477745, -0.47516998648643494, -0.0901700034737587, 0.21945999562740326, 0.04332999885082245, 0.10401000082492828, -0.2699599862098694, 0.38791000843048096, 0.23419000208377838, 0.10628999769687653, 0.2368900030851364, 0.024974999949336052, -0.005063999909907579, -0.2539899945259094, 0.1101899966597557, -0.3119499981403351, -0.48532000184059143, -0.3865300118923187, 0.452210009098053, -0.3343999981880188, 0.2543399930000305, -0.2659899890422821, 0.1907700002193451, 0.21907000243663788, 0.4102100133895874, -0.22860999405384064, 0.2011999934911728, 0.24492000043392181, -0.03442399948835373, 0.219310000538826, -0.18735000491142273, 0.04595699906349182, -0.002543400041759014, -0.04234699904918671, 0.014147999696433544, 0.15491999685764313, 0.2755100131034851, -0.0823260024189949, 0.21828000247478485, 0.1493300050497055, -0.042608000338077545, 0.2401600033044815, -0.13544000685214996, -0.4683299958705902, -0.18700000643730164, -0.10527999699115753, -0.3761500120162964, 0.2455900013446808, -0.523389995098114, -0.42430999875068665, 0.30215001106262207, -0.6250600218772888, -0.29385998845100403, -0.10735999792814255, -0.21020999550819397, -0.514549970626831, -0.16592000424861908, 0.007136300206184387, 0.38433998823165894, 0.47870001196861267, -0.3895399868488312, -0.40421000123023987, 0.46160998940467834, 0.007226500194519758, -0.14184999465942383, -0.11291000247001648, -0.05782200023531914, 0.0968250036239624, -0.16056999564170837, -0.15796999633312225, 0.3112500011920929, -0.17483000457286835, 0.020323999226093292, -0.1905200034379959, 0.2567000091075897, 0.5149099826812744, -0.12363000214099884, -0.17104999721050262, 0.11738000065088272, 0.479449987411499, 0.09005700051784515, 0.32739999890327454, 0.0025774999521672726, 0.2793999910354614, 0.0037136999890208244, 0.03731200098991394, -0.007642900105565786, 0.28780001401901245, -0.008460099808871746, -0.06688699871301651, 0.09051299840211868, 0.06502900272607803, -0.04130899906158447, 0.13239000737667084, 0.07184100151062012, -0.46129998564720154, -0.27469998598098755, -0.5365800261497498, -0.5389999747276306, -0.03149800002574921, 0.025436999276280403, 0.5450699925422668, 0.43340998888015747, -0.050873998552560806, -0.3394699990749359, -0.06284199655056, 0.7656099796295166, 0.0027151000685989857, -0.6161100268363953, -0.3273800015449524, 0.15288999676704407, -0.03862399980425835, 0.38222000002861023, 0.024375999346375465, 0.009404599666595459, 0.0508899986743927, -0.3354400098323822, -0.1436000019311905, -0.10565000027418137, 0.5697500109672546, 0.0703589990735054, 0.5422000288963318, -0.010553999803960323, 0.6090400218963623, -0.4766499996185303, 0.33191001415252686, 0.2362000048160553, -0.2535800039768219, -0.056327998638153076, 0.4451200067996979, 0.153889998793602, 0.19645999372005463, -0.02246199920773506, -0.17273999750614166, 0.41051000356674194, 0.44764000177383423, -0.04877899959683418, -0.09530799835920334, -0.15690000355243683, 0.16061000525951385, 0.12947000563144684, 0.07820100337266922, 0.09734199941158295, 0.0026581999845802784, -0.08608700335025787, -0.359360009431839, 0.17506000399589539, 0.13872000575065613, -0.1035899966955185, 0.7298099994659424, -0.08879200369119644, 0.3058899939060211, 0.7699900269508362, 0.043682001531124115, -0.16050000488758087, 0.0951479971408844, 0.3563399910926819, -0.23194999992847443, -0.16637000441551208, 0.2636300027370453, 0.02791599929332733, -0.5003299713134766, 0.13646000623703003, -0.588919997215271, -0.27599000930786133, 0.5594800114631653, 0.25560998916625977, 0.1388999968767166, -0.25909000635147095, -0.2356400042772293, 0.037101998925209045, -0.01642400026321411, 0.34178999066352844, 0.27623000741004944, -0.4994800090789795, 0.14419999718666077, -0.12506000697612762, -0.040686000138521194, -0.36535999178886414, 0.150409996509552, -0.07221399992704391, -0.5252799987792969, -0.26969000697135925, -0.1503400057554245, 0.1808300018310547, -0.4564000070095062, 0.24653999507427216, 0.2778800129890442, 0.20191000401973724, -0.3079099953174591, -0.7050399780273438, 0.14527000486850739, 0.34356001019477844, 0.2084600031375885, 0.3361699879169464, 0.15026000142097473, -0.6608200073242188, -0.08402200043201447, 0.4792900085449219, 0.05412000045180321, -0.289359986782074, 0.14408999681472778, -0.12424000352621078, 0.2904199957847595, 0.004477200098335743, -0.20315000414848328, -0.1829500049352646, -0.1734600067138672, -0.09787800163030624, 0.06348899751901627, 0.3792800009250641, 0.2881599962711334, -0.060440000146627426, 0.18793000280857086, 0.43413999676704407, 0.7384300231933594, -0.1835400015115738, -0.08534800261259079, -0.06778600066900253, -0.3532699942588806, 0.019951999187469482, 0.3219299912452698, -0.2989700138568878, -0.025259999558329582, -0.03766600042581558, 0.19612999260425568, 0.32993999123573303, -0.41668999195098877, -2.2235000133514404, 0.0702119991183281, 0.47881999611854553, -0.2445800006389618, 0.0794060006737709, -0.07999599725008011, 0.004836599808186293, -0.18174000084400177, 0.3596999943256378, -0.00779950013384223, 0.02508999966084957, 0.2650600075721741, 0.009178600274026394, -0.1864199936389923, -0.1666399985551834, -0.21352000534534454, 0.12626999616622925, -0.3060699999332428, -0.005703900009393692, 0.32510998845100403, 0.20603999495506287, 0.0898979976773262, 0.2839199900627136, -0.5108799934387207], u'squished': [-0.34244999289512634, 0.48385000228881836, -0.14688000082969666, 0.23637999594211578, -0.23723000288009644, 0.21841999888420105, -0.014457999728620052, -0.46184998750686646, -0.22721000015735626, 0.6403099894523621, -0.38694998621940613, -0.643809974193573, -0.07682099938392639, -0.19905999302864075, -0.6960999965667725, 0.09310399740934372, -0.03923099860548973, 0.704200029373169, 0.06169600039720535, 0.05894999951124191, 0.509909987449646, 0.11844000220298767, -0.03735800087451935, 0.23145000636577606, -0.33608999848365784, 0.16158999502658844, -0.053063999861478806, -0.282039999961853, 0.21770000457763672, 0.07814399898052216, -0.0063137998804450035, -0.5088000297546387, -0.0510220006108284, -0.16288000345230103, 0.6398900151252747, 0.217289999127388, -0.5277400016784668, -0.2558499872684479, -0.483489990234375, 0.05889099836349487, -0.27265000343322754, 0.33256998658180237, 0.3632200062274933, -0.1720999926328659, 0.022948000580072403, 0.4301699995994568, 0.11866000294685364, 0.28325000405311584, -0.01358800008893013, -0.3371700048446655, -0.20789000391960144, -0.11004000157117844, -0.1553799957036972, -0.035471998155117035, -0.354420006275177, 0.025844000279903412, 0.29750001430511475, -0.6186800003051758, -0.24355000257492065, 0.06798399984836578, 0.5981500148773193, -0.03574899956583977, -0.759630024433136, 0.399370014667511, -0.290120005607605, 0.08360099792480469, -0.09764699637889862, 0.263619989156723, 0.049851998686790466, 0.40713998675346375, -0.16395999491214752, 0.671500027179718, -0.17910000681877136, 0.10349000245332718, 0.3745099902153015, -0.19134999811649323, -0.08291900157928467, -0.13884000480175018, 0.026420999318361282, -0.10965999960899353, -0.36476001143455505, -0.2777099907398224, 0.1995300054550171, -0.24556000530719757, -0.5867800116539001, 0.005154000129550695, -0.09120699763298035, -0.7579399943351746, 0.1970299929380417, 0.16481000185012817, -0.22741000354290009, 0.22059999406337738, 0.711870014667511, 0.32934001088142395, -0.13287000358104706, -0.22231000661849976, 0.042263999581336975, 0.06009100005030632, -0.03139499947428703, 0.2771500051021576, 0.0778530016541481, -0.3914099931716919, 0.1411599963903427, -0.5103600025177002, -0.012039000168442726, 0.08843400329351425, 0.08111199736595154, -0.3050599992275238, 0.013586999848484993, -0.32620999217033386, 0.07796099781990051, 0.04200400039553642, -0.09624099731445312, -0.18355000019073486, -0.462799996137619, -0.4845699965953827, 0.023314999416470528, 0.08026900142431259, 0.29989999532699585, 0.39476001262664795, 0.4582900106906891, -0.8633900284767151, -0.12127000093460083, 0.21198999881744385, 0.30842000246047974, -0.34358999133110046, 0.2847500145435333, -0.39914000034332275, 0.547569990158081, -0.021175000816583633, 0.05323899909853935, 0.04237800091505051, -0.12172999978065491, -0.0077124000526964664, -0.22675999999046326, -0.3871600031852722, -0.15906000137329102, 0.15004000067710876, -0.20473000407218933, -0.2176399976015091, 0.08934599906206131, 0.47102999687194824, -0.44411998987197876, 0.05390100181102753, -0.022487999871373177, -0.09368699789047241, 0.003211099887266755, 0.2908799946308136, 0.44635000824928284, -0.5770000219345093, -0.4319800138473511, -0.4402799904346466, -0.0784280002117157, 0.17170999944210052, -0.9954800009727478, -0.26888999342918396, 0.5006300210952759, -0.6160500049591064, 0.4394400119781494, 0.3479500114917755, -0.5042300224304199, 0.34060999751091003, -0.24214999377727509, 0.24511000514030457, 0.8881000280380249, -0.4802899956703186, -0.08929599821567535, 0.29221999645233154, 0.10434000194072723, -0.24598999321460724, -0.09065599739551544, 0.08881600201129913, 0.2760300040245056, -0.3438200056552887, -0.6004199981689453, -0.3566499948501587, -0.08564399927854538, 0.6499699950218201, 0.34463998675346375, -0.7547199726104736, 0.4548799991607666, -0.5493199825286865, -0.04005400091409683, 0.661870002746582, 0.18252000212669373, -0.5677599906921387, 0.3682500123977661, 0.32585999369621277, 0.36864998936653137, 0.2283100038766861, 0.19346000254154205, -0.7522600293159485, -0.09154599905014038, -0.01496300008147955, 0.20173999667167664, -0.08432900160551071, -0.24085000157356262, -0.11653999984264374, -0.020409999415278435, -0.6511600017547607, -0.269569993019104, -0.18039999902248383, 0.48423999547958374, -0.09281200170516968, 0.08705999702215195, 0.06174499914050102, -0.2961699962615967, -0.28878000378608704, -0.15035000443458557, 0.15724000334739685, -0.24771000444889069, 0.9333599805831909, 0.16120000183582306, 0.17247000336647034, 0.26137998700141907, 0.38308000564575195, 0.30153000354766846, 0.33333998918533325, 0.031870000064373016, 0.03186199814081192, 0.20862999558448792, 0.45715001225471497, -0.18088999390602112, 0.2708199918270111, -0.0676020011305809, 0.03833699971437454, 0.503250002861023, 0.37588000297546387, 0.3815099895000458, 0.1021599993109703, -0.11122000217437744, 0.07957199960947037, 0.00773209985345602, -0.6422299742698669, 0.13378000259399414, -0.22053000330924988, -0.411080002784729, 0.1404699981212616, -0.12544000148773193, -0.08150099962949753, 0.34244000911712646, -0.2908099889755249, -0.1899300068616867, 0.29131999611854553, -0.5397999882698059, -0.38666999340057373, 0.06435299664735794, 0.35975998640060425, -0.1261100023984909, 0.15163999795913696, -0.3747299909591675, -0.6726599931716919, 0.12690000236034393, -0.1462399959564209, -0.47336000204086304, 0.2090499997138977, 0.3590500056743622, 0.24332000315189362, -0.15952999889850616, 0.09401299804449081, 0.3742400109767914, 0.10890000313520432, 0.6215999722480774, 0.013674000278115273, 0.1938299983739853, 0.10516999661922455, -0.1338600069284439, 0.05406099930405617, 0.009904500097036362, 0.45938000082969666, 0.3248400092124939, 0.01617399975657463, -0.757830023765564, 0.2930600047111511, -0.5577399730682373, -0.01603199914097786, 0.9431599974632263, -1.09089994430542, -0.1862500011920929, -0.20667999982833862, -0.24049000442028046, -0.14323000609874725, 0.0739080011844635, -0.21020999550819397, -0.09980600327253342, 0.13947999477386475, 0.7135800123214722, 0.2586899995803833, -0.16625000536441803, -0.26903998851776123, 0.05157199874520302, -0.24277999997138977, 0.43713998794555664, -0.08553300052881241, -0.3085300028324127, -0.2724500000476837, 0.510919988155365, 0.449319988489151, -0.16264000535011292, 0.12807999551296234], u'peeled': [0.07986100018024445, -0.6189600229263306, -0.06793499737977982, -0.27730000019073486, 0.3383600115776062, -0.9883300065994263, 0.04370399937033653, -0.17552000284194946, -0.9785100221633911, 0.6529200077056885, 0.6718000173568726, 0.6746900081634521, 1.183500051498413, -0.12660999596118927, -0.7865200042724609, 0.9339399933815002, 0.05103300139307976, -0.014694999903440475, -0.5760999917984009, 0.3489300012588501, 0.15320000052452087, 0.28130999207496643, 0.5695899724960327, -0.3210799992084503, -0.19839000701904297, -0.46459001302719116, -0.19171999394893646, 0.5552700161933899, -0.809660017490387, -0.4836199879646301, -0.503279983997345, 0.4499100148677826, -0.034699998795986176, -0.01688399910926819, 0.04799399897456169, 0.7301499843597412, -0.4578799903392792, 0.523580014705658, -0.19292999804019928, 0.158720001578331, 0.7450500130653381, -0.15949000418186188, 0.06856899708509445, -0.12387000024318695, 0.1919800043106079, 0.5758299827575684, -0.031530000269412994, 0.7150599956512451, -0.6212700009346008, 0.03238200023770332, 0.19820000231266022, -0.13357999920845032, 0.4251999855041504, 0.14778999984264374, -0.347790002822876, -0.573639988899231, -0.24079999327659607, 0.21696999669075012, 0.15175999701023102, 0.00515210023149848, 0.12168999761343002, 0.24040000140666962, -0.7770000100135803, 0.9240700006484985, -0.03896800056099892, -0.4475499987602234, -0.2204499989748001, -0.24605999886989594, -0.04351000115275383, -0.7107700109481812, 0.10241000354290009, -0.23294000327587128, 0.007580699864774942, 0.3975200057029724, -0.15824000537395477, 0.5801799893379211, 0.848110020160675, -0.36566001176834106, 0.3355500102043152, -0.563480019569397, -0.5797299742698669, -0.2504900097846985, -0.04702100157737732, -0.12054000049829483, -0.6999300122261047, 0.08151400089263916, -0.9364100098609924, 0.45691001415252686, 0.7303699851036072, 0.4103100001811981, 0.42719998955726624, 0.07830200344324112, -0.01862799935042858, -0.2384900003671646, -0.18749000132083893, -0.5656300187110901, -0.6791399717330933, 0.6180199980735779, 0.4643299877643585, 0.6359400153160095, -0.11087000370025635, -0.4840399920940399, 0.8615999817848206, -0.8451099991798401, -0.44554001092910767, 0.5615000128746033, 0.050032999366521835, 0.22306999564170837, -0.4996899962425232, 0.03658699989318848, 0.6922399997711182, 0.31345000863075256, 0.6802600026130676, -0.42541998624801636, -1.023300051689148, -0.03223999962210655, -0.1414099931716919, 0.5714499950408936, 0.3755300045013428, -0.5226500034332275, 0.1607999950647354, -0.6214600205421448, 0.06825999915599823, 0.767520010471344, -0.3449699878692627, 0.12460999935865402, -0.6374800205230713, 0.22257000207901, -0.14519000053405762, 0.34297001361846924, 0.09367100149393082, 1.05649995803833, -0.31692999601364136, -0.3278000056743622, 0.10683999955654144, 0.35885000228881836, -0.16323000192642212, 0.06856100261211395, -0.14869000017642975, -0.5184900164604187, 0.490229994058609, 0.5338699817657471, -0.12221000343561172, 0.02507299929857254, -0.004557500127702951, -0.2692500054836273, 0.34125998616218567, 0.2020300030708313, 0.5038099884986877, -0.0724639967083931, -0.7108399868011475, 0.30935001373291016, 0.2062000036239624, 0.40893998742103577, -0.22179000079631805, -0.2395700067281723, 0.12953999638557434, 0.18907000124454498, -0.059537000954151154, 0.3697899878025055, -0.15362000465393066, -0.023099999874830246, -0.12687000632286072, -0.06819000095129013, 1.1311999559402466, -0.3591800034046173, 0.12707999348640442, 0.35962000489234924, 0.18129999935626984, -0.7623100280761719, -0.5226399898529053, -0.09510800242424011, 0.24128000438213348, 0.006469400133937597, 0.3145799934864044, -0.4652099907398224, 0.2817299962043762, 0.5385900139808655, 0.8054800033569336, -0.3245899975299835, 0.13922999799251556, -0.36191999912261963, 0.11750999838113785, -0.7340099811553955, -0.47343000769615173, -0.55690997838974, 1.2414000034332275, 0.7110300064086914, 1.5413999557495117, 0.7033299803733826, 0.2541100084781647, 1.0684000253677368, -0.8092899918556213, 0.2814500033855438, 0.5725799798965454, 0.4391399919986725, -0.29585000872612, -0.11963000148534775, 0.46560001373291016, -0.493910014629364, -0.05106600001454353, 0.005758299957960844, 0.006535099819302559, 0.29385000467300415, 0.3130899965763092, 0.8406299948692322, 0.21334999799728394, 0.28488001227378845, 0.10699000209569931, 0.0212980005890131, 0.3415899872779846, 0.08353199809789658, -0.9171199798583984, 0.4553399980068207, 0.8494600057601929, -0.21392999589443207, 1.0634000301361084, -0.5402799844741821, 0.3440600037574768, -0.37560999393463135, 0.9243599772453308, 0.4482699930667877, -0.18142999708652496, -0.026984000578522682, -0.20329999923706055, -0.06447300314903259, -0.25850000977516174, -0.49160999059677124, -0.3806299865245819, -0.5054500102996826, 0.2773999869823456, 0.34619998931884766, -0.04339899867773056, -0.2902100086212158, -0.047547001391649246, 0.0002013199991779402, 0.5176200270652771, -0.45434001088142395, -0.5537300109863281, -0.27625998854637146, 0.22277000546455383, 0.06561200320720673, -0.3492400050163269, -0.17533999681472778, -0.5393099784851074, -0.3159399926662445, 0.45798999071121216, 0.08337199687957764, -0.2071000039577484, -0.9250400066375732, 0.4735200107097626, 0.9170500040054321, -1.0946999788284302, -0.8060600161552429, -0.1659500002861023, 0.13268999755382538, -0.3937700092792511, -0.4380899965763092, -0.5870699882507324, 0.974839985370636, 0.08905100077390671, 0.08563099801540375, -0.131400004029274, 0.996999979019165, 0.33566001057624817, -0.37797999382019043, 0.026034999638795853, -0.483379989862442, -0.28624001145362854, 0.5432599782943726, 0.07804500311613083, 0.555679976940155, -0.3675999939441681, 0.12065999954938889, -0.8104100227355957, 0.2874700129032135, 0.010978000238537788, -0.5900300145149231, -1.0885000228881836, -0.9687399864196777, 0.17170000076293945, -0.022127000615000725, 0.3526400029659271, 0.18577000498771667, -0.11985000222921371, -0.006141200195997953, 0.41898998618125916, 0.2706100046634674, 0.37828001379966736, 0.6998199820518494, -0.25172001123428345, 0.5098199844360352, -0.03372199833393097, -0.5414999723434448, -0.273140013217926, -0.2507700026035309, -0.2678399980068207, 0.025831999257206917, 0.6459599733352661, 0.6179800033569336], u'broken': [-0.41819998621940613, -0.06783399730920792, -0.631659984588623, -0.16255000233650208, -0.28411999344825745, -0.053355999290943146, -0.1617799997329712, -0.04227700084447861, 0.13287000358104706, -1.0652999877929688, -0.05471799895167351, 0.24568000435829163, -0.511680006980896, -0.014480000361800194, -0.05324900150299072, -0.24835999310016632, -0.010173000395298004, 0.7535300254821777, -0.2945699989795685, 0.06570599973201752, -0.03291599825024605, 0.2508400082588196, 0.18074999749660492, -0.0951320007443428, -0.17363999783992767, -0.19554999470710754, -0.3045400083065033, -0.47075000405311584, -0.03793400153517723, -0.017109999433159828, -0.04517500102519989, 0.5387399792671204, 0.1315000057220459, 0.37117999792099, -0.6607300043106079, -0.1732800006866455, -0.22776000201702118, 0.0890749990940094, 0.04219700023531914, 0.702530026435852, -0.3374899923801422, -0.47440001368522644, -0.28883999586105347, -0.3052299916744232, -0.3530299961566925, 0.3412399888038635, -0.228860005736351, 0.1288599967956543, -0.05370299890637398, 0.36864998936653137, 0.26291999220848083, 0.05880900099873543, 0.04358699917793274, 0.10262999683618546, 0.033661000430583954, -0.006239099893718958, -0.23201000690460205, -0.036688998341560364, -0.4483200013637543, 0.22428999841213226, 0.4679200053215027, 0.3217799961566925, -0.27869999408721924, 0.429390013217926, -0.2568199932575226, -0.48162001371383667, 0.2711600065231323, 0.24219000339508057, 0.5400599837303162, 0.15972000360488892, -0.006765199825167656, -0.8050400018692017, 0.10610999912023544, 0.5536199808120728, -0.10984999686479568, -0.21261000633239746, -0.016493000090122223, -0.39006999135017395, -0.2630400061607361, 0.5021399855613708, 0.14170999825000763, -0.031091999262571335, 0.5118499994277954, 0.041241999715566635, -0.12090999633073807, 0.003783399937674403, 0.25415998697280884, -0.15842999517917633, -0.18711000680923462, 0.29833000898361206, 0.43334001302719116, 0.32427000999450684, 0.31494998931884766, 0.21717000007629395, 0.3128499984741211, 0.005180600099265575, 0.3219600021839142, -0.047495998442173004, 0.5054100155830383, -0.34463998675346375, 0.39858999848365784, 0.15953999757766724, 0.018559999763965607, -0.1139800027012825, 0.17354999482631683, 0.3496299982070923, 0.36107999086380005, -0.24431000649929047, -0.4563100039958954, -0.3065600097179413, -0.4139299988746643, -0.5663400292396545, -0.19137999415397644, -0.5317699909210205, -0.3508099913597107, -0.08984100073575974, -0.6286600232124329, -0.12883000075817108, 0.10350000113248825, -0.3606700003147125, 0.12213999778032303, -0.532069981098175, -0.2864600121974945, 0.06527099758386612, 0.0039968001656234264, -0.1973399966955185, -0.4347899854183197, -0.2190299928188324, -0.032777998596429825, 0.13514000177383423, 0.2204899936914444, 0.8335199952125549, 0.3677699863910675, 0.4717499911785126, 0.4304800033569336, 0.16392000019550323, 0.631659984588623, 0.18604999780654907, 0.26978999376296997, -0.57805997133255, -0.05358999967575073, 0.5554900169372559, -0.16339999437332153, 0.5577099919319153, -0.2688399851322174, 0.11905000358819962, 0.050182998180389404, 0.5628600120544434, -0.033562999218702316, 0.012396999634802341, -0.03870199993252754, -0.19357000291347504, -0.43132999539375305, -0.15181000530719757, 0.32058000564575195, -0.3444400131702423, 0.24674999713897705, -0.31505000591278076, -0.3129200041294098, 0.5070199966430664, 0.2718600034713745, -0.0964680016040802, -0.19438999891281128, 0.009580000303685665, 0.6439499855041504, 0.6980900168418884, 0.2007399946451187, -0.11766999959945679, -0.02739799953997135, 0.2330400049686432, -0.4804899990558624, 0.27202001214027405, 0.03877300024032593, 0.016731999814510345, 0.0036406998988240957, 0.04779699817299843, 0.3900800049304962, 0.32093000411987305, -0.04110400006175041, -0.24730999767780304, 0.14868000149726868, 0.032391998916864395, 0.2757500112056732, -0.38694000244140625, 0.12963999807834625, -0.7285100221633911, 0.007037099916487932, 0.2143400013446808, 0.20728999376296997, -0.06538400053977966, -0.0058852001093328, -0.36699000000953674, 0.21342000365257263, 0.17378999292850494, -0.10187999904155731, 0.19874000549316406, -0.46143001317977905, -0.10666999965906143, -0.23477999866008759, 0.19976000487804413, 1.0029000043869019, 0.6875, 0.41293999552726746, -0.13660000264644623, -0.24216000735759735, -0.29976001381874084, 0.03275100141763687, -0.177279993891716, -0.4720799922943115, 0.16805000603199005, 0.11392000317573547, -0.04361899942159653, -0.11614999920129776, 0.38102999329566956, 0.41391000151634216, 0.2042199969291687, 0.6006100177764893, 0.11085999757051468, 0.3156599998474121, 0.5296400189399719, 0.4248400032520294, 0.024940000846982002, 0.7414299845695496, -0.44435998797416687, 0.15439000725746155, 0.23920999467372894, -0.016441000625491142, -0.3375000059604645, 0.020052000880241394, 0.3843899965286255, 0.24455000460147858, -0.6682400107383728, 0.026644999161362648, -0.5950700044631958, -0.2787899971008301, 0.2924500107765198, -0.03255299851298332, -0.042381998151540756, -0.14680999517440796, -0.5315499901771545, -0.0047173998318612576, -0.7576000094413757, 0.14994999766349792, -0.49000999331474304, -0.18219999969005585, 0.07380300015211105, -0.12613999843597412, 0.24855999648571014, 0.12932999432086945, 0.08466199785470963, 0.4924899935722351, 0.168830007314682, -0.25325000286102295, -0.7411199808120728, 0.6215999722480774, 0.06285399943590164, -0.402319997549057, -0.18182000517845154, 0.22877000272274017, 0.22267000377178192, -0.08665399998426437, -0.1388300061225891, -0.3344399929046631, 0.16313999891281128, 0.03843099996447563, 0.08140899986028671, -0.1717900037765503, -0.22366000711917877, -0.42469000816345215, 0.3900899887084961, -0.6144199967384338, -0.34595000743865967, 0.05090299993753433, -0.12791000306606293, -0.3379499912261963, -0.04326200112700462, -1.6916999816894531, 0.10010000318288803, 0.21297000348567963, -0.2416200041770935, 0.06850399821996689, -0.263700008392334, 0.3837200105190277, -0.022546999156475067, -0.0294599998742342, 0.3117600083351135, -0.432559996843338, -0.0368879996240139, 0.3689199984073639, -0.2888599932193756, -0.16200999915599823, 0.09984000027179718, -0.19222000241279602, 0.2836900055408478, -0.3386699855327606, 0.16836999356746674, 0.0057657998986542225, -0.6230199933052063, 0.2714099884033203, 0.3090499937534332], u'mashed': [-0.15400999784469604, 0.1858700066804886, 0.8317499756813049, 0.8797500133514404, -0.18252000212669373, -0.004838299937546253, -0.1263899952173233, -0.19539999961853027, -0.26256000995635986, 0.4867900013923645, 0.17599999904632568, 0.03615500032901764, -0.04193300008773804, 0.5983099937438965, -0.12711000442504883, 0.33145999908447266, -0.5501599907875061, 0.022221000865101814, -0.14811000227928162, 0.3856799900531769, -0.12853999435901642, 0.4383699893951416, -0.356550008058548, -0.054492998868227005, -0.6507200002670288, -0.10346999764442444, 0.6888200044631958, 0.1670600026845932, 0.11886999756097794, -0.2672699987888336, -0.8540099859237671, 0.2626599967479706, -0.2703799903392792, 0.025499999523162842, 0.07083100080490112, 0.8816900253295898, -0.42177000641822815, 0.4796999990940094, 0.08557499945163727, -0.02819100022315979, 0.07366199791431427, -0.11242000013589859, 0.3543199896812439, -0.5605599880218506, 0.5188000202178955, 0.6758700013160706, 0.5380300283432007, 0.12946000695228577, 0.14101000130176544, 0.06372399628162384, -0.0980909988284111, 0.17050999402999878, 0.35113999247550964, 0.053711000829935074, -0.6121000051498413, -0.1087300032377243, 0.024129999801516533, -0.22200000286102295, -0.21845999360084534, 0.25332000851631165, 0.11585000157356262, -0.035624999552965164, -0.3835799992084503, 0.2768999934196472, -0.8335899710655212, -0.26249998807907104, 0.2990899980068207, 0.2379399985074997, 0.08913999795913696, -0.03939399868249893, -0.16234999895095825, -0.020812999457120895, -0.48256000876426697, 0.011943000368773937, -0.20979000627994537, -0.01670899987220764, 0.8386600017547607, -0.020323999226093292, 0.15004000067710876, -0.1357100009918213, 0.0357850007712841, 0.33917999267578125, 0.13499000668525696, -0.39524999260902405, 0.23669999837875366, -0.18558000028133392, -0.28644001483917236, 0.4899199903011322, 0.19530999660491943, -0.36750999093055725, 0.1374099999666214, -0.013019000180065632, 0.13740000128746033, 0.0010065999813377857, -0.5142599940299988, -0.1600400060415268, -0.4018700122833252, 0.4009999930858612, 0.02744399942457676, 0.8424800038337708, -0.20601999759674072, -0.4499000012874603, 0.46553999185562134, -0.4960800111293793, -0.6521099805831909, 0.3998199999332428, -0.2007800042629242, 0.04098200052976608, -0.6916300058364868, -0.6477100253105164, 0.5373799800872803, 0.684719979763031, -0.15692000091075897, -0.25084999203681946, -0.5601999759674072, 0.055883001536130905, -0.524649977684021, 0.5112599730491638, 0.41947999596595764, 0.030750000849366188, -0.0006708800210617483, -0.06712199747562408, 0.08765199780464172, 0.2499299943447113, -0.6653500199317932, -0.43626999855041504, 0.20527000725269318, 0.8896899819374084, 0.03698499873280525, 0.8212599754333496, -0.21897000074386597, 1.1263999938964844, -0.10882999747991562, 0.3021099865436554, -0.24594999849796295, -0.43119001388549805, 0.6019899845123291, 0.26736000180244446, -0.3487200140953064, 0.27333998680114746, 0.16309000551700592, 0.12385000288486481, -0.00964209996163845, -0.0857739970088005, 0.4557499885559082, 0.05596400052309036, -0.4465700089931488, -0.09934800118207932, 0.30180999636650085, -0.6714500188827515, -0.9287999868392944, 0.5757499933242798, 0.6327599883079529, 0.5654600262641907, -0.7533599734306335, 0.16869999468326569, -0.06900700181722641, -0.03520999848842621, 0.4199399948120117, 0.3631899952888489, -0.3852899968624115, 0.0961650013923645, -0.012186000123620033, -0.27577999234199524, 0.49428999423980713, -0.050269998610019684, 0.2605000138282776, 0.3237200081348419, 0.012884999625384808, -0.8923699855804443, 0.1385599970817566, 0.1795399934053421, -0.16357000172138214, -0.28633999824523926, -0.24671000242233276, -0.17257000505924225, -0.12567000091075897, 0.18752999603748322, 0.2189200073480606, -0.7139999866485596, 0.6374599933624268, 0.10352999716997147, -0.021119000390172005, -0.455049991607666, -0.22382000088691711, -0.2442599982023239, 0.4530700147151947, 0.7095100283622742, -0.09087900072336197, 0.2073500007390976, -0.149399995803833, 0.7115700244903564, -0.15939000248908997, -0.2009200006723404, -0.06352700293064117, -0.011229000054299831, -0.44235000014305115, 0.20777000486850739, 0.07980500161647797, 0.09408699721097946, -0.20590999722480774, -0.059661999344825745, 0.4815399944782257, 0.490200012922287, 0.7463200092315674, 0.20789000391960144, 0.3989900052547455, 0.315310001373291, -0.3401300013065338, -0.45802000164985657, 0.04104100167751312, -0.10535000264644623, 0.09223400056362152, 0.44593000411987305, 0.23664000630378723, 0.056435998529195786, 0.4396600127220154, -0.8150799870491028, 0.15804000198841095, 0.2869200110435486, 0.20276999473571777, -0.24445000290870667, -0.24985000491142273, -0.5147799849510193, -0.23019999265670776, -0.7464399933815002, 0.267410010099411, 0.05429299920797348, -0.10430999845266342, -0.17709000408649445, -0.5163599848747253, 0.26620998978614807, -0.2379000037908554, -0.20884999632835388, 0.2797999978065491, -0.055031001567840576, 0.5691800117492676, 0.2758199870586395, 0.007667299825698137, -0.41725000739097595, -0.8449500203132629, -0.30803999304771423, 0.1779700070619583, -0.13530999422073364, -0.3670499920845032, -0.6147199869155884, 0.5738000273704529, 0.6443300247192383, -0.12387000024318695, -0.9534500241279602, 0.17009000480175018, 0.1425900012254715, -0.24991999566555023, 0.14069999754428864, -0.5730400085449219, 0.037289999425411224, -0.4187299907207489, -0.16572000086307526, -0.38558998703956604, 0.1758500039577484, -0.32010000944137573, -0.0359640009701252, 0.4165000021457672, 0.030004000291228294, 0.29280000925064087, 0.2091899961233139, -0.23260000348091125, -0.3028300106525421, 0.2078000009059906, 0.3851099908351898, -0.7724599838256836, -0.19930000603199005, 0.08893200010061264, 0.8156700134277344, 0.033810000866651535, 0.34525999426841736, 0.09325099736452103, 0.10220000147819519, -0.598770022392273, -0.809440016746521, 0.002129900036379695, -0.1665399968624115, 0.532039999961853, -0.01515199989080429, -0.09445899724960327, 0.09635800123214722, 0.7471299767494202, -0.1128700003027916, 0.6272199749946594, 0.13948999345302582, -0.10740000009536743, 0.011269999668002129, 0.040307000279426575, -0.6204900145530701, -0.5863699913024902, -0.8980100154876709, 0.18190999329090118, 0.07096599787473679, -0.17045000195503235, 0.3719399869441986], u'pureed': [0.2803800106048584, -0.12020999938249588, 0.8449599742889404, -0.1314300000667572, 0.24044999480247498, -0.0838489979505539, 0.2709299921989441, 0.09199900180101395, -0.46467000246047974, 0.15522000193595886, 0.41940000653266907, -0.48392999172210693, 0.76419997215271, 0.3816100060939789, -0.40817001461982727, -0.14961999654769897, -0.5145300030708313, 0.22431999444961548, -0.020061999559402466, 0.774150013923645, -0.6811599731445312, -0.6165199875831604, -0.14203999936580658, -0.5895199775695801, 0.03767300024628639, -0.4836899936199188, -0.047231998294591904, 0.10606999695301056, -0.12208999693393707, -0.16843000054359436, -1.1094000339508057, 0.08125299960374832, -0.061223000288009644, 0.148499995470047, 0.8066099882125854, 1.0384999513626099, -0.5956199765205383, 0.43011000752449036, -0.06851200014352798, 0.05641999840736389, -0.17982999980449677, -0.5804100036621094, -0.210099995136261, -0.7005500197410583, 0.738070011138916, 0.3904699981212616, 0.12714999914169312, 0.7078800201416016, -0.1799899935722351, 0.8437399864196777, -0.21062999963760376, 0.5518500208854675, 0.3824099898338318, -0.21797999739646912, -0.5781599879264832, 0.16315999627113342, 0.31317999958992004, -0.30946001410484314, 0.42100000381469727, 0.34261998534202576, 0.42197999358177185, -0.16322000324726105, -0.09329099953174591, 0.6807699799537659, -0.8309599757194519, 0.1589599996805191, 0.5263800024986267, 0.3362399935722351, 0.5370200276374817, -0.24688999354839325, 0.5347099900245667, -0.24921999871730804, -0.3164199888706207, 0.12280000001192093, -0.4118900001049042, 0.82955002784729, 0.8317499756813049, -0.11083000153303146, 0.5514699816703796, -0.13603000342845917, 0.0047109997831285, -0.0534450002014637, -0.051902998238801956, 0.024744000285863876, 0.20180000364780426, -0.6283100247383118, -0.6197699904441833, 0.3551599979400635, -0.14469000697135925, -0.23672999441623688, -0.5117300152778625, -0.17308999598026276, -0.3203999996185303, -0.12941999733448029, -0.19900000095367432, -0.509660005569458, -0.02223600074648857, 0.23532000184059143, -0.01576099917292595, 0.5919399857521057, 0.07629899680614471, -0.0952259972691536, 1.0135999917984009, 0.1563200056552887, -0.8643199801445007, 0.1965699940919876, -0.04580099880695343, -0.4193499982357025, -0.41196998953819275, -0.1329600065946579, 0.2422100007534027, 0.6164299845695496, -0.011893999762833118, -0.011150999926030636, -0.021261999383568764, 0.37088000774383545, -0.731249988079071, 0.4893699884414673, 0.27816998958587646, 0.9111700057983398, -0.35242000222206116, -0.509440004825592, 0.5952600240707397, 0.23406000435352325, -0.045928001403808594, -1.0637999773025513, 0.2459300011396408, 0.37386998534202576, -0.016377000138163567, 0.8689799904823303, -0.5737199783325195, 0.8433700203895569, 0.3496699929237366, 0.38468998670578003, 0.09955599904060364, 0.1945600062608719, 0.024486999958753586, 0.01770699955523014, -0.2855300009250641, 0.10490000247955322, -0.09564799815416336, 0.6741099953651428, -0.5014299750328064, 0.22248999774456024, 0.5663599967956543, -0.17184999585151672, -0.18655000627040863, 0.05882500112056732, 0.5952900052070618, -0.6559900045394897, -1.3955999612808228, -0.27775999903678894, 0.08461999893188477, 0.1396999955177307, -0.6153500080108643, -0.3810499906539917, 0.210439994931221, -0.6295499801635742, 0.5968300104141235, 0.5817999839782715, -0.4490100145339966, -0.3510400056838989, 0.21018999814987183, -0.5985400080680847, 0.001752799958921969, -0.16221000254154205, 0.10693000257015228, 0.006219599861651659, -0.4144600033760071, -0.4361099898815155, 0.44958001375198364, 0.40139999985694885, -0.418940007686615, -0.03652799874544144, -0.06707999855279922, 0.06382100284099579, 0.1623699963092804, -0.07669799774885178, 0.7346900105476379, -0.14339999854564667, -0.19744999706745148, -0.3792699873447418, 0.08125200122594833, 0.11320000141859055, 0.37768998742103577, -0.1610099971294403, 0.6520500183105469, 0.17958000302314758, -0.155799999833107, -0.035905998200178146, 0.1330299973487854, 0.38367000222206116, 0.020103000104427338, -0.8852900266647339, -0.09735000133514404, 0.28150999546051025, 0.028074000030755997, 1.0636999607086182, 0.20068000257015228, 0.22156000137329102, -0.7237799763679504, -0.4429999887943268, 1.1948000192642212, 0.512499988079071, 0.2991800010204315, 0.5044100284576416, 0.11661999672651291, 0.14936000108718872, -0.44670000672340393, -0.19338999688625336, 0.6331999897956848, -0.3937399983406067, -0.4381200075149536, 0.5612599849700928, -0.11055000126361847, -0.008088699541985989, 0.32973000407218933, -0.510420024394989, 0.04005400091409683, 0.5829200148582458, 0.18959000706672668, -0.015313000418245792, -0.3953000009059906, -0.494269996881485, -0.04871800169348717, -0.5575500130653381, -0.03330700099468231, 0.6849700212478638, 0.0196749996393919, -0.5978000164031982, -0.07518500089645386, 0.4217199981212616, 0.42765000462532043, -0.13415999710559845, 0.12929999828338623, 1.0717999935150146, 0.09229700267314911, 0.5896999835968018, -0.6348000168800354, -0.08995600044727325, -0.3666200041770935, -0.3271400034427643, -0.19147999584674835, 0.48680999875068665, -0.4125500023365021, -0.28529998660087585, 1.0371999740600586, 0.05515599995851517, -0.23799000680446625, -0.8902300000190735, -0.011775000020861626, -0.05325600132346153, -0.062185000628232956, -0.1786399930715561, -1.027500033378601, -0.00526219978928566, -0.0828929990530014, 0.39259999990463257, -0.282260000705719, 0.29311999678611755, 0.0219930000603199, 0.8593000173568726, -0.13759000599384308, 0.17497999966144562, 0.2834399938583374, -0.16541999578475952, 0.30869999527931213, -0.22951999306678772, 0.25262999534606934, 0.10595999658107758, -0.25143998861312866, 0.23624999821186066, -0.020201999694108963, 0.7269999980926514, 0.1445000022649765, 0.1092199981212616, 0.7182199954986572, -0.5603200197219849, -0.23810000717639923, -0.13535000383853912, 0.3135699927806854, -0.23388999700546265, 0.5079500079154968, 0.27775999903678894, -0.09686999768018723, -0.004205599892884493, 0.3812600076198578, 0.6888099908828735, 0.13339999318122864, -0.20297999680042267, 0.019866999238729477, -0.24211999773979187, 0.42677998542785645, -0.3961299955844879, -0.09815800189971924, -0.9291200041770935, -0.05937499925494194, -0.16925999522209167, 0.38109999895095825, 0.20178000628948212], u'dry': [0.15289999544620514, -0.005403299815952778, -0.3314400017261505, -0.6283800005912781, 0.25418001413345337, 0.027487000450491905, 0.6869999766349792, 0.4720500111579895, 0.6048300266265869, -1.4944000244140625, 0.21390999853610992, -0.5304499864578247, -0.12701000273227692, -0.3584800064563751, -0.07543899863958359, -0.3135499954223633, -0.13840000331401825, -0.0030149000231176615, 0.2897700071334839, 0.37018001079559326, -0.052661001682281494, -0.29811999201774597, 0.48069000244140625, 0.20201000571250916, -0.5689600110054016, -0.05577699840068817, 0.1438400000333786, 0.023409999907016754, -0.43230998516082764, -0.296889990568161, -0.016950000077486038, 0.8777499794960022, -0.38951000571250916, -0.177839994430542, -0.5053499937057495, 0.37542998790740967, -0.15154999494552612, 0.2881999909877777, -0.41600000858306885, 0.30066999793052673, -0.23333999514579773, 0.326229989528656, 0.4441399872303009, -0.006952300202101469, 0.6920099854469299, -0.17911000549793243, 0.3488999903202057, 0.7972000241279602, -0.04105599969625473, 0.027041999623179436, 0.3297800123691559, -0.06867299973964691, 0.3500699996948242, 0.078404001891613, -0.05593699961900711, 0.21907000243663788, -0.14896999299526215, -0.7351499795913696, 0.49404001235961914, 0.11924000084400177, 0.3060399889945984, -0.23386000096797943, 0.47788000106811523, -0.003222400089725852, -0.4574899971485138, -0.6755300164222717, -0.2145799994468689, 0.2952499985694885, -0.07964999973773956, -0.2964000105857849, 0.19701999425888062, -0.231330007314682, -0.23220999538898468, 0.012911000289022923, -0.5203400254249573, -0.3815700113773346, 0.4448699951171875, 0.3401699960231781, -0.38262999057769775, -0.11326000094413757, -0.16516000032424927, 0.09528300166130066, -0.6661199927330017, -0.2165900021791458, -0.21900999546051025, 0.34139999747276306, -0.5781999826431274, -0.02717900089919567, 0.03133799880743027, 0.2125999927520752, 0.120619997382164, 0.29987001419067383, 0.4097200036048889, 0.03527799993753433, 0.13033999502658844, 0.13885000348091125, 0.2413100004196167, -0.10228999704122543, 0.4290899932384491, 0.10299000144004822, 0.14831000566482544, 0.06208699941635132, -0.6787199974060059, 0.039972998201847076, -0.852370023727417, 0.04872800037264824, -0.29875001311302185, 0.5361800193786621, -0.1666799932718277, -0.018825000151991844, -0.4662100076675415, -0.4290499985218048, -0.043630000203847885, 0.005871700122952461, -0.490229994058609, -0.2088100016117096, -0.40953001379966736, 0.652400016784668, 0.6366000175476074, 0.007924799807369709, -0.2465900033712387, -0.7132200002670288, -0.16060000658035278, 0.704289972782135, 0.10982000082731247, 0.44176000356674194, -0.03670699894428253, 0.3389799892902374, 0.6519700288772583, 0.1723400056362152, 0.3310000002384186, 0.6677500009536743, 0.4497300088405609, 0.46974000334739685, -0.32635998725891113, 0.22755999863147736, 0.06997399777173996, 0.6946200132369995, -0.08340699970722198, -0.33202001452445984, 0.6356899738311768, -0.18761000037193298, -0.5335299968719482, -0.34779998660087585, -0.6511099934577942, 0.0458110012114048, -0.03227299824357033, -0.09360300004482269, 0.26012998819351196, 0.18150000274181366, -0.4813399910926819, -0.04008999839425087, -0.44110000133514404, -0.2808699905872345, 0.4093399941921234, -0.19637000560760498, 0.0557899996638298, -0.21295000612735748, 0.29117000102996826, 0.9233300089836121, -0.11112000048160553, -0.702489972114563, 0.1214200034737587, 0.22431999444961548, 0.40821999311447144, -0.1800300031900406, 0.4966000020503998, -0.26183000206947327, 0.038589999079704285, 0.31167998909950256, 0.44988998770713806, 0.7782400250434875, -0.3474000096321106, 0.455949991941452, 0.19377000629901886, 0.4033699929714203, 0.2810699939727783, -0.6003400087356567, 0.2978599965572357, -0.7170699834823608, 0.3276999890804291, 0.32067999243736267, 0.1703599989414215, 0.017458999529480934, 0.14143000543117523, -0.2335200011730194, 1.3503999710083008, -0.3646000027656555, -0.1250700056552887, -0.38931000232696533, 0.06065699830651283, 1.2367000579833984, -0.1639000028371811, 0.21541999280452728, -0.04382200166583061, -0.05809900164604187, -0.19393999874591827, -0.4487999975681305, -0.4392699897289276, 0.2379000037908554, 0.13816000521183014, 0.13527999818325043, 0.1809300035238266, 0.3705100119113922, 0.23183000087738037, 0.11264999955892563, -0.28384000062942505, 0.12276999652385712, -0.032033998519182205, -0.7879599928855896, -0.06435299664735794, -0.5548200011253357, -0.21017999947071075, -0.39256998896598816, 0.3619900047779083, -0.33044999837875366, 0.12035000324249268, -0.6043000221252441, 0.2671799957752228, -0.4914799928665161, 0.794409990310669, 0.19351999461650848, -0.1612199991941452, -0.10541000217199326, -0.2625899910926819, -0.056233000010252, 0.15838000178337097, -0.1333799958229065, -0.1006999984383583, 0.0893229991197586, 0.7503899931907654, -0.32293999195098877, -0.36528000235557556, -0.34272000193595886, 0.2978900074958801, 0.21608999371528625, -0.6894500255584717, -0.33754000067710876, -0.6105599999427795, -0.2967199981212616, -0.5813199877738953, 0.19411000609397888, -0.11438000202178955, -0.0033227999228984118, -0.7329400181770325, 0.0015737999929115176, 0.5920199751853943, 0.18029999732971191, -0.016797000542283058, -1.0812000036239624, 0.4685800075531006, -0.1863200068473816, 0.4359999895095825, -0.10102999955415726, 0.3570699989795685, 0.09970100224018097, -0.019222000613808632, 0.09495200216770172, 0.023631000891327858, 0.9517099857330322, -0.01234500017017126, -0.3777799904346466, -0.9020100235939026, 0.015177000313997269, -0.3447999954223633, -0.21392999589443207, -0.41464999318122864, -0.10570000112056732, 0.5497499704360962, -0.2963100075721741, 0.10831999778747559, 0.03918199986219406, -0.5602200031280518, 0.17037999629974365, 0.33235999941825867, -0.2263599932193756, -0.6844800114631653, 0.29027000069618225, -0.05076700076460838, -0.0610090009868145, -0.6776999831199646, 0.0351639986038208, 0.1160999983549118, 0.30667001008987427, 0.04535200074315071, 0.6385899782180786, 0.47457998991012573, -0.5680400133132935, -0.0585240013897419, 0.24556000530719757, -0.059783000499010086, 0.36980000138282776, -0.36586999893188477, 0.053509000688791275, 0.1770000010728836, 0.24266000092029572, -0.4117099940776825, -0.1237500011920929, 0.1321299970149994, 0.7542300224304199], u'chipped': [-0.31213000416755676, 0.17440000176429749, -0.2814599871635437, 0.3399899899959564, 0.2832300066947937, -0.29054999351501465, 0.044259000569581985, 0.03573499992489815, 0.4390699863433838, 0.0983320027589798, 0.20866000652313232, 0.24650999903678894, -0.04144499823451042, 0.19431999325752258, -0.49748000502586365, 0.13369999825954437, -0.2092200070619583, 0.1030300036072731, 0.13003000617027283, 0.5257099866867065, -0.12191999703645706, 0.29047998785972595, 0.124269999563694, -0.6374899744987488, -0.2933799922466278, -0.19913999736309052, -0.35986998677253723, 0.2797499895095825, -0.08085799962282181, 0.1178399994969368, 0.1554899960756302, 0.14575999975204468, 0.3708699941635132, 0.07855899631977081, -0.5904899835586548, -0.5648900270462036, 0.45781999826431274, 0.3752799928188324, -0.5678899884223938, 0.11339999735355377, -0.2514899969100952, 0.07832799851894379, 0.4076699912548065, -0.4118799865245819, 0.3138499855995178, 0.06852500140666962, 0.24747000634670258, -0.16561999917030334, -0.5995200276374817, 0.038152001798152924, -0.36798998713493347, -0.19283999502658844, 0.2998499870300293, -0.002689500106498599, -0.08223100006580353, -0.5086299777030945, -0.020889999344944954, 0.3306100070476532, -0.5103899836540222, -0.3896700143814087, 0.1300400048494339, 0.318340003490448, -0.12590999901294708, -0.03598799929022789, 0.22657999396324158, -0.1614599972963333, 0.4141499996185303, -0.3605499863624573, 0.4461199939250946, -0.7955800294876099, 0.3567099869251251, -0.24301999807357788, 0.6027600169181824, 0.3677000105381012, 0.3043000102043152, -0.18032999336719513, 0.15760000050067902, -0.6226599812507629, -0.00901539996266365, -0.05819699913263321, 0.06472499668598175, 0.005552100017666817, 0.8971400260925293, -0.3842200040817261, -0.4587000012397766, 0.39897000789642334, 0.11146000027656555, -0.3595600128173828, -0.0537789985537529, -0.3857499957084656, 0.7480000257492065, 0.8907899856567383, -0.41144001483917236, -0.07215700298547745, -0.31349000334739685, -0.3682200014591217, -0.044089000672101974, 0.13443000614643097, -0.2596200108528137, 0.5684300065040588, -0.06359799951314926, -0.14981000125408173, -0.18795999884605408, 0.06435099989175797, 0.23055000603199005, -0.08258199691772461, -0.020764999091625214, -0.16367000341415405, -0.22654999792575836, -0.4594700038433075, -0.21491999924182892, -0.08380699902772903, -0.13446000218391418, -0.7379699945449829, -0.5849400162696838, 0.17966000735759735, -0.26712000370025635, 0.03632400184869766, -0.16126999258995056, 0.11378999799489975, 0.030711999163031578, -0.008472800254821777, -0.20271000266075134, 0.5613499879837036, -0.3160499930381775, -0.03436100110411644, -0.049800001084804535, -0.1419599950313568, 0.15960000455379486, 0.11545000225305557, 0.1505099982023239, -0.05409200116991997, 0.0011642000172287226, 0.17045000195503235, 0.32982000708580017, 0.012551000341773033, -0.028769999742507935, -0.03336299955844879, -0.10087999701499939, -0.09474299848079681, -0.021877000108361244, 0.7356699705123901, -0.1815200001001358, 0.21514999866485596, -0.33246999979019165, 0.5279300212860107, 0.1601399928331375, 0.4466100037097931, -0.27952998876571655, -0.704990029335022, -0.11247999966144562, 0.4198000133037567, -0.6775299906730652, -0.4258599877357483, -0.37117999792099, 0.012826000340282917, -0.2498600035905838, -0.5240200161933899, 0.15164999663829803, 0.12536999583244324, -0.22612999379634857, -0.20875999331474304, -0.32809001207351685, -0.6744800209999084, 0.61531001329422, 0.0173799991607666, -0.12421999871730804, -0.27316001057624817, 0.10653000324964523, 0.06531299650669098, -0.5431600213050842, -0.3152799904346466, 0.293040007352829, -0.06997100263834, -0.5095499753952026, -0.010332000441849232, -0.023391999304294586, 0.7914800047874451, -0.1816300004720688, -0.20297999680042267, -0.40696999430656433, 0.2074899971485138, 0.6003199815750122, -0.7117499709129333, -0.5030800104141235, -0.46654999256134033, 0.43101000785827637, -0.10463999956846237, 0.3248800039291382, 0.8973000049591064, 0.7640299797058105, 0.11668000370264053, 0.45344001054763794, 0.034811001271009445, -0.16223999857902527, -0.2821199893951416, -0.08381500095129013, -0.2591100037097931, -0.13318000733852386, -0.028603000566363335, 1.0161999464035034, 0.16701999306678772, 0.05335899814963341, 0.21376000344753265, -0.3868899941444397, 0.09831299632787704, -0.13662000000476837, -0.2997500002384186, -0.7913699746131897, 0.4883800148963928, 0.8431599736213684, 0.013741999864578247, 0.0315449982881546, 0.3930099904537201, 0.398140013217926, -0.18313999474048615, 0.37288999557495117, 0.006524200085550547, 0.07588999718427658, 0.3142400085926056, 0.2828899919986725, 0.0433959998190403, -0.3062799870967865, -0.20317000150680542, -0.17434999346733093, -0.22936999797821045, 0.32396000623703003, -0.17462000250816345, -0.061795998364686966, 0.3161799907684326, -0.3197900056838989, 0.18961000442504883, 0.06836000084877014, 0.7779600024223328, -0.32934999465942383, 0.14464999735355377, -0.3129499852657318, 0.030355000868439674, 0.16132999956607819, 0.0022609999869018793, -0.07354699820280075, -0.03841700032353401, -0.7515299916267395, -0.02432600036263466, -0.6569399833679199, 0.08754999935626984, 0.06603699922561646, 0.19585999846458435, 0.4095900058746338, 0.1247899979352951, -0.20318999886512756, -0.5753499865531921, 0.4552600085735321, -1.1859999895095825, 0.19102999567985535, 0.02333899959921837, -0.07354100048542023, -0.4078899919986725, -0.6258599758148193, 0.3794800043106079, -0.07962000370025635, 0.4128499925136566, -0.06111399829387665, -0.12769000232219696, -0.16458000242710114, -0.08386500179767609, -0.28665000200271606, -0.3297500014305115, -0.35565000772476196, -0.2019599974155426, -0.21122999489307404, -0.030748000368475914, -0.4294399917125702, 0.11552000045776367, -0.06970100104808807, -0.38288000226020813, 0.13714000582695007, -0.07909700274467468, -0.8637599945068359, -0.446370005607605, -0.18560999631881714, -0.15151000022888184, 0.48431000113487244, 0.24356000125408173, 0.3934899866580963, -0.04483100026845932, 0.21377000212669373, -0.2247599959373474, 0.42285001277923584, 0.40373000502586365, 0.5245100259780884, 0.15343999862670898, 0.30858999490737915, 0.4869599938392639, -0.06780499964952469, -0.48166000843048096, 0.5959699749946594, -0.9977700114250183, 0.14478999376296997, -0.3744699954986572], u'spilled': [0.2516399919986725, 0.1715400069952011, 0.26618000864982605, 0.2796500027179718, -0.07316700369119644, 0.41416001319885254, 0.1384900063276291, 0.03096500039100647, 0.6639000177383423, -0.5573599934577942, -0.19859999418258667, 0.23868000507354736, -0.5473700165748596, -0.4575900137424469, -0.14222000539302826, -0.004537399858236313, -0.11461000144481659, 0.672249972820282, -0.09132800251245499, 0.6270999908447266, -0.13370999693870544, 0.24652999639511108, 0.22411000728607178, -0.5345699787139893, -0.3923799991607666, -0.17178000509738922, 0.18991999328136444, 0.10955999791622162, 0.060858000069856644, -0.09887400269508362, 0.3723500072956085, 0.0832270011305809, 0.20010000467300415, 0.34404000639915466, 0.29868999123573303, -0.42952999472618103, -0.29256001114845276, -0.10728999972343445, -0.008739699609577656, 0.31650999188423157, -0.46255001425743103, -0.34731999039649963, 0.249439999461174, 0.16147999465465546, 0.6529399752616882, -0.01792999915778637, 0.09309399873018265, 0.03417100012302399, -0.21528999507427216, 0.3900099992752075, 0.0200399998575449, 0.36039999127388, -0.11055999994277954, -0.2888599932193756, -0.03341300040483475, 0.40156999230384827, 0.6434400081634521, 0.17951999604701996, -0.06594300270080566, -0.14979000389575958, -0.0045934999361634254, -0.08480899780988693, -0.1071000024676323, -0.24975000321865082, -0.35106998682022095, -0.7042199969291687, -0.02121499925851822, -0.10847999900579453, -0.07600200176239014, -0.33212000131607056, -0.05744300037622452, -0.30131998658180237, 0.6276999711990356, 0.05038300156593323, 0.3420200049877167, -0.555429995059967, 0.8152400255203247, -0.3684200048446655, -0.3330000042915344, -0.44725000858306885, 0.1399500072002411, -0.6014999747276306, 0.40779998898506165, 0.12999999523162842, 0.2440900057554245, -0.33212000131607056, 0.27619999647140503, -0.43557998538017273, 0.3490599989891052, -0.011908999644219875, -0.11768999695777893, 0.18633000552654266, 0.06981600075960159, -0.02029399946331978, -0.6523100137710571, 0.19271999597549438, 0.6222599744796753, -0.022213999181985855, -0.22970999777317047, 0.15127000212669373, 0.1261499971151352, 0.07760299742221832, -0.2688399851322174, -0.2378299981355667, 0.5869200229644775, 0.2027599960565567, 0.38815000653266907, 0.054381001740694046, 0.13564999401569366, 0.6456000208854675, -0.5281999707221985, -0.2848399877548218, 0.11788000166416168, -0.006899999920278788, -0.3158299922943115, 0.3361400067806244, 0.15658000111579895, 0.44481998682022095, 0.025286000221967697, -0.21755999326705933, 0.05016100034117699, -0.4997900128364563, -0.2629599869251251, 1.1526000499725342, 0.2908099889755249, -0.33594998717308044, -0.12939000129699707, -0.21943999826908112, 0.5133500099182129, -0.0011458999942988157, 0.15789000689983368, 0.4708400070667267, 0.08987399935722351, 0.166360005736351, 0.5331199765205383, 0.2906300127506256, -0.0039033000357449055, 0.14111000299453735, 0.017225999385118484, -0.19571000337600708, -0.10131999850273132, -0.20913000404834747, -0.11404000222682953, 0.04696999862790108, -0.21383999288082123, 0.1472499966621399, -0.1199600026011467, -0.013539000414311886, 0.03714999929070473, 0.2549999952316284, 0.08311399817466736, 0.5992500185966492, 0.3423300087451935, 0.0028562000952661037, 0.2976900041103363, 0.5685399770736694, 0.20952999591827393, 0.23513999581336975, 0.6204100251197815, -0.030215999111533165, 0.4132300019264221, -0.23587000370025635, 0.11226999759674072, 0.19799000024795532, -0.020089000463485718, -0.6419699788093567, 0.01982799917459488, -0.13471999764442444, -0.6039699912071228, -0.04891800135374069, -0.2150299996137619, 0.125450000166893, 0.1618099957704544, -0.6684399843215942, -0.1789499968290329, -0.34251001477241516, 0.11298999935388565, 0.1602499932050705, 0.3097800016403198, -0.3069100081920624, -0.2692300081253052, 0.18077999353408813, -0.2583500146865845, -0.07897400110960007, 0.05466099828481674, -0.2670699954032898, 1.1629999876022339, -0.24944999814033508, 0.15809999406337738, -0.13738000392913818, 0.6810700297355652, -0.5912899971008301, -0.0020896000787615776, -0.23317000269889832, 0.2847999930381775, -0.336899995803833, 0.17030000686645508, -0.004288500174880028, -0.6960800290107727, 0.10485000163316727, 0.2671999931335449, -0.0029174000956118107, 0.6483299732208252, 0.010173000395298004, 0.2544800043106079, -0.20521999895572662, 0.3499299883842468, 0.28843000531196594, -0.3269999921321869, 0.36441001296043396, -0.2446800023317337, 0.48816999793052673, -0.20430000126361847, 0.1851000040769577, 0.25374001264572144, 0.40459001064300537, 0.3023799955844879, -0.41137999296188354, 0.23785999417304993, 0.15376999974250793, 0.5321199893951416, 0.35857999324798584, -0.36890000104904175, -0.31905999779701233, 0.2666099965572357, -0.329910010099411, 0.1031000018119812, -0.31477999687194824, 0.13371999561786652, 0.16362999379634857, -0.31095001101493835, 0.048889998346567154, -0.7164099812507629, 0.48969000577926636, 0.57955002784729, 0.4916900098323822, 0.42906999588012695, 0.023333000019192696, -0.4900200068950653, -0.39590999484062195, -0.31099000573158264, 0.17388999462127686, -0.289359986782074, -0.06300599873065948, 0.20000000298023224, -0.4370799958705902, 0.41909000277519226, 0.5019500255584717, 0.07653199881315231, 0.1035500019788742, 0.36921000480651855, -0.5248799920082092, -0.3464600145816803, -0.44012001156806946, -0.06886400282382965, -0.10386999696493149, -0.554639995098114, -0.3576900064945221, 0.20855000615119934, 0.32613998651504517, 0.007543900050222874, 0.5048900246620178, -0.3609200119972229, -0.30976998805999756, 0.12256000190973282, 0.25266000628471375, -0.7429100275039673, -0.11597000062465668, -0.2584199905395508, -0.3729400038719177, 0.2005700021982193, 0.1749500036239624, -0.295960009098053, -0.1758899986743927, -0.15645000338554382, -0.28659000992774963, -0.6169700026512146, -0.4735400080680847, -0.21443000435829163, -0.14124000072479248, -0.3570599853992462, 0.3025299906730652, 0.7387099862098694, 0.09503400325775146, 0.3376699984073639, -0.22867999970912933, 0.06550700217485428, -0.3055399954319, 0.08460000157356262, 0.6154500246047974, -0.08884699642658234, 0.23533999919891357, 0.8664500117301941, 0.5340099930763245, 0.5305299758911133, -0.026141000911593437, 0.7993999719619751, -0.47150999307632446, -0.0158929992467165, 0.08520399779081345], u'coiled': [-0.12775999307632446, 0.5202199816703796, -0.04901999980211258, -0.0532820001244545, 0.1365399956703186, 0.5410400032997131, -0.2401999980211258, -0.02729799970984459, 0.07803100347518921, 0.24646000564098358, -0.3703800141811371, 0.029179999604821205, -0.15277999639511108, -0.31134000420570374, -0.36201998591423035, 0.12484999746084213, -0.3248400092124939, 0.22864000499248505, 0.10225000232458115, 0.055612001568078995, 0.1835400015115738, 0.26756998896598816, -0.6685000061988831, 0.6442800164222717, 0.3086499869823456, -0.035725001245737076, 0.05925999954342842, 0.2856999933719635, -0.08648999780416489, 0.6449400186538696, 0.1506499946117401, -0.37003999948501587, 0.20843000710010529, -0.07938499748706818, 0.7426499724388123, 0.5715699791908264, -0.18706999719142914, 0.25018998980522156, -0.41102999448776245, 0.5163999795913696, -0.022036999464035034, -0.48895999789237976, 0.412200003862381, -0.47275999188423157, -0.15526999533176422, -0.055424999445676804, -0.2652699947357178, -0.18727000057697296, 0.2792400121688843, 0.4655599892139435, -0.08371700346469879, 0.3043000102043152, 0.35989999771118164, 0.03155599907040596, 0.22078999876976013, -0.8164600133895874, -0.3437800109386444, 0.32635000348091125, 0.22039000689983368, 0.4273200035095215, 0.6479899883270264, 0.4041999876499176, 0.6875500082969666, 0.7195500135421753, 0.38207000494003296, -0.38732999563217163, -0.4708699882030487, 0.4292599856853485, 0.6961299777030945, 0.6334599852561951, -0.2906300127506256, 0.32631000876426697, 0.24718999862670898, 0.10920999944210052, 0.04060199856758118, 0.7221900224685669, 0.1326500028371811, -0.3433699905872345, -0.481330007314682, -0.09199199825525284, 0.34321001172065735, -0.8288099765777588, 0.10444000363349915, 0.37106001377105713, -0.45263999700546265, 0.2823199927806854, -0.33449000120162964, 0.4866200089454651, -0.1295900046825409, 0.49595001339912415, 0.3508799970149994, 0.005773699842393398, -0.17788000404834747, 0.0640449970960617, -0.6214699745178223, 0.0505560003221035, -0.2413800060749054, 1.0347000360488892, 0.35471999645233154, 0.290800005197525, 0.033757999539375305, 0.09017699956893921, -0.4771699905395508, -0.3653700053691864, 0.24289000034332275, 0.16711999475955963, 0.5926600098609924, 0.28262001276016235, -0.5277600288391113, 0.10317999869585037, 0.007893599569797516, 0.42697998881340027, -0.19681000709533691, -0.09323199838399887, 0.019550999626517296, 0.045625001192092896, -0.7581499814987183, -0.13686999678611755, 0.1331000030040741, 0.0014416000340133905, -0.28589001297950745, -1.1035000085830688, 0.2590799927711487, 0.5345600247383118, 0.03163899853825569, -0.022422000765800476, -0.6533399820327759, -0.10976000130176544, -0.7371900081634521, 0.32468000054359436, 0.2900199890136719, -0.0662980005145073, -0.46000999212265015, -0.002981900004670024, -0.79721999168396, 0.31038999557495117, -0.04358899965882301, -0.47437000274658203, 0.3434000015258789, -0.07131899893283844, 0.10202000290155411, -0.14451000094413757, 0.16088999807834625, -0.49355000257492065, 0.11917000263929367, -0.2319599986076355, -0.0760200023651123, -0.14900000393390656, 0.21995000541210175, -0.1646600067615509, -0.08970700204372406, 0.09528899937868118, -0.46893998980522156, -0.021568000316619873, -0.21881000697612762, -0.7824900150299072, 0.6100800037384033, -0.5583099722862244, -0.09916400164365768, 0.632610023021698, -0.3292900025844574, 0.07863499969244003, -0.8154100179672241, -0.23633000254631042, 0.21773000061511993, 0.23506000638008118, -0.25023001432418823, -0.013697000220417976, 0.28672999143600464, -0.5021399855613708, -0.23333999514579773, 0.5179100036621094, 0.31512999534606934, 0.25659000873565674, -0.15547999739646912, 0.06775400042533875, -0.4268999993801117, 0.491210013628006, 0.3779599964618683, -0.8979899883270264, 0.1118599995970726, -0.557200014591217, 0.2808400094509125, 0.9380300045013428, 0.40874001383781433, 0.016140999272465706, 0.6212499737739563, 0.4159199893474579, 0.11924999952316284, -0.5316799879074097, -0.3208000063896179, -0.0177449993789196, -0.056412000209093094, -0.05732500180602074, -0.006222300231456757, -0.3390499949455261, -0.26493000984191895, -0.5656999945640564, -0.1640699952840805, 0.2689799964427948, -0.11456000059843063, 0.7756199836730957, 0.2631700038909912, 0.2274399995803833, 0.05685799941420555, 0.6024399995803833, -0.3803099989891052, -0.4346199929714203, -0.17267000675201416, 0.5512499809265137, -0.1740500032901764, 0.42129001021385193, 0.6624500155448914, -0.4183099865913391, -0.7219099998474121, 0.2885200083255768, 0.293940007686615, -0.8342499732971191, -0.018036000430583954, -0.3240799903869629, -0.11813999712467194, 0.0704289972782135, 0.28172001242637634, -0.1234000027179718, -0.49195000529289246, 0.3117299973964691, -0.3790299892425537, 0.21067999303340912, -0.09180299937725067, -0.6584299802780151, 0.19750000536441803, -0.4314500093460083, 0.2463800013065338, 0.0974079966545105, -0.1657799929380417, 0.0772470012307167, -0.3026899993419647, -0.8722400069236755, -0.9085500240325928, 0.49838000535964966, 0.25053998827934265, -0.06983499974012375, -0.48313000798225403, 0.2399200052022934, -0.20614999532699585, -0.2557699978351593, 0.36289000511169434, 0.4161800146102905, -0.45548000931739807, -0.4045099914073944, -0.27955999970436096, 0.26475000381469727, 0.08611500263214111, -0.32168999314308167, 0.2552799880504608, -0.14833000302314758, 0.5610499978065491, 0.1979999989271164, 0.10544999688863754, 0.32951000332832336, -0.1445399969816208, -0.15076999366283417, 0.5740699768066406, -0.5286200046539307, -0.4288800060749054, -0.4156099855899811, 0.5113300085067749, -0.2223999947309494, 0.4246799945831299, 0.866129994392395, -0.20513999462127686, -0.11085999757051468, 0.09714499861001968, 0.8520100116729736, -0.1861799955368042, 0.06672900170087814, 0.28777000308036804, -0.07134400308132172, -0.5644599795341492, 0.09599900245666504, 0.23066000640392303, -0.5483599901199341, -0.04496699944138527, 0.5896400213241577, -1.055999994277954, -0.38269999623298645, 0.2820900082588196, 1.016800045967102, 0.061599001288414, -0.43362998962402344, 0.44749999046325684, 0.009499600157141685, 0.042433999478816986, 0.3701399862766266, 0.608460009098053, 0.2591499984264374, 0.8380500078201294, 0.007387000136077404, 0.3268299996852875, -0.32311001420021057], u'wrinkled': [-0.5680099725723267, -0.21379999816417694, 0.050533000379800797, -0.6460400223731995, -0.11457999795675278, -0.2645600140094757, -0.05771299824118614, 0.05344599857926369, 0.25933000445365906, 0.18523000180721283, -0.6527699828147888, 0.007204900030046701, 0.21129000186920166, 0.19092999398708344, -0.3531799912452698, 0.5329999923706055, -0.013275000266730785, -0.3520300090312958, -0.15241000056266785, -0.034529998898506165, 0.005095100030303001, 0.1072700023651123, -0.1083500012755394, 0.06477800011634827, -1.0252000093460083, 0.11565999686717987, 0.6279900074005127, 0.05815200135111809, 0.2672500014305115, 0.4013899862766266, -0.0694890022277832, 0.1335200071334839, -0.6624699831008911, -0.11331000179052353, 0.1282999962568283, 0.4243600070476532, -0.38826999068260193, -0.4787899851799011, 0.10823000222444534, 0.18681000173091888, 0.2378299981355667, -0.07175300270318985, 0.33281999826431274, -0.5527300238609314, 0.3106200098991394, 0.01659799925982952, 0.5769500136375427, 0.13801999390125275, -0.34804001450538635, -0.6721699833869934, 0.150409996509552, -0.6197599768638611, 0.25567999482154846, 0.027480000630021095, 0.2969000041484833, -0.47404998540878296, 0.18750999867916107, -0.3749699890613556, 0.5598999857902527, -0.179639995098114, 0.08192700147628784, -0.46924999356269836, -0.3663400113582611, 0.31571000814437866, -0.30948999524116516, 0.07088799774646759, 0.23940999805927277, 0.08974599838256836, 0.23598000407218933, -0.38317999243736267, 0.289110004901886, -0.0061425999738276005, -0.08184400200843811, 0.5829499959945679, 0.7932599782943726, 0.2602100074291229, -0.4632200002670288, -0.1802700012922287, 0.09619300067424774, -0.1745000034570694, -0.5113300085067749, 0.002235099906101823, -0.27546000480651855, -0.2740899920463562, -0.3887900114059448, 0.3721100091934204, -0.1410199999809265, 0.03738600015640259, -0.02719699963927269, 0.48177000880241394, -0.3556300103664398, -0.10109999775886536, -0.4088200032711029, 0.10198000073432922, -0.6096299886703491, 0.08680099993944168, -0.20440000295639038, 0.04738900065422058, 0.34692999720573425, 0.5085999965667725, 0.28428998589515686, -0.1551699936389923, 0.030889999121427536, 0.06318700313568115, -0.25018998980522156, 0.01506900042295456, 0.12598000466823578, -0.10102000087499619, 0.000208760000532493, -0.2633500099182129, -0.3432300090789795, 0.616599977016449, -0.2945599853992462, -0.3318699896335602, -0.05559000000357628, 0.21244999766349792, -0.16333000361919403, 0.488319993019104, 0.07063499838113785, 0.004807299934327602, -0.030525999143719673, -0.7148500084877014, 0.2606399953365326, 0.3253200054168701, -0.10773999989032745, 0.3242500126361847, -0.2076999992132187, 0.08510900288820267, 0.1660899966955185, -0.12492000311613083, -0.36052998900413513, -0.4885599911212921, -0.35923001170158386, 0.1205499991774559, -0.5592700242996216, 0.12126000225543976, 0.3912999927997589, 0.11819999665021896, 0.38179001212120056, 0.2726399898529053, 0.8351399898529053, 0.28720998764038086, -0.15219999849796295, 0.05793999880552292, 0.19280999898910522, 0.3586600124835968, -0.28374001383781433, 0.33730998635292053, 0.5071600079536438, -0.17615999281406403, -0.8306000232696533, -0.0035455001052469015, -0.5570600032806396, -0.5996500253677368, -0.28501999378204346, -0.13605999946594238, 0.12272000312805176, -0.6401699781417847, 0.029145000502467155, 0.785290002822876, -0.5562899708747864, -0.6279900074005127, -0.27932998538017273, 0.02360299974679947, 0.3019300103187561, -0.4187000095844269, -0.1371700018644333, 0.4659700095653534, 0.5446100234985352, -0.651960015296936, 0.01740100048482418, 0.46713998913764954, -0.1370999962091446, -0.23114000260829926, -0.7392399907112122, -0.5148500204086304, 0.5205100178718567, 0.10712999850511551, 0.22863000631332397, -0.0628260001540184, 0.4331899881362915, -0.12927000224590302, -0.18793000280857086, 0.05008799955248833, -0.08012799918651581, -0.4882200062274933, 1.2359000444412231, 0.06383399665355682, 0.011358000338077545, 0.052639998495578766, 0.3387799859046936, 0.3655799925327301, 0.19304999709129333, 0.29864001274108887, -0.3073900043964386, 0.33500000834465027, 0.06735499948263168, -0.3090899884700775, -0.3195199966430664, -0.07559199631214142, 0.044491998851299286, 0.03881699964404106, 0.16098999977111816, 0.2662400007247925, 0.3274900019168854, -0.45794999599456787, 0.13642999529838562, -0.029217999428510666, 0.13962000608444214, -0.4729500114917755, 0.11249999701976776, 0.24988999962806702, -0.6179699897766113, 0.041117001324892044, 0.6456300020217896, 0.1049799993634224, 0.5491799712181091, -0.831059992313385, 0.176269993185997, 0.21039000153541565, 0.17124000191688538, 0.15934999287128448, 0.514710009098053, -0.06460099667310715, -0.5721700191497803, 0.24557000398635864, -0.2717599868774414, -0.12329000234603882, -0.29159998893737793, 0.01962899975478649, 0.6112499833106995, 0.18480999767780304, -0.25815001130104065, 0.29743000864982605, -0.121629998087883, -0.3627299964427948, -0.1357399970293045, -0.33094000816345215, -0.4333699941635132, -0.23331999778747559, 0.1636500060558319, -0.1693200021982193, -0.09502299875020981, 0.19625000655651093, -0.41244998574256897, 0.5011399984359741, -0.3583599925041199, 0.19171999394893646, 0.1442600041627884, -0.1418900042772293, -0.1531199961900711, -0.02758900076150894, 0.051725998520851135, -0.4771899878978729, 0.4968799948692322, 0.8049600124359131, 0.03275299817323685, 0.7173900008201599, -0.37373000383377075, 0.05906900018453598, -0.09941600263118744, 0.007719200104475021, 0.12703999876976013, 0.25512000918388367, 0.06663300096988678, 0.21202999353408813, 0.29159000515937805, 0.11873999983072281, -0.17323000729084015, 0.36177998781204224, -0.21073000133037567, 0.15875999629497528, 0.4731200039386749, 0.03966100141406059, -0.28992998600006104, -0.10450000315904617, 0.98430997133255, -0.8133100271224976, -0.3727099895477295, 0.20146000385284424, 0.3268600106239319, 0.13437999784946442, 0.08814600110054016, -0.04892599955201149, -0.49904000759124756, 0.1287900060415268, 0.1007699966430664, 0.12246000021696091, 0.36687999963760376, -0.10677000135183334, -0.20453999936580658, 0.37755998969078064, -0.4664100110530853, 0.2656700015068054, -0.19089999794960022, -0.572409987449646, 0.22768999636173248, 0.19311000406742096, 0.12703000009059906, 0.020260000601410866], u'unpainted': [0.0484750010073185, -0.054019998759031296, -0.6508299708366394, -0.4651699960231781, -0.13765999674797058, 0.16218000650405884, -0.069582998752594, -0.24924999475479126, -0.5485799908638, 0.1623699963092804, -0.7757200002670288, -0.02963699959218502, -0.05329599976539612, -0.5814700126647949, 0.1593800038099289, 0.1366499960422516, -0.1190899983048439, 0.4608199894428253, -0.1523600071668625, -0.0836699977517128, 0.08420000225305557, 0.2937999963760376, 0.5667399764060974, 0.33417999744415283, 0.048868998885154724, -0.512969970703125, 0.738569974899292, 0.18316000699996948, -0.21478000283241272, 0.05291000008583069, 0.5954800248146057, 0.21056999266147614, 0.05113999918103218, -0.3222599923610687, 0.7050600051879883, 0.17204000055789948, -0.14823000133037567, -0.031408000737428665, 0.25929999351501465, 0.20830999314785004, 0.3378100097179413, -0.12782999873161316, -0.03741300106048584, -0.4547800123691559, -0.2946699857711792, 0.3354800045490265, 0.08483000099658966, -0.1352899968624115, -0.08942300081253052, 0.5170699954032898, -0.13842999935150146, 0.12950000166893005, 0.49149999022483826, -0.3334699869155884, 0.3421100080013275, -0.07402300089597702, -0.012117999605834484, -0.05748099833726883, -0.2100600004196167, 0.1402900069952011, -0.25679999589920044, -0.009081199765205383, -0.4584299921989441, -0.19363999366760254, 0.17773999273777008, -0.2754499912261963, 0.42640000581741333, -0.6273099780082703, 0.3476400077342987, -0.3922699987888336, -0.2727299928665161, 0.35359999537467957, -0.366100013256073, -0.1443299949169159, 0.015768999233841896, -0.12949000298976898, -0.13096000254154205, -0.011188000440597534, 0.06954900175333023, -0.23861999809741974, -0.33586999773979187, 0.05789700150489807, 0.16911999881267548, -0.32199999690055847, 0.1134599968791008, 0.36517998576164246, -0.08680500090122223, -0.09848300367593765, -0.02214200049638748, 0.6692399978637695, 0.15459999442100525, 0.36274999380111694, 0.6518300175666809, -0.17698000371456146, -0.34578999876976013, -0.5353299975395203, 0.16207000613212585, -0.31042999029159546, 0.09087099879980087, 0.23407000303268433, 0.3011600077152252, -0.29493001103401184, 0.1872200071811676, 0.3642899990081787, -0.038635000586509705, 0.007895800285041332, -0.03305500000715256, -0.8318799734115601, -0.1658799946308136, -0.5459499955177307, -0.7013499736785889, 0.18814000487327576, -0.5276299715042114, -0.6772300004959106, -0.43011000752449036, -0.1403599977493286, 0.011617000214755535, 0.5383300185203552, -0.17757999897003174, 0.5134699940681458, -0.34810999035835266, -0.46472999453544617, -0.12050999701023102, 0.454800009727478, 0.07160600274801254, 0.39280998706817627, -0.6725900173187256, -0.39410001039505005, -0.5232399702072144, -0.20895999670028687, -0.20767000317573547, -0.1743299961090088, 0.321399986743927, -0.005056300200521946, -0.2025499939918518, 0.29848000407218933, -0.1772100031375885, 0.8346199989318848, -0.16315999627113342, 0.016565000638365746, 0.5166299939155579, 0.456820011138916, -0.38798001408576965, -0.6556800007820129, 0.05239399895071983, -0.28944000601768494, 0.38227999210357666, -0.13436999917030334, -0.24360999464988708, -0.2992999851703644, -0.5415499806404114, 0.05764099955558777, -0.05343100056052208, -0.8559899926185608, -0.3363800048828125, 0.16481000185012817, 0.2985000014305115, -0.18806999921798706, 0.2414799928665161, 0.21324999630451202, -0.047589998692274094, 0.6351000070571899, -0.10544999688863754, 0.2601499855518341, 0.23391999304294586, 0.4277699887752533, -0.021902000531554222, 0.3931399881839752, 0.4555499851703644, -0.16218000650405884, -0.5195299983024597, 0.2688800096511841, 0.4119499921798706, -0.16261999309062958, -0.19314000010490417, -0.5144699811935425, 0.1587499976158142, -0.3039200007915497, 0.09608899801969528, -1.3020999431610107, 0.22669999301433563, -0.12472999840974808, 0.38363000750541687, 0.26291999220848083, -0.07770299911499023, -0.14817999303340912, 0.5799400210380554, -0.00917890015989542, -0.003190699964761734, 0.17130999267101288, 0.1974799931049347, -0.21077999472618103, 0.4242100119590759, -0.03801500052213669, 0.31551000475883484, -0.12408000230789185, -0.6810500025749207, 0.37147000432014465, -0.164450004696846, -0.3163599967956543, -0.025232000276446342, -0.33425000309944153, -0.46922001242637634, -0.186039999127388, 0.00014077000378165394, 0.29884999990463257, 0.153889998793602, 0.1652200073003769, -0.5487200021743774, 0.20162999629974365, 0.5693100094795227, 0.44617998600006104, -0.010913999751210213, 0.05114400014281273, -0.5629500150680542, 0.026207000017166138, 0.23287999629974365, -0.454010009765625, 0.27011001110076904, -0.017199000343680382, 0.39737001061439514, -0.16785000264644623, 0.11136999726295471, -0.7521499991416931, -0.013886000029742718, 0.2335200011730194, -0.030921999365091324, -0.07893099635839462, -0.2221599966287613, -0.709119975566864, -0.1293099969625473, -0.48346999287605286, -0.08663400262594223, -0.16335999965667725, 0.012246999889612198, -0.07206299901008606, 0.020167000591754913, -0.037411998957395554, -0.6321499943733215, 0.07067599892616272, -0.013918999582529068, 0.05753900110721588, -0.15789000689983368, 0.29631999135017395, -0.08957500010728836, -0.19584999978542328, 0.25422999262809753, -0.3437100052833557, -0.004139599855989218, 0.19957999885082245, -0.3773599863052368, 0.10811000317335129, 0.27368998527526855, -0.3608100116252899, -0.09392800182104111, 0.7405400276184082, -0.22718000411987305, -0.06227099895477295, 0.3265399932861328, 0.22245000302791595, 0.003875100053846836, 0.42983999848365784, -0.11975999921560287, -0.43953999876976013, 0.09118100255727768, 0.368259996175766, 0.12623000144958496, -0.12247999757528305, -0.20795999467372894, 0.18267999589443207, -0.1802300065755844, 0.1994200050830841, -0.23333999514579773, -0.6029999852180481, -1.0961999893188477, 0.49160000681877136, 0.732990026473999, 0.4974699914455414, -0.8760200142860413, 0.2009900063276291, 0.9696000218391418, -0.10130000114440918, -0.719290018081665, 0.241689994931221, 0.5187399983406067, 0.1687999963760376, 0.4159500002861023, 0.45386001467704773, -0.26023998856544495, -0.02514299936592579, -0.6554200053215027, 0.03244499862194061, 0.5915499925613403, 0.2056799978017807, -0.15688000619411469, 0.009970500133931637, -0.2387399971485138, -0.08876200020313263, 0.22905999422073364, 0.22529999911785126], u'narrow': [-0.3843500018119812, -0.16827000677585602, -0.25613999366760254, -0.0030886998865753412, 0.272460013628006, 0.4359799921512604, 0.3502199947834015, 0.10391999781131744, 0.586870014667511, -1.4782999753952026, -0.47760000824928284, -0.22930000722408295, -0.3739300072193146, 0.14585000276565552, -0.08918499946594238, 0.21637000143527985, -0.3196699917316437, -0.03787299990653992, 0.605459988117218, -0.27974000573158264, -0.030626000836491585, -0.13051000237464905, 0.14142000675201416, 0.2053699940443039, -0.7974100112915039, -0.3990600109100342, 0.30952000617980957, -0.2538500130176544, -0.20809000730514526, 0.427480012178421, 0.22391000390052795, 0.2162500023841858, -0.04972999915480614, 0.08652599900960922, -0.6320800185203552, 0.7570099830627441, -0.0018615999724715948, -0.09445100277662277, -0.5245400071144104, -0.2597000002861023, -0.08195900171995163, 0.6264899969100952, 0.05332000181078911, 0.03905700147151947, -0.2545599937438965, 0.8190000057220459, -0.07481999695301056, 0.14342999458312988, 0.006480500102043152, -0.15166999399662018, -0.7053899765014648, 0.3755599856376648, 0.21699999272823334, -0.3568499982357025, -0.004267300013452768, -0.04601699858903885, -0.63850998878479, -0.6990299820899963, 0.3584800064563751, 0.7464100122451782, 0.212909996509552, -0.2534500062465668, 0.06824400275945663, 0.10040999948978424, 0.6213899850845337, -0.2318899929523468, -0.04388900101184845, 0.5128600001335144, 0.03828499838709831, 0.017568999901413918, -0.12580999732017517, 0.3192700147628784, 0.22664999961853027, 0.5204399824142456, 0.44269999861717224, -0.39952000975608826, -0.4012199938297272, 0.19628000259399414, -0.07975099980831146, -0.591920018196106, 0.05147400125861168, 0.24855999648571014, 0.20548999309539795, -0.23342999815940857, -0.17792999744415283, 0.2525700032711029, 0.5158100128173828, -0.09721600264310837, 0.13473999500274658, 0.6139299869537354, 0.3971399962902069, -0.02185099944472313, -0.04022099822759628, -0.4303399920463562, -0.30983999371528625, 0.3047899901866913, 0.41554999351501465, 0.39427000284194946, 0.3068700134754181, -0.0025567999109625816, -0.06969700008630753, 0.11800000071525574, -0.13210999965667725, -0.3522700071334839, -0.7900500297546387, 0.40342000126838684, 0.370959997177124, -0.46050000190734863, 0.21675999462604523, 0.2108200043439865, -0.5036900043487549, -0.09506800025701523, 0.5617700219154358, -0.5183600187301636, 0.1426600068807602, -0.1844799965620041, 0.07818000018596649, 0.26357999444007874, -0.04739199951291084, -0.042725998908281326, 0.24959999322891235, -0.3214600086212158, 0.19346000254154205, -0.41857999563217163, 0.12690000236034393, 0.18389999866485596, 0.039115000516176224, 0.18345999717712402, -0.22371000051498413, -0.14762000739574432, -0.29638001322746277, -0.046549998223781586, -0.07047200202941895, 0.4702500104904175, 0.550819993019104, 0.4477500021457672, 0.15805000066757202, 0.5064200162887573, 0.3126699924468994, -0.3415200114250183, 0.29030001163482666, -0.08523699641227722, -0.12650999426841736, 0.028666000813245773, -0.661080002784729, 0.2445099949836731, -0.04680899903178215, 0.04642099887132645, -0.07426299899816513, 0.07990700006484985, -0.24176999926567078, 0.31172001361846924, 0.31852999329566956, -0.4109500050544739, 0.9584299921989441, 0.4721899926662445, -0.022283999249339104, 0.5471600294113159, 0.35580000281333923, 0.22719000279903412, -0.5806699991226196, 0.18328000605106354, 0.6488800048828125, 0.2794800102710724, 0.2999500036239624, -0.07333700358867645, -0.030724000185728073, -0.11597000062465668, -0.6334900259971619, 0.03931700065732002, -0.037608999758958817, -0.18052999675273895, 0.40149998664855957, -0.18264000117778778, -0.11210999637842178, -0.009101400151848793, -0.7544000148773193, 0.5438600182533264, 0.11708000302314758, -0.24518999457359314, 0.361519992351532, 0.17069999873638153, -0.06318599730730057, 0.39948999881744385, -0.09421700239181519, -0.2666400074958801, 0.2538299858570099, 0.02652899920940399, 0.5087000131607056, 0.8512099981307983, 0.38012999296188354, 0.4152100086212158, -0.2522900104522705, 0.4528599977493286, 0.07220199704170227, -0.1371700018644333, -0.2994300127029419, -0.48697999119758606, 0.0034153000451624393, 0.2398100048303604, 0.5454000234603882, 0.1453399956226349, -0.06259199976921082, -0.19855999946594238, 0.002239000052213669, -0.1383100003004074, -0.305620014667511, -0.41179001331329346, 0.5314599871635437, 0.3130300045013428, 0.045775000005960464, -0.14941999316215515, 0.4350000023841858, 0.5905399918556213, -0.08189799636602402, -0.1603900045156479, 0.29701998829841614, -0.34442999958992004, 0.25367000699043274, -0.5094000101089478, 0.833299994468689, -0.16143999993801117, -0.14061999320983887, 0.0572969987988472, 0.025728000327944756, 0.082505002617836, -0.1372700035572052, -0.1594800055027008, -0.3707300126552582, -0.01068899966776371, 0.0008281799964606762, -0.25174999237060547, -0.09517599642276764, 0.18896999955177307, 0.09772499650716782, -0.06120600178837776, -0.438400000333786, 0.19196000695228577, -0.2547299861907959, 0.16321000456809998, 0.3323200047016144, -0.06297100335359573, 0.4655500054359436, 0.3285300135612488, -1.579300045967102, -0.5052800178527832, 0.2560499906539917, 0.3677000105381012, 0.14384999871253967, 0.31619998812675476, -0.19886000454425812, -0.480679988861084, 0.39228999614715576, -0.8505799770355225, -0.5113599896430969, 0.11142999678850174, -0.4243899881839752, 0.3996399939060211, -0.24352000653743744, 0.33851999044418335, 0.3697499930858612, -0.5473099946975708, 0.18791000545024872, -0.10228999704122543, -0.16067999601364136, 0.12256000190973282, 0.030709000304341316, -0.4235000014305115, -0.07968500256538391, -0.6065000295639038, 0.09026700258255005, -0.07610800117254257, 0.4264799952507019, 0.1264200061559677, -0.799019992351532, -0.18964000046253204, -0.8261200189590454, 0.13779999315738678, 0.2737799882888794, 0.46663999557495117, 0.14480000734329224, 0.19485999643802643, -0.04396099969744682, -0.43571001291275024, 0.10716000199317932, 0.17622999846935272, -0.4142799973487854, 0.07224299758672714, 0.04208099842071533, -0.01104000024497509, 0.02437400072813034, 0.3019700050354004, -0.4708099961280823, 0.7061799764633179, 0.03355500102043152, 1.2359999418258667, -0.6992200016975403, 0.053755998611450195, 0.25297999382019043, 0.20513999462127686], u'fallen': [-0.3457300066947937, -0.48080000281333923, 0.35850000381469727, 0.222680002450943, -0.18663999438285828, 0.465719997882843, -0.153329998254776, 0.5331299901008606, 0.06461499631404877, -0.9145399928092957, -0.2727999985218048, 0.2406100034713745, -0.23416000604629517, 0.0066281999461352825, -0.2399500012397766, -0.3573499917984009, 0.0867689996957779, -0.2609100043773651, -0.2089499980211258, -0.4496699869632721, -0.23975999653339386, 0.419050008058548, 0.5429999828338623, -0.044176001101732254, 0.3980399966239929, -0.2918800115585327, -0.12766000628471375, -0.2367199957370758, 0.0797479972243309, 0.21373000741004944, 0.23062999546527863, 0.3177100121974945, 0.02435299940407276, -0.31946998834609985, -0.8154299855232239, -0.5670700073242188, 0.13133999705314636, -0.11096999794244766, 0.5985599756240845, 0.049956999719142914, 0.1161699965596199, -0.09246599674224854, -0.3447299897670746, -0.053296998143196106, 0.03928200155496597, 0.1665399968624115, 0.2450300008058548, -0.008183499798178673, 0.1714099943637848, -0.04855100065469742, 0.2791999876499176, -0.3280999958515167, -0.38075000047683716, 0.2630699872970581, -0.13413000106811523, -0.15679000318050385, 0.047731999307870865, 0.290039986371994, 0.3895600140094757, -0.5005999803543091, 0.04409699887037277, 0.06182999908924103, 0.4687800109386444, 0.1038300022482872, -0.4624199867248535, -0.6405900120735168, 0.13431000709533691, 0.31731000542640686, -0.39403000473976135, 0.5870400071144104, -0.037480998784303665, 0.5260000228881836, -0.4442099928855896, -0.08574900031089783, 0.2064100056886673, 0.32186999917030334, 0.45107999444007874, -0.1625799983739853, -0.13339999318122864, -0.027726000174880028, -0.17395000159740448, 0.024980999529361725, 0.1633400022983551, 0.2846600115299225, -0.028620000928640366, 0.6403200030326843, -0.14905999600887299, 0.02937299944460392, -0.02123500034213066, 0.16978999972343445, 0.8588500022888184, 0.12942999601364136, -0.0077228001318871975, -0.0488400012254715, 0.050342999398708344, -0.02579299919307232, -0.09522400051355362, 0.0408720001578331, 0.3572399914264679, 0.0437610000371933, 0.3576200008392334, 0.31428998708724976, 0.1338600069284439, 0.04005799815058708, 0.12129999697208405, -0.225600004196167, 0.11834000051021576, 0.06663700193166733, -0.08366800099611282, -0.14430999755859375, -0.4903700053691864, -0.06728599965572357, -0.12905000150203705, -0.6591399908065796, 0.032896000891923904, 0.05693800002336502, 0.21222999691963196, -0.21258999407291412, -0.07882600277662277, -0.29148000478744507, 0.17517000436782837, -0.6338199973106384, -0.17017999291419983, 1.089900016784668, -0.2518700063228607, 0.5914300084114075, -0.3244200050830841, 0.08224800229072571, 0.17789000272750854, 0.16469000279903412, -0.39544999599456787, 0.6742500066757202, 0.46522998809814453, 0.5672799944877625, 0.4217199981212616, 0.1795399934053421, 0.03632500022649765, -0.07518800348043442, 0.04693000018596649, -0.19064000248908997, 0.3596400022506714, 0.06165200099349022, -0.23836000263690948, 0.416810005903244, -0.2032800018787384, 0.16428999602794647, -0.07153400033712387, 0.0028574999887496233, -0.0036319000646471977, -0.6676700115203857, 0.13348999619483948, 0.0082547003403306, -0.26715001463890076, 0.05615299940109253, 0.49671998620033264, 0.36664000153541565, -0.055521998554468155, -0.18844999372959137, -0.07959099858999252, 0.14090999960899353, 0.04964600130915642, 0.024606000632047653, 0.09203699976205826, 0.019204000011086464, 0.6639500260353088, 0.14972999691963196, 0.20740999281406403, -0.3931199908256531, -0.3560599982738495, 0.0060132998041808605, -0.4280700087547302, 0.21443000435829163, 0.05213199928402901, 0.19163000583648682, 0.2427300065755844, 0.08962900191545486, 0.4822799861431122, 0.30597999691963196, -0.20327000319957733, 0.6648600101470947, -0.4550600051879883, -0.12246000021696091, 0.26396000385284424, -0.2630600035190582, 0.11626999825239182, -0.18217000365257263, 0.4788399934768677, -0.20708000659942627, 0.09737499803304672, 0.289139986038208, 0.23962000012397766, -0.6081299781799316, 0.3540000021457672, -0.24944999814033508, -0.6883100271224976, 0.08171900361776352, -0.5153499841690063, -0.04387500137090683, 0.36313000321388245, 0.027780000120401382, 0.9591400027275085, 0.07507500052452087, 0.2123900055885315, 0.18105000257492065, -0.7178900241851807, -0.06535500288009644, 0.14541000127792358, -0.07846699655056, -0.16853000223636627, 0.2966099977493286, -0.12816999852657318, 0.017923999577760696, 0.12545999884605408, -0.6294400095939636, 0.23532000184059143, 0.11987999826669693, 0.13718000054359436, 0.3004699945449829, 0.047008998692035675, 0.2837800085544586, 0.46152999997138977, -0.18378999829292297, 0.04102100059390068, -0.45614001154899597, -0.18783000111579895, 0.0486610010266304, -0.1109900027513504, 0.037709999829530716, 0.06535100191831589, 0.23980000615119934, -0.1848900020122528, -0.37536001205444336, -0.1589300036430359, 0.34064000844955444, 0.0635870024561882, 0.09266600012779236, 0.1684499979019165, 0.04363299906253815, -0.1031700000166893, -0.5142199993133545, 0.1505099982023239, -0.6083400249481201, 0.4562000036239624, -0.5163300037384033, -0.36061999201774597, -0.03053000010550022, 0.25380998849868774, 0.13880999386310577, 0.17648999392986298, 0.11569999903440475, 0.3629100024700165, 0.2630400061607361, 0.29559001326560974, -0.20993000268936157, 0.633620023727417, -0.11676999926567078, 0.3089999854564667, -0.09956300258636475, -0.4043000042438507, 0.12711000442504883, -0.3442299962043762, 0.7283899784088135, -0.39904001355171204, -0.13204999268054962, -0.1973699927330017, 0.5176900029182434, 0.24993999302387238, -0.37863001227378845, -0.2798599898815155, -0.054055001586675644, -0.042118001729249954, 0.22788000106811523, -0.16992999613285065, -0.598360002040863, -0.5868300199508667, -0.04710099846124649, -1.0118999481201172, -0.7867900133132935, 0.27643001079559326, -0.15745000541210175, -0.36579999327659607, -0.27970001101493835, 0.05076799914240837, -0.6947199702262878, -0.07672899961471558, 0.09528599679470062, 0.04050000011920929, -0.3002200126647949, 0.1557600051164627, 0.10869000107049942, 0.4050399959087372, 0.273250013589859, 0.10378000140190125, 0.12896999716758728, 0.31262001395225525, 0.44315001368522644, -0.11080999672412872, -0.3230400085449219, -0.10527999699115753, -0.10307999700307846], u'muddy': [-0.07881399989128113, -0.17417000234127045, -0.35673001408576965, 0.2466599941253662, -0.2605000138282776, -0.3694399893283844, 0.5315300226211548, -0.02218100056052208, 0.6573899984359741, 0.015943000093102455, -0.3164899945259094, 0.0119420001283288, -0.39017000794410706, -0.2204499989748001, -0.13476000726222992, -0.40143999457359314, -0.44703999161720276, -0.009136900305747986, 0.9582200050354004, 0.2531000077724457, 0.09302400052547455, 0.33656999468803406, -0.21744999289512634, 0.005783699918538332, -0.7428900003433228, -0.37738001346588135, 0.4637500047683716, -0.0827070027589798, 0.13620999455451965, 0.7778000235557556, -0.22267000377178192, -0.08745600283145905, -0.17958000302314758, 0.1849299967288971, 0.14875000715255737, 0.7362599968910217, 0.3674300014972687, -0.11518000066280365, -0.14339999854564667, -0.14151999354362488, 0.03879399970173836, 0.19183999300003052, 0.5409299731254578, 0.23617999255657196, 0.3214699923992157, 0.825950026512146, 0.5636600255966187, 0.24332000315189362, -0.23770000040531158, -0.29497000575065613, -0.2948800027370453, -0.008986099623143673, 0.12803000211715698, -0.3925800025463104, 0.27312999963760376, 0.23011000454425812, 0.0424950011074543, -0.7258300185203552, 0.12025000154972076, 0.7104200124740601, -0.17890000343322754, -0.3406600058078766, 0.4323199987411499, 0.0757950022816658, 0.023442000150680542, -0.2718900144100189, 0.5322099924087524, -0.09651099890470505, -0.3058899939060211, -0.013350999914109707, 0.45611000061035156, 0.5356600284576416, -1.0296000242233276, 0.2210800051689148, -0.37136000394821167, -0.5721200108528137, 0.03765600174665451, 0.6896499991416931, 0.4278300106525421, -0.6969199776649475, 0.66184002161026, -0.16811999678611755, 0.05260099843144417, -0.3428800106048584, 0.004031000193208456, -0.10653000324964523, 0.12364000082015991, -0.0924450010061264, 0.39045000076293945, -0.04288699850440025, 0.2729400098323822, 0.43827998638153076, 0.38708001375198364, -0.17149999737739563, -0.4027400016784668, 0.13973000645637512, 0.6298099756240845, -0.011714999563992023, 0.009743199683725834, 0.05212799832224846, 0.15103000402450562, 0.22145000100135803, -0.5778099894523621, 0.17749999463558197, -1.0477999448776245, 0.37022000551223755, 0.8099200129508972, 0.06441599875688553, -0.08541099727153778, -0.047821998596191406, -0.4258599877357483, -0.3419800102710724, -0.18589000403881073, -0.04604100063443184, -0.17847000062465668, -0.1782200038433075, 0.5251299738883972, 0.014162000268697739, 0.03908099979162216, -0.37891000509262085, 0.1751900017261505, -0.37623998522758484, 0.6626899838447571, 0.311379998922348, 0.11912000179290771, 0.4784199893474579, 0.14621999859809875, -0.271589994430542, -0.27055999636650085, -0.32284998893737793, -0.6636000275611877, 0.8454700112342834, 0.47560998797416687, 0.5524299740791321, -0.024755999445915222, -0.2996799945831299, 0.39542001485824585, 0.2985000014305115, -0.1035500019788742, -0.017914999276399612, 0.14267000555992126, -0.0602170005440712, 0.038982000201940536, -0.5570799708366394, -0.49202999472618103, -0.08960700035095215, 0.050422001630067825, 0.7524999976158142, -0.19679999351501465, 0.025452999398112297, -0.7284600138664246, -0.10313999652862549, -0.4360800087451935, -0.6318399906158447, 0.6423500180244446, 0.2569600045681, 0.7495200037956238, -0.3081600069999695, 0.8285300135612488, 0.8631899952888489, -0.35795000195503235, -0.8553000092506409, 0.05733000114560127, 0.09184200316667557, -0.02145799994468689, -0.21153999865055084, -0.00658079981803894, 0.38378000259399414, -0.561269998550415, -0.5382099747657776, 0.30138999223709106, 0.8531699776649475, 0.4457800090312958, -0.4734399914741516, -0.6007800102233887, -0.17994000017642975, 0.096110999584198, -0.15922999382019043, -0.2993899881839752, -0.1856600046157837, 0.2109300047159195, 0.7297400236129761, 0.12883000075817108, 0.8126599788665771, -0.50382000207901, -0.5432000160217285, 0.4703800082206726, 0.10435999929904938, 0.20182999968528748, 0.256089985370636, 0.014514000155031681, -0.0008769099949859083, -0.414249986410141, -0.5109500288963318, 0.21852999925613403, 0.07348299771547318, 0.002150400076061487, -0.12071000039577484, -0.3222000002861023, 0.24390999972820282, 0.6590399742126465, 0.2817299962043762, 0.5123699903488159, 0.15294000506401062, 0.2946699857711792, -0.09406699985265732, 0.40105000138282776, -0.001464199973270297, -0.3610199987888336, 0.11326000094413757, -0.5136100053787231, 0.30928000807762146, -0.2325499951839447, -0.2579900026321411, -0.26912999153137207, 0.5395200252532959, 0.020893000066280365, -0.3863300085067749, 0.32642999291419983, -0.2980499863624573, 0.8685299754142761, -0.05977199971675873, 0.3145900070667267, -0.19314999878406525, 0.5576099753379822, 0.12759000062942505, -0.21573999524116516, -0.11320000141859055, 0.12502999603748322, 0.08494400233030319, 0.5345100164413452, 0.2934400141239166, -0.5549299716949463, -0.05372900143265724, 0.3565100133419037, 0.5420100092887878, -0.08880999684333801, -0.16574999690055847, -0.4345499873161316, -0.15087999403476715, -0.08510400354862213, 0.6943699717521667, -0.10022000223398209, -0.7767300009727478, -0.5959699749946594, -0.5797500014305115, 0.2952600121498108, 0.21301999688148499, -0.053300000727176666, 0.17399999499320984, 0.33539000153541565, -0.14776000380516052, 0.24818000197410583, -0.4929099977016449, 0.4514800012111664, -0.09160599857568741, -0.21730999648571014, 0.0976099967956543, 0.1636199951171875, 0.30469000339508057, 0.39013001322746277, -0.0978889986872673, -0.3083299994468689, -0.2888599932193756, 0.09766600281000137, 0.435479998588562, -0.43077000975608826, -0.40619999170303345, -0.17330999672412872, -0.3517000079154968, -0.3675999939441681, -0.12921999394893646, -0.21693000197410583, 0.2771199941635132, 0.0027826998848468065, 0.04095299914479256, -0.268779993057251, -0.022864999249577522, -0.16175000369548798, -0.06718999892473221, -0.11947999894618988, -0.10559999942779541, 0.7979400157928467, -0.6437100172042847, -0.28648999333381653, -0.06192699819803238, 0.09409199655056, -0.060812000185251236, 0.17937999963760376, -0.3142299950122833, -0.3785800039768219, 0.03545700013637543, 0.046135999262332916, 0.7069000005722046, -0.19750000536441803, -0.06167199835181236, 0.33375000953674316, 0.8209599852561951, -0.25804999470710754, 0.2644299864768982], u'sliced': [0.2107599973678589, -0.047933001071214676, 0.21748000383377075, 0.0011232000542804599, 0.5576000213623047, -0.0896259993314743, -0.20023000240325928, 0.05078800022602081, -0.3491100072860718, -0.04533100128173828, 0.5632299780845642, 0.19036999344825745, 0.027163999155163765, 0.25964999198913574, -0.5776399970054626, 0.39862000942230225, -0.3337700068950653, -0.16915999352931976, -0.32541000843048096, 0.6125800013542175, -0.006275299936532974, 0.23499000072479248, -0.11894000321626663, -0.48583000898361206, 0.20545999705791473, -0.47143998742103577, -0.24835999310016632, 0.031012000516057014, -0.5071300268173218, -0.6012200117111206, -0.735260009765625, 0.556689977645874, 0.1741199940443039, 0.08443299680948257, -0.4653699994087219, 0.24773000180721283, -0.07545100152492523, 0.05602699890732765, -0.38787999749183655, 0.16547000408172607, -0.09047500044107437, -0.38631999492645264, -0.08863899856805801, -0.0994039997458458, 0.42263999581336975, 0.5582200288772583, -0.1792600005865097, 0.17312000691890717, -0.4768100082874298, 0.32565000653266907, -0.3986800014972687, 0.0023169999476522207, 0.38400998711586, 0.4982199966907501, -0.8013799786567688, -1.0398000478744507, -0.21083000302314758, 0.15699000656604767, -0.014328000135719776, 0.32339999079704285, 0.013513999991118908, 0.1149199977517128, -0.5245800018310547, 0.2056799978017807, 0.08072599768638611, -0.11670999974012375, -0.1880200058221817, -0.0656680017709732, 0.24299000203609467, -0.27748000621795654, 0.29471999406814575, 0.41492000222206116, 0.3015199899673462, 0.49851998686790466, 0.03595900163054466, 0.4825499951839447, 0.7806599736213684, 0.15567000210285187, -0.20543000102043152, 0.24267999827861786, -0.21353000402450562, 0.09734299778938293, 0.456930011510849, -0.15101000666618347, 0.08085399866104126, 0.0710889995098114, -0.7318599820137024, -0.11722999811172485, -0.17264999449253082, -0.12689000368118286, 0.7570499777793884, -0.06881500035524368, -0.1010499969124794, -0.4368300139904022, -0.1783899962902069, -0.11034999787807465, -0.1842299997806549, 0.7304099798202515, 0.23061999678611755, 0.7373300194740295, 0.09120800346136093, 0.010728999972343445, 0.5657299757003784, -0.7963100075721741, -0.5873799920082092, -0.17961999773979187, 0.24255000054836273, 0.3475399911403656, -0.5513899922370911, -0.04035799950361252, 0.2726899981498718, 0.7152900099754333, 0.5270400047302246, -0.805840015411377, -0.14219999313354492, 0.15900999307632446, -0.04412499815225601, 0.542739987373352, 0.7749999761581421, 0.016600999981164932, -0.10296999663114548, -0.6305999755859375, 0.1536799967288971, 0.0863339975476265, -0.36653000116348267, 0.49483999609947205, -0.40782999992370605, -0.07726799696683884, -0.25786998867988586, 0.39208000898361206, -0.15512000024318695, 1.0931999683380127, -0.05376499891281128, 0.5036900043487549, -0.1034500002861023, 0.46035999059677124, -0.10429000109434128, 0.17625999450683594, -0.5569800138473511, 0.17534999549388885, 0.6125900149345398, 0.06643100082874298, -0.363290011882782, 0.12049999833106995, -0.6060100197792053, 0.026102999225258827, 0.04826999828219414, -0.458979994058609, 0.4893699884414673, -0.5557900071144104, -0.5260300040245056, 0.23925000429153442, 0.6039800047874451, 0.3465000092983246, -0.11094000190496445, -0.2414100021123886, 0.14542999863624573, -0.1491599977016449, 0.03743400052189827, 0.39559999108314514, -0.335750013589859, 0.2091200053691864, -0.2062699943780899, -0.005764800123870373, 0.5204100012779236, -0.39566999673843384, 0.1855500042438507, -0.10662999749183655, -0.2376299947500229, -0.884090006351471, 0.05832599848508835, -0.33302998542785645, -0.3935900032520294, -0.021098000928759575, -0.2908700108528137, -0.04408600181341171, -0.17467999458312988, 0.07216800004243851, 0.5371299982070923, -0.32315999269485474, 0.3433400094509125, 0.1335899978876114, 0.4369199872016907, -0.5641700029373169, -0.01435100007802248, -0.7563499808311462, 0.26868999004364014, 0.2822200059890747, 0.3845199942588806, 0.8928800225257874, 0.08329000324010849, 1.0277999639511108, -0.14313000440597534, 0.28001999855041504, 0.4681699872016907, -0.018254000693559647, -0.5981299877166748, -0.31025999784469604, -0.006105899810791016, -0.050613999366760254, 0.25582998991012573, -0.12011999636888504, 0.16825999319553375, 0.052691999822854996, -0.13078999519348145, 0.768310010433197, 0.06616699695587158, 0.3956199884414673, -0.058097999542951584, 0.4036400020122528, 0.36719000339508057, -0.1569100022315979, -0.12772999703884125, 0.04493099823594093, 0.463019996881485, -0.08798199892044067, 1.0384000539779663, -0.7986099720001221, 0.3396899998188019, -0.12001000344753265, 0.6549000144004822, 0.5654199719429016, -0.6055799722671509, -0.2078000009059906, -0.09837900102138519, -0.34630998969078064, -0.11947000026702881, -0.8618199825286865, -0.014445999637246132, -0.2838500142097473, 0.20781999826431274, 0.15702000260353088, 0.11050999909639359, -0.2914299964904785, 0.19192999601364136, 0.22477999329566956, 0.18693000078201294, 0.1137399971485138, -0.7406499981880188, -0.4259200096130371, -0.21132999658584595, -0.2338699996471405, -0.03520699962973595, -0.2647300064563751, -0.6987199783325195, -0.17666999995708466, 0.29482999444007874, 0.23785999417304993, 0.2607100009918213, -1.0263999700546265, 0.5579599738121033, 0.47301000356674194, 0.20527000725269318, -0.904229998588562, -0.13860000669956207, 0.1381099969148636, -0.3496699929237366, 0.08154399693012238, -0.3795900046825409, 0.5296099781990051, 0.4538800120353699, 0.03888300061225891, 0.03604999929666519, 0.323529988527298, 0.007317000068724155, -0.3905099928379059, -0.36879000067710876, 0.17696000635623932, -0.22776000201702118, 0.10158000141382217, -0.10238000005483627, 0.3333800137042999, -0.3198600113391876, 0.9359400272369385, -0.35725998878479004, 0.19040000438690186, -0.22026999294757843, -0.07388900220394135, -1.3005000352859497, -1.041700005531311, -0.16673000156879425, -0.19014999270439148, 0.35234999656677246, -0.009963000193238258, 0.2659299969673157, 0.10644999891519547, 0.39772000908851624, -0.19221000373363495, -0.1766500025987625, 0.6199600100517273, 0.20524999499320984, 0.041377998888492584, 0.09436800330877304, -0.26298999786376953, -0.1604200005531311, -0.14234000444412231, -0.35842999815940857, -0.3224000036716461, 0.4677000045776367, -0.2793999910354614], u'sharp': [0.1820800006389618, -0.010888000018894672, -0.04377700015902519, -0.17396999895572662, -0.2607699930667877, -0.27008000016212463, -0.059429001063108444, 0.017322000116109848, 0.3670800030231476, -1.7067999839782715, -0.3711700141429901, 0.5894799828529358, -0.072564996778965, 0.12422999739646912, 0.3122600018978119, -0.049323998391628265, -0.3757599890232086, 0.541920006275177, 0.14733999967575073, -0.40362998843193054, -0.05249800160527229, 0.14622999727725983, 0.4770500063896179, 0.20205999910831451, 0.4202899932861328, -0.07429099828004837, 0.013718999922275543, -0.5710600018501282, -0.04606800153851509, -0.04977000132203102, -0.3152399957180023, 0.5346199870109558, -0.6736999750137329, 0.43112000823020935, -1.2761000394821167, -0.15185999870300293, -0.2172500044107437, -0.2172199934720993, 0.33103999495506287, 0.35148000717163086, 0.14437000453472137, 0.7562400102615356, 0.6957200169563293, -0.17324000597000122, -0.2513599991798401, 0.16921000182628632, -0.6578699946403503, -0.29142001271247864, -0.2347400039434433, 0.24303999543190002, -0.31367000937461853, 0.17418000102043152, 0.3158699870109558, 0.058115001767873764, -0.05507100000977516, -0.3654400110244751, -0.581570029258728, 0.11183000355958939, 0.09990499913692474, 0.23859000205993652, 0.46869999170303345, 0.2972100079059601, 0.3546200096607208, 0.11441999673843384, -0.10734999924898148, -0.5180400013923645, -0.559719979763031, 0.0006833000225014985, 0.13270999491214752, -0.44642001390457153, -0.6218400001525879, 0.23454999923706055, 0.2815600037574768, -0.15977999567985535, 0.5632200241088867, -0.4138599932193756, -0.3354699909687042, 0.21207000315189362, -0.8078100085258484, -0.7756800055503845, -0.19513000547885895, -0.3806599974632263, 0.016131000593304634, 0.01395300030708313, 0.27757999300956726, -0.17463000118732452, -0.23750999569892883, 0.00924570020288229, 0.11361999809741974, 0.2333499938249588, 0.6911500096321106, 0.6723499894142151, -0.7782099843025208, -0.38098999857902527, 0.22100000083446503, -0.0461370013654232, -0.21775999665260315, 0.18306000530719757, -0.04059699922800064, 0.06375200301408768, -0.5095400214195251, -0.045708999037742615, -0.322409987449646, 0.2044599950313568, -0.5356600284576416, -0.0019886998925358057, -0.017006000503897667, 0.30838000774383545, -0.17782999575138092, 0.053777001798152924, -0.13872000575065613, -0.15300999581813812, 0.21167999505996704, -0.036173999309539795, 0.39851999282836914, 0.0693420022726059, -0.15028999745845795, 0.1383499950170517, -0.5005199909210205, -0.5591800212860107, -0.17444999516010284, -0.5323399901390076, -0.6340199708938599, -0.3528600037097931, -0.24045999348163605, 0.6266499757766724, -0.48003000020980835, 0.06601700186729431, 0.4816400110721588, -0.4045200049877167, -0.3131299912929535, 0.9553800225257874, -0.6645500063896179, 0.47279998660087585, -0.5767099857330322, -0.17880000174045563, -0.532800018787384, 0.025496000424027443, 0.504830002784729, 0.3666999936103821, 0.4190399944782257, 0.19946999847888947, 0.08709000051021576, -0.19077999889850616, -0.12284000217914581, 0.5391799807548523, -0.18222999572753906, -0.08961299806833267, 0.14024999737739563, -0.2888300120830536, 0.4081200063228607, 0.3119199872016907, 0.11016999930143356, 0.012988000176846981, 0.2597399950027466, -0.0685420036315918, -0.15929999947547913, -0.11309999972581863, 0.20769000053405762, -0.04625999927520752, -0.07197199761867523, -0.05009299889206886, 0.6161999702453613, 0.12675000727176666, -0.11184000223875046, 0.2989000082015991, 0.4219900071620941, -0.02936900034546852, -0.054492998868227005, -0.0467739999294281, -0.2503100037574768, -0.24754999577999115, -0.021794000640511513, -0.08595000207424164, -0.4269599914550781, -0.27796000242233276, -0.17882999777793884, -0.16179999709129333, 0.19046999514102936, 0.7340899705886841, 0.15117000043392181, -0.1517000049352646, -0.2768999934196472, -0.060825999826192856, -0.33799999952316284, 0.10993999987840652, -0.12279000133275986, -0.12758000195026398, -0.44975000619888306, 0.6259400248527527, -0.22095000743865967, 0.12197999656200409, -0.10080999881029129, 0.02040099911391735, -0.20777000486850739, 0.8035299777984619, -0.5374100208282471, -0.5461699962615967, 0.14924000203609467, 0.14789000153541565, 0.6489999890327454, -0.4084799885749817, 0.14736999571323395, 0.23916000127792358, -0.5307700037956238, 0.3693099915981293, -0.07921099662780762, 0.538919985294342, -0.3784399926662445, -0.10582000017166138, 0.30098000168800354, -0.38196998834609985, -0.029196999967098236, -0.2081100046634674, 0.12791000306606293, -0.10678999871015549, 0.3792699873447418, 0.048955000936985016, -0.3490400016307831, -0.16338999569416046, -0.1262899935245514, -0.07849100232124329, 0.294840008020401, 0.21517999470233917, -0.22036999464035034, 0.07375200092792511, -0.16453999280929565, 0.20116999745368958, 0.024142000824213028, -0.2982099950313568, -0.1174900010228157, -0.1789100021123886, 0.15544000267982483, -0.9215099811553955, -0.2559199929237366, 0.4379099905490875, -0.3477500081062317, -0.22641000151634216, -0.36414000391960144, 0.29458001255989075, 0.7387199997901917, -0.6427900195121765, -0.47808998823165894, 0.16975000500679016, -0.8323699831962585, -0.4183099865913391, -0.016367999836802483, -0.2779200077056885, 0.22067999839782715, -0.31011998653411865, 0.25356999039649963, -0.2697100043296814, 0.28251001238822937, 0.23994000256061554, -0.056411001831293106, 0.0537789985537529, 0.487419992685318, -0.18232999742031097, -0.7522600293159485, 0.8946400284767151, -0.3132300078868866, 0.18355999886989594, -0.6101400256156921, 0.2512199878692627, -0.0027759999502450228, -0.3690600097179413, -0.10540000349283218, 0.3275899887084961, -0.7913600206375122, 0.24235999584197998, 0.18066999316215515, -0.18775999546051025, -0.1347299963235855, -0.2576200067996979, 0.15816999971866608, -0.2032500058412552, -0.797730028629303, 0.25547999143600464, -0.1423799991607666, -0.05604200065135956, -0.1257999986410141, 0.5911300182342529, 0.13524000346660614, 0.23029999434947968, 0.22877000272274017, -0.5716699957847595, 0.20145000517368317, -0.512440025806427, 0.29429998993873596, 0.2880299985408783, 0.3916800022125244, 0.36261001229286194, 0.34869998693466187, 0.2337300032377243, -0.10119999945163727, 0.9408900141716003, -0.025616999715566635, -0.00802330020815134, 0.37257999181747437, -0.25165998935699463], u'thick': [-0.514930009841919, -0.2964499890804291, -0.18660999834537506, -0.5892500281333923, -0.45903998613357544, -0.2935500144958496, 0.7203199863433838, 0.947160005569458, 0.28992000222206116, -1.1787999868392944, -0.004043799825012684, 0.508899986743927, -0.33456000685691833, 0.19781999289989471, -0.33687999844551086, -0.004278200212866068, -0.3326700031757355, 0.35989999771118164, 0.17470000684261322, 0.1910499930381775, 0.0029859000351279974, -0.38877999782562256, -0.5571500062942505, 0.5522599816322327, -0.16118000447750092, 0.04484599828720093, 0.643339991569519, -0.1589599996805191, -0.5793700218200684, 0.21292999386787415, -0.24556000530719757, 0.4159500002861023, -0.816100001335144, -0.27020999789237976, -0.1758500039577484, 0.09703599661588669, -0.17684000730514526, -0.08414799720048904, 0.20562000572681427, 0.5306900143623352, -0.49077001214027405, -0.23064999282360077, 0.26736998558044434, 0.05273300036787987, 0.351639986038208, 0.22318999469280243, 0.3655399978160858, -0.08468399941921234, -0.2989000082015991, -0.32420000433921814, 0.1418199986219406, 0.34852999448776245, -0.01830199919641018, -0.3396100103855133, -0.20693999528884888, 0.1520799994468689, -0.06948699802160263, -1.0461000204086304, 0.34022998809814453, 0.17964999377727509, 0.038221001625061035, -0.04958200082182884, 0.28224000334739685, 0.09704899787902832, 0.17479999363422394, -0.16951000690460205, 0.0747620016336441, 0.5671600103378296, 0.2572900056838989, -0.6130099892616272, 0.2936899960041046, -0.23586000502109528, -0.19133000075817108, -0.29927000403404236, -0.19880999624729156, -0.05774800106883049, 0.488070011138916, -0.029380999505519867, -0.020611999556422234, -0.2736400067806244, -0.06873500347137451, 0.03603300079703331, 0.33695998787879944, -0.3796600103378296, -0.23257000744342804, 0.18604999780654907, 0.36011001467704773, 0.0055518001317977905, -0.11665999889373779, 0.23920999467372894, 0.11078999936580658, -0.357699990272522, -0.41269999742507935, 0.2622799873352051, 0.00714130001142621, 0.16619999706745148, -0.3163900077342987, 0.5895900130271912, 0.5920299887657166, -0.09269700199365616, 0.1735599935054779, 0.10626000165939331, -0.31147000193595886, -0.6957200169563293, -0.37046000361442566, 0.04158300161361694, 0.45528000593185425, 0.19585999846458435, -0.5260900259017944, 0.08729799836874008, -0.4062199890613556, 0.4324299991130829, -0.0709879994392395, -0.9937899708747864, -0.28216999769210815, 0.16584999859333038, -0.4386399984359741, 0.03857700154185295, 0.09362400323152542, -0.6528900265693665, -0.34365999698638916, -0.8098800182342529, -0.007272500079125166, 0.6051899790763855, -0.3784399926662445, 0.30730000138282776, 0.3163599967956543, 0.08342500030994415, 0.09172499924898148, 0.1280599981546402, 0.12716999650001526, 1.0408999919891357, -0.39458000659942627, 0.3615399897098541, 0.600059986114502, -0.02810000069439411, -0.12154000252485275, 0.3606399893760681, 0.1676200032234192, -0.037337999790906906, 0.7956500053405762, 0.16572999954223633, -0.17940999567508698, -0.2937900125980377, -0.3210099935531616, 0.16298000514507294, 0.14907999336719513, -0.1882299929857254, -0.3369700014591217, 0.07560200244188309, -0.18257999420166016, 0.10046999901533127, -0.25501999258995056, -0.19452999532222748, 0.34624001383781433, -0.40112000703811646, 0.39899998903274536, -1.0246000289916992, 0.5758500099182129, 0.3787899911403656, -0.38738998770713806, -0.09255299717187881, 0.6556900143623352, -0.31283000111579895, 0.27390000224113464, -0.26444000005722046, 0.3845599889755249, 0.29747000336647034, 0.0189800001680851, -0.88919997215271, 0.12115000188350677, 0.03810599818825722, -0.3666499853134155, 0.3608799874782562, -0.12804999947547913, -0.47887998819351196, 0.09923899918794632, 0.15300999581813812, 0.3521299958229065, -1.2519999742507935, 0.34845998883247375, -0.1764799952507019, 0.06211499869823456, -0.3126699924468994, -0.25266000628471375, -0.8638799786567688, 0.7656400203704834, 0.2761699855327606, 0.18177999556064606, 0.3330099880695343, 0.17312000691890717, 0.9821799993515015, 0.3219600021839142, 0.19175000488758087, 0.8319600224494934, -0.7551299929618835, -0.395220011472702, 0.26172998547554016, -0.012404000386595726, 0.11922000348567963, 0.6129400134086609, -0.01571900025010109, 0.43977001309394836, 0.46632999181747437, 0.21753999590873718, -0.07754600048065186, -0.0755470022559166, -0.19404000043869019, -0.18494999408721924, -0.22032000124454498, 0.3421800136566162, -0.09079200029373169, -0.08360700309276581, -0.4997900128364563, 0.0795229971408844, -0.005776300095021725, 0.5695000290870667, -0.558139979839325, 0.3153499960899353, -0.5265200138092041, 0.9581400156021118, -0.0299260001629591, 0.5057500004768372, -0.049219001084566116, -0.2959100008010864, 0.368120014667511, -0.2703399956226349, -0.4646100103855133, -0.3582099974155426, -0.5222899913787842, 0.5680500268936157, 0.3237000107765198, -0.3876200020313263, -0.20975999534130096, 0.04035099968314171, -1.0002000331878662, 0.19988000392913818, -0.32280999422073364, -0.4429900050163269, 0.06591100245714188, 0.275519996881485, -0.10057999938726425, -0.19377000629901886, -0.03002000041306019, -1.033400058746338, -0.22383999824523926, 0.3404400050640106, 0.0896570011973381, 0.1090800017118454, -0.7318099737167358, -0.05723100155591965, -0.3862299919128418, 0.6227499842643738, -1.0738999843597412, 0.26677998900413513, 0.1390099972486496, -0.312610000371933, -0.2752299904823303, -0.22729000449180603, 0.38969001173973083, 0.48104000091552734, 0.22227999567985535, 0.3473599851131439, 0.012222999706864357, 0.1670999974012375, -0.07928100228309631, -0.3228600025177002, -0.25297999382019043, 0.2726399898529053, 0.0070720999501645565, -0.2275799959897995, -0.23330999910831451, -0.3087500035762787, 0.2050500065088272, -0.5001199841499329, 0.5350900292396545, -0.6263499855995178, 0.022492000833153725, -0.9655799865722656, -0.25523000955581665, 0.2500999867916107, -0.28856998682022095, -0.17476999759674072, -0.40582001209259033, -0.32074999809265137, -0.03718600049614906, -0.026743000373244286, 0.7074499726295471, 0.39827999472618103, 0.008241499774158001, 0.6285600066184998, 0.4882200062274933, -0.5789300203323364, -0.16649000346660614, 0.256089985370636, 0.3158000111579895, 0.09753400087356567, 0.28415000438690186, -0.3956199884414673, 0.16338999569416046], u'open': [-0.26464998722076416, -0.24112999439239502, -0.09739299863576889, -0.5604299902915955, 0.26969000697135925, 0.5215700268745422, -0.33869001269340515, 0.04112499952316284, 0.41211000084877014, -1.5943000316619873, 0.5292199850082397, 0.46474000811576843, -0.07146800309419632, 0.19329999387264252, -0.7118399739265442, -0.10515999794006348, -0.3193399906158447, 0.26642000675201416, 0.4515100121498108, 0.5453799962997437, 0.13933999836444855, -0.3451099991798401, -0.2583500146865845, -0.2429399937391281, -0.32396000623703003, -0.18738999962806702, 0.1473200023174286, -0.00409960001707077, -0.04346499964594841, 0.2381500005722046, 0.15484000742435455, 0.20076000690460205, 0.20634999871253967, 0.41098999977111816, -0.7900699973106384, 0.17083999514579773, 0.16689999401569366, -0.5867800116539001, -0.2717300057411194, 0.25887998938560486, -0.09176500141620636, -0.270440012216568, -0.130840003490448, 0.25999000668525696, -0.1735299974679947, 0.2452400028705597, 0.3755599856376648, 0.42925000190734863, -0.7740399837493896, 0.06834699958562851, 0.272379994392395, -0.2864300012588501, 0.17208999395370483, -0.289110004901886, -0.3390200138092041, 0.13516999781131744, 0.10064999759197235, -0.04580799862742424, 0.47626999020576477, -0.2399899959564209, 0.021709999069571495, -0.1357100009918213, -0.2360599935054779, 0.18774999678134918, -0.5312399864196777, -0.7277399897575378, 0.3435400128364563, -0.04974900186061859, -0.3615800142288208, -0.3655099868774414, -0.29137998819351196, -0.26010000705718994, 0.6204699873924255, 0.530709981918335, 0.24139000475406647, 0.17337000370025635, -0.18553000688552856, -0.06481699645519257, -0.1956299990415573, -0.48087000846862793, -0.02090900018811226, -0.393530011177063, 0.0015318000223487616, -0.17020000517368317, 0.3376300036907196, -0.31593000888824463, 0.04390000179409981, 0.1082099974155426, 0.11032000184059143, -0.24232999980449677, 0.11448000371456146, -0.7907699942588806, -0.22518999874591827, 0.04301900044083595, 0.328110009431839, -0.24589000642299652, -0.144679993391037, 0.427700012922287, -0.14116999506950378, -1.118499994277954, 0.5083000063896179, -0.10329999774694443, -0.5839899778366089, -0.43988001346588135, -0.2465900033712387, 0.14541999995708466, -0.18082000315189362, -0.23791000247001648, -0.7254999876022339, -0.23883000016212463, -0.09284599870443344, -0.6507099866867065, 0.050349000841379166, -0.27963000535964966, -0.5459100008010864, 0.3119199872016907, -0.36952000856399536, 0.21613000333309174, 0.43055999279022217, 0.28519999980926514, -0.3432599902153015, -0.013492000289261341, 0.3891800045967102, -0.08618699759244919, 0.09626200050115585, -0.193790003657341, -0.125560000538826, 0.09884700179100037, -0.3465999960899353, -0.21283000707626343, 0.14277000725269318, 0.4727100133895874, 0.12623000144958496, -0.28101998567581177, -0.3956100046634674, 0.09313700348138809, 0.5503799915313721, 0.23587000370025635, 0.33945000171661377, -0.09863100200891495, 0.1770000010728836, 0.02243500016629696, 0.0913499966263771, 0.5608000159263611, -0.825219988822937, -0.5847300291061401, 0.1701900064945221, 0.29923000931739807, -0.469539999961853, 0.35457998514175415, 0.3734999895095825, -0.4604499936103821, -0.10236000269651413, -0.11387000232934952, 0.6621900200843811, -0.04563299939036369, -0.5354999899864197, 0.22634999454021454, 0.32907000184059143, 0.33577999472618103, -0.05077299848198891, 0.16997000575065613, -0.3265700042247772, 0.18708999454975128, -0.31321999430656433, 0.12963999807834625, 0.34373000264167786, -0.004516200162470341, 0.41677001118659973, -0.29142001271247864, 0.01789500005543232, 0.23667000234127045, -0.3968900144100189, 0.14988000690937042, 0.021161999553442, 0.024373000487685204, 0.14212000370025635, 0.4034000039100647, 0.08564399927854538, 0.17328999936580658, 0.5354400277137756, 0.02335299924015999, 0.23824000358581543, -0.08795300126075745, -0.0633929967880249, 0.012839999981224537, -0.3515399992465973, 0.4313099980354309, 0.4130699932575226, 0.21199999749660492, 0.0728600025177002, 0.19044999778270721, -0.15129999816417694, 0.012702999636530876, 0.32916000485420227, -0.06260000169277191, 0.15835000574588776, 0.13451999425888062, -0.4215700030326843, 0.16008999943733215, 1.5988999605178833, -0.02271600067615509, -0.4650599956512451, -0.23494000732898712, -0.04673299938440323, -0.06047400087118149, -0.6405500173568726, -0.6226199865341187, 0.018470000475645065, 0.08370800316333771, -0.22382000088691711, -0.12400999665260315, -0.24472999572753906, -0.03831300139427185, -0.3508700132369995, -0.3174000084400177, -0.10279999673366547, 0.19539999961853027, -0.010840999893844128, -0.396699994802475, 0.8539100289344788, 0.042472999542951584, -0.24794000387191772, -0.2041500061750412, -0.38185998797416687, 0.1823900043964386, -0.5756499767303467, -0.35771000385284424, 0.1396999955177307, -0.1831900030374527, 0.5048900246620178, -0.16019000113010406, 0.3657900094985962, -0.17396999895572662, 0.31624001264572144, 0.3702000081539154, 0.03533000126481056, 0.3388800024986267, -0.531499981880188, 0.2987099885940552, -0.900600016117096, 0.24783000349998474, 0.050606999546289444, -0.03163199871778488, -0.1070299968123436, 0.20040999352931976, -0.10160999745130539, -0.2618800103664398, -0.11191999912261963, -0.6275500059127808, 0.31905999779701233, 0.22390000522136688, -0.19191999733448029, -0.7875099778175354, -0.39807000756263733, -0.20454999804496765, 0.3601599931716919, 0.056689999997615814, 0.01075700018554926, 0.704479992389679, 0.34637999534606934, -0.6401200294494629, -0.15910999476909637, -0.25178998708724976, 0.20172999799251556, -0.3168399930000305, 0.32850000262260437, 0.2272700071334839, 0.16548000276088715, -0.2471199929714203, 0.03987099975347519, 0.5881199836730957, 0.9093800187110901, -0.15591999888420105, -0.03474099934101105, -0.7272599935531616, -1.8011000156402588, 0.2503199875354767, 0.3488999903202057, 0.2500700056552887, -0.15343999862670898, -0.6090899705886841, 0.458979994058609, 0.1401199996471405, -0.6883699893951416, -0.32242000102996826, -0.2641800045967102, -0.06370200216770172, 0.08653999865055084, -0.20629000663757324, -0.15062999725341797, -0.1339299976825714, -0.3808700144290924, 0.9434199929237366, 0.4322499930858612, 0.8225700259208679, -0.17910000681877136, -0.38113999366760254, 0.5359899997711182, 0.2935999929904938], u'runny': [0.19286000728607178, 0.5246999859809875, 0.17779000103473663, -0.24163000285625458, -0.09245999902486801, -0.32141000032424927, -0.09771399945020676, -0.11253000050783157, 0.17941999435424805, 0.38166001439094543, -0.36983999609947205, -0.020517000928521156, -0.022586999461054802, -0.24672000110149384, -0.009180399589240551, -0.2803100049495697, -0.3743700087070465, -0.6874899864196777, 0.2502700090408325, 0.24873000383377075, 0.2297700047492981, 0.1399800032377243, -0.25547000765800476, 0.529770016670227, -0.9703999757766724, -0.05473899841308594, 0.5150600075721741, -0.9917299747467041, 0.3632499873638153, -0.2910600006580353, -0.2639000117778778, 0.4864799976348877, 0.12269999831914902, -0.4140399992465973, 0.5846199989318848, 0.6188600063323975, -0.7126299738883972, 0.36994999647140503, 0.023276999592781067, 0.2916400134563446, 0.25898998975753784, 0.46786999702453613, 0.8884900212287903, -0.2933099865913391, -0.018449999392032623, 0.5474399924278259, -0.2935999929904938, 0.9279800057411194, -0.16306999325752258, -0.04763999953866005, 0.9708600044250488, -0.35806000232696533, 0.5613600015640259, -0.18151000142097473, -0.8845000267028809, -0.2004999965429306, -0.4787899851799011, -0.5477499961853027, 0.2386700063943863, 0.09669200330972672, 0.14363999664783478, -0.12374000251293182, -0.6440100073814392, 0.7576500177383423, -0.36932000517845154, -0.2420700043439865, 0.9833499789237976, -0.04442699998617172, 0.09197299927473068, -0.5966600179672241, 0.36030998826026917, 0.020627999678254128, -0.030616000294685364, 0.2712799906730652, 0.7761200070381165, -0.5695599913597107, 0.36932000517845154, 0.6828699707984924, -0.3724299967288971, 0.006556100212037563, -0.42357000708580017, -0.1747100055217743, -0.341729998588562, -0.49873000383377075, 0.18637999892234802, 0.25102999806404114, 0.983680009841919, -0.056171998381614685, 0.05138000100851059, -0.05154100060462952, -0.44710999727249146, 0.0005206000059843063, -0.20145000517368317, 0.5123299956321716, -0.6048399806022644, -0.10231000185012817, -0.0077148000709712505, 0.5864700078964233, -0.2875100076198578, 0.5553100109100342, 0.5331900119781494, -0.43046998977661133, -0.45135000348091125, 0.2913999855518341, -0.6075500249862671, 0.3085300028324127, -0.7626000046730042, 0.03194800019264221, -0.7636200189590454, 0.17422999441623688, -0.1512800008058548, -0.12598000466823578, -0.6930099725723267, 0.2990500032901764, -0.46856001019477844, -0.1896200031042099, -0.9263299703598022, 0.9986000061035156, -0.013334999792277813, 0.3399899899959564, 0.01945200003683567, -1.1953999996185303, -0.11403000354766846, 0.6284199953079224, 0.4318000078201294, -0.31095999479293823, -0.7368599772453308, 0.09923700243234634, 0.17228999733924866, -0.2749499976634979, 0.6417800188064575, 0.15335999429225922, -0.43136999011039734, 1.0556999444961548, 0.01843300089240074, 0.16832999885082245, 0.24672000110149384, -0.2902800142765045, 0.03669999912381172, 0.22923000156879425, 0.6999499797821045, 0.22462999820709229, -0.4860199987888336, -0.33935999870300293, 0.5483499765396118, 0.4909000098705292, 0.045510999858379364, -0.42149001359939575, -0.4162999987602234, 0.6670500040054321, 0.037351999431848526, 0.5173500180244446, 0.39427000284194946, 0.48917001485824585, 0.34599000215530396, 0.02796499989926815, -0.12707999348640442, 0.1602499932050705, 0.46327999234199524, 0.3788900077342987, -0.31321001052856445, -0.34762999415397644, -0.15581999719142914, -0.25016000866889954, 0.48802000284194946, -0.17332999408245087, -0.2643199861049652, 0.12456999719142914, 0.2411700040102005, -0.4551199972629547, 0.7092999815940857, 0.45085999369621277, 0.31407999992370605, -0.1288899928331375, -0.9036999940872192, -0.5920900106430054, 0.4069400131702423, -0.4336499869823456, 0.030594000592827797, -0.5931299924850464, -0.23704999685287476, 0.5640699863433838, 0.011699000373482704, 0.3159100115299225, -0.8569499850273132, -0.36083000898361206, 0.8839200139045715, -0.8210099935531616, -0.5696300268173218, -0.42618998885154724, 0.013261999934911728, -0.038036998361349106, -0.5924500226974487, -0.4679900109767914, 0.45085999369621277, 0.19210000336170197, -0.23331999778747559, -0.15353000164031982, 0.6756299734115601, 0.22968000173568726, -0.3129099905490875, 0.07426299899816513, 0.6639099717140198, 0.2786400020122528, 0.3133000135421753, 0.175369992852211, -0.7055400013923645, -0.5516499876976013, 0.29840001463890076, -0.13782000541687012, -0.02433899976313114, -0.3308899998664856, -0.16224999725818634, -0.355320006608963, 0.41025999188423157, 0.04611799865961075, -0.1779700070619583, -0.0546329990029335, 0.21831999719142914, 0.35346999764442444, -0.0898820012807846, -0.1434600055217743, 0.6265299916267395, -0.024403000250458717, -0.31172001361846924, 0.09467300027608871, 0.4099999964237213, 0.4491899907588959, 0.42732998728752136, -0.07311400026082993, 0.628059983253479, -0.14767999947071075, 0.39629000425338745, -0.46035999059677124, 0.18672999739646912, 0.23980000615119934, 0.22142000496387482, 0.30900999903678894, -0.28968000411987305, -0.8424000144004822, 0.18086999654769897, -0.9722300171852112, -0.629010021686554, -0.22009000182151794, 0.17096999287605286, -0.0825209990143776, 0.28481999039649963, 0.44578999280929565, -0.18738000094890594, -0.7771400213241577, 0.3865000009536743, -0.1764100044965744, -0.034398000687360764, -0.15423999726772308, -1.0839999914169312, 0.8177499771118164, -0.7846800088882446, -0.15076999366283417, -0.4834800064563751, 0.5302199721336365, -0.5745199918746948, -0.16868999600410461, 0.06484100222587585, 0.5532199740409851, -0.20541000366210938, 0.07991600036621094, 0.22154000401496887, -0.05899199843406677, 0.37988001108169556, 1.0154000520706177, 0.47277000546455383, -0.4300599992275238, 0.11898999661207199, -0.13899999856948853, -0.2603999972343445, 0.2517400085926056, 0.32100000977516174, -0.21570999920368195, -0.47468000650405884, -0.7043399810791016, 0.7173500061035156, 0.5989500284194946, 0.6586499810218811, -0.1208299994468689, 0.43452998995780945, 0.07610700279474258, 0.13582000136375427, 0.3375599980354309, -0.288349986076355, -0.38666999340057373, -0.2233400046825409, -0.40417999029159546, 0.41251999139785767, -0.23959000408649445, -0.1829500049352646, -1.1231000423431396, 0.12399999797344208, 0.027766000479459763, -0.0011542000574991107, 0.33122000098228455], u'standing': [-0.07662300020456314, -0.044557999819517136, -0.12822000682353973, -0.41047000885009766, -0.3423500061035156, 0.06074199825525284, 0.17190000414848328, 0.10773999989032745, 0.0564659982919693, -1.1282000541687012, -0.13086000084877014, 0.10316000133752823, 0.05017799884080887, -0.39155998826026917, -0.10700000077486038, 0.36934998631477356, 0.07501699775457382, 0.22776000201702118, -0.14303000271320343, -0.29510998725891113, -0.15670999884605408, 0.24753999710083008, 0.450439989566803, -0.13741999864578247, -0.11078000068664551, 0.2614299952983856, 0.11032000184059143, -0.2749199867248535, 0.04405000060796738, 0.41220998764038086, 0.032919999212026596, -0.17451000213623047, 0.2030400037765503, -0.18951000273227692, -1.1990000009536743, 0.0313429981470108, 0.2786700129508972, -0.3421100080013275, 0.03283200040459633, 0.36090999841690063, 0.32471001148223877, -0.03961599990725517, -0.0396369993686676, 0.1780800074338913, 0.08296900242567062, 0.2537800073623657, 0.20054000616073608, -0.0682850033044815, 0.21448999643325806, 0.23563000559806824, -0.2016099989414215, -0.2682499885559082, -0.4538300037384033, 0.004545200150460005, 0.04217899963259697, 0.21618999540805817, 0.1338299959897995, 0.2798599898815155, -0.18535000085830688, 0.405460000038147, 0.6658499836921692, -0.06129999831318855, 0.10583999752998352, 0.3940800130367279, -0.4074600040912628, -0.5960999727249146, 0.058476001024246216, 0.387470006942749, -0.05758700147271156, 0.1545100063085556, -0.0660649985074997, 0.057036999613046646, -0.2913599908351898, -0.5083799958229065, -0.12219999730587006, 0.06114700064063072, 0.08432299643754959, -0.07107599824666977, -0.030263999477028847, -0.1058100014925003, -0.022026000544428825, 0.39478999376296997, -0.16797000169754028, 0.3660700023174286, 0.22023999691009521, 0.2537600100040436, -0.012160000391304493, -0.040897998958826065, -0.13446000218391418, -0.16056999564170837, 0.40389999747276306, -0.23377999663352966, 0.025273000821471214, 0.22078000009059906, -0.19994999468326569, -0.12775999307632446, 0.019735999405384064, -0.05281699821352959, 0.016109999269247055, 0.006651800125837326, -0.06711500138044357, 0.22809000313282013, -0.34766000509262085, -0.4165799915790558, -0.462009996175766, 0.47387000918388367, 0.25902000069618225, 0.10819999873638153, -0.08917000144720078, -0.18488000333309174, -0.09481199830770493, 0.060596998780965805, -0.2299100011587143, -0.1418900042772293, -0.0901390016078949, 0.17586000263690948, -0.0614360012114048, 0.14880000054836273, -0.12836000323295593, -0.023691000416874886, -0.4203700125217438, 0.2941800057888031, 0.1983499974012375, 0.2687300145626068, -0.3932400047779083, -0.36438998579978943, -0.007804399821907282, -0.09981399774551392, 0.3640500009059906, -0.6302899718284607, -0.37362000346183777, 0.14139999449253082, 0.03854500129818916, 0.10050000250339508, -0.07310199737548828, 0.041731998324394226, 0.03853899985551834, -0.042746998369693756, 0.310589998960495, 0.3833799958229065, -0.4690299928188324, 0.15285000205039978, 0.01724799908697605, 0.1364700049161911, -0.49222999811172485, 0.14034000039100647, 0.2790299952030182, 0.45993998646736145, -0.14058999717235565, -0.41273999214172363, 0.04051100090146065, 0.22269000113010406, 0.12466999888420105, -0.09563799947500229, 0.02829599939286709, 0.45291000604629517, -0.02371799945831299, 0.32221999764442444, 0.2594299912452698, 0.6240500211715698, 0.1530900001525879, 0.5085700154304504, 0.039774999022483826, 0.06643900275230408, -0.08910399675369263, 0.044902000576257706, -0.3672800064086914, 0.18975000083446503, -0.04534199833869934, -0.0184749998152256, 0.13898000121116638, 0.4536300003528595, -0.054639000445604324, -0.6113899946212769, 0.10540000349283218, 0.048941001296043396, -0.06449300050735474, 0.5118299722671509, -0.03315800055861473, -0.15208999812602997, 0.03996099904179573, -0.12370999902486801, 0.43737998604774475, 0.07667600363492966, -0.29423001408576965, -0.05012499913573265, 0.11569999903440475, 0.1787099987268448, -0.14879000186920166, 0.5176100134849548, -0.44538000226020813, -0.08406899869441986, 0.05869099870324135, -0.057617999613285065, -0.31940001249313354, -0.34463000297546387, -0.258899986743927, -0.11100000143051147, -0.4035399854183197, -0.06620799750089645, 1.3783999681472778, 0.038231998682022095, 0.4670400023460388, -0.060589998960494995, 0.16740000247955322, -0.2943800091743469, 0.1438799947500229, -0.25519999861717224, 0.3236599862575531, 0.2887299954891205, 0.27316999435424805, 0.04504700005054474, -0.31553998589515686, -0.5052300095558167, -0.09548799693584442, -0.07488200068473816, -0.4787200093269348, 0.18690000474452972, -0.05212799832224846, 0.3422600030899048, 0.1325799971818924, -0.20767000317573547, 0.3957900106906891, 0.03326600044965744, -0.01852799952030182, 0.013605000451207161, -0.039285000413656235, -0.08621600270271301, -0.5659199953079224, 0.03632799908518791, 0.28499001264572144, -0.24945999681949615, -0.07225599884986877, 0.057479001581668854, -0.01736599951982498, -0.52360999584198, -0.15078000724315643, -0.46452999114990234, 0.3742699921131134, 0.014150000177323818, -0.13040000200271606, 0.1370300054550171, -0.16297000646591187, 0.10193999856710434, -0.524399995803833, -0.0014284000499173999, 0.122529998421669, -0.08393199741840363, -0.13790999352931976, -0.05134100094437599, 0.4658699929714203, -0.052345000207424164, 0.8083400130271912, -0.6633099913597107, 0.5253999829292297, -0.024423999711871147, -0.6037499904632568, -0.16912999749183655, 0.00576619990170002, 0.10440000146627426, 0.1913899928331375, 0.06453900039196014, 0.36805999279022217, -0.12976999580860138, -0.13333000242710114, -0.3528900146484375, -0.05359499901533127, -0.08466000109910965, -0.25508999824523926, -0.2256699949502945, -0.6590099930763245, -0.18411999940872192, -0.28821998834609985, -0.08090300112962723, 0.10540000349283218, 0.12021999806165695, -1.623900055885315, 0.18264000117778778, 0.6505200266838074, 0.21356000006198883, 0.26941999793052673, -0.46786999702453613, 0.0023479999508708715, -0.37887001037597656, 0.30305999517440796, 0.6798200011253357, 0.23547999560832977, 0.5517100095748901, 0.4196999967098236, -0.04390399903059006, 0.3471199870109558, 0.4264200031757355, -0.32447001338005066, -0.1548299938440323, -0.24778999388217926, 0.041370000690221786, 0.09977000206708908, 0.04495500028133392, 0.016320999711751938, 0.0831189975142479], u'ancient': [0.07925000041723251, -0.4121200144290924, -0.3648900091648102, -0.09852100163698196, 0.43149998784065247, -0.2943600118160248, 0.20134000480175018, 0.16780999302864075, 0.42614999413490295, -1.2059999704360962, -0.3405599892139435, -0.06162299960851669, -0.35144999623298645, 0.4043999910354614, 0.31334999203681946, -0.21357999742031097, -0.15379999577999115, 0.619949996471405, -0.1945600062608719, 0.1603900045156479, -0.6560400128364563, -0.13850000500679016, 0.07389000058174133, 0.9286900162696838, 0.4762200117111206, -0.5522199869155884, -0.10214000195264816, -0.8024200201034546, -0.05412000045180321, 0.9965100288391113, 0.619920015335083, 0.9068700075149536, -1.2424999475479126, 0.1647000014781952, 0.20636999607086182, 0.09434700012207031, 0.21730999648571014, 0.01076900027692318, -0.12407000362873077, -0.1147800013422966, 0.8507999777793884, -0.5394799709320068, -0.02676199935376644, -0.12714999914169312, 0.5200200080871582, 0.06517300009727478, 0.5862399935722351, 0.816789984703064, -0.17562000453472137, -0.3003300130367279, 0.07223299890756607, -0.15026000142097473, 0.29881998896598816, -0.29725998640060425, 0.056492000818252563, -0.08480799943208694, -0.23430000245571136, 0.04483100026845932, 0.14847999811172485, 0.3466300070285797, -0.002430099993944168, 0.6433299779891968, 0.8232600092887878, 0.30164000391960144, -0.047784000635147095, 0.46713000535964966, -0.3490400016307831, 0.8692899942398071, 0.27312999963760376, 0.022221999242901802, -0.062442000955343246, -0.40195998549461365, 0.15504999458789825, -0.23929999768733978, -0.38971999287605286, 0.4873200058937073, 0.24142999947071075, -0.7748200297355652, 0.2381100058555603, -0.12610000371932983, -0.2690800130367279, -0.16095000505447388, -0.9326300024986267, 0.1533699929714203, 0.08197099715471268, 0.8297899961471558, -0.04685800150036812, 0.865119993686676, 0.49099001288414, 0.3963100016117096, 0.059602998197078705, 0.14151999354362488, 0.339819997549057, 0.06648600101470947, 0.29260000586509705, 0.5137400031089783, 0.7304499745368958, 0.05510900169610977, 0.38767001032829285, -0.0606440007686615, 0.25905999541282654, 0.8640000224113464, 0.17710000276565552, 0.10588999837636948, 0.10385999828577042, 0.19588999450206757, 0.2006099969148636, -0.49428001046180725, -0.0103169996291399, 0.14474999904632568, 0.2674500048160553, -0.3370800018310547, -0.40031999349594116, -0.1873299926519394, -0.5985900163650513, -0.23473000526428223, -0.11404000222682953, 0.050331998616456985, 0.2662000060081482, -0.4256899952888489, -0.659500002861023, 0.1979299932718277, -0.8297600150108337, 0.1799200028181076, -0.5117899775505066, 0.5305600166320801, -0.2176000028848648, 0.7406700253486633, -0.19458000361919403, -0.5259799957275391, 0.044346000999212265, 0.27305999398231506, 0.7355200052261353, -0.028996000066399574, -0.18330000340938568, -0.37836000323295593, 0.04144199937582016, -0.08832400292158127, -0.42820000648498535, -0.22477999329566956, 0.16519999504089355, -0.5485699772834778, 0.07937400043010712, -0.2389499992132187, 0.33333998918533325, -0.12723000347614288, 0.14879000186920166, 0.35881999135017395, -0.19603000581264496, -0.41082000732421875, -0.0467820018529892, -0.15002000331878662, -0.2033900022506714, -0.015776000916957855, 0.2184000015258789, 0.2669599950313568, -0.6042799949645996, 0.42932000756263733, 0.5001400113105774, -0.16810999810695648, 0.49404001235961914, 0.1395300030708313, 0.5095700025558472, 0.3728100061416626, -0.4828900098800659, 0.19550000131130219, 0.1900700032711029, 0.08821599930524826, 0.0827070027589798, -0.5041000247001648, -0.37505999207496643, -0.5341500043869019, 0.4429500102996826, 0.37661999464035034, 0.3484799861907959, 0.49507999420166016, -0.02464500069618225, 0.266620010137558, -0.04751100018620491, -0.5983099937438965, -0.19224999845027924, 0.5898699760437012, 0.07330100238323212, -0.008520100265741348, 0.7243899703025818, -0.3476400077342987, -0.49987998604774475, 0.34887999296188354, 0.6002200245857239, -0.2404700070619583, -0.016327999532222748, -0.043198999017477036, 0.4138199985027313, 0.021480999886989594, 0.2492399960756302, -0.720740020275116, 0.024178000167012215, -0.6484400033950806, -0.4297899901866913, 0.6016499996185303, 0.7875300049781799, 0.2632400095462799, -0.14651000499725342, 0.5756999850273132, 0.41385000944137573, -0.4472300112247467, -0.1906300038099289, -0.07212799787521362, -0.10728000104427338, 0.4005100131034851, 0.038888998329639435, 0.6488699913024902, 0.13537000119686127, -0.6677200198173523, -0.4441399872303009, -0.28784000873565674, -0.16766999661922455, -0.0024546999484300613, -0.542900025844574, 0.2476000040769577, 0.02450999990105629, 0.030036000534892082, -0.06919199973344803, -0.581309974193573, -0.4201500117778778, -0.43766000866889954, -0.25540998578071594, 0.2597300112247467, -0.05141700059175491, 0.2580200135707855, -0.4119200110435486, -0.08453600108623505, -0.4854600131511688, 0.2526099979877472, -0.09655400365591049, 0.130840003490448, -0.06131000071763992, -0.8082399964332581, -0.10687000304460526, 0.13707999885082245, -0.43841999769210815, 0.18197999894618988, 0.7392799854278564, 0.558709979057312, -0.8188599944114685, 0.18740999698638916, -0.9320999979972839, 0.669160008430481, 0.0786219984292984, 0.5651100277900696, 0.43292999267578125, 0.3589800000190735, -0.13267000019550323, -0.3027600049972534, 0.2846600115299225, 0.0870710015296936, -0.08517400175333023, -0.1692200005054474, 0.09904500097036362, -0.366349995136261, -0.25523000955581665, -0.22595000267028809, 0.44262000918388367, -0.3771800100803375, 0.07201399654150009, 0.18565000593662262, 0.07628799974918365, -0.055702000856399536, -0.04382700100541115, -0.07082200050354004, -0.5291600227355957, 0.06921699643135071, 0.879830002784729, 0.4251199960708618, 0.061918001621961594, 0.05029800161719322, -1.5305999517440796, -0.30375999212265015, 0.5049099922180176, -0.17907999455928802, 0.1058100014925003, 0.09501200169324875, 0.47152000665664673, -0.2362699955701828, 0.0521089993417263, -0.07584100216627121, -0.13086000084877014, -0.2760300040245056, 0.5535699725151062, 0.3061999976634979, 0.25859999656677246, 0.27107998728752136, -0.5536400079727173, 0.43953999876976013, -0.22273999452590942, -0.311710000038147, -0.05524900183081627, 0.008963599801063538, -0.70346999168396, 0.19930000603199005], u'toppled': [-0.05917700007557869, -0.6508200168609619, 0.0747859999537468, -0.5095000267028809, 0.1926400065422058, 0.34446001052856445, -0.5439199805259705, -0.013926000334322453, 0.4473299980163574, -0.33028000593185425, -0.5573499798774719, 0.7644400000572205, 0.2945699989795685, -0.3208000063896179, -0.3888300061225891, 0.77947998046875, 0.6909700036048889, -0.2660300135612488, -0.03606399893760681, -0.12134999781847, 0.2863500118255615, 0.014182999730110168, 0.9580699801445007, -0.36406001448631287, 0.06066200137138367, -0.06650599837303162, -0.5738400220870972, 0.06176299974322319, 0.10486999899148941, 0.389849990606308, -0.398389995098114, 0.08459199965000153, 0.3415299952030182, -0.22981999814510345, 0.41734999418258667, -0.4151900112628937, 0.24299000203609467, -0.4973199963569641, 0.5865700244903564, 0.17374999821186066, 0.25137001276016235, -0.4201500117778778, -0.40358999371528625, -0.07457900047302246, 0.06709499657154083, -0.3142699897289276, 0.30212000012397766, -0.20609000325202942, -0.015196999534964561, -0.25461000204086304, -0.10379999876022339, -0.14003999531269073, 0.22442999482154846, -0.4516499936580658, 0.7843800187110901, -0.03776799887418747, 0.35085999965667725, -0.16176000237464905, -0.11469999700784683, -0.15902000665664673, -0.42572999000549316, 0.4196400046348572, -0.8962299823760986, -0.04911499843001366, -0.41703999042510986, -0.22746999561786652, 0.021929999813437462, 0.7942600250244141, -0.6626899838447571, -0.1280899941921234, 0.0022700000554323196, 0.37053999304771423, -0.921970009803772, 0.06797099858522415, 0.27052998542785645, 0.5807300209999084, -0.413129985332489, -0.27006998658180237, -0.4177800118923187, -0.36733999848365784, 0.32350000739097595, -0.35280001163482666, -0.30421000719070435, 0.6610400080680847, 0.07208199799060822, 0.0908140018582344, -0.1440500020980835, 1.0153000354766846, -0.12703000009059906, -0.5354599952697754, 0.6584699749946594, -0.5975000262260437, 0.5422999858856201, -0.43832001090049744, -0.12570999562740326, 0.08389700204133987, 0.4480000138282776, -0.27156001329421997, 0.4679799973964691, 1.1220999956130981, 0.22506999969482422, 0.5259900093078613, 0.5715299844741821, -0.44846999645233154, 0.3291800022125244, 0.45983999967575073, 0.11631999909877777, -0.3219600021839142, -0.11635000258684158, -0.6709399819374084, -0.6258900165557861, -0.6126199960708618, 0.07269400358200073, -0.018177999183535576, 0.3485400080680847, 0.6053799986839294, 0.1590700000524521, -0.32732000946998596, -0.10964000225067139, -0.6139199733734131, 0.4863399863243103, -0.4715699851512909, 0.09915799647569656, -0.03550200164318085, -0.3980399966239929, -0.4332599937915802, -0.17504000663757324, 0.5989000201225281, -0.18807999789714813, 0.4045099914073944, -0.6259300112724304, 1.3539999723434448, -0.3387100100517273, 0.09819100052118301, 0.3061999976634979, -0.3225800096988678, 0.015433999709784985, 0.17996999621391296, 0.32429999113082886, -0.324970006942749, -0.3080799877643585, 0.06214600056409836, -0.016757000237703323, 0.8181399703025818, -0.018783999606966972, -0.26401999592781067, 0.19822999835014343, 0.19296999275684357, -0.05305999889969826, -0.4731700122356415, 0.6621400117874146, -0.6193400025367737, -0.24427999556064606, 0.5626599788665771, 0.08354099839925766, 0.5564900040626526, 0.946340024471283, 0.07220499962568283, -0.04542599990963936, 0.25077998638153076, -0.12947000563144684, 0.2817699909210205, 0.028750000521540642, 0.3656199872493744, 0.4743100106716156, -0.4457800090312958, 0.8660200238227844, -0.5877699851989746, 0.06779000163078308, -0.0592540018260479, -0.08640699833631516, -0.12078999727964401, -0.21355000138282776, -0.632070004940033, -0.07773400098085403, 0.2816300094127655, 0.027070000767707825, 0.6226000189781189, -0.36250001192092896, 0.15302999317646027, -0.09756699949502945, 0.053679000586271286, 0.5696399807929993, -0.08245900273323059, -0.1621900051832199, -0.12303999811410904, -0.15219999849796295, 0.6043699979782104, 0.5919899940490723, -0.09039799869060516, -0.006600699853152037, -0.5172399878501892, -0.05281800031661987, 0.14997999370098114, 0.05378099903464317, 0.11540000140666962, -0.8686100244522095, 0.4849399924278259, 0.029405999928712845, 0.11868999898433685, 0.30094000697135925, -0.07302100211381912, -0.1752299964427948, -0.7566999793052673, -0.274370014667511, -0.15970000624656677, 0.9558200240135193, 0.44550999999046326, -0.352539986371994, 0.518339991569519, -0.06050100177526474, -0.2967599928379059, 0.5143300294876099, -0.2004999965429306, 0.3791100084781647, 0.31782999634742737, 0.1571200042963028, -0.6561599969863892, -0.6918100118637085, 0.3073599934577942, 0.7281699776649475, -0.41534000635147095, -0.35863998532295227, -0.5341299772262573, 0.20352999866008759, 0.3812899887561798, 0.10888999700546265, -0.01575000025331974, -0.22339999675750732, 0.015483999624848366, -0.025599999353289604, -0.002606299938634038, -0.24236999452114105, 0.46024999022483826, 0.3584800064563751, -0.3383899927139282, 0.23627999424934387, 0.13328999280929565, 0.8365799784660339, -0.7434800267219543, -0.11512000113725662, -0.05778900161385536, -0.3031899929046631, -0.18154999613761902, 0.1414799988269806, -0.40057000517845154, 0.7005599737167358, -0.05257600173354149, 0.6065899729728699, 0.4649899899959564, 0.4915499985218048, -0.016061000525951385, -0.24356000125408173, -0.7138199806213379, 0.9844800233840942, -0.1585800051689148, 0.8453599810600281, -0.2547599971294403, -0.25624001026153564, -0.3405500054359436, -0.750220000743866, 0.33000001311302185, -0.22356000542640686, -0.249439999461174, 0.3174000084400177, -0.4995900094509125, -0.16372999548912048, -0.39500999450683594, 0.4156099855899811, 0.08071400225162506, -0.7939599752426147, -0.1754699945449829, 0.4205400049686432, -0.31457000970840454, -0.0786919966340065, 0.17357000708580017, -0.7615399956703186, -0.20916999876499176, -0.1360899955034256, -0.06976799666881561, 0.2916499972343445, -0.23517000675201416, 0.24584999680519104, -0.36462000012397766, -0.025313999503850937, 0.19057999551296234, -0.7814099788665771, 0.0459819994866848, -0.2886500060558319, -0.05769500136375427, 0.693880021572113, 0.208639994263649, 0.850570023059845, 0.3315100073814392, 0.45570001006126404, 0.7593299746513367, 0.5968499779701233, -0.8145599961280823, -0.07997599989175797, -0.29660001397132874], u'weathered': [-0.2621000111103058, -0.21702000498771667, -0.48427000641822815, -0.35060998797416687, 0.027400000020861626, -0.2971700131893158, -0.614329993724823, -0.11603999882936478, 0.44308000802993774, -0.302700012922287, 0.14681999385356903, 0.2660300135612488, -0.026002999395132065, -0.18004000186920166, -0.4032999873161316, -0.5024099946022034, -0.4607599973678589, 0.07077699899673462, 0.11168000102043152, -0.2703799903392792, -0.16313999891281128, 0.27327999472618103, 0.4699400067329407, -0.12392999976873398, -0.5652599930763245, -0.05915699899196625, -0.08835700154304504, 0.07076700031757355, -0.5226399898529053, 0.5127300024032593, 0.29778000712394714, 0.6202600002288818, -0.44787999987602234, -0.28349998593330383, -0.14986999332904816, 0.03440700098872185, -0.6213799715042114, -0.5773500204086304, 0.5567600131034851, 0.10420999675989151, 0.11044000089168549, 0.19088000059127808, 0.2952300012111664, -0.49970999360084534, 0.1548900008201599, 0.4020499885082245, -0.23718999326229095, -0.9081500172615051, -0.18345999717712402, 0.02135300077497959, -0.4187900125980377, -0.05267399922013283, 0.3241899907588959, 0.1457899957895279, 0.5646200180053711, -0.35131001472473145, -0.12551000714302063, 0.09412100166082382, 0.15421000123023987, -0.11135999858379364, 0.326229989528656, 0.12285000085830688, -0.018974000588059425, -0.1695999950170517, 0.011075999587774277, -0.28606998920440674, -0.12058000266551971, -0.1357399970293045, -0.1188800036907196, -0.5275800228118896, 0.07319200038909912, 0.047554001212120056, -0.5012999773025513, 0.011442000046372414, 0.11049000173807144, -0.3286600112915039, -0.5153800249099731, -0.5989000201225281, 0.05640200152993202, -0.18571999669075012, -0.33847999572753906, 0.1325799971818924, -0.03023100085556507, 0.12408000230789185, 0.1492300033569336, 0.5081599950790405, 0.06467799842357635, 0.20333999395370483, -0.30375999212265015, 0.22392000257968903, 0.004708699882030487, 0.35390999913215637, -0.004608300048857927, 0.1379999965429306, -0.22980999946594238, 0.23859000205993652, -0.05701100081205368, 0.10036999732255936, 0.6804199814796448, 0.43821001052856445, 0.10817000269889832, 0.5697500109672546, -0.32120001316070557, -0.32677000761032104, 0.32381001114845276, 0.15900999307632446, -0.195250004529953, 0.18794000148773193, 0.07353799790143967, -0.6650300025939941, 0.03494900092482567, -0.46094998717308044, -0.2920899987220764, -0.07414399832487106, 0.0013729999773204327, 0.05723100155591965, 0.07005199790000916, 0.2614699900150299, -0.17330999672412872, -0.25672000646591187, 0.15634000301361084, -0.5087900161743164, -0.25123000144958496, 0.6720100045204163, -0.06378600001335144, 0.6935200095176697, -0.9570299983024597, 0.3840799927711487, -0.14308999478816986, -0.12257999926805496, 0.0412600003182888, 0.05514900013804436, -0.25404998660087585, -0.1904900074005127, 0.3039099872112274, -0.10286000370979309, -0.33507999777793884, 0.6501500010490417, 0.40195000171661377, 0.18961000442504883, 0.22111999988555908, 0.5486099720001221, -0.47714000940322876, 0.7097200155258179, 0.37880000472068787, 0.380840003490448, -0.11880999803543091, 0.2982200086116791, -0.0640920028090477, -0.1384200006723404, -0.290910005569458, -0.2901799976825714, -0.2430099993944168, 0.13950000703334808, -0.2508699893951416, 0.35113000869750977, -0.10779999941587448, -0.11433999985456467, 0.04546000063419342, 0.6573699712753296, -0.21150000393390656, -0.10819000005722046, -0.3814600110054016, 0.571399986743927, -0.21967999637126923, 0.09407400339841843, 0.02390800043940544, -0.08772499859333038, 0.5117300152778625, -0.09766799956560135, -0.3165600001811981, 0.7846999764442444, -0.06339100003242493, -0.48708999156951904, -0.2853899896144867, -0.0018687000265344977, -0.06116100028157234, 0.06207000091671944, 0.3561899960041046, -0.21817000210285187, -0.10176999866962433, 0.033649999648332596, 0.1103999987244606, -0.23127000033855438, -0.25001999735832214, -0.2942500114440918, 0.25870001316070557, 0.24360999464988708, -0.27013999223709106, 0.2183700054883957, -0.01831899955868721, -0.1866299957036972, 0.21607999503612518, -0.3704099953174591, 0.24026000499725342, -0.11477000266313553, 0.4121600091457367, -0.26954999566078186, -0.5900999903678894, 0.03590400144457817, 0.4875600039958954, -0.384770005941391, 0.07730700075626373, -0.10786999762058258, 0.20784999430179596, -0.36906999349594116, 0.30932000279426575, 0.04787300154566765, -0.6533899903297424, -0.0838489979505539, 0.15518000721931458, 0.39871999621391296, -0.30000999569892883, -0.5285599827766418, 0.47475001215934753, -0.003792600007727742, 0.3065299987792969, -0.3873499929904938, -0.15769000351428986, 0.08149799704551697, 0.6575400233268738, 0.40860000252723694, 0.32795000076293945, -0.23442000150680542, -0.1336899995803833, 0.27608999609947205, -0.013381999917328358, -0.11885999888181686, -0.292930006980896, 0.13579000532627106, 0.1875399947166443, 0.30737000703811646, -0.3933500051498413, -0.07467400282621384, -0.5189700126647949, 0.3405599892139435, 0.3004299998283386, -0.6077499985694885, 0.10347999632358551, 0.2010200023651123, -0.05909299850463867, 0.4779599905014038, -0.6585699915885925, -0.6749100089073181, -0.48969998955726624, -0.296860009431839, -0.03590700030326843, 0.18832999467849731, 0.1216999962925911, 0.32552000880241394, -0.18714000284671783, -0.354779988527298, 0.4465799927711487, -0.020301999524235725, 0.45796000957489014, -0.08963699638843536, 0.14094999432563782, 0.23005999624729156, -0.21344999969005585, -0.018341999500989914, -0.5889400243759155, 0.5313699841499329, -0.06330099701881409, 0.14946000277996063, 0.11217000335454941, 0.20642000436782837, 0.12088999897241592, -0.7514299750328064, 0.1688700020313263, 0.2604700028896332, -0.241689994931221, 0.7544699907302856, -0.022971000522375107, -0.4276300072669983, 0.1413699984550476, -0.23357999324798584, 0.039301998913288116, 0.021541999652981758, -0.5016800165176392, 0.15846000611782074, 0.07354599982500076, 0.002975800074636936, -0.13274000585079193, 0.2644200026988983, 0.24177999794483185, -0.12223999947309494, -0.2676500082015991, 0.3426699936389923, 0.5304200053215027, -0.42763999104499817, 0.1685200035572052, 0.5045999884605408, -0.19682000577449799, 0.07256799936294556, 0.4308899939060211, 0.2700200080871582, -0.00031570999999530613, -0.014689000323414803, 0.008294500410556793, 0.009020400233566761], u'murky': [0.3206599950790405, -0.4352799952030182, 0.6713200211524963, 0.08953599631786346, 0.19273999333381653, 0.012167000211775303, -0.008598599582910538, -0.0007618900272063911, 0.41510000824928284, -0.7739099860191345, -0.017224999144673347, 0.08097399771213531, -0.06494099646806717, -0.21100999414920807, -0.31595999002456665, -0.33618998527526855, -0.6271899938583374, 0.39879000186920166, 0.21731999516487122, 0.7505599856376648, -0.21785999834537506, 0.636929988861084, -0.027501000091433525, 0.08060000091791153, -0.22020000219345093, -0.2838999927043915, 0.19109000265598297, -0.013326999731361866, -0.4710899889469147, 0.31836000084877014, 0.21223999559879303, -0.024335000663995743, 0.031105000525712967, -0.04228400066494942, 0.483599990606308, 0.6211000084877014, 0.47819000482559204, -0.259880006313324, -0.5062000155448914, 0.01235199999064207, 0.37342000007629395, 0.426690012216568, 0.25262001156806946, 0.5932400226593018, 0.4872699975967407, -0.1542000025510788, -0.21036000549793243, -0.20273999869823456, -0.4150499999523163, 0.2505899965763092, -0.09416600316762924, 0.2354699969291687, 0.518779993057251, -0.7449300289154053, 0.04975999891757965, -0.3609200119972229, 0.06578200310468674, 0.20378999412059784, 0.20277999341487885, 0.20016999542713165, 0.16670000553131104, 0.029695000499486923, -0.2866100072860718, 0.08812999725341797, 0.2819499969482422, -0.16543999314308167, 0.304610013961792, -0.02229500003159046, 0.04971500113606453, -0.061292000114917755, 0.2927199900150299, 0.42486000061035156, -0.33678001165390015, -0.2160699963569641, 0.06418400257825851, 0.022554000839591026, -0.10281000286340714, 0.5124099850654602, 0.2887899875640869, -0.07952900230884552, 0.40389999747276306, -0.2839699983596802, 0.3512899875640869, 0.18877999484539032, 0.4650999903678894, 0.041802000254392624, 0.3646700084209442, 0.011233000084757805, 0.055417001247406006, 0.3735499978065491, -0.625029981136322, 0.37797001004219055, 0.054134998470544815, 0.2535000145435333, 0.00401319982483983, 0.6550700068473816, -0.0261049997061491, 0.33362001180648804, 0.18750999867916107, -0.6616600155830383, 0.12228000164031982, -0.04847000166773796, -0.20183999836444855, 0.6075900197029114, -0.2888700067996979, -0.0951709970831871, 0.4617300033569336, 0.2981500029563904, 0.04309700056910515, 0.36597999930381775, 0.1231900006532669, -0.17417000234127045, 0.11112000048160553, 0.01730700023472309, 0.5419099926948547, -0.3782700002193451, 0.4519299864768982, -0.24089999496936798, 0.19044999778270721, -0.35701000690460205, 0.26743000745773315, -0.01930299960076809, 0.48155999183654785, 0.3739199936389923, 0.10651999711990356, 0.2618499994277954, -0.16675999760627747, 0.019618000835180283, -0.4371899962425232, -0.21154999732971191, -0.23091000318527222, 0.4534899890422821, 0.3000899851322174, 0.1639699935913086, -0.2404399961233139, 0.050269998610019684, 0.24106000363826752, -0.11868000030517578, -0.4672900140285492, 0.06284800171852112, 0.14122000336647034, 0.30803999304771423, -0.149959996342659, -0.20749999582767487, -0.4884200096130371, -0.49390000104904175, -0.038759998977184296, 0.39173001050949097, 0.29319000244140625, 0.4433000087738037, -0.5313500165939331, -0.31766000390052795, -0.042573001235723495, -0.22971999645233154, 0.19349999725818634, -0.19076000154018402, 0.42767998576164246, -0.3445200026035309, 0.196150004863739, 0.537090003490448, -0.5610100030899048, -0.009526499547064304, 0.6973099708557129, 0.1028899997472763, 0.06503599882125854, -0.19760000705718994, -0.13282999396324158, -0.028284000232815742, -1.1013000011444092, 0.10200999677181244, 0.3032299876213074, 0.31428998708724976, -0.6410999894142151, -0.21152999997138977, -0.3033599853515625, -0.2606799900531769, 0.11793000251054764, -0.19682000577449799, -0.11585000157356262, -0.17133000493049622, 0.17451000213623047, 0.8406199812889099, 0.09110800176858902, 0.27303001284599304, 0.24558000266551971, -0.03612999990582466, 1.0601999759674072, -0.34213000535964966, 0.05584200099110603, -0.22317999601364136, -0.0011490000179037452, -0.5101100206375122, -0.4035300016403198, -0.2349800020456314, 0.009324699640274048, -0.38784000277519226, -0.5708799958229065, 0.15248000621795654, -0.6861299872398376, -0.152879998087883, 0.4106000065803528, 0.3394100069999695, -0.0063403998501598835, -0.738510012626648, 0.13416999578475952, -0.7339000105857849, 0.1370999962091446, 0.10191000252962112, -0.13471999764442444, 0.37049001455307007, 0.20186999440193176, 0.6244000196456909, 0.28147000074386597, 0.06727000325918198, -0.11490000039339066, -0.05934600159525871, 0.6137499809265137, -0.00602689990773797, 0.1075500026345253, 0.6745100021362305, 0.7728000283241272, -0.3946399986743927, 0.3655099868774414, 0.1609800010919571, -0.03249799832701683, -0.04827199876308441, -0.43599000573158264, -0.35300999879837036, -0.053070999681949615, 0.132860004901886, 0.32714998722076416, 0.21803000569343567, 0.034956999123096466, 0.25999999046325684, -0.25769999623298645, 0.2889299988746643, 0.2640700042247772, -0.36212998628616333, -0.5405700206756592, -0.35503000020980835, -0.0020244999323040247, 0.022053999826312065, 0.398250013589859, -0.30013999342918396, -0.8770400285720825, -0.654990017414093, 0.7044900059700012, 0.021036000922322273, -0.2730900049209595, 0.6829599738121033, -0.28303998708724976, -0.21276000142097473, 0.369159996509552, -0.08546099811792374, -0.1892700046300888, -0.18172000348567963, -0.17294000089168549, 0.46140000224113464, -0.2016099989414215, 0.7850199937820435, -0.11823000013828278, -0.1674399971961975, -0.5763599872589111, -0.38576000928878784, 0.09894800186157227, 0.509909987449646, -0.3487800061702728, -0.24318000674247742, -0.682449996471405, -0.2884800136089325, 0.05921600013971329, 0.29256999492645264, -0.2092200070619583, 0.9050899744033813, 0.273389995098114, 0.5126399993896484, 0.18292999267578125, -0.2558499872684479, 0.4087100028991699, -0.4097500145435333, 0.09512200206518173, 0.5059400200843811, 0.41907998919487, -0.3529700040817261, 0.8125, -0.7599999904632568, 0.02911899983882904, 0.5961199998855591, 0.0856349989771843, 0.09440799802541733, 0.0433490015566349, 0.5099300146102905, 0.2475699931383133, 0.5202999711036682, 0.16416999697685242, -0.348470002412796, -0.0007087800186127424, 0.9274299740791321, -0.5356699824333191, 0.6256999969482422], u'damp': [-0.35126999020576477, 0.0006216799956746399, -0.6144400238990784, -0.1772499978542328, -0.010160000063478947, -0.22524000704288483, 0.3825699985027313, 1.0570000410079956, 0.5297600030899048, -0.5682700276374817, -0.06798200309276581, 0.13079999387264252, 0.23366999626159668, -0.651889979839325, -0.2888300120830536, -0.4187299907207489, -0.7974399924278259, -0.06738399714231491, 0.07227899879217148, 0.17951999604701996, -0.37022000551223755, 0.1049100011587143, -0.1580599993467331, -0.12826000154018402, -0.5160700082778931, -0.8441500067710876, 0.9424499869346619, -0.3765299916267395, -0.11959999799728394, -0.09953100234270096, 0.062066998332738876, -0.20215000212192535, -0.4153499901294708, -0.36191999912261963, -0.4111599922180176, 0.13796000182628632, -0.4845699965953827, 0.0012701000086963177, -0.4497799873352051, 0.4007999897003174, -0.018021000549197197, 0.272599995136261, -0.002948800101876259, -0.2718200087547302, 0.9998900294303894, 0.3424000144004822, 0.26767998933792114, 0.49503999948501587, -0.51705002784729, -0.2679100036621094, -0.06632100045681, -0.3340100049972534, 0.10701999813318253, -0.6165900230407715, 0.3717299997806549, 0.2567799985408783, -0.3566400110721588, -0.0963279977440834, 0.3127099871635437, 0.3508400022983551, 0.4324699938297272, -0.524399995803833, 0.36037999391555786, 0.5037099719047546, -0.5607200264930725, -0.13266000151634216, 0.4318099915981293, 0.04879499971866608, 0.1002499982714653, -0.024112999439239502, 0.12443999946117401, 0.2501699924468994, -0.4558500051498413, -0.3195599913597107, -0.5303300023078918, 0.14191000163555145, 0.016100000590085983, 0.2761099934577942, -0.0431860014796257, -0.13714000582695007, 0.2700600028038025, -0.27072998881340027, -0.43121999502182007, 0.1284399926662445, -0.1766899973154068, 0.4676800072193146, 0.07349099963903427, 0.1440100073814392, -0.15974999964237213, 0.19032999873161316, 0.3315800130367279, -0.0066581000573933125, 0.7309399843215942, 0.1408499926328659, -0.35067999362945557, 0.2646400034427643, 0.4996599853038788, 0.47130000591278076, -0.18730999529361725, -0.21277999877929688, 0.2934499979019165, -0.4866499900817871, -1.0283000469207764, 0.4915199875831604, 0.016491999849677086, 0.3522599935531616, -0.39302998781204224, 0.3651899993419647, -0.39667999744415283, -0.19022999703884125, -0.21876999735832214, -0.36131998896598816, -0.1161699965596199, -0.06780300289392471, -0.16776999831199646, -0.19226999580860138, 0.0964839980006218, 0.5875899791717529, -0.07430399954319, -0.36741000413894653, -0.182669997215271, -0.5720099806785583, 0.23457999527454376, 0.8097599744796753, 0.2139499932527542, 0.7749099731445312, -0.2153400033712387, -0.6716399788856506, 0.5508300065994263, -0.4632300138473511, -0.27535000443458557, 1.2619999647140503, 0.48881998658180237, -0.132750004529953, 0.3065899908542633, -0.251910001039505, -0.16633999347686768, 0.5954999923706055, -0.34255000948905945, -0.040546998381614685, 0.38631001114845276, -0.5064600110054016, -0.08930400013923645, -0.2126999944448471, -0.1260800063610077, -0.1438400000333786, -0.5283200144767761, -0.0670970007777214, 0.07824499905109406, -0.7170000076293945, -1.0851999521255493, -0.1332699954509735, -0.3050200045108795, -0.4109399914741516, 0.08494500070810318, -0.15609000623226166, -0.1277499943971634, -0.2432900071144104, 0.20821000635623932, 0.31272000074386597, -0.8521599769592285, -0.5373600125312805, -0.05422800034284592, 0.38861000537872314, 0.6118699908256531, -0.14990000426769257, 0.631089985370636, 0.09213700145483017, 0.17110000550746918, -0.017952999100089073, 0.01020899973809719, 0.27129998803138733, 0.05405300110578537, 0.3595699965953827, 0.08774299919605255, 0.18929000198841095, -0.003578400006517768, 0.237419992685318, 0.20403000712394714, -0.22071999311447144, 0.3592900037765503, -0.12076999992132187, 0.19749000668525696, 0.010823000222444534, 0.23750999569892883, 0.5334200263023376, 1.4325000047683716, -0.5363399982452393, -0.33945998549461365, 0.3522399961948395, 0.4790300130844116, 0.1256600022315979, -0.021379999816417694, -0.33202001452445984, -0.12376999855041504, -0.06839600205421448, -0.13537999987602234, 0.022128000855445862, -0.7252200245857239, -0.006065499968826771, -0.588670015335083, -0.03206000104546547, 0.027744000777602196, 0.548770010471344, 0.21646000444889069, -0.32343000173568726, -0.007517899852246046, -0.24094000458717346, -0.35607999563217163, -0.6580299735069275, -0.3960300087928772, 0.06733500212430954, 0.036421999335289, -1.0263999700546265, -0.19253000617027283, 0.06083200126886368, 0.0036353999748826027, -0.4690699875354767, 0.37560001015663147, -0.1699499934911728, 0.6080499887466431, 0.09324900060892105, -0.5845900177955627, 0.2508400082588196, -0.10232000052928925, 0.28832000494003296, -0.056147001683712006, -0.42803001403808594, 0.052223000675439835, -0.4414600133895874, 0.6827899813652039, -0.29322999715805054, -0.25655001401901245, -0.09956999868154526, -0.2790200114250183, -0.10961999744176865, -0.2695100009441376, 0.17062999308109283, -0.5376899838447571, -0.3434399962425232, -0.04294700175523758, -0.16958999633789062, -0.2680799961090088, 0.1331699937582016, -0.7021899819374084, 0.2835800051689148, 0.7021600008010864, 0.036386001855134964, -0.36994999647140503, -0.5469899773597717, -0.09925699979066849, -0.06539300084114075, 0.8657600283622742, -0.2915099859237671, 0.7955399751663208, 0.3927899897098541, 0.11706999689340591, -0.14504000544548035, -0.03181099891662598, 0.4585399925708771, 0.2701900005340576, -0.7235599756240845, -0.9078099727630615, 0.4874899983406067, 0.054294999688863754, 0.21071000397205353, -0.2940399944782257, 0.4993099868297577, 0.42381998896598816, -0.6208299994468689, 0.17899000644683838, 0.019728999584913254, -0.5537700057029724, 0.1675499975681305, -0.5845500230789185, -0.1712699979543686, 0.07278700172901154, 0.30237001180648804, -0.21908000111579895, 0.3407000005245209, -0.7448400259017944, 0.4785600006580353, -0.17951999604701996, 0.6304299831390381, -0.27142998576164246, -0.007223700173199177, 0.22688999772071838, 0.092958003282547, -0.16391000151634216, 0.15873999893665314, -0.14057999849319458, 0.4981600046157837, -0.4235199987888336, 0.5484899878501892, 0.15951000154018402, 0.2556299865245819, 0.10260000079870224, 0.9950100183486938, -0.7751299738883972, 0.5012400150299072], u'tiny': [-0.5124800205230713, 0.07902800291776657, -0.32315000891685486, -0.21187999844551086, 0.08389399945735931, 0.08426299691200256, -0.07468699663877487, -0.1402300000190735, 0.7714999914169312, -1.0180000066757202, -0.3325600028038025, 0.06422500312328339, -0.06452500075101852, 0.2696300148963928, 0.4171299934387207, 0.7300199866294861, -0.14667999744415283, -0.039719000458717346, -0.007759499829262495, -0.2533699870109558, 0.24381999671459198, 0.46845000982284546, 0.3018200099468231, 0.49560999870300293, -0.42493999004364014, -0.3663899898529053, 0.024337999522686005, -0.08187399804592133, -0.22078000009059906, -0.039680998772382736, -0.2868399918079376, 0.150969997048378, -0.5763099789619446, 0.7287099957466125, -0.05861499905586243, 0.5063400268554688, -0.19682000577449799, 0.3239699900150299, 0.0987749993801117, 0.08326700329780579, -0.3729400038719177, -0.2002899944782257, 0.10672000050544739, 0.18934999406337738, 0.30867999792099, -0.37213000655174255, -0.1460999995470047, -0.20103999972343445, 0.20206999778747559, 0.3695699870586395, 0.28238001465797424, 0.27279001474380493, 0.6245599985122681, 0.03748700022697449, -0.2579300105571747, -0.31707999110221863, -0.1403300017118454, -0.35690000653266907, 0.6080800294876099, -0.26627999544143677, 0.09121900051832199, -0.2940399944782257, 0.7275599837303162, 0.1532299965620041, 0.6008999943733215, 0.07089599967002869, 0.10829000174999237, 0.5397300124168396, -0.07285799831151962, 0.06689299643039703, -0.12010999768972397, 0.12256000190973282, 0.11852999776601791, 0.013120000250637531, -0.36107000708580017, 0.10399000346660614, -0.31022998690605164, -0.5346400141716003, 0.06081400066614151, -0.478410005569458, -0.05378299951553345, 0.13639000058174133, -0.2747400104999542, 0.3927299976348877, 0.08556299656629562, 0.05681899935007095, 0.1636900007724762, -0.04482100158929825, -0.3421500027179718, -0.1995600014925003, 0.265390008687973, 0.036931999027729034, -0.527679979801178, -0.35117000341415405, 0.24111999571323395, -0.11004000157117844, 0.3346500098705292, 0.2026199996471405, -0.14563000202178955, 0.12634000182151794, 0.3429900109767914, 0.4027999937534332, -0.1912200003862381, -0.7516099810600281, -0.5005199909210205, 0.25001001358032227, 0.4622800052165985, -0.2900499999523163, 0.21549999713897705, -0.020143000409007072, -0.20103000104427338, 0.3635300099849701, 0.1699499934911728, 0.012817000038921833, -0.08401499688625336, -0.21549999713897705, 0.13041000068187714, 0.5611199736595154, 0.2243500053882599, 0.3696799874305725, 0.13503000140190125, -0.013757999986410141, 0.4484499990940094, 0.10454999655485153, -0.1756500005722046, 0.31839001178741455, -0.2199299931526184, 0.3320600092411041, 0.09164299815893173, 0.20821000635623932, -0.008315499871969223, -0.24038000404834747, -0.17790000140666962, 0.43865999579429626, 0.462660014629364, -0.12456999719142914, 0.4791499972343445, 0.4058299958705902, -0.188960000872612, -0.5514799952507019, 0.24605000019073486, 0.1782499998807907, -0.2804499864578247, -0.11215999722480774, -0.542680025100708, -0.1867399960756302, 0.1761299967765808, 0.007350000087171793, 0.10086999833583832, -0.18438999354839325, 0.06794200092554092, -0.5788099765777588, -0.227510005235672, -0.36190998554229736, 0.629010021686554, 0.21813000738620758, -0.3139599859714508, 0.48454999923706055, -0.18616999685764313, 0.07162299752235413, 0.29886001348495483, 0.3124600052833557, 0.670490026473999, -0.13280999660491943, -0.021575000137090683, -0.3851499855518341, 0.21373000741004944, -0.05884600058197975, 0.16820000112056732, -0.1462700068950653, -0.19912999868392944, -0.2563900053501129, -0.018120000138878822, 0.17871999740600586, -0.4075999855995178, -0.4130299985408783, -0.13880999386310577, 0.645829975605011, 0.21177999675273895, -0.037494998425245285, 0.22982999682426453, -0.2195499986410141, 0.26368001103401184, 0.259770005941391, 0.39673998951911926, -0.33967000246047974, 0.7720100283622742, -0.18488000333309174, -0.2044299989938736, -0.1530199944972992, 0.5836399793624878, 0.05836600065231323, 0.0960180014371872, 0.05722200125455856, 0.38534998893737793, 0.3483699858188629, 0.45100998878479004, -0.05692800134420395, -0.5913100242614746, 0.15082000195980072, 1.2242000102996826, 0.11168999969959259, -0.012513999827206135, 0.3767299950122833, -0.002835199935361743, -0.5667499899864197, -0.2824999988079071, -0.021463999524712563, -0.36796000599861145, 0.3892099857330322, 0.13610999286174774, 0.409060001373291, 0.3090200126171112, 0.06171499937772751, 0.3733600080013275, 0.31641000509262085, 0.9152100086212158, -0.45712000131607056, -0.5378000140190125, 0.3538300096988678, 0.34540998935699463, 0.20523999631404877, -0.30246999859809875, 0.08977200090885162, 0.2764100134372711, -0.3171299993991852, -0.04040199890732765, -0.17497999966144562, -0.327349990606308, -0.06767100095748901, 0.20987999439239502, -0.6288599967956543, -0.2008499950170517, 0.09164000302553177, 0.3767699897289276, 0.19249999523162842, 0.2765600085258484, -0.020732000470161438, 0.05051799863576889, -0.3760400116443634, 0.2409600019454956, -0.2518100142478943, 0.14538000524044037, 0.5299699902534485, -0.5080599784851074, -0.737529993057251, 0.3245899975299835, 0.2047400027513504, -0.6376799941062927, -0.3987500071525574, -0.24060000479221344, 0.04120299965143204, 0.022269999608397484, -0.47780001163482666, 0.6542199850082397, 0.5955600142478943, 0.10780999809503555, -0.148499995470047, -0.23894000053405762, 0.04747999832034111, 0.36680999398231506, -0.17454999685287476, 0.0031739999540150166, -0.3650299906730652, 0.4756700098514557, -0.37338000535964966, -0.11851000040769577, 0.20499999821186066, -0.12709000706672668, 0.5150399804115295, -0.33278998732566833, 0.5073999762535095, -0.16797000169754028, -0.17333999276161194, -0.44538000226020813, 0.25084999203681946, -1.767300009727478, 0.22277000546455383, -0.9117400050163269, 0.04096899926662445, -0.25262001156806946, 0.8083699941635132, -0.35043999552726746, -0.06772200018167496, -0.3207699954509735, -0.33750998973846436, -0.1057400032877922, 0.053787000477313995, -0.4064899981021881, -0.052218999713659286, 0.09694399684667587, -0.22996999323368073, 0.16529999673366547, 0.08282700181007385, -0.718280017375946, 0.7595700025558472, 0.3504199981689453, 0.03890800103545189, -0.05213300138711929, -0.2072400003671646], u'grimy': [0.0019964000675827265, -0.15526999533176422, -0.25606998801231384, -0.4662500023841858, -0.3706499934196472, 0.11467999964952469, -0.2882300019264221, 0.033684998750686646, 0.15581999719142914, 0.48949000239372253, -0.4161800146102905, -0.33305999636650085, -0.11204999685287476, 0.4613899886608124, -0.0313819982111454, -0.36177998781204224, -0.6395599842071533, -0.049525998532772064, 0.07457000017166138, -0.04346200078725815, 0.027780000120401382, -0.07780800014734268, -0.07606600224971771, 0.4759800136089325, 0.1036200001835823, -0.39972999691963196, 0.6317700147628784, 0.0699789971113205, -0.1815599948167801, 0.22488999366760254, 0.4396499991416931, -0.08446799963712692, -0.5964999794960022, -0.06382600218057632, 0.6457099914550781, 0.5857999920845032, -0.38514000177383423, -0.1365099996328354, -0.2085999995470047, -0.27862000465393066, 0.11779999732971191, 0.059843000024557114, -0.11811000108718872, -0.0917460024356842, 0.28115999698638916, 0.47530001401901245, 0.2679400146007538, 0.03467300161719322, -0.17856000363826752, -0.4130200147628784, 0.18979999423027039, -0.2363699972629547, 0.3724899888038635, 0.09185999631881714, 0.49577999114990234, -0.07678599655628204, 0.05526699870824814, -0.04129000008106232, -0.02565399929881096, -0.02933499962091446, 0.10417000204324722, -0.6805099844932556, -0.042010001838207245, -0.08790799975395203, -0.14780999720096588, 0.5825300216674805, 0.1585800051689148, 0.12105000019073486, 0.332830011844635, -0.4985699951648712, 0.25766000151634216, -6.577299791388214e-05, -0.12939000129699707, -0.2685700058937073, 0.061312999576330185, -0.026534000411629677, -0.7476900219917297, 0.046147000044584274, 0.2706100046634674, -0.10461000353097916, -0.05530799925327301, -0.31571000814437866, 0.20467999577522278, -0.27566999197006226, 0.226610004901886, -0.21161000430583954, 0.3051300048828125, -0.24931000173091888, 0.14420999586582184, 0.5732799768447876, 0.26725998520851135, 0.1309400051832199, 0.3972100019454956, 0.019607000052928925, -0.34586000442504883, -0.052053000777959824, 0.4184899926185608, -0.16711999475955963, 0.47512000799179077, -0.003980699926614761, 0.5282400250434875, 0.16011999547481537, -0.4745199978351593, 0.27445998787879944, -0.22931000590324402, -0.06835000216960907, 0.13120999932289124, 0.20369000732898712, -0.23886999487876892, 0.08767800033092499, -0.5739099979400635, -0.11877000331878662, -0.491239994764328, -0.34373000264167786, -0.11788000166416168, 0.06367900222539902, 0.1587499976158142, 0.11069999635219574, -0.38047999143600464, -0.18212999403476715, 0.21562999486923218, -0.19585999846458435, -0.0878629982471466, 0.6292499899864197, 0.33772000670433044, 0.42735999822616577, -0.21875999867916107, -0.46136000752449036, 0.2864300012588501, -0.0583450011909008, 0.3167000114917755, -0.21875999867916107, 0.19933000206947327, -0.04798299819231033, -0.3606399893760681, -0.08240900188684464, 0.31828001141548157, 0.35436999797821045, 0.263619989156723, -0.30629000067710876, 0.30177000164985657, -0.0701960027217865, 0.04332200065255165, 0.29537999629974365, -0.08124600350856781, 0.062070999294519424, 0.031922999769449234, 0.40946999192237854, -0.1509000062942505, -0.2550800144672394, -0.5267000198364258, -0.03126699849963188, 0.048245999962091446, -0.11721999943256378, 0.2222999930381775, 0.2567099928855896, -0.17621000111103058, -0.0219969991594553, 0.7765899896621704, 0.17670999467372894, -0.5278699994087219, 0.18170000612735748, 0.187950000166893, -0.25148001313209534, 0.1827400028705597, -0.13447999954223633, 0.0846090018749237, 0.1024399995803833, 0.08153100311756134, -0.36695000529289246, -0.3930099904537201, 0.3389599919319153, -0.018022999167442322, -0.1360899955034256, -0.5100799798965454, -0.18121999502182007, -0.050579000264406204, 0.2588599920272827, -0.1280899941921234, -0.8151999711990356, 0.033723000437021255, -0.05351800099015236, 0.24031999707221985, 0.2540299892425537, 0.32728999853134155, 0.1671299934387207, 1.0490000247955322, -0.13307000696659088, 0.06979300081729889, -0.034999001771211624, 0.516759991645813, -0.6758700013160706, -0.20991000533103943, -0.03751299902796745, 0.4422999918460846, -0.07169599831104279, -0.8487200140953064, 0.3836199939250946, -0.37428998947143555, 0.33614999055862427, 0.17110000550746918, -0.046943001449108124, 0.4257200062274933, 0.11653999984264374, -0.21458999812602997, -0.23074999451637268, 0.23229999840259552, -0.3616499900817871, -0.0966159999370575, -0.09881100058555603, -0.6422899961471558, 0.41442999243736267, -0.5642099976539612, 0.3188199996948242, 0.362309992313385, 0.226610004901886, 0.46261999011039734, -0.19412000477313995, -0.49786999821662903, 0.08812999725341797, 0.08641599863767624, 0.1427299976348877, 0.19302000105381012, 0.6150199770927429, 0.1449500024318695, -0.11020000278949738, 0.03234799951314926, -0.4338499903678894, -0.09269800037145615, -0.21178999543190002, 0.21455000340938568, -0.30608999729156494, -0.1634099930524826, -0.1538199931383133, 0.18950000405311584, 0.3554700016975403, 0.27487999200820923, 0.2662299871444702, -0.385809987783432, 0.07788799703121185, 0.34571000933647156, -0.21544000506401062, -0.3181999921798706, 0.016460999846458435, -0.03818599879741669, -0.09752999991178513, -0.1832199990749359, -0.0988290011882782, -0.17934000492095947, -0.0916299968957901, 0.024562999606132507, -0.13878999650478363, 0.4838300049304962, -0.24413999915122986, 0.06755699962377548, 0.1887200027704239, -0.23545999825000763, 0.1851699948310852, -0.1796800047159195, 0.18779000639915466, 0.10752999782562256, 0.06946499645709991, 0.33427000045776367, 0.3099299967288971, 0.4679799973964691, 0.33535999059677124, 0.043223001062870026, -0.11210999637842178, -0.3964200019836426, -0.13178999722003937, -0.4618299901485443, 0.2391899973154068, -0.1386300027370453, 0.07191299647092819, -0.38008999824523926, 0.019891999661922455, 0.1895499974489212, 0.28589001297950745, -0.3335300087928772, 0.36024001240730286, -0.04623999819159508, 0.3255299925804138, -0.3119499981403351, -0.10538999736309052, 0.20298999547958374, 0.23096999526023865, 0.2612999975681305, 0.3600800037384033, -0.20970000326633453, -0.3273400068283081, -0.13112999498844147, 0.575760006904602, -0.08953800052404404, 0.2657400071620941, 0.09938099980354309, -0.12105000019073486, 0.2276799976825714, 0.14320999383926392, -0.1395999938249588, 0.6225799918174744], u'viscous': [-0.012016000226140022, 0.38032999634742737, 0.742169976234436, -0.11530999839305878, -0.24112999439239502, -0.644320011138916, 0.5855000019073486, 0.02223300002515316, 0.5372700095176697, -0.2632499933242798, 0.33901000022888184, 0.08056700229644775, -0.2063799947500229, -0.7435899972915649, -0.6353499889373779, 0.07789699733257294, -0.8103200197219849, -0.18815000355243683, -0.29493001103401184, -0.16674000024795532, -0.43970000743865967, 0.07786200195550919, -0.20714999735355377, 0.07633999735116959, -0.5499399900436401, -0.02575100027024746, 0.32603999972343445, 0.17281000316143036, -0.12370999902486801, -0.057263001799583435, 0.04303999990224838, 0.12524999678134918, -0.04842400178313255, -0.4456399977207184, 1.333899974822998, 0.7993599772453308, 0.4553700089454651, 0.23096999526023865, 0.5553200244903564, 0.5588600039482117, -0.4644300043582916, 0.09018100053071976, -0.10063000023365021, -0.847000002861023, 0.7127599716186523, -0.2799299955368042, -0.023271000012755394, -0.5715100169181824, 0.1249300017952919, 0.5085999965667725, 0.18942999839782715, -0.06276000291109085, -0.13380999863147736, -0.4567300081253052, 0.27393999695777893, -0.32596999406814575, -0.47720998525619507, -0.6187499761581421, 0.4281199872493744, 0.7856600284576416, 0.09574799984693527, -0.34490999579429626, 0.7013900279998779, 0.3882899880409241, 0.2602500021457672, 0.1483200043439865, -0.10317999869585037, -0.1280599981546402, -0.013038000091910362, 0.7678400278091431, 0.2204200029373169, -0.22427000105381012, -0.1506499946117401, 0.3366599977016449, 0.014979000203311443, 0.28009000420570374, 0.5968300104141235, -0.6120399832725525, -0.23377999663352966, -0.9478700160980225, 0.2394700050354004, -0.24128000438213348, -0.04904099926352501, -0.7946100234985352, 0.013117000460624695, 0.021251000463962555, 1.246399998664856, 0.3658300042152405, 0.01862799935042858, 0.0984250009059906, -0.5565800070762634, 0.24952000379562378, 0.2987399995326996, 0.06260800361633301, -0.3067399859428406, 0.16399000585079193, -0.10181999951601028, 0.17378999292850494, 0.31567999720573425, 0.3464899957180023, 0.20773999392986298, 0.6307600140571594, -0.0971279963850975, 0.21112999320030212, -0.6550899744033813, 0.720990002155304, -0.3967300057411194, 0.44071999192237854, -0.2635999917984009, -0.3968699872493744, -0.11324000358581543, 0.5296199917793274, -0.11224000155925751, 0.10379000008106232, -0.2728999853134155, 0.5996599793434143, -0.011334000155329704, 0.03202499821782112, 0.09058000147342682, -0.24334999918937683, 0.6358500123023987, -0.3508400022983551, -0.019951000809669495, 0.40070000290870667, 0.1311199963092804, 0.21926000714302063, 0.9142600297927856, -0.3767299950122833, -0.022182999178767204, 0.6657800078392029, 0.07564699649810791, 0.5864599943161011, -0.522130012512207, -0.21950000524520874, -0.41418999433517456, -0.41425999999046326, 0.19495999813079834, -0.6094499826431274, -0.33557000756263733, -0.12206999957561493, -0.07070700079202652, 0.16982999444007874, -0.42570000886917114, -0.6214600205421448, 0.40380001068115234, -0.2529299855232239, -1.0236999988555908, -0.6972399950027466, -0.13638000190258026, 0.5248299837112427, -0.13610999286174774, -0.29725000262260437, -0.6103000044822693, 0.5773000121116638, 0.6071299910545349, -0.19585999846458435, 0.4375799894332886, -0.5407400131225586, 0.27211999893188477, -0.2767300009727478, -0.45151999592781067, -0.8443700075149536, 0.3072499930858612, -0.48506999015808105, -0.3094399869441986, -0.27382001280784607, 0.043591998517513275, -0.07336799800395966, 0.28290998935699463, -0.11072999984025955, 0.7582299709320068, 0.42399999499320984, 0.4686700105667114, 0.24683000147342682, -0.706250011920929, -0.8338900208473206, -0.18585999310016632, -0.5248399972915649, 0.2568199932575226, -0.10580000281333923, -0.13826000690460205, 0.023031000047922134, -0.1286499947309494, 0.3048799932003021, -0.1709499955177307, -0.40880000591278076, 0.9891300201416016, 0.20161999762058258, -0.6914200186729431, -0.3114199936389923, -0.36504998803138733, 0.6486799716949463, -0.06663600355386734, 0.011486999690532684, 0.31407999992370605, 0.24502000212669373, -0.20996999740600586, 0.18821999430656433, -0.31905999779701233, -0.26594001054763794, -0.010222000069916248, -0.04594999924302101, -0.0991860032081604, 0.10932999849319458, 0.7103999853134155, 0.5509099960327148, 0.017090000212192535, -0.07906100153923035, -0.28088998794555664, -0.015564000234007835, -0.39068999886512756, 0.1134599968791008, 0.2617399990558624, -0.0631830021739006, 0.4250600039958954, 0.3528200089931488, 0.53889000415802, -0.12362000346183777, 0.13335999846458435, 0.3477799892425537, -0.49601998925209045, 0.6546000242233276, 0.051575999706983566, 0.2986699938774109, -0.631600022315979, -0.11663000285625458, 0.16857999563217163, 0.19758999347686768, -0.04711199924349785, -0.3654100000858307, 0.33709999918937683, -0.3524700105190277, 0.728410005569458, -0.46553999185562134, 0.35148000717163086, -0.4244599938392639, 1.031000018119812, -0.3733699917793274, -0.07102300226688385, 0.05209999904036522, 0.23342999815940857, 0.1171099990606308, 0.15497000515460968, -0.6897199749946594, -0.12129999697208405, -0.3992699980735779, 0.9369300007820129, 0.19257000088691711, 0.2945300042629242, 0.10685999691486359, -0.158160001039505, -1.1131000518798828, -0.02899700030684471, -0.3479900062084198, -0.3574199974536896, -0.10857000201940536, -0.8297899961471558, -0.5253099799156189, 0.20432999730110168, -0.02966099977493286, -0.692300021648407, -0.31255000829696655, 0.0286289993673563, -0.06920800358057022, -0.27008000016212463, -0.34384000301361084, 0.08771000057458878, -0.2429800033569336, 0.17913000285625458, 0.4898500144481659, 0.2722100019454956, -0.14928999543190002, -0.261709988117218, 0.05178999900817871, -0.2655700147151947, 0.5923699736595154, 1.045799970626831, 0.17079000174999237, -1.0699000358581543, 0.1261499971151352, 0.33678001165390015, -0.20980000495910645, 1.3348000049591064, -0.11230000108480453, 0.07121200114488602, -0.6809899806976318, -0.245619997382164, -0.16829000413417816, 0.2235500067472458, -0.06634200364351273, -0.43865999579429626, -0.2205899953842163, 0.23468999564647675, 0.7768099904060364, 0.6315500140190125, -0.002058400074020028, 0.19160999357700348, -0.09983199834823608, -0.14780999720096588, -0.3234100043773651], u'empty': [0.1783200055360794, 0.4011099934577942, -0.4459100067615509, -0.22748999297618866, 0.012694000266492367, 0.13301999866962433, 0.14531999826431274, -0.24171000719070435, 0.028807999566197395, -0.5825499892234802, -0.45945000648498535, -0.33118000626564026, -0.7542799711227417, -0.13107000291347504, -0.22846999764442444, 0.22879000008106232, -0.30542999505996704, 0.3982900083065033, 0.02027199976146221, 0.16888999938964844, 0.11823000013828278, 0.14226999878883362, 0.3559100031852722, -0.10628999769687653, -0.3342300057411194, -0.07937400043010712, 0.23255999386310577, 0.21815000474452972, 0.6110000014305115, 0.019021999090909958, 0.08918900042772293, -0.05686600133776665, -0.05260299891233444, 0.26137998700141907, -0.6862800121307373, 0.38778001070022583, -0.32879000902175903, -0.12936000525951385, -0.3481000065803528, 0.7654100060462952, -0.15921999514102936, -0.026270000264048576, -0.4210500121116638, 0.16277000308036804, -0.043786998838186264, 0.051368001848459244, 0.2031400054693222, 0.16520999372005463, -0.193900004029274, 0.28356999158859253, 0.3250899910926819, -0.08907099813222885, -0.07999899983406067, -0.05616400018334389, 0.1684499979019165, 0.16824999451637268, 0.1518000066280365, 0.13211999833583832, 0.20135000348091125, 0.5544700026512146, -0.11641000211238861, 0.01272599957883358, -0.17868000268936157, -0.16162000596523285, -0.06916200369596481, -0.4411799907684326, -0.08553899824619293, -0.15695999562740326, 0.12502999603748322, -0.043505001813173294, -0.25975000858306885, -0.27737000584602356, 0.15896999835968018, 0.3424299955368042, -0.08558399975299835, -0.05421200022101402, 0.3715200126171112, 0.25001999735832214, 0.28365999460220337, -0.056738998740911484, -0.011606999672949314, 0.312610000371933, -0.673259973526001, 0.49219000339508057, -0.022506000474095345, -0.0612649992108345, -0.0014979999978095293, -0.15696999430656433, -0.42890000343322754, 0.06271299719810486, 0.6863499879837036, 0.028013000264763832, -0.14770999550819397, -0.2558000087738037, -0.16368000209331512, 0.0839489996433258, 0.16134999692440033, 0.24718999862670898, 0.5512499809265137, -0.6045600175857544, 0.2772200107574463, 0.28727999329566956, 0.03283900022506714, -0.061507999897003174, -0.40529999136924744, -0.35927000641822815, 0.08311299979686737, -0.43755999207496643, -0.3012099862098694, 0.2468000054359436, -0.9840400218963623, -0.077845998108387, 0.060123998671770096, 0.21131999790668488, -0.669510006904602, 0.2440200001001358, 0.4200100004673004, 0.3619399964809418, 0.21817000210285187, 0.004279899876564741, 0.07360800355672836, -0.3434999883174896, 0.11102999746799469, 1.0780999660491943, -0.12323000282049179, -0.06786499917507172, 0.1311900019645691, -0.32280001044273376, -0.4532099962234497, 0.11316999793052673, 0.23889000713825226, 0.0740320011973381, -0.030851999297738075, 0.18313999474048615, 0.05844099819660187, 0.20928999781608582, 0.3929100036621094, 0.21622000634670258, -0.2020300030708313, -0.16554999351501465, -0.06233600154519081, -0.03686999902129173, -0.5653600096702576, 0.36629000306129456, -0.6222900152206421, -0.15846000611782074, 0.23816999793052673, 0.6546400189399719, -0.4267500042915344, -0.17735999822616577, -0.2610900104045868, 0.3542500138282776, -0.5670999884605408, -0.3950499892234802, 0.3469499945640564, -0.08939400315284729, 0.10372000187635422, 0.03263700008392334, 0.460889995098114, 0.16122999787330627, 0.5404800176620483, -0.2308499962091446, 0.7088599801063538, 0.11801999807357788, 0.24639999866485596, 0.09329599887132645, -0.14810000360012054, 0.20813000202178955, -0.03448399901390076, -0.39923998713493347, 0.015270999632775784, 0.44343000650405884, 0.22360999882221222, -0.14248999953269958, 0.07535199820995331, -0.280349999666214, -0.4066999852657318, 0.23172999918460846, -0.08386299759149551, -0.7204499840736389, 0.06125200167298317, 0.06366299837827682, 0.5512800216674805, 0.2595899999141693, 0.3965800106525421, -0.044589001685380936, 0.7660300135612488, -0.2730099856853485, 0.28804999589920044, 0.43108001351356506, 0.9266600012779236, -0.3902600109577179, 0.3035300076007843, -0.07004900276660919, 0.1060200035572052, 0.041572000831365585, -0.9336199760437012, 0.22633999586105347, -0.17353999614715576, 0.45735999941825867, 0.44179001450538635, -0.24627000093460083, -0.13492999970912933, -0.25593000650405884, 0.01929900050163269, -0.7587199807167053, 0.1348399966955185, -0.5712500214576721, 0.049355000257492065, -0.49154001474380493, -0.22960999608039856, -0.0126740001142025, -0.582069993019104, 0.24806000292301178, -0.1341799944639206, 0.08921799808740616, -0.14215999841690063, -0.47218000888824463, -0.16484999656677246, 0.0442189984023571, 0.6217700242996216, 0.06485100090503693, 0.12467999756336212, -0.031484998762607574, -0.3064500093460083, 0.14201000332832336, 0.38826000690460205, -0.2685000002384186, 0.3666299879550934, -0.599399983882904, 0.22450999915599823, 0.22857999801635742, -0.3038100004196167, -0.14532999694347382, 0.0759269967675209, 0.8286200165748596, -0.2738400101661682, 0.18559999763965607, -0.6794000267982483, 0.144679993391037, -0.23503999412059784, 0.1320900022983551, -0.09014300256967545, 0.44982999563217163, -0.1568399965763092, -0.5352399945259094, 0.6652299761772156, 0.28971999883651733, -0.48883000016212463, 0.09801699966192245, 0.6065899729728699, 0.4771600067615509, 0.31775999069213867, -0.3855400085449219, -0.13312000036239624, -0.09733600169420242, -0.5842499732971191, -0.39412999153137207, 0.3074699938297272, -0.01055699959397316, -0.307779997587204, -0.26236000657081604, 0.009162100031971931, 0.04321800172328949, 0.2274399995803833, -0.5664100050926208, 0.34825000166893005, -0.43421998620033264, -0.058139000087976456, 0.34373000264167786, -0.24226999282836914, -0.0011881999671459198, -0.26159000396728516, -0.253930002450943, -0.03936599940061569, -0.293830007314682, -1.9585000276565552, 0.6626099944114685, 0.23322999477386475, -0.07777199894189835, -0.4111199975013733, 0.03878600150346756, -0.34325000643730164, -0.32429999113082886, 0.10864999890327454, 0.7098900079727173, 0.39465999603271484, 0.5443900227546692, -0.11796999722719193, 0.3312000036239624, 0.1863500028848648, 0.18398000299930573, 0.13083000481128693, 0.6655200123786926, -0.13407999277114868, 0.17816999554634094, 0.2646099925041199, -0.18167999386787415, -0.32565999031066895, -0.052769001573324203], u'scratched': [0.055792998522520065, 0.17569999396800995, -0.8767200112342834, 0.30649998784065247, -0.06575000286102295, 0.033341001719236374, -0.4231100082397461, -0.10397999733686447, 0.20502999424934387, 0.22353999316692352, 0.0729840025305748, -0.28731998801231384, -0.4742099940776825, -0.3783400058746338, -0.4296799898147583, 0.2715199887752533, 0.11076000332832336, 0.7771099805831909, -0.17956000566482544, -0.21243000030517578, -0.15512000024318695, -0.04329200088977814, -0.2003300040960312, -0.2046699970960617, -0.004031499847769737, -0.0952640026807785, 0.3792699873447418, 0.05966600030660629, 0.06626100093126297, 0.27884000539779663, 0.2498600035905838, 0.07879699766635895, -0.014980999752879143, -0.07182300090789795, -0.7054200172424316, 0.16664999723434448, -0.5077400207519531, -0.2167699933052063, -0.07465200126171112, 0.32405000925064087, 0.052737001329660416, 0.2947399914264679, 0.00484029995277524, -0.6975200176239014, 0.4303700029850006, 0.5776299834251404, -0.016580000519752502, 0.26583999395370483, -0.15862999856472015, -0.1826999932527542, 0.0945110023021698, -0.3082999885082245, 0.07659199833869934, 0.041763000190258026, -0.3632499873638153, 0.1310500055551529, -0.08730900287628174, -0.13033999502658844, 0.189860001206398, 0.40542998909950256, 0.01039700023829937, 0.3841699957847595, -0.616320013999939, 0.1124500036239624, 0.00276309996843338, -0.20816999673843384, 0.21149000525474548, -0.28519999980926514, 0.4293000102043152, -0.5019599795341492, 0.5859900116920471, 0.265720009803772, 0.3234499990940094, 0.37477999925613403, 0.4500100016593933, -0.6055300235748291, 0.08129599690437317, 0.1381399929523468, 0.3668299913406372, 0.06925100088119507, -0.248089998960495, -0.0001846999948611483, -0.12052000313997269, -0.2691099941730499, -0.4411500096321106, -0.16394999623298645, -0.14805999398231506, -0.1805099993944168, 0.33427000045776367, 0.25374001264572144, -0.10818000137805939, 0.16089999675750732, -0.09666399657726288, -0.5519599914550781, 0.18216000497341156, 0.017551999539136887, -0.24257999658584595, -0.20311999320983887, -0.31679001450538635, -0.1818699985742569, -0.13745999336242676, -0.28637999296188354, -0.09840500354766846, 0.30862998962402344, 0.26043999195098877, 0.4325000047683716, 0.09707000106573105, -0.630299985408783, -0.6037899851799011, 0.449290007352829, -0.3292199969291687, -0.13367000222206116, -0.7388200163841248, 0.017105000093579292, -0.575410008430481, 0.005414300132542849, -0.7552400231361389, -0.18643000721931458, 0.17204000055789948, -0.398389995098114, -0.19654999673366547, -0.6878899931907654, -0.7472800016403198, 0.5038899779319763, -0.10074999928474426, 0.3083899915218353, -0.5711399912834167, -0.5342400074005127, 0.20266999304294586, 0.14846999943256378, 0.03228599950671196, -0.1542000025510788, 0.3598000109195709, 0.4049200117588043, -0.2538299858570099, -0.1864600032567978, 0.5710399746894836, -0.03677000105381012, 0.34584999084472656, 0.38054999709129333, 0.3176099956035614, 0.7414699792861938, -0.1271599978208542, 0.16767999529838562, 0.45392000675201416, 0.09112299978733063, -0.04626400023698807, 0.2795400023460388, -0.0020481001120060682, -0.19054000079631805, 0.25029000639915466, -0.21276000142097473, -0.03966899961233139, -0.32829999923706055, -0.3491300046443939, -0.409170001745224, 0.07867299765348434, -0.26815998554229736, 0.015324000269174576, 0.417820006608963, -0.3808099925518036, -0.30285000801086426, -0.929830014705658, 0.3052600026130676, 1.0893000364303589, 0.38791000843048096, 0.21379999816417694, 0.1443299949169159, 0.13210000097751617, -0.4568899869918823, -0.34178999066352844, -0.08152099698781967, 0.48758000135421753, -0.5628100037574768, -0.6002399921417236, -0.20720000565052032, 0.45590999722480774, 0.2540000081062317, 0.11394999921321869, -0.12773999571800232, 0.005075199995189905, 0.6150100231170654, 0.02361300028860569, -0.019300000742077827, -0.4934999942779541, -0.2642099857330322, 0.6944800019264221, -0.4984099864959717, 0.2680000066757202, 0.1775600016117096, -0.03454200178384781, -0.09089499711990356, -0.052553001791238785, -0.020073000341653824, 0.32611000537872314, 0.11993999779224396, -0.2599700093269348, -0.043820999562740326, -0.038130998611450195, -0.05715800076723099, 0.5460600256919861, -0.4847100079059601, -0.0034364000894129276, -0.2117999941110611, 0.3811500072479248, -0.13312000036239624, 0.12910999357700348, -0.2640399932861328, 0.010147999972105026, 0.35447999835014343, 0.20300999283790588, -0.23704999685287476, 0.012920999899506569, -0.36855998635292053, 0.09010999649763107, 0.27584999799728394, 0.17329999804496765, -0.10243000090122223, 0.2052599936723709, 0.15202000737190247, 0.05720699951052666, 0.3269299864768982, -0.1226700022816658, -0.3509500026702881, 0.5402699708938599, 0.23177999258041382, 0.11802999675273895, -0.17979000508785248, -0.054510001093149185, 0.35583001375198364, 0.08555900305509567, -0.23622000217437744, -0.6547899842262268, -0.2819899916648865, -0.03558899834752083, -0.05448399856686592, 0.009893300011754036, -0.25231000781059265, 0.1256999969482422, -0.45263001322746277, 0.2656500041484833, -0.21597999334335327, -0.27757999300956726, -0.03341500088572502, 0.3161599934101105, -0.14876000583171844, 0.06259699910879135, 0.6144400238990784, 0.15971000492572784, 0.6241000294685364, 0.1515199989080429, -0.02941099926829338, 0.3596700131893158, -0.19713999330997467, 0.2280299961566925, 0.23872999846935272, 0.3625200092792511, 0.06732700020074844, -0.0818680003285408, 0.21940000355243683, -0.2496200054883957, 0.7796099781990051, -0.2768799960613251, 0.17297999560832977, 0.07907599955797195, 0.022254999727010727, 0.28501999378204346, 0.036458998918533325, -0.32708001136779785, 0.7202799916267395, -0.23016999661922455, -0.5081899762153625, 0.17045000195503235, -0.34558001160621643, -0.4874899983406067, 0.28560999035835266, -0.18253999948501587, -0.1750199943780899, -0.33788999915122986, -0.009437699802219868, 0.019352000206708908, 0.18264999985694885, -0.10073000192642212, 0.6393300294876099, -0.2206999957561493, 0.24653999507427216, 0.08288899809122086, -0.39386001229286194, 0.17997999489307404, -0.3540700078010559, 0.058051999658346176, -0.04868999868631363, -0.09416700154542923, 0.41363000869750977, -0.4344500005245209, -0.5980799794197083, 0.42961999773979187, -0.10958000272512436, -0.02828899957239628, 0.21730999648571014], u'painted': [0.09827099740505219, 0.08364000171422958, -0.436599999666214, -0.35335999727249146, 0.0764629989862442, 0.4095599949359894, -0.6532700061798096, -0.02299400046467781, -0.4695500135421753, -0.7430999875068665, -0.07309900224208832, -0.06191200017929077, 0.25301000475883484, 0.4149799942970276, 0.30292999744415283, 0.1892700046300888, 0.40242999792099, -0.11445000022649765, -0.24873000383377075, -0.22384999692440033, -0.1949400007724762, 0.5077999830245972, 0.72257000207901, 0.05707700178027153, 0.04020899906754494, -0.5655699968338013, -0.13964000344276428, -0.30783000588417053, 0.06130700185894966, 0.48342999815940857, 0.8298299908638, 0.6000300049781799, -0.6261600255966187, 0.08101800084114075, 0.14247000217437744, 0.9586099982261658, -0.6230199933052063, -0.795710027217865, -0.18004000186920166, -0.2542499899864197, -0.08688300102949142, -0.10056000202894211, -0.38477998971939087, -0.3391200006008148, 0.1859399974346161, 0.39800000190734863, -0.07513400167226791, -0.13122999668121338, 0.10839000344276428, -0.495959997177124, -0.342960000038147, 0.2932800054550171, 0.7237600088119507, 0.1896599978208542, 0.15629999339580536, 0.012388000264763832, -0.31112998723983765, 0.03822999820113182, 0.5317299962043762, -0.15994000434875488, -0.022060999646782875, -0.1997700035572052, 0.3155600130558014, -0.26194998621940613, 0.5076799988746643, -0.49985000491142273, -0.3118799924850464, -0.7376599907875061, 0.5996699929237366, -0.6805199980735779, -0.41130000352859497, -0.4296500086784363, -0.2991099953651428, -0.0656369999051094, -0.20986999571323395, -0.10318999737501144, 0.4685400128364563, 0.6021199822425842, 0.10892000049352646, -0.2571699917316437, -0.3754900097846985, -0.11455000191926956, -0.6425099968910217, -0.22221000492572784, -0.0487309992313385, 0.5625100135803223, 0.2838299870491028, 0.4368700087070465, -0.37171000242233276, 0.4470899999141693, 0.6687800288200378, -0.17520999908447266, 0.06668499857187271, -0.16353000700473785, 0.019984999671578407, 0.2901799976825714, 0.10251999646425247, -0.5002700090408325, 0.20938999950885773, -0.4599500000476837, -0.1141199991106987, -0.0451899990439415, 0.1488800048828125, 0.26346999406814575, -0.13463999330997467, 0.3908100128173828, 0.0034984999801963568, -0.07513400167226791, -0.0349700003862381, -0.1825300008058548, -0.09586700052022934, 0.006965000182390213, 0.26809000968933105, 0.06910700350999832, -0.050018999725580215, 0.007995099760591984, -0.19128000736236572, 0.8888099789619446, 0.10982999950647354, -0.32232001423835754, -0.1753000020980835, -0.2558499872684479, -0.10426999628543854, 0.6226199865341187, -0.19088000059127808, 0.23128999769687653, -0.6088799834251404, 0.26493000984191895, 0.02145100012421608, 0.28540998697280884, 0.01191799994558096, 0.07838299870491028, -0.04975299909710884, 0.29774999618530273, -0.527209997177124, -0.12873999774456024, 0.15680000185966492, 0.527899980545044, 0.3285900056362152, -0.10932999849319458, 0.18609000742435455, 0.8507000207901001, -0.5814599990844727, -0.3644700050354004, 0.08756399899721146, 0.20389999449253082, 0.26767000555992126, 0.17798000574111938, 0.0680759996175766, -0.19327999651432037, 0.19833000004291534, 0.40665000677108765, -0.32396000623703003, -0.4069899916648865, -0.28933000564575195, 0.11588999629020691, -0.21453000605106354, 0.3246299922466278, 0.295740008354187, 0.10871999710798264, -0.3660599887371063, 0.31481000781059265, 0.06256599724292755, 0.21599000692367554, 0.4210599958896637, 0.11368999630212784, -0.3905799984931946, 0.506600022315979, 0.10548000037670135, -0.15227000415325165, 0.20691999793052673, -0.1174900010228157, 0.12589000165462494, -0.033541999757289886, 0.11694999784231186, -0.21258999407291412, 0.05466800183057785, -0.03751000016927719, -0.044555000960826874, -0.7578399777412415, -0.35097000002861023, 0.492900013923645, 0.17509999871253967, 0.2937900125980377, 0.02316400036215782, -0.8973399996757507, -0.01743300072848797, 0.1631699949502945, 0.657010018825531, 0.49327000975608826, 0.5159599781036377, -0.05886299908161163, 0.001689799944870174, 0.23486000299453735, 0.119889996945858, -0.31679001450538635, -0.11648999899625778, 0.42583000659942627, -0.3655799925327301, -0.36820998787879944, 0.6301500201225281, -0.28641000390052795, -0.10583999752998352, -0.3375000059604645, 0.8719499707221985, -0.31224000453948975, -0.16872000694274902, 0.40568000078201294, -0.5260499715805054, -0.058844998478889465, 0.687690019607544, 0.00011950000043725595, 0.07012499868869781, -0.778219997882843, 0.6301900148391724, 0.08089400082826614, 0.7409999966621399, -0.3038400113582611, 0.12317000329494476, -0.010397999547421932, 1.0073000192642212, 0.37856000661849976, 0.36507999897003174, -0.5227500200271606, -0.15629999339580536, -0.16325999796390533, -0.09424199908971786, 0.0018016999820247293, -0.1397400051355362, -0.6391400098800659, -0.4660100042819977, -0.17497000098228455, -0.23208999633789062, -0.11027000099420547, -0.04583299905061722, 0.04197800159454346, -0.3403100073337555, -0.054441001266241074, -0.5481799840927124, -0.14981000125408173, -0.4440299868583679, -0.08458399772644043, -0.5004799962043762, 0.564740002155304, -0.32763001322746277, -0.08906599879264832, -0.11954999715089798, -0.6657000184059143, -0.07044800370931625, 0.6366099715232849, -0.1265300065279007, -0.4875600039958954, 0.6215500235557556, -0.16550999879837036, 0.9951599836349487, -0.11396999657154083, 0.13409000635147095, 0.0849360004067421, 0.17007000744342804, 0.3957799971103668, -0.210439994931221, -0.11009000241756439, 0.6281599998474121, 0.06082899868488312, 0.17803999781608582, -0.23836000263690948, -0.2051600068807602, -0.2816300094127655, -0.43143999576568604, -0.1435600072145462, 0.10332000255584717, -0.14305999875068665, 0.2790200114250183, -0.12026000022888184, -0.06215199828147888, 0.3334299921989441, -1.0379999876022339, -0.52360999584198, -0.38743001222610474, -0.27219000458717346, 0.19840000569820404, -0.1550299972295761, -0.3455199897289276, 0.14673000574111938, 0.18647000193595886, 0.6245399713516235, 0.11437000334262848, 0.2669300138950348, 0.2824699878692627, -0.1289999932050705, -0.07017800211906433, 0.010684000328183174, -0.09247100353240967, 0.2035199999809265, -0.4122999906539917, -0.1638599932193756, 0.7753400206565857, 0.18616999685764313, -0.00937539990991354, 0.11241000145673752], u'pierced': [-0.18466000258922577, -0.42610999941825867, -0.25488999485969543, -0.22342999279499054, 0.17359000444412231, 0.26816999912261963, 0.2160000056028366, 0.008522200398147106, -0.3134300112724304, -0.15715999901294708, -0.29497000575065613, 0.27667999267578125, 0.2111400067806244, 0.08559100329875946, -0.01116899959743023, 0.5413399934768677, -0.03890800103545189, 0.0075622000731527805, -0.31007999181747437, 0.44422000646591187, -0.0728290006518364, 0.6169999837875366, 0.379040002822876, -0.37342000007629395, 0.3907899856567383, -0.10047999769449234, 0.27632999420166016, 0.32385000586509705, -0.4556800127029419, -0.07819200307130814, 0.625469982624054, 0.8508899807929993, 0.09972500056028366, 0.458189994096756, 0.45837000012397766, -0.19431999325752258, -0.3043400049209595, -0.3684700131416321, 0.53125, 1.0533000230789185, 0.541379988193512, 0.14764000475406647, -0.37975001335144043, -0.2941800057888031, 0.09756399691104889, 0.7520700097084045, 0.020330000668764114, 0.009584399871528149, 0.154789999127388, -0.48614999651908875, -0.00920020043849945, 0.2990800142288208, 0.35447001457214355, -0.23339000344276428, -0.3326700031757355, -0.35569998621940613, -0.20181000232696533, -0.1795399934053421, 0.36719000339508057, -0.014019000343978405, 0.3964399993419647, -0.10329999774694443, -0.18622000515460968, 0.36327001452445984, -0.07953599840402603, -0.2565400004386902, 0.40446001291275024, -0.09622299671173096, 0.649370014667511, -0.06547299772500992, 0.09716299921274185, -0.026789000257849693, 0.48072001338005066, 1.0601999759674072, 0.4386399984359741, 0.02749899961054325, 0.5618500113487244, -0.3123700022697449, -0.6308799982070923, -0.37946999073028564, -0.16011999547481537, 0.1613599956035614, 0.4683400094509125, 0.3623799979686737, -0.7125599980354309, 0.6033999919891357, -0.41822001338005066, -0.4377399981021881, -0.3172599971294403, 0.21491000056266785, 0.3461199998855591, 0.1603499948978424, -0.23176999390125275, 0.1612599939107895, -0.19468000531196594, 0.07935000211000443, -0.30298998951911926, 0.416920006275177, 0.214819997549057, 0.2696300148963928, -0.4611699879169464, 0.01897300034761429, 0.28769999742507935, -0.36221998929977417, 0.24838000535964966, -0.044874999672174454, -0.031140999868512154, -0.25881001353263855, -0.3983199894428253, 0.14722000062465668, -0.06023300066590309, 0.6414399743080139, 0.19547000527381897, -0.6972399950027466, -0.0875220000743866, -0.2251099944114685, -0.7579299807548523, 0.46345001459121704, 0.18272000551223755, -0.015130000188946724, -0.018331000581383705, -0.33204999566078186, -0.22904999554157257, 0.8624200224876404, -0.6113799810409546, -0.2479500025510788, -0.33469998836517334, -0.1078300029039383, -0.16448000073432922, -0.4212000072002411, 0.20204000174999237, 0.10465999692678452, 0.344650000333786, -0.24827000498771667, -0.0031477001029998064, 0.4702399969100952, -0.28885000944137573, -0.11020000278949738, 0.5316500067710876, 0.04328399896621704, -0.010925999842584133, 0.49919000267982483, -0.16405999660491943, -0.1496099978685379, 0.04466300085186958, -0.01784300059080124, 0.8093500137329102, -0.1912499964237213, 0.1687300056219101, -0.12921999394893646, -0.09175200015306473, -0.042167000472545624, 0.11037000268697739, 0.11088000237941742, 0.6654999852180481, -0.2612900137901306, 0.16166000068187714, -0.3296099901199341, 0.03200700134038925, 0.3045699894428253, -0.5521299839019775, 0.20205999910831451, 0.14892999827861786, -0.035663001239299774, 0.5272600054740906, 0.19035999476909637, -0.16211000084877014, 0.25731998682022095, 0.19530999660491943, -0.8571400046348572, -0.24355000257492065, 0.7067099809646606, 0.2573699951171875, -0.21619999408721924, -0.478769987821579, -0.0516510009765625, -0.44571998715400696, 0.3248000144958496, 0.22166000306606293, -0.5069800019264221, 0.344760000705719, -0.012670000083744526, 0.4406999945640564, -0.5024899840354919, 0.09582199901342392, -0.11386000365018845, 0.9744799733161926, 0.42438000440597534, -0.07456400245428085, 0.0562409982085228, -0.42405998706817627, -0.5570499897003174, -0.4070099890232086, 0.025374000892043114, 0.05849500000476837, 0.403439998626709, -0.3730500042438507, -0.2627899944782257, -0.07631199806928635, 0.4763999879360199, 0.3811100125312805, -0.11918000131845474, -0.058371998369693756, -0.21536000072956085, -0.2628600001335144, 0.40257999300956726, -0.28224000334739685, 0.2958900034427643, -0.4812699854373932, 0.06254500150680542, 0.6200100183486938, -0.1282700002193451, -0.4650900065898895, -0.2713199853897095, 0.10356000065803528, -0.4726099967956543, 0.8733000159263611, -0.1842299997806549, -0.22081999480724335, 0.2512100040912628, 0.25977998971939087, 0.10683000087738037, 0.46011999249458313, -0.1349399983882904, -0.1999800056219101, 0.25922998785972595, -0.6039800047874451, -0.701449990272522, 0.1893399953842163, -0.31139999628067017, 0.25314998626708984, 0.3465900123119354, -0.43709999322891235, -0.3341499865055084, -0.5534600019454956, 0.07331400364637375, -0.018503999337553978, -0.3479999899864197, -0.4853299856185913, -0.5130699872970581, 0.49713999032974243, -0.8154500126838684, -0.011795000173151493, -0.4299499988555908, -0.022935999557375908, -0.2527500092983246, -0.041032999753952026, -0.08618099987506866, 0.2760300040245056, -0.701960027217865, -0.23172999918460846, -0.14857999980449677, 0.08417999744415283, -1.017699956893921, -0.2702699899673462, 0.35040000081062317, 0.04826200008392334, -0.2993600070476532, -0.4752199947834015, 0.6262900233268738, -0.22950999438762665, -0.08324100077152252, 0.0746999979019165, -0.006308699958026409, -0.14764000475406647, 0.7234699726104736, -0.2771199941635132, -0.06765799969434738, -0.4646399915218353, 0.24792000651359558, -0.10758999735116959, -0.6436700224876404, 0.26962000131607056, -0.3442099988460541, -0.6744899749755859, -0.042897000908851624, -0.1586800068616867, -0.6807900071144104, -1.2503999471664429, 0.2762500047683716, -0.31953001022338867, -0.04716299846768379, -0.4147399961948395, 0.19840000569820404, -0.4199399948120117, 0.5058500170707703, 0.38207998871803284, -0.18862999975681305, 0.24677999317646027, -0.27090001106262207, 0.07745800167322159, -0.007953999564051628, -0.13278000056743622, 0.04938799887895584, 0.23827999830245972, -0.5420799851417542, -0.15068000555038452, -0.03177599981427193, -0.05890800058841705, 0.10294000059366226], u'draped': [-0.1932699978351593, -0.3521299958229065, 0.3307499885559082, -0.254720002412796, -0.2659299969673157, 0.2247599959373474, -0.4267500042915344, -0.22450999915599823, -0.1641799956560135, 0.32166001200675964, -0.4142400026321411, 0.02124599926173687, -0.6119800209999084, -0.19472000002861023, 0.15746000409126282, 0.685670018196106, -0.5203400254249573, 0.272599995136261, -0.047724999487400055, -0.20227999985218048, -0.12058000266551971, -0.5664299726486206, 0.4279800057411194, 0.08070400357246399, -0.1480100005865097, -0.4737200140953064, -0.09064500033855438, 0.2214300036430359, 0.20970000326633453, -0.1348699927330017, 0.2498299926519394, -0.21051999926567078, -0.23097999393939972, 0.524619996547699, 0.05746300145983696, 0.41402000188827515, -0.18569999933242798, -0.6124600172042847, 0.33980000019073486, -0.033351000398397446, -0.364300012588501, -0.9481499791145325, -0.4273099899291992, -0.27706000208854675, 0.4092699885368347, -0.15524999797344208, 0.5085700154304504, -0.18339000642299652, 0.3274799883365631, 0.0765409991145134, -0.7838799953460693, -0.2805500030517578, 0.17590999603271484, -0.6443300247192383, -0.7729399800300598, -0.27529001235961914, -0.3574399948120117, -0.4735200107097626, -0.0036611000541597605, 0.7525500059127808, 0.6362599730491638, -0.08443699777126312, 0.20117999613285065, 0.0861470028758049, -0.1060900017619133, -0.4916900098323822, -0.17630000412464142, 0.27849000692367554, 0.5818399786949158, 0.07282400131225586, -0.5225099921226501, 0.11439000070095062, -0.5715699791908264, -0.45625001192092896, -0.2526000142097473, 0.5594800114631653, 0.28745999932289124, -0.08981099724769592, 0.15940000116825104, -0.11467999964952469, -0.3884600102901459, 0.003492099931463599, -0.3429099917411804, 0.14717000722885132, -0.24252000451087952, 0.5364199876785278, 0.11779999732971191, 0.5335400104522705, -0.23431000113487244, 0.18914000689983368, 0.8062199950218201, -0.7493299841880798, 0.3487600088119507, 0.3732300102710724, -0.19485999643802643, -0.2542099952697754, 0.22643999755382538, 0.27748000621795654, -0.20531000196933746, 0.36127999424934387, 1.0598000288009644, 0.042479000985622406, -0.0026114999782294035, 0.10829000174999237, 0.3797700107097626, 0.1936500072479248, 0.6205800175666809, 0.07559700310230255, -0.1376899927854538, -0.29941999912261963, -0.8480799794197083, 0.8325499892234802, -0.3551900088787079, -0.1023700013756752, 0.14799000322818756, -0.09973999857902527, -0.039367999881505966, 0.7017899751663208, -0.04295700043439865, -0.8641999959945679, -0.3105199933052063, -0.23371000587940216, 0.9836699962615967, 0.7777100205421448, -0.31314998865127563, -0.04054899886250496, -0.442440003156662, -0.1177000030875206, -0.24624000489711761, 0.12996000051498413, -0.018451999872922897, 0.209989994764328, 0.0027054999954998493, 0.20615999400615692, -0.07684600353240967, 0.10176999866962433, -0.19990000128746033, -0.1354600042104721, -0.04997200146317482, 0.3425700068473816, -0.03568999841809273, -0.2044599950313568, -0.06822700053453445, -0.15772999823093414, -0.4756599962711334, 0.2693899869918823, 0.03783699870109558, -0.2772899866104126, -0.19338999688625336, -0.21875999867916107, 0.08938899636268616, 0.0009676400222815573, 0.038252998143434525, -0.266620010137558, 0.12083999812602997, 0.19603000581264496, -0.29264000058174133, -0.21400000154972076, 0.2605299949645996, 0.455049991607666, -0.36032000184059143, -0.12714999914169312, -0.2191700041294098, 0.64205002784729, 0.31154999136924744, -0.2604300081729889, -0.16534000635147095, 0.7585700154304504, 0.08002100139856339, -0.38857999444007874, -0.8036800026893616, 0.018682999536395073, 0.45458999276161194, -0.07515600323677063, 0.08399800211191177, -0.7797999978065491, -0.4195599853992462, 0.7277500033378601, 0.03341300040483475, -0.8661500215530396, 0.38335999846458435, -0.2726599872112274, 0.7341300249099731, 0.20815999805927277, 0.4684399962425232, -0.22384999692440033, 0.05894799903035164, -0.3312099874019623, -0.06618700176477432, 0.2802099883556366, 0.4667400121688843, -0.6595600247383118, -0.04626699909567833, -0.09490600228309631, 0.2822299897670746, 0.37450000643730164, -0.07790900021791458, 0.3207699954509735, -0.4073199927806854, 0.021598000079393387, 0.4031899869441986, -0.8073300123214722, -0.13222000002861023, 0.2465900033712387, 1.087399959564209, -0.5960900187492371, 0.2699800133705139, 0.32065001130104065, 0.08441299945116043, -0.02251799963414669, 0.43998000025749207, 0.35357001423835754, -0.3243100047111511, -0.01115499995648861, 0.4347499907016754, 0.005315899848937988, 0.6792600154876709, -1.1406999826431274, -0.24316999316215515, -0.04244700074195862, 0.6126999855041504, -0.14122000336647034, 0.4925999939441681, -0.27074000239372253, -0.20733000338077545, 0.5482699871063232, 0.13659000396728516, -0.35940998792648315, -0.0990540012717247, -0.6011099815368652, 0.5580199956893921, -0.3519900143146515, 0.0054259998723864555, 0.2898400127887726, 0.4839800000190735, 0.28630000352859497, -0.31220999360084534, -0.6830099821090698, -0.5715000033378601, 0.08013399690389633, 0.1173200011253357, 0.2614699900150299, -0.3167000114917755, 0.3712500035762787, 0.09046100080013275, 0.22481000423431396, 0.2681399881839752, -0.07522399723529816, -0.2490600049495697, 0.19559000432491302, -0.24740000069141388, 0.1427599936723709, 0.5180000066757202, -0.8582800030708313, 1.156000018119812, -0.31970998644828796, -0.36421000957489014, 0.18116000294685364, -0.1358799934387207, -0.29287999868392944, 0.3737899959087372, -0.26969000697135925, 0.6833400130271912, 0.735319972038269, -0.12342000007629395, 0.16046999394893646, 0.015352999791502953, -0.07906000316143036, -0.4102199971675873, -0.6922000050544739, 0.10774999856948853, -0.45135998725891113, 0.2984200119972229, 0.16669000685214996, -0.44214001297950745, -0.13770000636577606, -0.42629000544548035, -0.20239999890327454, -0.4263400137424469, -0.19062000513076782, 0.13919000327587128, -0.030587999150156975, -0.11939000338315964, -0.45903000235557556, 0.045896999537944794, 0.6673600077629089, 0.15824000537395477, 0.39282000064849854, 0.1642799973487854, -0.45386001467704773, 1.3295999765396118, 0.22620999813079834, -0.0356689989566803, 0.8477500081062317, -0.4685499966144562, -0.10259000211954117, 0.9244300127029419, 0.5844500064849854, -0.3212999999523163, -0.3601300120353699], u'loose': [-0.43884000182151794, -0.37283000349998474, 0.05142800137400627, -0.40821000933647156, 0.00959550030529499, -0.3765600025653839, 0.21921999752521515, 0.44516998529434204, 0.6433299779891968, -0.8225200176239014, 0.3134300112724304, 0.0976559966802597, -0.3722600042819977, 0.11270999908447266, -0.507830023765564, -0.20723000168800354, 0.2763899862766266, 0.32965001463890076, 0.46327000856399536, 0.9818599820137024, 0.39184001088142395, 0.3720499873161316, 0.4276899993419647, -0.09592799842357635, -0.3761500120162964, -0.162540003657341, -0.162090003490448, -0.2815900146961212, -0.17207999527454376, -0.10591000318527222, 0.3160800039768219, 0.2312300056219101, 0.6747499704360962, 0.7858200073242188, -0.5503799915313721, 0.1144300028681755, 0.5459499955177307, 0.6373299956321716, 0.20956000685691833, 0.2656799852848053, -0.707099974155426, 0.03874199837446213, -0.06333699822425842, -0.47484999895095825, 0.3200800120830536, 0.09262800216674805, -0.12304999679327011, -0.10696999728679657, -0.3773899972438812, -0.41284000873565674, 0.04252700135111809, 0.07717099785804749, -0.3774299919605255, -0.3357599973678589, -0.022123999893665314, 0.06031300127506256, -0.06504800170660019, -0.1811400055885315, -0.09034299850463867, -0.2740199863910675, 0.4139600098133087, 0.16440999507904053, -0.188060000538826, -0.2436400055885315, 0.011068000458180904, -0.44550999999046326, -0.045155998319387436, -0.07810100167989731, 0.3937700092792511, 0.46581000089645386, -0.372979998588562, 0.19255000352859497, 0.17056000232696533, 0.042426999658346176, 0.12536999583244324, 0.23247000575065613, 0.6741499900817871, -0.3066900074481964, -0.09453500062227249, -0.47369998693466187, 0.012582999654114246, -0.6134999990463257, 0.3176400065422058, -0.039942000061273575, -0.08276499807834625, -0.034991998225450516, 0.2593800127506256, 0.15494999289512634, -0.7721899747848511, 0.022384999319911003, 0.25446000695228577, 0.21191999316215515, -0.2565700113773346, -0.023547999560832977, -0.11924999952316284, -0.3180299997329712, -0.11264999955892563, 0.159620001912117, 0.043434999883174896, -0.49004998803138733, 0.2067900002002716, 0.0006949100061319768, -0.07971200346946716, 0.15998999774456024, -0.611050009727478, 0.10035999864339828, 0.35034000873565674, 0.13460999727249146, -0.181659996509552, -0.0663129985332489, 0.3956100046634674, -0.4122700095176697, -0.02864699997007847, 0.15737000107765198, 0.04095400124788284, 0.680400013923645, -0.2544200122356415, 0.3003099858760834, 0.43641000986099243, -0.6561999917030334, 0.02943599969148636, -0.34272000193595886, 0.8726699948310852, 0.45715999603271484, 0.22147999703884125, 0.49279001355171204, -0.6058700084686279, 0.6827399730682373, 0.24542999267578125, -0.19444000720977783, 0.26034000515937805, -0.061597999185323715, 0.041120000183582306, -0.44106000661849976, 0.5417799949645996, 0.23206999897956848, -0.12472999840974808, -0.209539994597435, -0.20340000092983246, -0.9033899903297424, -0.37275999784469604, 0.7630900144577026, -0.3034200072288513, 0.09843900054693222, -0.17403000593185425, -0.032561998814344406, -0.3695400059223175, 0.09335900098085403, 0.5278699994087219, 0.14379000663757324, 0.045524001121520996, -0.4854399859905243, -0.32339999079704285, -0.3665100038051605, 0.5495200157165527, -0.32315000891685486, 0.37358999252319336, 0.28185999393463135, 0.3677000105381012, 0.5407900214195251, 0.07761000096797943, -0.164560005068779, -0.045577000826597214, -0.3130800127983093, 0.17093999683856964, -0.5870500206947327, -0.09158799797296524, -0.0044153002090752125, 0.5620599985122681, 0.07941800355911255, -0.022053999826312065, 0.16767999529838562, -0.043244000524282455, 0.5655099749565125, 0.46417000889778137, -0.14584000408649445, -0.07386499643325806, 1.0441999435424805, 0.151869997382164, -0.4203599989414215, 1.0216000080108643, -0.8083099722862244, 0.6356899738311768, -0.7268900275230408, -0.2694399952888489, 0.23503999412059784, 0.09272900223731995, -0.5756700038909912, 0.3040800094604492, -0.23779000341892242, 0.5671799778938293, 0.30235999822616577, 0.32276999950408936, -0.03140600025653839, 0.19212999939918518, -0.5821800231933594, -0.6245200037956238, -0.1952199935913086, -0.17478999495506287, 0.4348300099372864, 1.291200041770935, 0.5976499915122986, 0.7386299967765808, 0.30504000186920166, 0.2863599956035614, -0.0298870000988245, 0.23928000032901764, -0.035725999623537064, -0.8786600232124329, 0.43577998876571655, -0.23417000472545624, 0.07124099880456924, 0.645110011100769, 0.6164399981498718, -0.04495000094175339, -0.21879999339580536, 0.230320006608963, -0.29677000641822815, -0.3193100094795227, 0.1812800019979477, -0.010544000193476677, 0.24864999949932098, 0.4393500089645386, -0.42076000571250916, -0.1218700036406517, 0.15644000470638275, 0.49358001351356506, -0.08716200292110443, 0.26050999760627747, 0.28130000829696655, 0.5920600295066833, -0.2658199965953827, -0.10356999933719635, -0.011943000368773937, 0.05550599843263626, 0.14194999635219574, -0.5235400199890137, -0.3545199930667877, -0.3878900110721588, 0.1820400059223175, 0.20419000089168549, -0.09446500241756439, -0.46487000584602356, -0.6187999844551086, -0.660260021686554, -0.04412899911403656, 0.10503000020980835, 0.4573099911212921, -0.03817199915647507, -0.0587569996714592, -0.07651399821043015, -0.5858200192451477, 0.1067499965429306, -0.422870010137558, 0.6490600109100342, 0.08288100361824036, 0.011365000158548355, -0.17952999472618103, -0.005516699980944395, 0.6928200125694275, 0.036552999168634415, -0.2392899990081787, -0.5177599787712097, 0.27173998951911926, -0.09750799834728241, -0.19492000341415405, -0.06196499988436699, 0.21800999343395233, -0.4413299858570099, 0.20016999542713165, -0.09873899817466736, 0.17252999544143677, -0.14632000029087067, 0.07597800344228745, -0.4701499938964844, -0.11186999827623367, -0.5659599900245667, 0.35920000076293945, -0.45813998579978943, -0.31272000074386597, 0.10025999695062637, -0.10369999706745148, 0.7967000007629395, 0.03639800101518631, -0.25372999906539917, -0.11225999891757965, 0.008486299775540829, -0.20048999786376953, 0.09582299739122391, -0.5155199766159058, -0.36221998929977417, -0.3783800005912781, -0.49891000986099243, 0.49077001214027405, 0.008483000099658966, 0.559909999370575, 0.03344700112938881, -0.14791999757289886, 0.22924000024795532, 0.3699699938297272], u'browned': [0.8735100030899048, 0.27351999282836914, 0.07877200096845627, 0.3834399878978729, -0.5583199858665466, -0.5151299834251404, 0.02184399962425232, 0.2384600043296814, 1.2259000539779663, -0.16896000504493713, -0.617169976234436, 0.5988900065422058, 0.670009970664978, 0.9340699911117554, -0.7557399868965149, 0.626800000667572, -0.09500200301408768, -0.11969999969005585, 0.57819002866745, 0.1770700067281723, 0.14315000176429749, 0.8917199969291687, 0.11789000034332275, -0.4089600145816803, 0.2671099901199341, 0.6289799809455872, 0.21442000567913055, 0.43116000294685364, -0.5143799781799316, 0.4346599876880646, -0.4607599973678589, 0.7082399725914001, 0.0905120000243187, -1.042099952697754, -0.3889699876308441, -0.40248000621795654, 0.43549999594688416, 0.7800899744033813, -0.5193099975585938, 0.11612000316381454, 1.2882000207901, 0.4174000024795532, -0.917169988155365, -0.471670001745224, 0.852590024471283, 0.5977699756622314, 1.0113999843597412, 0.5464400053024292, -0.44130998849868774, 1.4082000255584717, 0.28273001313209534, 0.21607999503612518, 0.41464999318122864, -0.7986299991607666, -0.08166699856519699, -0.1271200031042099, -0.42563000321388245, -0.10724999755620956, -0.6036700010299683, 0.760919988155365, 0.4141699969768524, -0.8418499827384949, 0.3053799867630005, -0.4056600034236908, 0.09812100231647491, -0.34523001313209534, 0.8776800036430359, 0.27730000019073486, -0.6752499938011169, 0.08583799749612808, -0.014813999645411968, 0.0877159982919693, 0.6836599707603455, 0.19370999932289124, -0.15277999639511108, 1.0657000541687012, 1.436900019645691, 0.7004200220108032, -0.4338400065898895, -0.8631700277328491, 0.30820000171661377, -0.6946300268173218, 0.4779700040817261, -0.27654001116752625, -0.36656999588012695, 0.3558399975299835, -0.644760012626648, 0.0863180011510849, -1.0627000331878662, -0.12873999774456024, -0.3585200011730194, 0.3357299864292145, -0.15410999953746796, 0.9977700114250183, -1.0723999738693237, 0.5777999758720398, -0.7890300154685974, 0.9480000138282776, -0.3995000123977661, 0.7210299968719482, -0.18358999490737915, -0.3904600143432617, -0.2532599866390228, -0.9884200096130371, -0.4529300034046173, -0.002723699901252985, 0.5943899750709534, -0.21563999354839325, 0.6072499752044678, 0.021258000284433365, 0.42612001299858093, 0.6094599962234497, 0.45688000321388245, 0.033296000212430954, -0.8698300123214722, 0.299019992351532, -1.0613000392913818, 0.06437700241804123, 0.43988001346588135, 0.5724700093269348, -0.13824999332427979, 0.13583000004291534, 0.13343000411987305, 0.9340699911117554, -0.9150800108909607, 0.5935199856758118, -0.38743001222610474, 0.5033299922943115, -0.8632799983024597, 0.5299500226974487, -0.22086000442504883, 1.145400047302246, 0.3980199992656708, 0.44530999660491943, -0.663349986076355, -0.6476799845695496, 0.6221699714660645, 0.6083800196647644, 0.06095699965953827, 0.02833000011742115, 0.5249500274658203, 0.4015200138092041, -0.8994799852371216, 0.6025699973106384, -0.04124100133776665, 0.021832000464200974, -0.5932599902153015, 0.4345499873161316, 0.11298000067472458, 0.011227999813854694, -0.026983000338077545, -0.2886900007724762, -0.9895099997520447, -0.47064000368118286, -0.12884999811649323, -0.16549000144004822, 1.2243000268936157, -1.128999948501587, -0.6761900186538696, 0.5282599925994873, -0.2919299900531769, -0.607509970664978, 0.03955800086259842, -1.5643999576568604, 0.4947499930858612, 0.1438799947500229, -0.07771900296211243, 0.4818499982357025, 0.7727599740028381, -0.6364700198173523, -0.6223400235176086, -0.17437000572681427, -0.2917799949645996, -0.6494500041007996, -0.6337100267410278, -0.45590001344680786, -0.3957099914550781, -1.3387999534606934, 0.3036800026893616, -0.8321800231933594, -1.1813000440597534, -0.4683000147342682, 0.8422399759292603, -0.18667000532150269, -0.36500000953674316, -0.1887200027704239, 1.0921000242233276, -0.4333699941635132, -0.6693500280380249, 0.1348699927330017, -0.10626000165939331, 0.06474900245666504, 0.020478999242186546, 0.8679699897766113, 0.6367999911308289, -0.6543200016021729, 0.036687999963760376, 0.2959499955177307, -0.10095000267028809, -0.4912700057029724, 0.015416000038385391, -0.1835000067949295, 0.3985599875450134, -0.08235500007867813, 0.3002200126647949, 0.22287000715732574, 0.9281100034713745, -0.4429199993610382, -0.34228000044822693, -0.5994099974632263, 1.191100001335144, -0.21593999862670898, -0.6978800296783447, -0.06807699799537659, 0.008585699833929539, 0.2176699936389923, 0.30149000883102417, -0.7658399939537048, -0.7878900170326233, 0.3613699972629547, -0.21453000605106354, 0.03750399872660637, -0.2710599899291992, -0.25586000084877014, 0.12330000102519989, -1.105299949645996, -0.12284000217914581, 0.8231800198554993, 0.5052599906921387, 0.2643499970436096, -0.14962999522686005, -0.018045000731945038, 0.3959999978542328, -0.6531800031661987, 0.42638999223709106, 0.9783599972724915, 0.12297999858856201, 0.15945999324321747, -1.1445000171661377, -0.6407300233840942, 0.12092000246047974, -0.06970299780368805, 0.05146700143814087, 0.006768899969756603, -0.0007617900264449418, 0.706250011920929, 0.8620399832725525, -0.7349500060081482, -0.3700000047683716, -0.9976599812507629, -0.32475000619888306, -1.4217000007629395, 0.4335399866104126, -0.23548999428749084, 0.23265999555587769, 1.1238000392913818, 0.4168800115585327, -0.10033000260591507, -0.6019099950790405, 1.1670000553131104, 0.07371699810028076, -0.10769999772310257, -0.9971399903297424, 0.2722800076007843, -0.2165600061416626, -0.0937659963965416, -0.13274000585079193, -0.2015099972486496, 0.518779993057251, -0.274370014667511, -0.24603000283241272, -0.13359999656677246, -0.28648999333381653, 1.1747000217437744, -0.36157000064849854, 0.3756999969482422, 0.838699996471405, -0.5888500213623047, -1.127500057220459, -0.3121100068092346, 0.48728999495506287, -0.3930000066757202, -1.2538000345230103, 0.33011001348495483, -0.4252299964427948, 0.1763100028038025, 0.8350600004196167, 0.011986000463366508, 0.11208000034093857, 0.13389000296592712, 0.24297000467777252, -0.2750900089740753, -0.13574999570846558, -0.3199999928474426, -0.5287600159645081, -1.4424999952316284, -0.9900799989700317, -0.4917300045490265, -0.2331800013780594, 0.6391500234603882], u'foggy': [0.1920499950647354, -0.44808998703956604, -0.7609599828720093, -0.22559000551700592, -0.2908799946308136, -0.060899000614881516, 0.3211199939250946, 0.8578199744224548, 0.2827799916267395, 0.0018999000312760472, -0.013008000329136848, -0.10444000363349915, 0.010576999746263027, -0.11456000059843063, -0.328220009803772, -0.10158000141382217, 0.35708001255989075, -0.46070998907089233, 0.33296999335289, 0.24199999868869781, 0.4187000095844269, 0.3922699987888336, -0.16203999519348145, 0.14201000332832336, -0.7420399785041809, -0.7402399778366089, 0.33610999584198, 0.2478799968957901, 0.010433999821543694, -0.19833999872207642, 0.8184000253677368, -0.27702999114990234, -0.035732999444007874, -0.16029000282287598, 0.16006000339984894, 0.24597999453544617, -0.10980000346899033, -0.04018799960613251, -0.39939001202583313, -0.0144640002399683, 0.1588599979877472, 0.9081699848175049, -0.14135999977588654, -0.122359998524189, 0.20430999994277954, -0.007774699945002794, 0.24404999613761902, 0.16253000497817993, -0.6618099808692932, -0.38168999552726746, -0.061539001762866974, -0.4624499976634979, 0.4497700035572052, -0.2057799994945526, -0.40163999795913696, 0.5601400136947632, -0.14395999908447266, -0.43599000573158264, -0.02244899980723858, 0.5482100248336792, 0.13492000102996826, -0.42416998744010925, 0.013821999542415142, 0.3014799952507019, -0.38631001114845276, 0.17114000022411346, 0.3465000092983246, 0.32385000586509705, -0.08792699873447418, -0.4438300132751465, -0.21080000698566437, 0.3555000126361847, -0.8550199866294861, 0.08095899969339371, -0.8232100009918213, -0.21737000346183777, -0.09657300263643265, 0.4867599904537201, -0.10515999794006348, -0.3980900049209595, -0.2781600058078766, -0.02382799983024597, 0.056012000888586044, 0.24108999967575073, -0.4527300000190735, -0.06790799647569656, 0.33406999707221985, 0.33066999912261963, -0.11254999786615372, 0.39906999468803406, 0.1979999989271164, 0.13673999905586243, -0.2756600081920624, 0.10588999837636948, -0.22176000475883484, 0.7641400098800659, 0.627269983291626, 0.4507099986076355, -0.4154199957847595, -0.10801000148057938, 0.24447999894618988, -0.24638999998569489, 0.16267000138759613, 0.6016499996185303, -0.453220009803772, 0.270440012216568, 0.2274799942970276, 0.023809000849723816, -0.14334000647068024, 0.02387000061571598, -0.1737699955701828, -0.6927099823951721, 0.2736800014972687, -0.14680999517440796, 0.329259991645813, -0.47227999567985535, 0.2547599971294403, -0.6102200150489807, -0.14485999941825867, 0.00938310008496046, 0.20305000245571136, -0.7452999949455261, 0.22869999706745148, 0.03332199901342392, -0.17889000475406647, -0.5324100255966187, -0.01153900008648634, -0.5480700135231018, 0.25328999757766724, -0.17893999814987183, -0.01799199916422367, 1.1038999557495117, 0.5671799778938293, 0.38874998688697815, 0.06275799870491028, -0.24683000147342682, -0.11326000094413757, 0.6088399887084961, -0.4463199973106384, 0.08059000223875046, -0.053164999932050705, 0.09143999963998795, -0.12055999785661697, -0.2846499979496002, -0.6037600040435791, -0.38523998856544495, -0.12809999287128448, 0.10623999685049057, 0.20362000167369843, 0.19001999497413635, -0.41854000091552734, -0.18731999397277832, 0.44993001222610474, 0.0374549999833107, 0.21515999734401703, -0.42423999309539795, 0.41593000292778015, -0.12029000371694565, 0.4593200087547302, 0.5239499807357788, -0.6253499984741211, -1.1883000135421753, 0.15557999908924103, -0.7656400203704834, 0.35965999960899353, -0.42056000232696533, -0.009497099556028843, -0.0642160028219223, -0.18986999988555908, -0.41655001044273376, -0.1365399956703186, 0.11739999800920486, 0.26774999499320984, -0.5538399815559387, -0.3175100088119507, -0.3403100073337555, -0.339029997587204, -0.32332998514175415, -0.43202000856399536, -0.2731899917125702, 0.3128199875354767, 0.8273299932479858, 0.4622499942779541, -0.1206900030374527, 0.40151000022888184, 0.17106999456882477, 1.0241999626159668, -0.5577399730682373, -0.3907899856567383, -0.1629599928855896, 0.05680999904870987, -0.010095000267028809, -0.10779999941587448, -0.36768999695777893, -0.1610099971294403, -0.18943999707698822, -0.6578500270843506, -0.10480000078678131, -0.5941799879074097, 0.1923000067472458, 0.21788999438285828, -0.28227001428604126, 0.2930600047111511, -0.039000000804662704, 0.07778099924325943, -0.40557000041007996, -0.3912700116634369, -0.16410000622272491, -0.0322050005197525, -0.4102799892425537, -0.04393099993467331, 0.3045800030231476, -0.2567099928855896, -0.35012999176979065, -0.6296600103378296, -0.016945000737905502, 0.24940000474452972, -0.47196000814437866, 0.7145500183105469, 0.14037999510765076, 0.26892000436782837, 0.23680999875068665, 0.03850499913096428, 0.6157299876213074, 0.17795999348163605, 0.39364999532699585, 0.07385600358247757, -0.45656999945640564, 0.21660000085830688, -0.05530700087547302, -0.1291700005531311, -0.16639000177383423, -0.20611999928951263, -0.5526000261306763, 0.20167000591754913, -0.37713000178337097, -0.42197999358177185, 0.09210100024938583, 0.0593549981713295, -0.11429999768733978, 0.20723000168800354, -0.08421699702739716, -0.36524999141693115, -0.05537699908018112, -0.20340999960899353, -0.1958799958229065, -0.21780000627040863, 0.09577299654483795, 0.10153999924659729, -0.30303001403808594, -0.6665499806404114, 0.1585099995136261, 0.7972000241279602, 0.03154800087213516, -0.17979000508785248, 0.11991000175476074, 0.32163000106811523, 0.050436001271009445, -0.037987999618053436, 0.43000999093055725, -0.19934000074863434, 0.34233999252319336, -0.1242000013589859, 0.41804999113082886, 0.3709999918937683, 0.054228998720645905, -0.04266799986362457, -0.28321999311447144, 0.5960599780082703, 0.022505000233650208, 0.3235799968242645, -0.010817999951541424, -0.4487999975681305, 0.4029499888420105, 0.19493000209331512, 0.019034000113606453, 0.6373900175094604, -0.28338998556137085, 0.13981999456882477, 0.3032599985599518, -0.7367500066757202, 0.3300800025463104, 0.02949400059878826, -0.4720599949359894, 0.10745999962091446, 0.06429100036621094, -0.17798000574111938, 0.043494001030921936, -0.002572299912571907, -0.2418700009584427, 0.12479999661445618, -0.14458000659942627, -0.40529999136924744, 0.2917799949645996, 0.03389500081539154, -0.42467001080513, 0.31714001297950745, 0.6227700114250183, -0.5037699937820435, 0.5270900130271912], u'brushed': [0.13954000174999237, -0.07866699993610382, -0.5022600293159485, 0.16447000205516815, 0.3174000084400177, -0.7925599813461304, -0.3911300003528595, -0.22689999639987946, 0.34558001160621643, -0.6677799820899963, 0.2757900059223175, 0.15588000416755676, 0.3786199986934662, 0.047766998410224915, 0.09138700366020203, 0.36456000804901123, 0.1125200018286705, 0.12563000619411469, 0.3138299882411957, -0.2305999994277954, 0.34871000051498413, 0.334199994802475, -0.07136499881744385, -0.28536999225616455, -0.4962100088596344, 0.1123799979686737, 0.1964299976825714, 0.5842000246047974, -0.07354799658060074, 0.035975001752376556, 0.28621000051498413, -0.031197000294923782, 0.1839199960231781, -0.3317900002002716, -0.7240300178527832, -0.05497400090098381, -0.23135000467300415, 0.23555000126361847, 0.22842000424861908, 0.48256000876426697, 0.05604900047183037, 0.10869999974966049, -0.040856000036001205, -0.19050000607967377, 0.3297800123691559, 0.45572999119758606, -0.5095999836921692, -0.3471899926662445, -0.24776999652385712, 0.5306500196456909, -0.20347000658512115, 0.21032999455928802, 0.515209972858429, 0.12886999547481537, 0.4579299986362457, -0.08200100064277649, -0.31040000915527344, -0.07321999967098236, 0.3562999963760376, 0.17141999304294586, 0.2072799950838089, 0.19304999709129333, 0.012896000407636166, 0.13891999423503876, 0.07138500362634659, -0.2958900034427643, 0.07981500029563904, 0.06630399823188782, -0.03167000040411949, -0.6541799902915955, 0.31092000007629395, -0.47870999574661255, 0.6204000115394592, -0.18831999599933624, 0.3388200104236603, 0.08119700103998184, -0.2696700096130371, 0.17824000120162964, 0.13197000324726105, 0.17743000388145447, 0.19099000096321106, 0.3486599922180176, -0.07537899911403656, 0.12695999443531036, 0.23691000044345856, -0.021196000277996063, -0.47953999042510986, 0.1125200018286705, 0.14133000373840332, 0.2420099973678589, 0.17410999536514282, -0.06509999930858612, 0.03813000023365021, -0.14817999303340912, -0.40035998821258545, 0.03578300029039383, -0.31314998865127563, 0.4222300052642822, -0.09985599666833878, 0.26159000396728516, 0.42836999893188477, 0.14059999585151672, 0.010111999697983265, -0.22269000113010406, 0.552299976348877, -0.005577700212597847, -0.21538999676704407, -0.1998700052499771, -0.44929999113082886, 0.23122000694274902, 0.24216000735759735, 0.2380100041627884, -0.3162199854850769, -0.08183500170707703, 0.18459999561309814, 0.6019200086593628, 0.007736300118267536, 0.18758000433444977, -0.21945999562740326, -0.5058900117874146, -0.17574000358581543, -0.2170500010251999, -0.5418499708175659, 0.0917849987745285, -0.41370999813079834, 0.30744001269340515, -0.6054400205612183, 0.037776000797748566, 0.002914499957114458, 0.15150000154972076, -0.2265699952840805, 0.1629199981689453, -0.22503000497817993, 0.16006000339984894, 0.19262999296188354, 0.19269999861717224, 0.17021000385284424, 0.47314000129699707, 0.20796999335289001, -0.4228399991989136, 0.7288500070571899, 0.432559996843338, 0.47613999247550964, -0.15298999845981598, -0.014053000137209892, 0.7924000024795532, 0.40836000442504883, -0.34915998578071594, 0.04157700017094612, -0.6896100044250488, 0.35078001022338867, -0.3225899934768677, -0.05848199874162674, 0.41666001081466675, -0.2947799861431122, -0.1608400046825409, 0.09184599667787552, -0.4282200038433075, 0.1358100026845932, 0.2866399884223938, 0.4937399923801422, -0.25372999906539917, -0.08024399727582932, -0.4217599928379059, 0.5554599761962891, 0.023250000551342964, 0.09004499763250351, 0.31723999977111816, 0.7531099915504456, -0.8209800124168396, -0.5594599843025208, -0.09074600040912628, -0.0690469965338707, -0.5347899794578552, -0.02853200025856495, -0.4035300016403198, 0.25710999965667725, 0.40766000747680664, 0.33274999260902405, -0.31095001101493835, -0.21315999329090118, 0.04692399874329567, 0.46726998686790466, -0.17788000404834747, -0.4720599949359894, 0.06422500312328339, 0.19892999529838562, 0.24778999388217926, 0.2978000044822693, 0.4970099925994873, -0.06865400075912476, -0.1481200009584427, -0.26752999424934387, 0.10862000286579132, -0.16017000377178192, 0.08053400367498398, 0.38593000173568726, -0.02408899925649166, -0.3412100076675415, -0.4513700008392334, 0.2796899974346161, -0.35512998700141907, -0.0038614000659435987, -0.24320000410079956, -0.193900004029274, -0.04079100117087364, 0.06522200256586075, -0.4021500051021576, -0.3566800057888031, 0.18916000425815582, -0.04039900004863739, 0.3876200020313263, 0.2024500072002411, 0.14837999641895294, 0.36581000685691833, -0.4935399889945984, 0.4506799876689911, -0.8060600161552429, 0.29521000385284424, -0.24607999622821808, 0.8088899850845337, 0.03404900059103966, 0.09428299963474274, -0.2731199860572815, 0.1879899948835373, 0.025962000712752342, 0.6382799744606018, -0.023086000233888626, -0.0058651999570429325, -0.3033300042152405, 0.20496000349521637, -0.3079800009727478, 0.3383300006389618, 0.01863200031220913, 0.27584999799728394, -0.2420700043439865, -0.015432000160217285, -0.22812999784946442, -0.11148999631404877, -0.26052001118659973, -0.05091699957847595, 0.030331000685691833, -0.8456699848175049, 0.12161000072956085, -0.03299900144338608, -0.18809999525547028, 0.016659999266266823, -0.397599995136261, -0.1903499960899353, 0.02274000085890293, -0.027677999809384346, -0.2456900030374527, 0.45938000082969666, -0.019437000155448914, 0.19584999978542328, -0.44290998578071594, -0.3844900131225586, 0.23487000167369843, -0.7052599787712097, 0.057043999433517456, -0.29017001390457153, -0.4092099964618683, -0.27904000878334045, 0.2527500092983246, 0.12791000306606293, 0.1837099939584732, -0.33037999272346497, -0.1846799999475479, -0.27803000807762146, 0.21087999641895294, 0.1517000049352646, 0.05944500118494034, -0.3933899998664856, -0.33649998903274536, -0.42699000239372253, 0.4369400143623352, -0.0259380005300045, -0.2810699939727783, -0.07350300252437592, -0.15196000039577484, 0.5099800229072571, 0.07492099702358246, -0.22394999861717224, 0.35613998770713806, 0.023382000625133514, -0.0445530004799366, 0.6392300128936768, -0.18463000655174255, 0.3089199960231781, 0.34544000029563904, -0.19683000445365906, 0.5128399729728699, 0.46441999077796936, 0.3277300000190735, -0.1862799972295761, 0.4585599899291992, -0.5186499953269958, -0.3045499920845032, -0.03577199950814247, 0.16335999965667725], u'dull': [0.18584999442100525, 0.02883799932897091, -0.14858999848365784, -0.22972999513149261, 0.3456900119781494, 0.01106099970638752, -0.22700999677181244, 0.6614099740982056, 0.3231399953365326, -0.5746200084686279, -0.3912700116634369, 0.03861900046467781, -0.425819993019104, -0.005762199871242046, -0.008040999993681908, -0.4318400025367737, -0.39980000257492065, -0.010863999836146832, 0.15473000705242157, -0.5899199843406677, -0.28571999073028564, 0.5085899829864502, 0.1268800050020218, 0.25571998953819275, -0.2371399998664856, 0.003424100112169981, 0.5954200029373169, -0.811460018157959, 0.11712999641895294, -0.16241000592708588, -0.4702500104904175, 0.13087999820709229, 0.06576500087976456, 0.10633999854326248, -0.7402200102806091, 1.1837999820709229, -0.13808999955654144, 0.04258599877357483, -0.12647999823093414, -0.062334999442100525, 0.8489000201225281, 0.4855400025844574, -0.05917000025510788, -0.8351200222969055, 0.9609400033950806, 0.19380000233650208, -0.37196001410484314, -0.45813000202178955, 0.23026999831199646, -0.24143999814987183, 0.2606799900531769, 0.0992640033364296, 0.8162999749183655, -0.21157999336719513, 0.155689999461174, 0.23781999945640564, -0.06647200137376785, -0.3992699980735779, 0.34356001019477844, 0.29631999135017395, 0.050296999514102936, -0.7691400051116943, 0.27132999897003174, 0.005022699944674969, 0.15995000302791595, -0.3725599944591522, 0.6954900026321411, -0.2714900076389313, 0.7671499848365784, -0.31248998641967773, -0.02417300082743168, 0.04933999851346016, 0.4163700044155121, 0.48737001419067383, 0.12679000198841095, 0.15205000340938568, -0.46852999925613403, 0.6898199915885925, 0.22303999960422516, 0.2725200057029724, 0.05376499891281128, 0.3765900135040283, 0.03622899949550629, -0.2557699978351593, 0.2806600034236908, 0.14330999553203583, 0.5565299987792969, 0.03005800023674965, -0.02542800083756447, 0.6120100021362305, 0.24624000489711761, 0.3362399935722351, 0.4717099964618683, -0.420879989862442, -0.1775200068950653, 0.5474900007247925, 0.2267799973487854, 0.440530002117157, 0.4054499864578247, 0.17615999281406403, -0.07519800215959549, -0.1251399964094162, -0.35815000534057617, 0.37909001111984253, -0.3764300048351288, -0.4201900064945221, -0.05858499929308891, -0.2045000046491623, 0.002391000045463443, 0.2379000037908554, -0.4523099958896637, 0.14079000055789948, -0.01721400022506714, -0.4280500113964081, -0.0017450000159442425, -0.0641150027513504, 0.2573699951171875, 0.23465999960899353, 0.126010000705719, -0.20562000572681427, -0.2805500030517578, -0.5196099877357483, -0.3946099877357483, 0.793690025806427, 0.032260000705718994, 0.6100500226020813, 0.2788200080394745, 0.3039099872112274, -0.00590580003336072, -0.10569000244140625, -0.47543999552726746, 0.0899059996008873, 0.03222699835896492, 0.2519499957561493, -0.5217000246047974, 0.08195699751377106, -0.013701999559998512, 0.27663999795913696, 0.42100998759269714, 0.4839499890804291, 0.7553899884223938, 0.21821999549865723, 0.02720700018107891, -0.25512999296188354, -0.01998100057244301, 0.09703700244426727, 0.6378200054168701, 0.032315000891685486, -0.5253300070762634, -0.024903999641537666, -0.6669099926948547, 0.1571200042963028, -0.11687000095844269, -0.46004000306129456, 0.335099995136261, -0.2608799934387207, 0.35304999351501465, -0.6343899965286255, 0.294979989528656, 0.3474999964237213, -0.7785999774932861, -0.13716000318527222, 0.6595699787139893, -0.0787770003080368, 0.30382001399993896, 0.40873000025749207, -0.4047999978065491, -0.15910999476909637, -0.17305999994277954, -0.1850699931383133, -0.20111000537872314, -0.2959200143814087, -0.6208299994468689, 0.16301000118255615, -0.24863000214099884, -0.14523999392986298, 0.09615399688482285, -0.31038999557495117, 0.10097000002861023, -0.17601999640464783, 0.11708000302314758, -0.1514900028705597, -0.40128999948501587, 0.27741000056266785, 0.06046200171113014, -0.16909000277519226, 1.1644999980926514, 0.08109699934720993, -0.4767000079154968, 0.06322299689054489, 0.3812600076198578, -0.331930011510849, 0.051513999700546265, -0.2894499897956848, 0.2728300094604492, 0.10232000052928925, -0.7619199752807617, 0.033094000071287155, -0.10322000086307526, -0.0023203000891953707, 0.22520999610424042, -0.26120999455451965, -0.012089000083506107, 0.15616999566555023, 0.22653000056743622, -0.21112999320030212, 0.1836400032043457, 0.08416400104761124, -0.06900099664926529, -0.04487600177526474, 0.045899998396635056, -0.21466000378131866, -0.5708000063896179, 0.19111000001430511, -0.23819999396800995, -0.06490100175142288, 0.5831800103187561, -0.049031998962163925, -0.0895489975810051, -0.35512998700141907, -0.2781299948692322, -0.2983100116252899, -0.30608001351356506, 0.0824970006942749, -0.6245399713516235, -0.03314099833369255, 0.39228999614715576, -0.3050599992275238, 0.1174200028181076, -0.2079399973154068, 0.30028000473976135, 0.08245600014925003, 0.15031999349594116, -0.8282999992370605, 0.26875999569892883, 0.3484500050544739, -0.20258000493049622, 0.42298999428749084, -0.23836000263690948, -0.15376000106334686, 0.5131099820137024, -0.48596999049186707, -0.4549500048160553, 0.08403400331735611, -0.3254300057888031, 0.23142999410629272, 0.06543999910354614, -0.20879000425338745, -0.11269000172615051, -0.34213000535964966, 0.2945399880409241, -0.08821400254964828, 0.09436099976301193, 0.20590999722480774, 0.0162540003657341, -0.19850000739097595, 0.4143100082874298, -0.24580000340938568, -0.35148999094963074, 0.2091899961233139, -0.022881999611854553, -0.2630699872970581, 0.12815000116825104, -0.15279999375343323, 0.18203000724315643, 0.5323799848556519, 0.1923999935388565, -0.23503999412059784, 0.011156000196933746, 0.49625998735427856, -0.09220000356435776, -0.14100000262260437, 0.1565600037574768, -0.08679600059986115, -0.39395999908447266, 0.36383000016212463, -0.34784001111984253, -0.1234700009226799, -0.020201999694108963, 0.23336000740528107, -0.11094000190496445, 0.6462500095367432, -0.05222100019454956, 0.039632998406887054, 0.5599799752235413, 0.17499999701976776, -0.1996700018644333, 0.20687000453472137, -0.4896099865436554, 0.016242999583482742, 0.5666999816894531, 0.6817799806594849, -0.6073099970817566, 0.37685999274253845, -0.2147900015115738, -0.25325000286102295, -0.37432000041007996, 0.19086000323295593, 0.17282000184059143, 0.29221999645233154], u'wide': [-0.45120999217033386, 0.06519900262355804, -0.07132399827241898, -0.3358500003814697, 0.42524999380111694, 0.6678100228309631, -0.1014999970793724, -0.25652000308036804, 0.041138000786304474, -1.2138999700546265, 0.356550008058548, 0.9125400185585022, -0.37985000014305115, -0.031401000916957855, 0.10429999977350235, -0.11485999822616577, -0.3640100061893463, 0.5606799721717834, 0.008043100126087666, 0.5273000001907349, 0.5118700265884399, 0.3109300136566162, -0.21176999807357788, -0.17482000589370728, -0.30136001110076904, 0.4031299948692322, -0.15434999763965607, -0.6145399808883667, -0.012741000391542912, -0.003686700016260147, -0.06703799962997437, 0.19200000166893005, 0.017035000026226044, 0.05317400023341179, -0.9898999929428101, -0.09114400297403336, -0.4139400124549866, -0.069022998213768, -0.03223299980163574, -0.15636999905109406, -0.7103400230407715, 0.20167000591754913, 0.28512001037597656, -0.05289199948310852, -0.06861399859189987, 0.5667700171470642, 0.3297100067138672, -0.29850998520851135, 0.02566700056195259, -0.02413100004196167, -0.2413800060749054, -0.03793900087475777, 0.2360299974679947, -0.28227999806404114, -0.05395599827170372, -0.5365800261497498, 0.3521600067615509, -0.5562899708747864, -0.046163998544216156, -0.09248600155115128, 0.04679099842905998, -0.08770299702882767, -0.25738000869750977, -0.09344100207090378, 0.17655999958515167, -0.6270700097084045, 0.421779990196228, 0.07002600282430649, 0.18624000251293182, -0.22779999673366547, -0.015771999955177307, 0.13197000324726105, 0.3914499878883362, 0.07135999947786331, 0.4372499883174896, 0.19446000456809998, 0.31237998604774475, 0.14067000150680542, 0.05547399818897247, 0.2248000055551529, 0.1469700038433075, -0.48399001359939575, -0.17624999582767487, -0.26743000745773315, 0.032944999635219574, 0.2147900015115738, 0.7063199877738953, -0.263839989900589, 0.10214000195264816, 0.15365000069141388, -0.1027899980545044, 0.12306000292301178, -0.3452000021934509, -0.12432999908924103, -0.3199700117111206, -0.34112998843193054, 0.1612900048494339, -0.2512100040912628, 0.3395799994468689, -0.46970000863075256, -0.2528400123119354, -0.3594000041484833, -0.025141999125480652, -0.23284000158309937, -0.44165998697280884, 0.2951200008392334, 0.4241099953651428, 0.23756000399589539, -0.19554999470710754, -0.03553999960422516, 0.008429300040006638, -0.10002999752759933, 0.6079999804496765, -0.4519999921321869, 0.1361600011587143, -0.016002999618649483, -0.18945999443531036, 0.3930700123310089, 0.1856900006532669, 0.05595000088214874, -0.25044000148773193, -0.23694999516010284, 0.39989998936653137, -0.10057999938726425, 0.40261998772621155, 0.14722999930381775, 0.07740499824285507, 0.8052600026130676, -0.15851999819278717, -0.006513500120490789, -0.2501800060272217, -0.2532399892807007, -0.5436000227928162, 0.3422600030899048, 0.34762001037597656, -0.591159999370575, 0.13107000291347504, -0.07986299693584442, -0.2805100083351135, 0.12498000264167786, -0.047988999634981155, -0.015402999706566334, 0.39517998695373535, -0.4146899878978729, -0.7235299944877625, 0.4734500050544739, -0.38095998764038086, -0.4769900143146515, 0.17034000158309937, -0.24539999663829803, 0.22237999737262726, 0.16006000339984894, 0.10632000118494034, -0.4997999966144562, 1.0799000263214111, 0.33066999912261963, 0.08892100304365158, -0.03309199959039688, 0.1663299947977066, -0.14169000089168549, -0.06741099804639816, -0.2500300109386444, 0.5012400150299072, -0.0665069967508316, -0.09261900186538696, -0.4097200036048889, 0.06714800000190735, 0.7380499839782715, -0.12571999430656433, 0.27557000517845154, -0.13221000134944916, -0.3892199993133545, -0.40821000933647156, 0.024032000452280045, 0.3443000018596649, 0.3266099989414215, -0.3999600112438202, 0.18803000450134277, 0.1994899958372116, -0.4398599863052368, 0.06222100183367729, -0.5560299754142761, 0.4400700032711029, 0.2711699903011322, -0.030691999942064285, -0.846589982509613, 0.08955900371074677, 0.1305299997329712, 0.3818899989128113, 0.8453400135040283, 0.008801200427114964, 0.6435800194740295, -0.26875999569892883, 0.19584999978542328, 0.06049500033259392, -0.21461999416351318, 0.44470998644828796, -0.22436000406742096, 0.44426000118255615, 0.2999599874019623, 0.5945600271224976, 0.46492999792099, -0.25964999198913574, 0.01797099970281124, 0.13381999731063843, -0.09787599742412567, -0.42879998683929443, -0.3097200095653534, 0.18592999875545502, 0.16985000669956207, 1.0795999765396118, -0.14941999316215515, -0.040546998381614685, 0.2897700071334839, -0.09529399871826172, 0.21980999410152435, -0.23869000375270844, -0.06202799826860428, 0.2706100046634674, -0.5857300162315369, 0.8098499774932861, -0.40577998757362366, -0.0320810005068779, -0.17194999754428864, 0.542900025844574, 0.6728000044822693, 0.1436000019311905, -0.23939000070095062, 0.0794999971985817, -0.06754600256681442, 0.3359200060367584, 0.32708999514579773, -0.1342500001192093, 0.39609000086784363, -0.029892999678850174, -0.1269499957561493, -0.1124500036239624, -0.21589000523090363, -0.5635300278663635, 0.08305300027132034, -0.018225999549031258, 0.13068999350070953, 0.02495099976658821, -0.11315999925136566, -1.0578999519348145, 0.2329699993133545, -0.21991999447345734, 0.2306700050830841, 0.3049199879169464, 0.6161800026893616, 0.17213000357151031, -0.07189299911260605, 0.38117000460624695, -0.9196500182151794, -0.10749000310897827, 0.21784000098705292, -0.25297001004219055, -0.028023000806570053, -0.23537999391555786, 0.4410899877548218, 0.24031999707221985, -0.2740600109100342, 0.49441999197006226, -0.47216999530792236, -0.487060010433197, -0.22246000170707703, -0.49022001028060913, 0.33809998631477356, 0.33493998646736145, -0.26017001271247864, 0.15421999990940094, -0.11270000040531158, 0.13371999561786652, 0.3950299918651581, -0.09216099977493286, -0.20535999536514282, -1.891700029373169, 0.4166800081729889, -0.14847999811172485, -0.3845599889755249, -0.13985000550746918, -0.11449000239372253, 0.4321100115776062, 0.16958999633789062, 0.1505099982023239, 0.0715939998626709, -0.8312900066375732, -0.04621899873018265, 0.4296799898147583, -0.03534799814224243, 0.2073799967765808, 0.30278998613357544, -0.15098999440670013, 0.30121999979019165, 0.2910600006580353, 0.7753700017929077, -0.4822100102901459, -0.06549999862909317, 0.034143999218940735, -0.7738900184631348], u'winding': [-0.16143999993801117, -0.17175999283790588, 0.44203999638557434, 0.5006200075149536, -0.10292000323534012, 0.07138299942016602, -0.010772000066936016, -0.050269998610019684, 0.5128899812698364, -0.33719000220298767, -0.5076000094413757, -0.1876000016927719, -0.11326000094413757, -0.6269800066947937, -0.4005799889564514, 0.21726000308990479, -0.613070011138916, -0.6958100199699402, 0.5583599805831909, 0.2457199990749359, -0.31867000460624695, -0.05759900063276291, -0.1837099939584732, 0.3487899899482727, -0.22811000049114227, -0.2791700065135956, 0.2678700089454651, -0.7391999959945679, -0.06328299641609192, 0.6096299886703491, 0.5703200101852417, 0.14263999462127686, 0.28700000047683716, 0.12447000294923782, -0.49004998803138733, 0.6540600061416626, -0.32409000396728516, -0.44958001375198364, -0.23375000059604645, -0.19881999492645264, -0.4292899966239929, 0.04081299901008606, -0.259660005569458, 0.035252999514341354, -0.06926199793815613, 0.16439999639987946, 0.7162899971008301, 0.7671800255775452, -0.09631799906492233, -0.32378000020980835, -0.47846001386642456, 0.3795500099658966, 0.4572499990463257, 0.18118999898433685, 0.3994799852371216, -0.053415000438690186, 0.04441099986433983, -0.12230999767780304, 0.21897999942302704, 0.5827500224113464, 0.178739994764328, -0.23351000249385834, 0.7782999873161316, 0.1597999930381775, -0.009454299695789814, 0.22242000699043274, -0.09848500043153763, 0.45622000098228455, 0.29585000872612, -0.39243000745773315, 0.06812600046396255, 0.30171000957489014, 0.09155400097370148, 0.31112998723983765, 0.5073800086975098, 0.3744100034236908, 0.05374399945139885, -0.05392500013113022, 0.34992000460624695, -0.4348300099372864, 0.167480006814003, -0.16068999469280243, -0.04196099936962128, 0.18328000605106354, -0.00044517999049276114, -0.08443000167608261, 0.1897200047969818, 0.17496000230312347, -0.06956499814987183, 0.4628799855709076, 0.6558099985122681, -0.17515000700950623, 0.09093999862670898, -0.3731499910354614, -0.23826000094413757, -0.015845999121665955, 0.01463400013744831, -0.06509199738502502, 0.2888599932193756, 0.0719119980931282, -0.2896699905395508, 0.27390000224113464, -0.1623699963092804, 0.16801999509334564, -0.5184599757194519, -0.21897999942302704, 0.49667999148368835, -0.32565999031066895, -0.42649999260902405, -0.09298799932003021, -0.4438900053501129, -0.2634199857711792, -0.012914000079035759, 0.11326999962329865, 0.03668700158596039, 0.31749001145362854, 0.3179300129413605, 0.04684299975633621, -0.1787700057029724, 0.07107900083065033, 0.06544200330972672, -0.5736200213432312, 0.3100000023841858, -0.12132000178098679, 0.16814999282360077, 0.25867000222206116, 0.13699999451637268, -0.0022108000703155994, -0.5400300025939941, -0.1719599962234497, 0.15945999324321747, 0.5127099752426147, -0.051867999136447906, 0.11235000193119049, 0.06235399842262268, 0.425790011882782, 0.20886999368667603, 0.328220009803772, -0.030841000378131866, -0.5344499945640564, 0.1586800068616867, -0.2809399962425232, -0.17467999458312988, 0.15553000569343567, -0.4383400082588196, 0.1584099978208542, 0.12048999965190887, -0.5007799863815308, 0.4301399886608124, 0.5738099813461304, -0.023632999509572983, -0.019488999620079994, -0.5479900240898132, -0.21060000360012054, 1.07260000705719, -0.17521999776363373, 0.43050000071525574, 0.002440399955958128, 0.11223000288009644, 0.356469988822937, -0.3718999922275543, -0.0483269989490509, -0.3628300130367279, -0.10301999747753143, 0.43542999029159546, 0.5466600060462952, 0.36901000142097473, -0.11919999867677689, -0.03416400030255318, -0.2476699948310852, -0.18862000107765198, -0.03972399979829788, 0.5114700198173523, -0.15850000083446503, -0.17734000086784363, 0.11534000188112259, -0.6032400131225586, 0.11441999673843384, -0.05931999906897545, -0.05646099895238876, -0.24132999777793884, 0.11007999628782272, -0.25523000955581665, 0.31630998849868774, -0.1383800059556961, -0.12456999719142914, -0.15244999527931213, -0.5750499963760376, -0.11221999675035477, 0.015908999368548393, -0.11387000232934952, 0.2608399987220764, -0.22082999348640442, -0.03252999857068062, 0.2542499899864197, 0.04151900112628937, -0.38938000798225403, -0.20827999711036682, 0.15223999321460724, -0.16625000536441803, 0.10384000092744827, 0.5586000084877014, 0.34751999378204346, 0.5608500242233276, 0.60794997215271, 0.39100998640060425, -0.5064100027084351, -0.5483099818229675, 0.2730399966239929, 0.341839998960495, -0.08987899869680405, 0.1880200058221817, 0.44405001401901245, -0.3013800084590912, -0.351639986038208, 0.05383700132369995, -0.013953999616205692, -0.45664000511169434, 0.13474999368190765, -0.06553000211715698, 0.49939000606536865, -0.4682900011539459, -0.46650001406669617, -0.05999099835753441, 0.19352999329566956, 0.06631000339984894, 0.00031341001158580184, -0.049199000000953674, -0.4242599904537201, -0.42083001136779785, -0.3697200119495392, -0.21146999299526215, 0.24403999745845795, -0.04330800101161003, 0.1708499938249588, 0.2932499945163727, 0.23190000653266907, -0.5338199734687805, 0.2300799936056137, -0.03809700161218643, 0.524619996547699, 0.43935999274253845, 0.07876300066709518, -0.05877299979329109, -0.26183998584747314, -0.11846999824047089, 0.1468600034713745, 0.5202299952507019, 0.028084000572562218, 0.1737300008535385, 0.3602699935436249, -0.23723000288009644, -0.13931000232696533, -0.9176599979400635, 0.5181999802589417, 0.012129999697208405, -0.2812199890613556, 0.4586699903011322, -0.4437299966812134, -0.09870000183582306, 0.04066700115799904, -0.03921100124716759, 0.2504200041294098, -0.2593100070953369, 0.5504299998283386, 0.11219000071287155, 0.16701999306678772, -0.45517000555992126, 0.6432600021362305, -0.4597199857234955, 0.05745299905538559, 0.15095999836921692, 0.26197999715805054, 0.2767300009727478, 0.1743600070476532, 0.3111500144004822, -0.48087000846862793, -0.05780800059437752, 0.5390899777412415, 0.10488999634981155, 0.34707000851631165, -0.4662500023841858, -0.02337699942290783, -0.32256999611854553, -0.33855998516082764, 0.17428000271320343, 0.09445200115442276, 0.23677000403404236, -0.28363001346588135, -0.1995600014925003, 0.11855000257492065, 0.45765000581741333, -0.05752300098538399, 0.642009973526001, -0.2015099972486496, 0.8726400136947632, -0.11902999877929688, 0.6165199875831604, -0.15143999457359314, -0.02578599937260151], u'frozen': [0.07587900012731552, 0.25369998812675476, 0.37575000524520874, -0.055431999266147614, 0.2945899963378906, -0.32067999243736267, 0.46432000398635864, -0.0036973999813199043, -0.19314000010490417, -1.1694999933242798, -0.4463900029659271, -0.7726399898529053, -0.27674999833106995, -0.607699990272522, -0.34564000368118286, -0.02418600022792816, -0.020287999883294106, -0.160180002450943, -0.27605998516082764, 1.0261000394821167, -0.03417599946260452, 0.22279000282287598, 0.16840000450611115, -0.2537199854850769, -0.5106899738311768, 0.5115500092506409, -0.43285998702049255, -0.32350000739097595, 0.3586300015449524, -0.020766999572515488, -0.4193600118160248, -0.10870999842882156, 0.268669992685318, -0.06802400201559067, 0.11914999783039093, 0.4412499964237213, 0.07993800193071365, 0.7500900030136108, -0.27483999729156494, 0.5049499869346619, -0.46643999218940735, -0.05189099907875061, -0.2462799996137619, 0.8567500114440918, -0.39660000801086426, -0.0259380005300045, -0.23774999380111694, 0.17735999822616577, -0.10762999951839447, 0.4309700131416321, -0.30875998735427856, 0.1396999955177307, 0.28525999188423157, -0.0315140001475811, -0.35909000039100647, -0.05594399943947792, 0.8547000288963318, -0.0723470002412796, 0.37147998809814453, 0.2071399986743927, 0.15769000351428986, 0.5688199996948242, -0.10001000016927719, -0.2103700041770935, -0.5521900057792664, -0.19867999851703644, -0.46584999561309814, -0.0995749980211258, -0.2836399972438812, 0.2903600037097931, 0.5253099799156189, 0.788670003414154, 0.04072500020265579, 0.026768000796437263, 0.06382600218057632, -0.056657999753952026, 0.5696899890899658, -0.39702001214027405, 0.4217199981212616, -0.008147199638187885, -0.15112000703811646, -0.004907799884676933, -0.3365899920463562, 0.6466100215911865, -0.09740900248289108, -0.8722800016403198, -0.20542000234127045, 0.10055000334978104, -0.5379300117492676, -0.35148999094963074, -0.1858299970626831, -0.09597799926996231, -0.38464999198913574, -0.01190400030463934, 0.010936000384390354, 0.2535499930381775, -0.4515500068664551, -0.10762999951839447, 0.14153000712394714, 0.07518500089645386, 0.3086099922657013, -0.15259000658988953, 0.3262700140476227, -0.3104900121688843, -0.6045600175857544, -0.23026999831199646, 0.17906999588012695, 0.6757100224494934, -0.3552899956703186, -0.18648000061511993, -0.19268999993801117, -0.32319000363349915, 0.11354000121355057, -0.34637001156806946, -0.4662100076675415, -0.7745000123977661, 0.23240000009536743, 0.01634100079536438, 0.16670000553131104, 0.3326199948787689, -0.20282000303268433, -0.37003999948501587, -0.22468000650405884, 0.38253000378608704, 0.0039311000145971775, 0.48225998878479004, -0.467849999666214, 0.37275001406669617, 0.1380700021982193, -0.022064000368118286, -0.4654900133609772, 0.8111299872398376, 0.17903000116348267, 0.12913000583648682, -0.40931999683380127, 0.1291400045156479, 0.18950000405311584, 0.2776699960231781, -0.609969973564148, -0.09055399894714355, 0.19892999529838562, -0.5876299738883972, -0.3178600072860718, -0.3596000075340271, -0.33719998598098755, -0.020524999126791954, -0.3599900007247925, -0.04706500098109245, -0.07538300007581711, -0.5600200295448303, -0.43650999665260315, 0.05698399990797043, -0.17757999897003174, 0.09262499958276749, 0.35767000913619995, -0.1820400059223175, -0.5436400175094604, -0.7774199843406677, 0.06318099796772003, 0.2555199861526489, -0.08567000180482864, 0.333189994096756, -0.14114999771118164, 0.7617999911308289, 0.28016000986099243, 0.159170001745224, 0.8454399704933167, -0.32649001479148865, -0.1025099977850914, -0.37130001187324524, 0.47889000177383423, 0.37376001477241516, -0.48548999428749084, -0.06286100298166275, -0.03560199961066246, 0.23734000325202942, 0.38929998874664307, -0.23356999456882477, 0.2462099939584732, 0.030473999679088593, 0.38332998752593994, -0.5684099793434143, 0.3461099863052368, 0.03613400086760521, -0.21604999899864197, -0.17800000309944153, 0.8453400135040283, -0.6884599924087524, 0.04306799918413162, -0.4694100022315979, 0.24782000482082367, 0.7042099833488464, -0.19808000326156616, -0.2942500114440918, -0.4216499924659729, -0.26888999342918396, -0.45625999569892883, 0.004610300064086914, -0.17041000723838806, 0.4266200065612793, 0.8109700083732605, -0.3591499924659729, 0.4918000102043152, 0.4611999988555908, -0.010999999940395355, -0.5588499903678894, 0.34341999888420105, -0.02009199932217598, -0.32879000902175903, 0.015269000083208084, 0.023242000490427017, 0.1014299988746643, -0.5371000170707703, 0.05004800111055374, -0.2917900085449219, -0.0015213999431580305, 0.7375800013542175, -0.4875499904155731, -0.06169300153851509, 0.7496399879455566, 0.9489200115203857, -0.1649399995803833, 0.24683000147342682, -0.5043399930000305, 0.06172500178217888, 0.19633999466896057, -0.014582999981939793, 0.2067600041627884, -0.5167099833488464, -0.13142000138759613, 0.04201199859380722, -0.05884300172328949, 0.3364900052547455, -0.22281000018119812, 5.558400062000146e-06, 0.41923999786376953, -0.10644999891519547, 0.4005900025367737, -0.2370699942111969, -0.36858999729156494, -0.6717600226402283, -0.4097599983215332, 0.4347200095653534, -0.1883700042963028, -1.0771000385284424, -0.10978999733924866, 0.4881899952888489, 0.3393099904060364, -0.3332099914550781, -0.8123499751091003, 0.4552200138568878, 0.39921000599861145, 0.09402400255203247, 0.06826300173997879, 0.0766490027308464, -0.22291000187397003, -0.20207999646663666, 0.17555999755859375, -0.07166600227355957, 0.6143500208854675, 0.4148299992084503, 0.18355000019073486, -0.21743999421596527, -0.027612000703811646, 0.4424000084400177, -0.2994700074195862, 0.16259999573230743, 0.5188999772071838, 0.05053900182247162, -0.27456000447273254, -0.38214999437332153, 0.345550000667572, -0.008546999655663967, 0.3525800108909607, 0.6261399984359741, 0.2054000049829483, -1.019700050354004, -0.6795799732208252, -0.40018001198768616, -0.42173001170158386, -0.1491899937391281, -0.04976100102066994, 0.016330000013113022, -0.5316600203514099, -0.3441999852657318, -0.006844600196927786, 0.22382999956607819, 0.2970300018787384, 0.3915199935436249, 0.42100998759269714, -0.028849000111222267, -0.49164000153541565, 0.17964999377727509, 0.08798299729824066, -0.5353400111198425, -0.3065299987792969, 0.9651600122451782, -0.06115800142288208, -0.07816799730062485, -0.1784300059080124], u'straight': [0.022686999291181564, 0.29695001244544983, -0.2200700044631958, -0.11010999977588654, -0.10238999873399734, 0.09401199966669083, -0.44881999492645264, -0.1159299984574318, 0.3345000147819519, -0.7172300219535828, 0.29322001338005066, 0.13605999946594238, -0.20242999494075775, 0.034508999437093735, -0.36142998933792114, -0.4166499972343445, -0.1396300047636032, 0.22386999428272247, 0.48649001121520996, -0.3097600042819977, -0.32284000515937805, -0.42587000131607056, -0.05627899989485741, 0.1747400015592575, 0.4581199884414673, -0.4005900025367737, 0.3454599976539612, -0.4135800004005432, 0.1606599986553192, 0.25995999574661255, -0.03979500010609627, 0.28181999921798706, 0.2261199951171875, -0.46717000007629395, -1.8819999694824219, 0.16151000559329987, -0.255840003490448, -0.333840012550354, -0.06818900257349014, 0.4807800054550171, -0.11118000000715256, 0.47832998633384705, -0.32304999232292175, -0.21642999351024628, -0.011744000017642975, 0.4431299865245819, -0.15790000557899475, -0.023699000477790833, -0.39403998851776123, 0.10920999944210052, -0.019791999831795692, 0.2991600036621094, 0.516260027885437, 0.08876799792051315, -0.16280999779701233, -0.24730999767780304, -0.025731999427080154, -0.049915000796318054, -0.16095000505447388, 0.08058799803256989, -0.1733900010585785, -0.3992699980735779, -0.11918000131845474, 0.20872999727725983, -0.12839999794960022, -0.4701699912548065, -0.08325599879026413, 0.20909999310970306, 0.2984600067138672, -0.26353999972343445, 0.05584599822759628, -0.03706900030374527, -0.3727099895477295, 0.2442999929189682, 0.2438800036907196, -0.26006001234054565, -0.13485999405384064, -0.3073999881744385, -0.038672998547554016, -0.30239999294281006, -0.12557999789714813, 0.11819999665021896, 0.3788299858570099, 0.13221000134944916, -0.2331400066614151, -0.029733000323176384, -0.09860700368881226, -0.035023000091314316, 0.4549799859523773, -0.1289599984884262, 0.9165999889373779, 0.4334999918937683, -0.2758300006389618, -0.4065600037574768, -0.12026000022888184, 0.33153998851776123, -0.7650200128555298, 0.40985000133514404, -0.18911999464035034, -0.6794999837875366, -0.01553099974989891, 0.29910001158714294, -0.04772400110960007, -0.37119999527931213, -0.14305000007152557, -0.02316099964082241, 0.1497499942779541, -0.4450100064277649, -0.38995999097824097, 0.007939600385725498, -0.213469997048378, -0.3151499927043915, -0.2599300146102905, -0.7043099999427795, -0.2617399990558624, 0.48987001180648804, -0.007559699937701225, -0.047228001058101654, -0.20027999579906464, -0.13479000329971313, -0.011009999550879002, -0.048813000321388245, 0.21772000193595886, -0.06869100034236908, -0.018654000014066696, 0.3159399926662445, -0.0948370024561882, -0.14792999625205994, 0.1975499987602234, -0.06429900228977203, -0.1546500027179718, 0.47095000743865967, -0.1408900022506714, 0.24130000174045563, -0.18604999780654907, 0.544439971446991, 0.19662000238895416, 0.5259900093078613, -0.008229799568653107, 0.4359099864959717, 0.33331000804901123, 0.2155900001525879, 0.09027499705553055, 0.450190007686615, -0.5750200152397156, 0.7978699803352356, -0.04209600016474724, 0.015052000060677528, 0.1388999968767166, 0.18504999577999115, 0.4518600106239319, 0.28630000352859497, -0.49399998784065247, 0.13646000623703003, 0.5453799962997437, -0.22126999497413635, -0.39774999022483826, -0.6904100179672241, -0.016112999990582466, -0.09537900239229202, -0.6011099815368652, -0.05463999882340431, -0.3136900067329407, -0.6043000221252441, -0.18799999356269836, 0.5095199942588806, 0.040373001247644424, 9.096899884752929e-05, -0.10145000368356705, 0.09342999756336212, 0.16311000287532806, -0.33702999353408813, -0.3016499876976013, -0.08643700182437897, -0.26513999700546265, 0.07641399651765823, 0.19818000495433807, 0.6949300169944763, -0.19377000629901886, 0.27987998723983765, 0.49588000774383545, 0.13760000467300415, 0.4047499895095825, 0.20675000548362732, -0.35743001103401184, 0.08738499879837036, -0.07846000045537949, 0.1439100056886673, 0.06793099641799927, 0.6019600033760071, -0.44218000769615173, 0.27015000581741333, -0.06887199729681015, 0.11145000159740448, 0.004039600025862455, -0.1613599956035614, -0.37617000937461853, -0.22051000595092773, 0.06437499821186066, 0.33228999376296997, 2.140700101852417, -0.24042999744415283, 0.058299001306295395, -0.5954700112342834, -0.3781900107860565, -0.1740099936723709, -0.37397998571395874, -0.07274500280618668, -0.0556580014526844, 0.292169988155365, -0.516290009021759, 0.4361700117588043, -0.015716999769210815, 0.3374600112438202, 0.22846999764442444, -0.1266299933195114, 0.3160499930381775, -0.36559998989105225, 0.4081999957561493, -0.7359099984169006, 0.41091999411582947, -0.11757999658584595, -0.026045000180602074, 0.1279900074005127, -0.5730000138282776, -0.24818000197410583, 0.23627999424934387, -0.22142000496387482, 0.30507999658584595, 0.21963000297546387, -0.08028200268745422, -0.032030001282691956, 0.20689000189304352, -0.43007001280784607, -0.08452799916267395, -0.01740800030529499, -0.09818899631500244, 0.18485000729560852, -0.27737998962402344, 0.5682399868965149, 0.22381000220775604, 0.018566999584436417, -0.33788999915122986, 0.00023690999660175294, -0.060426998883485794, -0.41084998846054077, 0.021882999688386917, 0.712660014629364, 0.19979999959468842, -0.09876900166273117, 0.20257000625133514, -0.38822999596595764, 0.29761001467704773, -0.3632600009441376, -0.13061000406742096, -0.4618400037288666, 0.059992000460624695, 0.24753999710083008, -0.6529899835586548, 0.09479600191116333, 0.0250489991158247, -0.29012998938560486, -0.12032999843358994, 0.0974849984049797, -0.4753499925136566, 0.16116000711917877, 0.1149900034070015, -0.436379998922348, -0.13152000308036804, 0.23231999576091766, 0.06580699980258942, -0.19554999470710754, 0.013799999840557575, 0.263700008392334, -0.02533400058746338, -0.442330002784729, -1.1081000566482544, 0.10327000170946121, -0.09800200164318085, 0.0843920037150383, -0.1727299988269806, 0.07960999757051468, 0.08456599712371826, 0.15592999756336212, -0.4507899880409241, 0.3256700038909912, 0.045754000544548035, -0.07719100266695023, -0.16898000240325928, 0.26973000168800354, 0.09831999987363815, 0.4173400104045868, -0.6349700093269348, 0.4119099974632263, 0.43342000246047974, 0.569570004940033, -0.12110999971628189, -0.6228700280189514, 0.37964001297950745, -0.1475600004196167], u'smooth': [0.24142999947071075, -0.3982200026512146, 0.5451499819755554, -0.5755800008773804, -0.8050600290298462, -0.20399999618530273, 0.3181000053882599, 0.5865100026130676, 0.5510299801826477, -1.8502000570297241, -0.5545899868011475, -0.1311199963092804, 0.19790999591350555, -0.10475999861955643, -0.3700000047683716, 0.3217799961566925, -0.31929999589920044, 0.5432599782943726, -0.08385899662971497, 0.47141000628471375, -0.6614400148391724, 0.16920000314712524, -0.40404000878334045, 0.18918000161647797, -0.5499299764633179, 0.12884999811649323, -0.18520000576972961, 0.022709999233484268, -0.07941699773073196, 0.0801210030913353, -0.33809998631477356, 0.5795400142669678, -0.33882999420166016, 0.14603999257087708, -0.46129000186920166, 1.05239999294281, 0.17292000353336334, -0.221220001578331, -0.16833999752998352, 0.330020010471344, -0.8710600137710571, 0.11503999680280685, -0.19946999847888947, -0.514490008354187, 0.1959100067615509, 0.4448699951171875, 0.039333000779151917, 0.6333600282669067, -0.2833099961280823, 0.07165300101041794, -0.36805999279022217, -0.08939500153064728, -0.17437000572681427, 0.0834140032529831, 0.07066000252962112, 0.4351300001144409, -0.38238999247550964, -0.06428200006484985, 0.08211900293827057, 0.5648499727249146, 0.5326799750328064, 0.4923500120639801, 0.39100000262260437, -0.2819400131702423, -0.13568000495433807, 0.25582998991012573, 0.7018100023269653, 0.46522000432014465, 0.5315600037574768, 0.1667100042104721, -0.17448000609874725, 0.2814899981021881, -0.05717200040817261, 0.6387100219726562, 0.3421199917793274, 0.12848000228405, -0.34578999876976013, 0.25624001026153564, 0.4602000117301941, -0.17677000164985657, -0.193790003657341, 0.17117999494075775, -0.011133000254631042, -0.12031000107526779, 0.47516998648643494, 0.15372000634670258, 0.09124699980020523, -0.15183000266551971, -0.4743799865245819, -0.04283500090241432, -0.2315800040960312, 0.21504999697208405, -1.0439000129699707, -0.378030002117157, -0.3499799966812134, -0.935949981212616, 0.07454600185155869, 0.2752000093460083, 0.6502900123596191, 0.5494199991226196, -0.19791999459266663, -0.13808000087738037, -0.019478000700473785, -0.5733100175857544, -0.2616499960422516, 0.6136400103569031, -0.4493800103664398, -0.37676000595092773, 0.08503799885511398, -0.0178849995136261, 0.13912999629974365, 0.1510699987411499, 0.32168999314308167, -0.19643999636173248, -0.26017001271247864, 0.10639999806880951, -0.427839994430542, 0.029593000188469887, -0.41903001070022583, 0.1331000030040741, -0.03430600091814995, -0.3642500042915344, 0.5367199778556824, -0.6318399906158447, -0.35813000798225403, 0.2088800072669983, 0.3059700131416321, 1.0987999439239502, -0.4088500142097473, 0.3949899971485138, -0.5407900214195251, 0.27689000964164734, -0.3144400119781494, 0.36041000485420227, -0.7649400234222412, -0.2603900134563446, -0.21720999479293823, -0.3502799868583679, 0.4953500032424927, 0.14830000698566437, 0.6323699951171875, 0.29701000452041626, -0.40841999650001526, -0.26030999422073364, 0.10321000218391418, 0.13975000381469727, -0.24518999457359314, -0.5580199956893921, 0.36090001463890076, 0.43314000964164734, -0.6275799870491028, 0.08314000070095062, -0.42085000872612, -0.07521700114011765, 0.2363400012254715, -0.7329300045967102, -0.22488999366760254, -0.3559400141239166, 0.09523999691009521, 0.05760100111365318, -0.6392499804496765, -0.38106998801231384, -0.5220000147819519, -0.19627000391483307, 0.057962000370025635, 0.1763100028038025, 0.430869996547699, 0.14630000293254852, 0.11789999902248383, 0.12925000488758087, 0.22770999372005463, -0.01854199916124344, -0.2728700041770935, -0.005569600034505129, -0.1291700005531311, -0.2692500054836273, 0.09411799907684326, -0.30331000685691833, -0.3495500087738037, -0.4392000138759613, -0.41304001212120056, -0.144569993019104, -0.266510009765625, 0.16965000331401825, -0.49039000272750854, -0.3837699890136719, 0.7988499999046326, -0.016896000131964684, -0.870169997215271, 0.4388999938964844, 0.45186999440193176, 0.8279500007629395, -0.27897000312805176, 0.22934000194072723, 0.7653899788856506, 0.306549996137619, 0.03505700081586838, 0.21713000535964966, 0.033048998564481735, -0.18876999616622925, 1.159000039100647, 0.11035999655723572, 0.7443100214004517, 0.5020700097084045, 0.11738000065088272, -0.4300900101661682, 0.11962000280618668, -0.14114999771118164, -0.06982800364494324, 0.22543999552726746, 0.37567999958992004, -0.19616000354290009, -0.23548999428749084, -0.0398159995675087, -0.2957899868488312, -0.31396999955177307, 0.060759998857975006, -0.7837299704551697, -0.20543000102043152, -0.24553999304771423, 0.4101699888706207, -0.2505199909210205, 0.3104400038719177, -0.500190019607544, -0.6230199933052063, -0.15275000035762787, 0.19452999532222748, -0.17733000218868256, -0.09669800102710724, -0.04294800013303757, 0.02462499961256981, 0.32036998867988586, 0.21720999479293823, -0.5174300074577332, 0.07434500008821487, 0.2035199999809265, -0.11482000350952148, -0.07954099774360657, -0.5839999914169312, -0.35001999139785767, 0.46994999051094055, -0.20753000676631927, -0.4397900104522705, 0.4833599925041199, -0.6843100190162659, 0.29440999031066895, 0.4869999885559082, 0.30601999163627625, 0.033070001751184464, -0.3951599895954132, 0.12161999940872192, -0.41762998700141907, 0.07517000287771225, 0.06356900185346603, -0.17148999869823456, -0.08851899951696396, 0.24851000308990479, -0.07467400282621384, 0.1553100049495697, 1.0227999687194824, -0.2577199935913086, -0.2413800060749054, 0.27932000160217285, 0.4486199915409088, -0.2278600037097931, 0.13181999325752258, 0.7501699924468994, -0.6841899752616882, 0.4287300109863281, 0.1304599940776825, 0.010564000345766544, -0.25902000069618225, 0.24461999535560608, -0.007364099845290184, 0.6541100144386292, 0.5917999744415283, -0.4576300084590912, 0.03750300034880638, -0.2528400123119354, -0.19726000726222992, 0.2031800001859665, -0.03218499943614006, 0.19912999868392944, 0.8309599757194519, -0.24201999604701996, -0.2250099927186966, 0.1645199954509735, 0.3417400121688843, 0.29649001359939575, -0.04292700067162514, 0.13176999986171722, 0.7107499837875366, -0.14529000222682953, 0.48100998997688293, 0.5354800224304199, 0.9853500127792358, -0.23702000081539154, 0.053070999681949615, -0.1846199929714203, 0.24044999480247498], u'worn': [-0.2008100003004074, -0.2331800013780594, -0.2216300070285797, -0.1306300014257431, -0.14986999332904816, -0.04550199955701828, -0.2194499969482422, 0.2834799885749817, -0.0767270028591156, -1.0013999938964844, 0.4783799946308136, 0.06791099905967712, -0.2863599956035614, 0.18964000046253204, -0.3642300069332123, -0.40634000301361084, 0.07452300190925598, 0.17233000695705414, -0.29833000898361206, -0.688730001449585, -0.03677599877119064, -0.2246900051832199, 0.10333000123500824, -0.07874900102615356, -0.5344700217247009, -0.4420599937438965, 0.08697500079870224, -0.31490999460220337, 0.4474300146102905, 0.714460015296936, 0.8500099778175354, 0.16290000081062317, -0.43873998522758484, 0.05167299881577492, -0.6727399826049805, 0.4690600037574768, -0.07526999711990356, -0.22362999618053436, 0.72434002161026, 0.44736000895500183, -0.357369989156723, -0.6448000073432922, -0.4601700007915497, -0.6219499707221985, 0.23237000405788422, 0.17663000524044037, 0.2105100005865097, -0.3678799867630005, 0.013287999667227268, -0.1746399998664856, -0.416130006313324, -0.35293999314308167, 0.30362001061439514, 0.05057799816131592, 0.09421399980783463, -0.4333600103855133, -0.15106000006198883, -0.5412700176239014, 0.03771600127220154, 0.12615999579429626, 0.3459100127220154, -0.5698099732398987, 0.13183000683784485, 0.1264200061559677, -0.04909199848771095, -0.639490008354187, -0.1990099996328354, 0.09013599902391434, 0.2016800045967102, 0.5217499732971191, 0.22304999828338623, 0.09433399885892868, -0.06864599883556366, 0.09748200327157974, 0.21480999886989594, -0.04447999969124794, 0.4879400134086609, 0.02130500040948391, -0.24894000589847565, -0.27397000789642334, 0.1649799942970276, 0.3379899859428406, -0.44238001108169556, -0.03790799900889397, -0.03334200009703636, 0.08891300112009048, 0.2209099978208542, 0.08610299974679947, -0.39737001061439514, 0.7678899765014648, 0.016519999131560326, 0.5947800278663635, 0.09813199937343597, 0.1410900056362152, -0.16080999374389648, 0.14657999575138092, 0.41888999938964844, 0.06750400364398956, 0.4881399869918823, -0.05338900163769722, 0.44815999269485474, 0.631879985332489, -0.3393999934196472, 0.02944299951195717, -0.04537700116634369, 0.01190400030463934, -0.11514999717473984, 0.20645000040531158, -0.055080998688936234, -0.14462999999523163, -0.3322100043296814, 0.6635900139808655, 0.15262000262737274, -0.5115600228309631, -0.7095999717712402, 0.06048800051212311, 0.11417999863624573, 0.3495199978351593, 0.290039986371994, -0.619700014591217, 0.27761998772621155, -0.17121000587940216, 0.5562899708747864, 0.04103099927306175, -0.0484439991414547, 0.6003900170326233, -0.22283999621868134, -0.00034381001023575664, 0.28068000078201294, -0.024450000375509262, 0.05834000185132027, -0.265720009803772, 0.1915699988603592, -0.2697499990463257, -0.4075799882411957, -0.07997100055217743, -0.17609000205993652, 0.3166399896144867, 0.1137000024318695, -0.09419699758291245, 0.24220000207424164, 0.15102000534534454, 0.023256000131368637, 0.04947499930858612, -0.02510800026357174, 0.2548600137233734, -0.3244900107383728, 0.3309899866580963, 0.37880998849868774, -0.0781790018081665, 0.14188000559806824, -0.5621799826622009, -0.3540000021457672, -0.5283100008964539, 0.16461999714374542, -0.21331000328063965, -0.028586000204086304, -0.35899001359939575, 0.37042000889778137, 0.7379000186920166, 0.03517900034785271, -0.4319800138473511, -0.42664000391960144, 0.03521399945020676, 0.5086600184440613, 0.004157400224357843, -0.3292900025844574, 0.3668299913406372, 0.1747799962759018, -0.3547700047492981, -0.2468400001525879, 0.3993299901485443, -0.015169999562203884, 0.25854000449180603, 0.022285999730229378, -0.4264400005340576, 0.24541999399662018, 0.9605000019073486, -0.23452000319957733, -0.24493999779224396, -0.20771999657154083, 0.3671000003814697, 0.3399699926376343, 0.04010799899697304, 0.49983999133110046, 0.018391000106930733, 0.4863399863243103, 0.3897800147533417, -0.09081199765205383, -0.3119800090789795, 0.38429000973701477, -0.11368999630212784, 0.18886999785900116, 0.02033500000834465, 0.10779999941587448, 0.02925099991261959, -0.4964100122451782, -0.3486199975013733, 0.15623000264167786, -0.4917300045490265, 0.8873599767684937, 0.08887799829244614, 0.5366799831390381, 0.6075699925422668, 1.0509999990463257, -0.2091200053691864, 0.379720002412796, 0.04439900070428848, -0.5882300138473511, 0.060903001576662064, 0.2990500032901764, 0.0972760021686554, 0.18118000030517578, 0.036708999425172806, 0.7442799806594849, -0.6609200239181519, 0.5646399855613708, -0.6348400115966797, 0.03610600158572197, -0.31547999382019043, 0.18949000537395477, -0.23075999319553375, 0.47404998540878296, -0.16343000531196594, 0.3464600145816803, 0.0383480004966259, 0.10722000151872635, -0.26903000473976135, -0.11563000082969666, -0.19168999791145325, 0.5474900007247925, -0.030163999646902084, -0.22104999423027039, 0.03557400032877922, -0.016742000356316566, -0.06010400131344795, 0.4005599915981293, -0.35097000002861023, -0.28773000836372375, 0.4964999854564667, 0.33842000365257263, 0.1963299959897995, 0.11556000262498856, 0.1056400015950203, -0.2863599956035614, 0.27046000957489014, -0.2325499951839447, -0.36932000517845154, 0.29214999079704285, -0.2821600139141083, 0.09113399684429169, -0.29684001207351685, 0.2213200032711029, -0.7299200296401978, 0.3059599995613098, -0.3357599973678589, 0.2500300109386444, 0.23011000454425812, -0.359609991312027, -0.48840001225471497, -0.3847300112247467, -0.14306999742984772, 0.5923299789428711, 0.2358900010585785, -0.4965299963951111, -0.240339994430542, -0.42882001399993896, 0.09147399663925171, 0.09274999797344208, 0.282039999961853, -0.31995999813079834, -0.14643000066280365, 0.25874999165534973, -0.6088600158691406, -0.894070029258728, 0.4041999876499176, -0.9740999937057495, -0.41843000054359436, -0.4811600148677826, 0.13580000400543213, 0.7236599922180176, -0.04789699986577034, 0.23707999289035797, 0.1106799989938736, -0.35192999243736267, 0.6217399835586548, -0.4171999990940094, 0.287090003490448, 0.02034600079059601, -0.2964099943637848, 0.3406200110912323, -0.04448400065302849, -0.40182000398635864, 1.007200002670288, -0.878570020198822, -0.5429900288581848, 0.2565999925136566, 0.3381099998950958, 0.34968000650405884, -0.5691900253295898], u'melted': [0.05640700086951256, -0.37957999110221863, -0.09575200080871582, 0.16033999621868134, -0.2917900085449219, -0.36976000666618347, 0.4383400082588196, 0.24974000453948975, 0.35262998938560486, -0.7900300025939941, -0.14656999707221985, -0.16836999356746674, -0.0488939993083477, 0.1260800063610077, -0.4142000079154968, 0.07322899997234344, 0.22614000737667084, 0.08938899636268616, -0.36695998907089233, 0.7232900261878967, 0.051667001098394394, 0.13819000124931335, 0.2380400002002716, 0.10751999914646149, -0.28115999698638916, -0.46911001205444336, -0.2237199991941452, 0.17899000644683838, -0.14842000603675842, -0.5082899928092957, 0.040119998157024384, 0.2830600142478943, -0.21514999866485596, -0.3087199926376343, 0.18978999555110931, 0.2638700008392334, -0.553659975528717, 0.8008599877357483, 0.3849799931049347, 0.5171899795532227, -0.16309000551700592, -0.3853900134563446, 0.13716000318527222, 0.0375249981880188, 0.42056000232696533, 0.07960599660873413, 0.15818999707698822, 0.2163199931383133, 0.1514499932527542, 0.24901999533176422, -0.0207310002297163, 0.03750799968838692, -0.631820023059845, 0.299699991941452, 0.2906999886035919, -0.13928000628948212, 0.2037699967622757, -0.23372000455856323, 0.6827800273895264, 0.9360100030899048, 0.0230260007083416, 0.8820199966430664, -0.17803999781608582, 0.14741000533103943, 0.05628199875354767, -0.11474999785423279, 0.02241400070488453, 0.6340000033378601, -0.3715899884700775, -0.17388999462127686, -0.043191999197006226, -0.20499999821186066, -0.035353001207113266, 0.28154999017715454, -0.009570100344717503, 0.6762199997901917, 0.14234000444412231, -0.6030300259590149, -0.1970899999141693, -0.11440999805927277, -0.03876499831676483, -0.10100000351667404, -0.25725001096725464, -0.08108899742364883, 0.007143999915570021, -0.2720000147819519, -0.4578000009059906, 0.07525999844074249, -0.4837599992752075, 0.05117499828338623, -0.00693049980327487, -0.39208999276161194, 0.17377999424934387, -0.008514399640262127, -0.5875899791717529, -0.16580000519752502, 0.032173000276088715, 0.43062999844551086, -0.13277000188827515, 0.7712100148200989, 0.305649995803833, 0.3829900026321411, 0.1944500058889389, -0.35047999024391174, 0.16186000406742096, -0.2605699896812439, -0.1273999959230423, 0.1685599982738495, -0.28543001413345337, 0.5946999788284302, -0.08851800113916397, -0.07153800129890442, 0.23197999596595764, -0.12623000144958496, -0.43599000573158264, 0.28262001276016235, -0.7100200057029724, 0.44157999753952026, 0.41165998578071594, -0.09820699691772461, -0.151419997215271, -0.7820500135421753, 0.014585999771952629, 0.5024200081825256, -0.31119000911712646, -0.07992800325155258, -0.07607799768447876, -0.023680999875068665, -0.6609899997711182, 0.3333899974822998, -0.5085700154304504, 0.9535300135612488, 0.043136999011039734, 0.667110025882721, 0.2753100097179413, 0.20670999586582184, 0.0698700025677681, 0.12281999737024307, 0.04407300055027008, -0.10992000252008438, 0.26377999782562256, -0.08384999632835388, -0.9329100251197815, 0.6578800082206726, 0.01622699946165085, -0.0009995599975809455, 0.25947999954223633, 0.12246000021696091, 0.18088999390602112, -0.8450300097465515, -0.355569988489151, 0.3645099997520447, -0.11416999995708466, -0.1939300000667572, 0.21544000506401062, -0.44242000579833984, 0.11998999863862991, -0.24759000539779663, -0.2346400022506714, -0.124719999730587, 0.20202000439167023, -0.701259970664978, -0.011416000314056873, -0.026695000007748604, 0.5075799822807312, 0.22161999344825745, 0.5323399901390076, 0.06355500221252441, 0.27035999298095703, -1.1128000020980835, 0.33000001311302185, 0.3880000114440918, 0.22930000722408295, 0.04566900059580803, -0.028152000159025192, -0.42497000098228455, 0.1737300008535385, -0.633840024471283, 0.655210018157959, -0.30386999249458313, -0.2144400030374527, 0.18422000110149384, 0.36772000789642334, -0.6244199872016907, -0.21254000067710876, -0.8247100114822388, 1.1438000202178955, 0.16574999690055847, -0.22937999665737152, 0.13630999624729156, 0.4679099917411804, 0.45535001158714294, 0.18520000576972961, 0.07363499701023102, 0.08399800211191177, -0.1954299956560135, -0.3708600103855133, 0.4647200107574463, 0.44850000739097595, -0.032545000314712524, -0.019078999757766724, -0.5341699719429016, 1.0865999460220337, 0.4225099980831146, 0.13631999492645264, -0.3333800137042999, 0.13843999803066254, -0.10135000199079514, -0.6648200154304504, 0.09822999686002731, 0.7721999883651733, 0.21651999652385712, -0.2697199881076813, -0.09648100286722183, 0.05390699952840805, 0.1010499969124794, 0.34292998909950256, -0.574429988861084, 0.12714999914169312, 0.40470001101493835, 0.35420000553131104, 0.41670000553131104, -0.050099000334739685, -0.7800800204277039, -0.11108999699354172, -0.2252800017595291, 0.06662700325250626, -0.044148001819849014, 0.24040000140666962, -0.23443999886512756, 0.29467999935150146, -0.34139999747276306, 0.039073001593351364, -0.5999900102615356, 0.0032832000870257616, -0.18605999648571014, 0.037223998457193375, 0.2091600000858307, -0.3805899918079376, -0.6986600160598755, -0.2121099978685379, -0.27851998805999756, -0.17719000577926636, 0.10989999771118164, -0.5463700294494629, -0.1964299976825714, 0.3660599887371063, -0.09536100178956985, -0.13583999872207642, -0.5411800146102905, 0.39640000462532043, -0.5487099885940552, 0.20895999670028687, -0.32464998960494995, -0.19617000222206116, 0.122359998524189, 0.15729999542236328, -0.11728999763727188, -0.48930999636650085, 0.26260998845100403, -0.4806300103664398, -0.4025599956512451, -0.013137999922037125, 0.5834500193595886, 0.2233400046825409, 0.01929200068116188, -0.35367000102996826, 0.06174600124359131, 0.036354999989271164, -0.1978600025177002, 0.1278200000524521, 0.11203999817371368, 0.00922240037471056, 0.5819600224494934, -0.2567799985408783, 0.24461999535560608, -0.2917500138282776, -0.6431699991226196, -0.7361500263214111, -0.2814300060272217, 0.230430006980896, 0.17437000572681427, -0.16809000074863434, -0.1253100037574768, 0.39743998646736145, 0.5278099775314331, -0.10294999927282333, -0.07281800359487534, 0.2711600065231323, 0.4285300076007843, -0.31422001123428345, 0.29561999440193176, 0.4630900025367737, 0.7299399971961975, 0.19913999736309052, -0.46292001008987427, 0.384909987449646, -0.2530199885368347, -0.32001999020576477, 0.30799001455307007], u'thin': [-0.30489999055862427, -0.14603999257087708, -0.14328999817371368, -0.3572700023651123, 0.2979699969291687, -0.12443999946117401, -0.006986199878156185, 0.2798599898815155, 0.2845599949359894, -1.4534000158309937, 0.008996300399303436, -0.22311000525951385, -0.7560700178146362, 0.21785999834537506, -0.471780002117157, -0.3724699914455414, -0.45914000272750854, 0.5285999774932861, -0.18862999975681305, -0.4004499912261963, -0.1941000074148178, -0.3700000047683716, -0.1412999927997589, 0.04258500039577484, 0.1986899971961975, 0.17034000158309937, 0.6481800079345703, -0.18250000476837158, -0.29969000816345215, 0.12918999791145325, -0.37696999311447144, -0.021734999492764473, -0.3241199851036072, 0.3275200128555298, -0.9185699820518494, 0.3809399902820587, -0.29822999238967896, 0.1237500011920929, 0.5603600144386292, 0.551800012588501, -0.07063700258731842, -0.16165000200271606, 0.10232999920845032, 0.03321399912238121, 0.11595000326633453, 0.33649998903274536, 0.15800000727176666, -0.16120000183582306, -0.0803230032324791, 0.11599999666213989, 0.09505899995565414, 0.0334630012512207, -0.22095000743865967, 0.3720400035381317, 0.20170000195503235, -0.5026400089263916, -0.28068000078201294, -0.4553599953651428, 0.5567100048065186, 0.34143999218940735, 0.6480600237846375, -0.3382900059223175, 0.6358299851417542, 0.16177000105381012, 0.6495699882507324, -0.32183000445365906, -0.33597999811172485, 0.029743000864982605, 0.2335200011730194, 0.5345500111579895, -0.011580999940633774, -0.07080700248479843, 0.12043999880552292, 0.21199999749660492, 0.45649999380111694, -0.1378999948501587, 0.06564100086688995, 0.3747200071811676, -0.030262000858783722, -0.455020010471344, -0.2147199958562851, -0.023655999451875687, -0.11520999670028687, -0.015375000424683094, -0.10148999840021133, 0.30094000697135925, 0.03627200052142143, 0.0685420036315918, -0.44095999002456665, 0.18371999263763428, -0.4701699912548065, -0.10321000218391418, -0.33750998973846436, -0.3198300004005432, -0.1785299926996231, 0.24501000344753265, -0.3287599980831146, 0.5542700290679932, 0.1671299934387207, 0.29245999455451965, 0.3487800061702728, 0.07585500180721283, -0.13526999950408936, -0.7264599800109863, -0.3077400028705597, 0.14244000613689423, 0.050328999757766724, 0.2607499957084656, -0.5513399839401245, 0.2144699990749359, -0.5765200257301331, 0.37929001450538635, 0.11804000288248062, -1.0429999828338623, -0.0803619995713234, 0.0534139983355999, -0.05460299924015999, 0.7210999727249146, 0.26403000950813293, -0.6032699942588806, -0.2519499957561493, -0.44297000765800476, 0.2885200083255768, 0.13005000352859497, -0.1356000006198883, 0.1928199976682663, 0.14424000680446625, 0.1227400004863739, -0.2870599925518036, 0.008961900137364864, -0.6833199858665466, 0.41611000895500183, -0.602150022983551, 0.6931300163269043, 0.5688700079917908, 0.11858999729156494, 0.06510400027036667, 0.2915799915790558, 0.09470400214195251, 0.224030002951622, 0.9757000207901001, 0.20927000045776367, -0.23284000158309937, -0.5250999927520752, -0.20725999772548676, 0.5779500007629395, -0.2739599943161011, -0.25764000415802, -0.11727999895811081, -0.674839973449707, -0.11334999650716782, -0.1265600025653839, -0.1500300019979477, 0.10806000232696533, 0.641510009765625, -0.15860000252723694, -0.43206000328063965, -0.3678700029850006, 0.26524999737739563, 0.542739987373352, -0.7814499735832214, -0.5216400027275085, 0.5835599899291992, 0.1909099966287613, 0.5566099882125854, 0.15395000576972961, -0.2994599938392639, -0.21323999762535095, -0.39743998646736145, -0.3758000135421753, 0.37610000371932983, 0.02621600031852722, -0.21069000661373138, -0.034147001802921295, -0.6523000001907349, -0.14486999809741974, 0.3913399875164032, 0.37435001134872437, 0.6059899926185608, -0.8157100081443787, 0.29093998670578003, -0.2045699954032898, -0.21323999762535095, 0.20010000467300415, -0.08331900089979172, -0.3842200040817261, 0.6794000267982483, -0.36800000071525574, 0.2696399986743927, 0.46873998641967773, 0.3509800136089325, 0.8999699950218201, 0.4436100125312805, 0.35701999068260193, -0.020927999168634415, 0.16368000209331512, -0.5918400287628174, -0.29719001054763794, -0.13077999651432037, 0.14756999909877777, 0.652679979801178, 0.18456000089645386, 0.35583001375198364, 0.5500199794769287, 0.18779000639915466, 0.11094000190496445, 0.6742299795150757, -0.10712999850511551, 0.06652399897575378, 0.04892300069332123, 0.21634000539779663, -0.018605999648571014, 0.3733200132846832, -0.12567999958992004, -0.03543800115585327, -0.06493999809026718, 0.8510599732398987, -0.5980100035667419, -0.016673000529408455, -0.34002000093460083, 0.7684100270271301, 0.14393000304698944, -0.06837499886751175, 0.42340999841690063, -0.4108099937438965, 0.510640025138855, -0.6370700001716614, -0.3013499975204468, -0.43566998839378357, -0.1556600034236908, 0.5133799910545349, -0.5097299814224243, 0.1531600058078766, -0.45124000310897827, 0.10967999696731567, -0.7389699816703796, -0.22961999475955963, -0.12833000719547272, -0.41032999753952026, -0.31123998761177063, 0.5146899819374084, 0.0015982999466359615, -0.12286999821662903, 0.19050000607967377, -1.2044999599456787, -0.5742700099945068, 0.15625, -0.16200999915599823, 0.15175999701023102, -0.431410014629364, 0.17850999534130096, -0.33774998784065247, 0.3325499892234802, -0.39904001355171204, 0.06760100275278091, 0.29023000597953796, -0.011242999695241451, 0.2973099946975708, -0.3264699876308441, 0.41058000922203064, 0.3417600095272064, -0.17645999789237976, 0.030681999400258064, 0.370279997587204, 0.2795099914073944, -0.12274999916553497, 0.23573000729084015, -0.019497999921441078, 0.3891899883747101, 0.41157999634742737, -0.03151300176978111, 0.24815000593662262, -0.16766999661922455, -0.05474900081753731, -0.8652399778366089, 0.6807100176811218, -0.5615000128746033, -0.2050900012254715, -1.0922000408172607, 0.3107199966907501, 0.3691500127315521, -0.20839999616146088, -0.35558998584747314, 0.07471299916505814, 0.1546200066804886, -0.2010899931192398, 0.09536900371313095, 1.149999976158142, -0.10284999758005142, 0.11191999912261963, 0.5137100219726562, 0.30660998821258545, -1.2003999948501587, 0.2906700074672699, -0.14751000702381134, 0.2779099941253662, -0.6337299942970276, -0.12906000018119812, 0.20541000366210938, 0.12967999279499054], u'cracked': [0.22575999796390533, 0.1646600067615509, -0.3442699909210205, -0.46389999985694885, -0.31648001074790955, -0.1277499943971634, -0.3375299870967865, 0.09653600305318832, 0.0985419973731041, -0.4477199912071228, -0.28457000851631165, 0.2046699970960617, 0.25174999237060547, 0.37900999188423157, -0.49950000643730164, 0.30535998940467834, 0.20645000040531158, 0.6007999777793884, -0.28060001134872437, 0.08180899918079376, 0.3806599974632263, 0.5205199718475342, 0.032134998589754105, -0.7040600180625916, -0.921209990978241, 0.12646999955177307, 0.11314000189304352, 0.16155999898910522, -0.3649600148200989, 0.026058999821543694, -0.420199990272522, 0.25095000863075256, -0.24271999299526215, 0.38791999220848083, -0.3463299870491028, 0.09628300368785858, -0.011857000179588795, 0.03197300061583519, 0.1277099996805191, 0.2011999934911728, 0.26319000124931335, 0.29747000336647034, 0.001000500051304698, 0.07572200149297714, -0.13155999779701233, 0.2928699851036072, -0.09213799983263016, 0.37821000814437866, -0.967270016670227, 0.1961899995803833, 0.11014000326395035, 0.18896999955177307, 0.2737799882888794, 0.00488980021327734, 0.6878799796104431, 0.21213999390602112, -0.1345299929380417, -0.2903999984264374, -0.27623000741004944, -0.47036001086235046, 0.4493600130081177, 0.41666001081466675, -0.3957799971103668, 0.06300099939107895, -0.11598999798297882, 0.09327200055122375, 0.10546000301837921, 0.24491000175476074, 0.7094600200653076, -0.08120699971914291, 0.026295000687241554, -0.3364900052547455, 0.07899600267410278, 0.30691999197006226, 0.2676999866962433, -0.044975001364946365, 0.32517001032829285, -0.3778199851512909, -0.2654300034046173, 0.6007500290870667, -0.26642999053001404, -0.12897999584674835, 0.3129499852657318, 0.05264899879693985, -0.1653600037097931, 0.14357000589370728, 0.03901499882340431, 0.2555199861526489, -0.18889999389648438, -0.21568000316619873, -0.008182800374925137, 0.43806999921798706, -0.24073000252246857, 0.46351000666618347, 0.06803599745035172, 0.49570998549461365, -0.08592300117015839, 0.05101799964904785, 0.5338000059127808, 0.2666099965572357, 0.46689000725746155, -0.11450999975204468, 0.26677998900413513, -0.44764000177383423, -0.14192000031471252, 0.3443099856376648, 0.12249000370502472, 0.1354299932718277, -0.5213599801063538, -0.14213000237941742, -0.5029399991035461, -0.7477499842643738, 0.3156000077724457, -0.6320099830627441, -0.463809996843338, 0.39994001388549805, -0.40202000737190247, 0.27674999833106995, -0.3227100074291229, -0.37988001108169556, -0.11399000138044357, -0.6238300204277039, -0.28839001059532166, 0.11819999665021896, -0.15846000611782074, -0.186489999294281, 0.07698799669742584, 0.20223000645637512, 0.326200008392334, 0.6712700128555298, -0.010792000219225883, 0.6125400066375732, 0.04642599821090698, 0.5329800248146057, 0.6037700176239014, -0.03715699911117554, 0.1792300045490265, 0.015711000189185143, -0.11097999662160873, -0.09990499913692474, 0.3259499967098236, 0.844219982624054, -0.538070023059845, 0.3918299973011017, -0.2025199979543686, 0.148049995303154, 0.13965000212192535, 0.2715100049972534, 0.2650099992752075, 0.39333000779151917, 0.1343500018119812, 0.09547699987888336, -0.2667900025844574, 0.10852000117301941, 0.06495799869298935, 0.15744000673294067, 0.0031975999008864164, 0.033403001725673676, -0.3913300037384033, 0.30877000093460083, -0.1392900049686432, 0.2983199954032898, -0.047161001712083817, 0.22477999329566956, 0.5677700042724609, -0.10719999670982361, 0.042465001344680786, -0.3813199996948242, 0.6795600056648254, -0.026197999715805054, -0.29297998547554016, 0.07999899983406067, 0.5517399907112122, -0.35069000720977783, 0.25088000297546387, 0.5254700183868408, 0.22457000613212585, 0.3139899969100952, 0.2538500130176544, -0.3690199851989746, -0.1904900074005127, 0.09080199897289276, 0.4562099874019623, -0.3800700008869171, -0.2059600055217743, -0.3267199993133545, 0.8014900088310242, 0.251120001077652, 0.08612699806690216, -0.09132999926805496, 0.330020010471344, -0.07312499731779099, 0.033188000321388245, 0.2030400037765503, 0.42708998918533325, -0.009790199808776379, -0.5067200064659119, -0.1320900022983551, 0.27063000202178955, 0.04326599836349487, 0.5899900197982788, 0.21310000121593475, 0.3953999876976013, -0.23296000063419342, -0.29763999581336975, 0.32078999280929565, 0.354420006275177, -0.26427000761032104, -0.3071799874305725, 0.4531700015068054, 0.10299000144004822, -0.04839000105857849, 0.13922999799251556, 0.12245000153779984, 0.10098999738693237, -0.3355900049209595, -0.2796199917793274, 0.12793999910354614, 0.24829000234603882, 0.7894300222396851, 0.6558799743652344, 0.5575000047683716, 0.3586300015449524, -0.2370000034570694, -0.04442400112748146, 0.2106200009584427, 0.11375000327825546, -0.38752999901771545, -0.3456200063228607, 0.2742899954319, 0.3217099905014038, -0.8353899717330933, -0.3472999930381775, -0.23055000603199005, -0.43241000175476074, 0.12616999447345734, -0.12011999636888504, -0.2258799970149994, -0.14488999545574188, -0.7916499972343445, -0.08445700258016586, -0.32328000664711, -0.4402399957180023, -0.3796299993991852, 0.3966499865055084, -0.353300005197525, -0.34558001160621643, 0.02959899976849556, 0.24060000479221344, -0.08760300278663635, 0.5013399720191956, -0.08540400117635727, 0.457720011472702, -1.080299973487854, 0.563979983329773, 0.13308000564575195, -0.13056999444961548, -0.2921600043773651, -0.24244999885559082, 0.340939998626709, -0.6646900177001953, -0.010156000033020973, -0.35387998819351196, -0.06290800124406815, -0.16287000477313995, -0.5129200220108032, -0.23463000357151031, -0.4809800088405609, -0.3853999972343445, 0.04008999839425087, -0.14538000524044037, 0.1010499969124794, 0.0914200022816658, -0.17225000262260437, -0.48080000281333923, 0.3305099904537201, -0.5702800154685974, 0.10700000077486038, -0.46568000316619873, -0.329800009727478, 0.13279999792575836, -0.7267600297927856, 0.6030700206756592, 0.4150499999523163, -0.010080999694764614, 0.21112999320030212, -0.8644599914550781, -0.21031999588012695, 0.7029799818992615, 0.15011000633239746, -0.14642000198364258, -0.3355900049209595, -0.06220899894833565, 0.8187999725341797, -0.23917999863624573, -0.014125999994575977, 0.052003998309373856, -0.8257799744606018, 0.1679999977350235, 0.5234400033950806], u'bent': [-0.05858999863266945, -0.5743299722671509, -0.13122999668121338, -0.21863999962806702, 0.416920006275177, -0.15328000485897064, -0.1234000027179718, 0.19413000345230103, 0.5684300065040588, -0.457940012216568, -0.14927999675273895, 0.38203001022338867, 0.41183000802993774, 0.17573000490665436, -0.6881600022315979, 0.38495999574661255, 0.6352300047874451, 0.2314700037240982, 0.23231999576091766, 0.12981000542640686, 0.4299499988555908, -0.14865000545978546, 0.4059000015258789, -0.07422100007534027, -0.4569700062274933, 0.5433800220489502, -0.0643410012125969, -0.20036999881267548, 0.04615899920463562, 0.36024001240730286, 0.07235600054264069, 0.23770999908447266, 0.3337100148200989, -0.1382399946451187, -0.5942500233650208, 0.2758199870586395, 0.20788000524044037, -0.04610000178217888, 0.15586000680923462, 0.25402000546455383, 0.06630100309848785, 0.19596999883651733, -0.06437599658966064, -0.3439599871635437, 0.20512999594211578, 0.01067699957638979, 0.051284998655319214, 0.09355399757623672, 0.16249999403953552, 0.13673999905586243, -0.08623600006103516, -0.14855000376701355, 0.3880699872970581, -0.12262000143527985, 0.25554999709129333, -0.1970299929380417, 0.09390199929475784, 0.00851960014551878, 0.13808999955654144, -0.002766899997368455, 0.26794999837875366, -0.03365800157189369, -0.012826000340282917, 0.30485999584198, -0.19744999706745148, -0.03464899957180023, -0.27489998936653137, 0.5830900073051453, 0.3101600110530853, 0.02358900010585785, 0.008391600102186203, 0.19181999564170837, 0.04226699844002724, 0.37867000699043274, 0.2681899964809418, 0.26118001341819763, -0.3570899963378906, 0.3741700053215027, -0.8590800166130066, 0.29308000206947327, 0.21528999507427216, -0.05113799870014191, -0.004858000203967094, 0.35286998748779297, -0.02739099971950054, -0.1089399978518486, 0.1074799969792366, 0.21517999470233917, -0.017689000815153122, 0.1848600059747696, 0.05072199925780296, -0.031537000089883804, 0.06210299953818321, 0.08088099956512451, -0.30730000138282776, -0.6896799802780151, 0.2731899917125702, -0.15509000420570374, 0.04168900102376938, -0.032033998519182205, 0.017091000452637672, -0.20237000286579132, 0.15264999866485596, 0.030155999585986137, -0.47464999556541443, 0.06527400016784668, 0.2426699995994568, -0.06742999702692032, 0.007569099776446819, -0.08631200343370438, -0.09858699887990952, 0.2020999938249588, 0.3417600095272064, -0.8214899897575378, -0.0886790007352829, 0.288239985704422, -0.4498800039291382, 0.045093998312950134, -0.32517001032829285, -0.23451000452041626, -0.12197999656200409, -0.4588199853897095, 0.07093200087547302, 0.4067099988460541, -0.09077399969100952, -0.2287600040435791, 0.17478999495506287, -0.42792001366615295, 0.22668999433517456, -0.5365399718284607, 0.04271300137042999, 0.5348399877548218, -0.3284200131893158, 0.49439001083374023, -0.24079999327659607, 0.05732399970293045, 0.3280400037765503, 0.296970009803772, -0.2556400001049042, -0.2702299952507019, -0.29982998967170715, 0.28262999653816223, -0.11069999635219574, -0.11896999925374985, -0.3296099901199341, 0.7940800189971924, -0.029241999611258507, 0.12883000075817108, -0.12699000537395477, -0.7144700288772583, 0.38086000084877014, 0.25306999683380127, -1.0270999670028687, -0.3634699881076813, -0.0814879983663559, -0.10642000287771225, 0.2871200144290924, 0.3066299855709076, 0.15023000538349152, 0.34095999598503113, 0.46538999676704407, -0.080485999584198, 0.3542099893093109, -0.5493199825286865, 0.3091700077056885, -0.46167001128196716, -0.16345000267028809, -0.17971999943256378, 0.5355799794197083, -0.25578001141548157, -0.05750399827957153, -0.41815999150276184, 0.19833000004291534, -0.5280399918556213, 0.7720199823379517, 0.2524600028991699, -0.10593999922275543, 0.7081199884414673, -0.27845001220703125, -0.24668000638484955, -0.11406999826431274, -0.6192200183868408, 0.3517099916934967, -0.036277998238801956, 0.12043999880552292, -0.09424100071191788, -0.12472999840974808, -0.5705199837684631, 0.5887899994850159, -0.40542998909950256, -0.19181999564170837, -0.20880000293254852, 0.484279990196228, 0.8394100069999695, -0.17217999696731567, 0.2338400036096573, -0.015232999809086323, -0.6682500243186951, 0.2048099935054779, -0.053339000791311264, 0.8228899836540222, 0.25369998812675476, -0.20492999255657196, -0.14270000159740448, 0.1743299961090088, -0.13760000467300415, 0.0684719979763031, -0.056738998740911484, 0.22250999510288239, 0.11855000257492065, 0.20871999859809875, 0.7244700193405151, 0.5686600208282471, 0.5313400030136108, 0.7139599919319153, 0.08396100252866745, 0.31411001086235046, -0.0630899965763092, -0.2621000111103058, 0.2898699939250946, -0.54830002784729, 0.07806000113487244, -0.1523600071668625, 0.10120999813079834, -0.3522000014781952, -0.0689840018749237, -0.08373299986124039, 0.18637000024318695, 0.1941699981689453, 0.19247999787330627, 0.5887899994850159, 0.10121999680995941, 0.05160199850797653, 0.24568000435829163, -0.04686100035905838, -0.271369993686676, -0.5144500136375427, 0.23962999880313873, -0.47165000438690186, -0.10057000070810318, 0.420199990272522, 0.23703999817371368, -0.33719998598098755, -0.2946400046348572, -0.4918000102043152, -0.0617620013654232, 0.19850000739097595, -0.00042878001113422215, -0.1155100017786026, 0.385809987783432, 0.0549050010740757, 0.2812100052833557, 0.01561800017952919, -0.35133999586105347, 0.4946799874305725, 0.3972100019454956, -0.13016000390052795, -0.8359699845314026, -0.43446001410484314, 0.07370500266551971, 0.2067900002002716, -0.2969900071620941, 0.005889399908483028, 0.12640999257564545, -0.662850022315979, 0.29826998710632324, -0.36388999223709106, -0.1694599986076355, -0.27682000398635864, 0.6686099767684937, -0.09361600130796432, -0.3579300045967102, 0.6222400069236755, 0.2773900032043457, -0.22356000542640686, -0.32106998562812805, -0.42849001288414, -0.4232900142669678, -0.3747999966144562, -0.5849699974060059, 0.4307500123977661, -0.6479700207710266, -0.17297999560832977, 0.2517699897289276, 0.37751999497413635, 0.24048000574111938, -0.15151000022888184, -0.013988999649882317, 0.5276100039482117, 0.06848099827766418, 0.31850001215934753, -0.05014299973845482, -0.07982199639081955, 0.48568999767303467, 0.37553998827934265, 0.22770999372005463, -0.061218999326229095, -0.23759999871253967, 0.09156099706888199, 0.24688999354839325], u'ripe': [-0.37779000401496887, -0.0417959988117218, 0.6163399815559387, 0.05130000039935112, 0.7323200106620789, -0.20241999626159668, 0.07568900287151337, 0.10789000242948532, 0.4814999997615814, -0.29183000326156616, 0.7526199817657471, 0.1545500010251999, 0.14274999499320984, 0.40062999725341797, 0.19627000391483307, -0.2779499888420105, -0.45892998576164246, -0.4898799955844879, -0.12077999860048294, 0.42849001288414, -0.47154000401496887, 0.6864699721336365, -0.058793000876903534, -0.2798300087451935, -0.8570299744606018, -0.33893001079559326, 0.07214199751615524, -0.05707700178027153, -0.3428199887275696, -0.22018000483512878, -0.7053200006484985, 0.24820999801158905, -0.7208600044250488, 0.1234700009226799, -0.3969399929046631, 0.7214400172233582, -0.6776800155639648, 0.2063400000333786, 0.2023099958896637, 0.10542000085115433, 0.9295399785041809, -0.01559200044721365, 0.309579998254776, -0.14550000429153442, -0.1062999963760376, 0.4654799997806549, -0.29451000690460205, 0.36100998520851135, -0.17493000626564026, -0.24256999790668488, -0.434689998626709, -0.23345999419689178, -0.10707999765872955, -0.2614400088787079, -0.11862000077962875, -0.6269699931144714, -0.04569299891591072, 0.07754500210285187, 0.5554400086402893, -0.2772200107574463, -0.19328999519348145, -0.5758299827575684, -0.17673000693321228, 0.11191999912261963, -0.5951099991798401, 0.04406600072979927, 0.38686999678611755, -0.5923299789428711, -0.2650200128555298, -0.2836500108242035, 0.006693399976938963, 0.2186799943447113, -0.18690000474452972, 0.2744799852371216, -0.04130600020289421, -0.30292001366615295, 0.4125699996948242, -0.18920999765396118, 0.7084900140762329, 0.32436999678611755, -0.26262998580932617, -0.221670001745224, -0.1515199989080429, 0.013913000002503395, 0.6226199865341187, 0.20029999315738678, -0.8553699851036072, 0.19088000059127808, 0.48186999559402466, 0.246629998087883, -0.0908610001206398, -0.23330999910831451, -0.26780998706817627, -0.4683000147342682, -0.6939299702644348, 0.34081000089645386, -0.22193999588489532, 0.26128000020980835, -0.2824600040912628, 0.040272001177072525, 0.19415999948978424, -0.09954199939966202, -0.016248000785708427, -0.29069000482559204, -0.9442800283432007, 0.35732999444007874, -0.4521400034427643, 0.48721998929977417, -0.1096000000834465, -0.30816999077796936, 0.14300000667572021, 0.23270000517368317, 0.44971999526023865, -0.29416000843048096, 0.4756999909877777, -0.29826998710632324, -0.21030999720096588, 0.5295600295066833, 0.8479099869728088, 0.2081100046634674, -0.34231001138687134, -0.5752099752426147, 0.24557000398635864, 0.32548999786376953, -0.5764399766921997, -0.09084399789571762, -0.046953000128269196, 1.1223000288009644, -0.12231999635696411, 0.11660999804735184, 0.11083000153303146, 0.8925099968910217, -0.4160099923610687, -0.15358999371528625, -0.5637000203132629, 0.1720000058412552, -0.453359991312027, -0.12781000137329102, 0.03889999911189079, -0.2490299940109253, 0.41620001196861267, 0.19905999302864075, -0.30612000823020935, -0.20880000293254852, 0.19122999906539917, -0.15098999440670013, -0.24616999924182892, 0.11057999730110168, 0.1950400024652481, -0.060743000358343124, -1.194200038909912, 0.076323002576828, 0.20841999351978302, 0.02419300004839897, -0.293069988489151, -0.38721001148223877, 0.2401600033044815, -0.2360599935054779, -0.45186999440193176, 0.5415999889373779, -0.17410999536514282, 0.047724999487400055, 0.010614999569952488, -0.33379998803138733, 0.2902800142765045, -0.7196199893951416, 0.3398500084877014, 0.19099999964237213, 0.006125899963080883, 0.03640799969434738, 0.03547300025820732, 0.2457599937915802, -0.5003899931907654, -0.11807999759912491, 0.18279999494552612, -0.04270299896597862, 0.22235000133514404, 0.04336300119757652, -0.1424500048160553, 0.12971000373363495, -0.10402999818325043, -0.5586000084877014, 0.6128600239753723, -0.8687199950218201, -0.13311000168323517, -0.4999699890613556, 0.48941999673843384, 0.2144699990749359, 0.41005998849868774, 0.3478800058364868, 0.07034599781036377, 0.47843000292778015, -0.20220999419689178, -0.29844000935554504, 0.30827000737190247, -0.3992699980735779, 0.005983499810099602, 0.2672399878501892, -0.14386999607086182, 0.19402000308036804, -0.002934900112450123, -0.6762199997901917, 0.03379499912261963, -0.2665500044822693, -0.0017312000272795558, -0.19391000270843506, -0.049251001328229904, 0.5270000100135803, 0.5687800049781799, -0.32627999782562256, 0.2565700113773346, -0.4787200093269348, -0.3141700029373169, 0.4372900128364563, 0.28009000420570374, -0.280349999666214, 0.6156100034713745, -0.059686001390218735, 0.14361000061035156, 0.021028000861406326, 0.1677200049161911, -0.3811799883842468, -0.2930000126361847, 0.07124000042676926, -1.0139000415802002, 0.23082000017166138, 0.21665999293327332, -0.46164000034332275, -0.23027999699115753, 0.4222699999809265, 0.29846999049186707, 0.3220300078392029, 0.5563200116157532, 0.04975999891757965, 0.07373200356960297, 0.39581000804901123, -0.03550000116229057, -0.3536899983882904, -0.25303998589515686, -0.1646600067615509, -0.04814299941062927, -0.026856999844312668, 0.1035199984908104, -0.18959000706672668, -0.606440007686615, 0.38394999504089355, 0.3212200105190277, 0.336760014295578, 0.14063000679016113, -0.7091400027275085, 0.19381000101566315, 0.9063699841499329, -0.925599992275238, -0.2129800021648407, -0.028245000168681145, 0.08607400208711624, -0.3391200006008148, -0.15491999685764313, 0.012306000106036663, 0.7781299948692322, 0.40400999784469604, -0.143669992685318, 0.19025999307632446, 0.07265099883079529, 0.27008000016212463, -0.16719000041484833, -0.22578999400138855, 0.07980400323867798, 0.22623999416828156, 0.5813500285148621, -0.39930999279022217, 0.516979992389679, 0.1429399996995926, 0.39127999544143677, 0.1471399962902069, 0.17924000322818756, -0.5397400259971619, 0.24292999505996704, -0.05299599841237068, -0.7781500220298767, -0.0298870000988245, -0.19749000668525696, -0.14573000371456146, -0.1704699993133545, 0.11593999713659286, -0.08704700320959091, 0.12647999823093414, 0.1538199931383133, 0.30184999108314514, 0.28282999992370605, -0.05808499827980995, 0.0453450009226799, 0.7099499702453613, -0.1661600023508072, 0.10741999745368958, 0.049539998173713684, -0.598770022392273, -0.07930999994277954, 0.11691000312566757, 0.05235299840569496], u'mossy': [-0.8922200202941895, 0.002853000070899725, 0.0018988000229001045, 0.1407099962234497, -0.25784000754356384, -0.1055700033903122, 0.4760200083255768, -0.22134000062942505, 0.18104000389575958, 0.9843400120735168, -0.11620999872684479, 0.03847600147128105, -0.11789000034332275, -0.4375300109386444, -0.30461999773979187, 0.19899000227451324, -0.9419599771499634, 0.08454100042581558, 0.030076999217271805, 0.50968998670578, -0.07696299999952316, -0.12349999696016312, -0.39079999923706055, -0.2840299904346466, -0.8856199979782104, -0.17759999632835388, -0.21634000539779663, -0.4904100000858307, -0.13779999315738678, 0.775950014591217, 0.07889600098133087, -0.10947000235319138, 0.09368500113487244, 0.2801699936389923, 0.49717000126838684, -0.16769999265670776, 0.04872699826955795, -0.4020000100135803, -0.05226600170135498, 0.014507000334560871, -0.46206000447273254, 0.2944500148296356, 0.058837998658418655, -1.1172000169754028, 0.33232998847961426, 0.2593199908733368, 0.2581000030040741, -0.28057000041007996, -0.17378999292850494, -0.2438499927520752, -0.6560999751091003, 0.37297001481056213, -0.3343299925327301, 0.4772000014781952, -0.3531999886035919, -0.2596699893474579, 0.12735000252723694, -0.3702299892902374, 0.21496999263763428, 0.08182699978351593, 0.20002000033855438, -0.4181100130081177, 0.08449199795722961, 0.10221999883651733, 0.10374999791383743, -0.03503900021314621, 0.0082566998898983, 0.3542900085449219, -0.16350999474525452, 0.2824400067329407, -0.5090100169181824, 0.481440007686615, -0.8422300219535828, 0.020385000854730606, -0.3714100122451782, 0.8107100129127502, -0.227960005402565, -0.06003599986433983, -0.376800000667572, 0.15117000043392181, -0.4457100033760071, -0.3536899983882904, -0.05261300131678581, -0.2639000117778778, -0.03779900074005127, 0.06334400177001953, 0.2020999938249588, 0.2662599980831146, -0.10211999714374542, 0.3024600148200989, -0.02147500030696392, -0.48833999037742615, 0.5265200138092041, 0.4441800117492676, -0.22178000211715698, 0.016103999689221382, 0.2926200032234192, 0.03252999857068062, 0.24129000306129456, 0.19780999422073364, 0.19489000737667084, 0.2397499978542328, -0.2879300117492676, 0.43470999598503113, -0.19598999619483948, -0.04196299985051155, 0.3286600112915039, 0.021345000714063644, -0.09303899854421616, -0.029486000537872314, -0.3800300061702728, -0.40178999304771423, 0.6659799814224243, 0.13064000010490417, -0.006889999844133854, -0.5733399987220764, -0.09321899712085724, 0.1033099964261055, -0.054666999727487564, 0.9993199706077576, -0.05313099920749664, -0.03660700097680092, 0.3119699954986572, 0.5438100099563599, 0.25609999895095825, -0.5148000121116638, -0.09289500117301941, -0.3596299886703491, 0.504830002784729, -0.6882200241088867, -0.12050999701023102, 0.20855000615119934, -0.13008999824523926, 0.32780998945236206, 0.3575200140476227, -0.29861998558044434, 0.27632999420166016, 0.46094000339508057, 0.017544999718666077, -0.5461900234222412, 0.7201399803161621, 0.16872000694274902, 0.12268999963998795, -0.11635000258684158, 0.18287000060081482, -0.6614099740982056, -0.7537500262260437, -0.5536999702453613, -0.06605599820613861, -0.4447399973869324, -0.7752799987792969, 0.6306999921798706, -0.42322999238967896, 0.0068061999045312405, -0.08042799681425095, 0.2812899947166443, 0.3087399899959564, 0.0019785999320447445, -0.19120000302791595, 0.4076800048351288, -0.5534800291061401, -0.23533999919891357, 0.31165000796318054, 0.17398999631404877, -0.22912000119686127, 0.638189971446991, 0.3414100110530853, -0.12049999833106995, -0.403439998626709, -0.09234999865293503, 0.619949996471405, 0.8109999895095825, 0.4264099895954132, -0.17323000729084015, -0.3871699869632721, -0.09821700304746628, 0.17691999673843384, 0.3393299877643585, 0.13947999477386475, -0.4697900116443634, -0.2876099944114685, -0.5458700060844421, 0.46581000089645386, -0.16814999282360077, -0.15425999462604523, -0.12430000305175781, 0.4825800061225891, 0.1347000002861023, 0.5105400085449219, -0.029464000836014748, 0.725409984588623, 0.06659000366926193, -0.07009900361299515, -0.5819500088691711, 0.3067399859428406, 0.05098399892449379, 0.24376000463962555, -0.22925999760627747, -0.11935999989509583, -0.07365400344133377, -0.09195800125598907, -0.3805199861526489, 0.7192999720573425, -0.40397000312805176, -0.11141999810934067, -0.4943099915981293, -0.2192700058221817, 0.7585999965667725, -0.7916200160980225, -0.4466800093650818, 0.11553999781608582, -0.11392000317573547, 0.029241999611258507, -0.4344800114631653, -0.5509799718856812, 0.30994001030921936, 0.6956200003623962, 0.0016209000023081899, -0.004556300118565559, -0.31029000878334045, 0.0017227999633178115, -0.4185999929904938, -0.16259999573230743, -0.42135998606681824, 0.32580000162124634, 0.016471000388264656, -0.13923999667167664, 0.6896899938583374, -0.3456900119781494, 0.21825000643730164, -0.2568100094795227, 0.4609000086784363, -0.12213999778032303, -0.1361899971961975, 0.03121500089764595, 0.2060299962759018, 0.005435400176793337, -0.47690001130104065, 0.3743300139904022, -0.23270000517368317, 0.5138400197029114, 0.05092500150203705, -0.22237999737262726, -0.1870799958705902, -0.22821000218391418, 0.13549000024795532, -0.24942000210285187, -0.38863998651504517, 0.02728700079023838, -0.01896500028669834, -0.062109000980854034, -0.5260000228881836, 0.0349389985203743, -0.465039998292923, 0.12385000288486481, 0.09495799988508224, 0.17419999837875366, 0.4258500039577484, -0.14969000220298767, -0.7078800201416016, 0.189410001039505, 0.21544000506401062, -0.11056999862194061, -0.401529997587204, -0.008231500163674355, 0.15970000624656677, 0.03557800129055977, -0.3147299885749817, 0.06045600026845932, 0.3775700032711029, -0.4736100137233734, -0.02710600011050701, -0.4860999882221222, 0.14444999396800995, -0.3967199921607971, 0.27781999111175537, 1.051900029182434, 0.5049899816513062, -0.33562999963760376, 0.03918299823999405, -0.39375001192092896, 0.325300008058548, -0.24979999661445618, 0.2776400148868561, -0.5475299954414368, 0.08759099990129471, 0.3273099958896637, -0.5456500053405762, 0.08798299729824066, -0.3517799973487854, -0.48089998960494995, 0.013694999739527702, -0.030073000118136406, 0.060756999999284744, 0.16473999619483948, 0.6279900074005127, -0.25911998748779297, 0.7101500034332275, -0.16474999487400055, 0.05113000050187111], u'modern': [0.13252000510692596, -0.3820199966430664, 0.054878998547792435, -0.48726001381874084, 0.2947399914264679, 0.15839999914169312, -0.06145099923014641, 0.05080299824476242, 0.03597300127148628, -1.628100037574768, 0.5833399891853333, 0.360370010137558, 0.38269999623298645, 0.7184799909591675, 0.7989599704742432, -0.6327999830245972, 0.0792199969291687, 0.1556600034236908, 0.07697699964046478, -0.14309999346733093, -0.22768999636173248, 0.3457300066947937, 0.24919000267982483, 1.001099944114685, 0.27807000279426575, 0.06501299887895584, 0.16412000358104706, -0.10890000313520432, -0.04735900089144707, 0.5763700008392334, 0.24578000605106354, 0.637440025806427, -0.6235899925231934, 0.8143100142478943, 0.017487000674009323, 0.5067700147628784, 0.4702399969100952, 0.025337999686598778, 0.007697599940001965, 0.13048000633716583, 0.1909399926662445, 0.00047461999929510057, 0.02802800014615059, -0.217739999294281, 0.6809200048446655, 0.31345999240875244, 0.2750900089740753, 0.25659000873565674, -0.08561000227928162, 0.4060100018978119, -0.0032313999254256487, -0.5059999823570251, -0.04957599937915802, 0.044488001614809036, 0.26385000348091125, -0.10023000091314316, -0.2113099992275238, -0.22197000682353973, -0.11159999668598175, 0.20372000336647034, -0.1253499984741211, -0.05216600000858307, 0.5880500078201294, -0.40116000175476074, -0.36243000626564026, 0.579200029373169, 0.07197099924087524, 0.6019600033760071, -0.005964499898254871, 0.1814900040626526, -0.0873199999332428, -0.35148999094963074, -0.061785999685525894, 0.1050100028514862, -0.3666599988937378, 0.347790002822876, -0.5250700116157532, 0.3711099922657013, 0.18121999502182007, -0.06492800265550613, -0.17709000408649445, -0.1182899996638298, -0.47543999552726746, -0.3134700059890747, 0.39252999424934387, -0.04524800181388855, 0.07625000178813934, 0.304720014333725, 0.21768000721931458, 0.35295000672340393, 0.34747999906539917, 0.17338000237941742, 0.0839029997587204, 0.5495100021362305, 0.2790200114250183, -0.1953199952840805, -0.007938000373542309, -0.11230000108480453, 0.2674799859523773, -0.4073899984359741, -0.12084999680519104, 0.5450800061225891, 0.5372099876403809, -0.12233000248670578, -0.4165300130844116, 0.4112800061702728, 0.05913100019097328, -0.0331140011548996, -0.29513001441955566, 0.7320600152015686, 0.2074500024318695, -0.2319599986076355, -0.4523699879646301, -0.13569000363349915, 0.0327330008149147, -0.3353300094604492, -0.18322999775409698, 0.2541700005531311, -0.34286999702453613, -0.46219998598098755, -0.0035677000414580107, 0.31185001134872437, -0.2413100004196167, -0.14945000410079956, -0.26559001207351685, 0.35137999057769775, -0.32592999935150146, 0.5407299995422363, -0.14142000675201416, 0.1286199986934662, 0.13327999413013458, -0.18852999806404114, 0.5399199724197388, 0.05596400052309036, -0.15360000729560852, -0.3643600046634674, 0.014259000308811665, -0.0692100003361702, -0.41065001487731934, 0.38416001200675964, 0.026667000725865364, -0.5091099739074707, 0.12182000279426575, -0.037338998168706894, 0.5055299997329712, -0.20531000196933746, 0.334960013628006, 0.3096500039100647, -0.6407099962234497, 0.6131600141525269, 0.1331000030040741, -0.6039199829101562, 0.08725699782371521, -0.27678000926971436, 0.24244000017642975, 0.3626900017261505, -0.6169099807739258, 0.17448000609874725, 0.2899099886417389, -0.4402500092983246, 0.5961099863052368, 0.4634599983692169, -0.2535800039768219, -0.45155999064445496, -0.33689001202583313, 0.20499999821186066, 0.2976599931716919, 0.12160000205039978, 0.15017999708652496, -0.3887900114059448, 0.07288999855518341, -0.07212399691343307, -0.40000998973846436, 0.4948199987411499, 0.2076600044965744, -0.1165200024843216, 0.007473600097000599, -0.3156200051307678, -0.2430099993944168, -0.37255001068115234, 0.010638999752700329, 0.1785999983549118, 0.13508999347686768, 0.3645699918270111, 0.3437100052833557, 0.04601399973034859, -0.2257400006055832, 0.7379000186920166, 0.15870000422000885, -0.1566700041294098, 0.19523000717163086, 0.11561000347137451, 0.2212900072336197, -0.09227900207042694, -0.47808000445365906, 0.22130000591278076, -0.658270001411438, -0.19288000464439392, 0.04915900155901909, 0.4531700015068054, 0.7912499904632568, -0.35339999198913574, 0.19805000722408295, -0.3999600112438202, 0.07036200165748596, -0.4626699984073639, -0.030943000689148903, 0.2989400029182434, -0.2837800085544586, 0.2625400125980377, -0.12234000116586685, 0.3665800094604492, 0.2308100014925003, 0.02712099999189377, 0.1488800048828125, 0.010885000228881836, -0.5199800133705139, -0.15103000402450562, -0.20960000157356262, -0.37977999448776245, -0.30546998977661133, 0.5326600074768066, -0.13312000036239624, -0.22680999338626862, 0.10328000038862228, -0.655780017375946, -0.018122000619769096, -0.14208999276161194, 0.22075000405311584, -0.08541599661111832, 0.15056000649929047, 0.1214200034737587, -0.4458500146865845, 0.47589001059532166, 0.04131900146603584, 0.2932499945163727, -0.42333999276161194, -0.20395000278949738, -0.4083000123500824, 0.272350013256073, -0.014689000323414803, 0.08156400173902512, 0.6524699926376343, 0.643339991569519, -0.7841600179672241, 0.09074699878692627, -0.6505799889564514, 0.14643999934196472, -0.2633199989795685, 0.2835800051689148, -0.0643249973654747, -0.13380999863147736, -0.06481800228357315, 0.4794999957084656, 0.2241699993610382, 0.08106499910354614, -0.018908999860286713, -0.05789300054311752, 0.09888900071382523, -0.33610999584198, -0.20297999680042267, -0.43992000818252563, 0.2619599997997284, -0.2943800091743469, -0.07560399919748306, -0.017392000183463097, -0.4627799987792969, -0.12190999835729599, 0.7332500219345093, 0.1261499971151352, 0.3720000088214874, 0.3434999883174896, 0.3157599866390228, -0.25387999415397644, -0.16912999749183655, 0.27171000838279724, -1.5390000343322754, 0.1457899957895279, 0.19935999810695648, -0.39711999893188477, -0.0444520004093647, -0.39882001280784607, 0.2916499972343445, 0.03629099950194359, 0.48263999819755554, -0.04026300087571144, -0.40022000670433044, 0.10154999792575836, -0.01866699941456318, -0.048927001655101776, -0.004292599856853485, 0.01040400005877018, -0.1805499941110611, 0.12072999775409698, 0.16610999405384064, 0.24309000372886658, 0.05232800170779228, -0.2867799997329712, 0.28696998953819275, 0.38065001368522644], u'raw': [0.2100200057029724, 0.777209997177124, 0.8061699867248535, -0.2309899926185608, -0.36559998989105225, -0.03929400071501732, 0.03553999960422516, -0.14519000053405762, 0.10249000042676926, -1.1324000358581543, 0.2228900045156479, -1.0688999891281128, -0.10328999906778336, 0.3265799880027771, -0.2696700096130371, -0.8722900152206421, -0.40981999039649963, 0.292059987783432, -0.07105699926614761, 0.4623500108718872, -0.4281400144100189, -0.20782999694347382, 0.10292000323534012, -0.15230000019073486, -0.15547999739646912, 0.04537099972367287, 0.12723000347614288, 0.05892600119113922, -0.15919999778270721, 0.3417600095272064, -0.5763400197029114, 0.39958998560905457, -0.708299994468689, 0.24769000709056854, -0.5502200126647949, 0.37509000301361084, 0.03617599979043007, -0.19891999661922455, 0.04375100135803223, -0.4540199935436249, -0.45732998847961426, -0.31735000014305115, 0.4387800097465515, -0.34797000885009766, 0.10736999660730362, 0.10301999747753143, -0.38694000244140625, -0.0639989972114563, 0.07831200212240219, 0.2873300015926361, 0.6743699908256531, 0.6917499899864197, -0.3294000029563904, -0.2907699942588806, -0.07475200295448303, 0.0026978999376296997, 0.3039900064468384, 0.20130999386310577, 0.03578399866819382, -0.31529998779296875, -0.21866999566555023, -0.06143999844789505, -0.1820800006389618, -0.35095998644828796, -0.2665500044822693, -0.18592999875545502, -0.16947999596595764, -0.02718299999833107, 0.3967599868774414, 0.7773000001907349, -0.17520999908447266, 0.611299991607666, -0.4792500138282776, 0.2782300114631653, -0.02786099910736084, 0.1556600034236908, 0.11006999760866165, 0.05732100084424019, -0.4519500136375427, 0.4492500126361847, 0.11494000256061554, -0.5679799914360046, 0.1125200018286705, -0.3287599980831146, -0.024737000465393066, -0.4367299973964691, -0.2480199933052063, -0.0740320011973381, -0.0683170035481453, 0.08447100222110748, 0.09764199703931808, 0.26895999908447266, 0.22397999465465546, -0.1517300009727478, -0.20667999982833862, -0.22869999706745148, -0.5229600071907043, -0.46213001012802124, -0.15334999561309814, -0.0885310024023056, -0.2785699963569641, -0.4565100073814392, 0.2611199915409088, -0.447270005941391, -0.27557000517845154, -0.32065001130104065, 0.08236400038003922, 0.1252399981021881, -0.2300799936056137, 0.22754999995231628, 0.8993899822235107, 0.06962399929761887, -0.12301000207662582, -0.44374001026153564, 0.17324000597000122, 0.5067600011825562, 0.7101699709892273, 0.7550299763679504, 0.20438000559806824, -0.45419999957084656, -0.6032199859619141, 0.015845000743865967, -0.22454999387264252, 0.6440600156784058, 0.18694999814033508, 0.31643998622894287, -0.4110899865627289, -0.23124000430107117, -0.15578000247478485, -0.0702660009264946, -0.07556399703025818, 0.6619099974632263, 0.3709700107574463, 0.0641229972243309, -0.39493998885154724, -0.10407999902963638, -0.24416999518871307, 0.2634199857711792, -0.06408499926328659, 0.27393999695777893, 0.5037000179290771, 0.1683499962091446, 0.2984600067138672, -0.37362000346183777, -0.045382000505924225, 0.36500999331474304, 0.3135800063610077, -0.06318999826908112, -0.1190200001001358, -0.10911999642848969, 0.13562999665737152, 0.2659800052642822, -0.0894709974527359, -0.003094600047916174, 0.27213001251220703, 0.05240299925208092, -0.07497800141572952, 0.22678999602794647, 0.12511000037193298, -0.8220599889755249, -0.03414199873805046, -0.3289799988269806, -0.040171001106500626, 0.14334000647068024, 0.374099999666214, -0.13322000205516815, -0.17718000710010529, -0.5971599817276001, -0.4845399856567383, 0.3137499988079071, 0.03727300092577934, -0.05269800126552582, -0.46884000301361084, -0.15530000627040863, 0.09230999648571014, 0.18321000039577484, 0.3905400037765503, 0.2568199932575226, 0.019317999482154846, -0.38613998889923096, 0.060913000255823135, 0.1238899976015091, -0.5660099983215332, -0.7198600172996521, -0.06829400360584259, -0.14074000716209412, 1.2008999586105347, 0.44086000323295593, 0.6536999940872192, 0.04788200184702873, 0.03772899881005287, 1.1258000135421753, -0.1698099970817566, -0.08836200088262558, 0.31400999426841736, 0.39739999175071716, -0.6829000115394592, -0.5135599970817566, -0.29102998971939087, 0.4645400047302246, 0.48083001375198364, -0.46792998909950256, 0.38837000727653503, 0.3265500068664551, -0.3257800042629242, 0.6363499760627747, 0.3931800127029419, 0.8820099830627441, -0.5075200200080872, -0.16418999433517456, 0.0457179993391037, -0.19790999591350555, 0.030702000483870506, 0.04870999976992607, -0.11575999855995178, 0.1694599986076355, 0.6875699758529663, -0.208529993891716, -0.10649000108242035, 0.35179999470710754, -0.40946000814437866, -0.03332100063562393, 0.030400000512599945, -0.5186100006103516, -0.4565599858760834, 0.16006000339984894, -0.03645800054073334, 0.027417000383138657, -0.07037899643182755, -0.1534299999475479, 0.07051499933004379, -0.1530199944972992, -0.14499999582767487, -0.26249000430107117, 0.33577001094818115, 0.4744400084018707, 0.4148400127887726, -0.3735100030899048, -0.7559099793434143, 0.09375400096178055, -0.4543899893760681, 0.25255000591278076, -0.12274999916553497, 0.08462899923324585, -0.7339299917221069, 0.1459999978542328, -0.019682999700307846, -0.28101998567581177, -0.2239599972963333, -1.1061999797821045, 0.669439971446991, -0.10984999686479568, -0.4327000081539154, 0.060724999755620956, -0.2811500132083893, 0.12343999743461609, -0.020329000428318977, 0.044541001319885254, -0.4029900133609772, 0.7916499972343445, -0.06800799816846848, -0.059331998229026794, -0.6583700180053711, -0.2503499984741211, 0.18557000160217285, 0.546459972858429, -0.25363999605178833, -0.1691499948501587, -0.37623998522758484, -0.42572999000549316, -0.423770010471344, 0.09001900255680084, 0.04984600096940994, 0.058706000447273254, -0.8695899844169617, 0.502839982509613, -1.062000036239624, 0.11838000267744064, -0.22838999330997467, -0.3568600118160248, -0.5143399834632874, -0.27772000432014465, -0.011996000073850155, 0.8253600001335144, 0.38798001408576965, -0.06571999937295914, -0.04089599847793579, -0.3253600001335144, -0.10401999950408936, -0.03536299988627434, -0.14169000089168549, 0.32600998878479004, 0.6867200136184692, 0.07794400304555893, 0.2961300015449524, 0.4802600145339966, 0.3548699915409088, -0.2410999983549118, -0.09715200215578079, -0.37692999839782715], u'lightweight': [0.35916998982429504, 0.05203999951481819, 0.5814899802207947, -0.7731900215148926, -0.011033999733626842, 0.3783099949359894, 0.274399995803833, 0.6372900009155273, 0.0324460007250309, -0.6797699928283691, 0.7037000060081482, -0.04625599831342697, -0.41350001096725464, 0.12439999729394913, -0.4672200083732605, -0.6305699944496155, -0.24247999489307404, -0.019638000056147575, 0.08132000267505646, -0.027739999815821648, 0.41124001145362854, 0.07493100315332413, -0.119159996509552, 0.27546998858451843, 0.08372599631547928, -0.6476699709892273, 0.6217399835586548, 0.29271000623703003, 0.04679499939084053, 0.34303998947143555, -0.04347199946641922, 0.4888400137424469, 0.085316002368927, -0.031585000455379486, -0.2810400128364563, 0.4012500047683716, 0.17159999907016754, -0.23989999294281006, 0.5496600270271301, 1.1411999464035034, -0.3239800035953522, 0.13490000367164612, 0.364439994096756, 0.04641599953174591, -0.031615000218153, -0.026417000219225883, -0.23107999563217163, -0.6031200289726257, 0.31560999155044556, 0.14857999980449677, -0.10514000058174133, -0.007356300018727779, -0.04444799944758415, -0.12637999653816223, -0.3312000036239624, -0.1881600022315979, -0.04394900053739548, -0.5758200287818909, -0.023600000888109207, -0.012446999549865723, 0.11800999939441681, 0.3157300055027008, -0.6067399978637695, 0.2327899932861328, -0.153889998793602, 0.14653000235557556, -0.7190700173377991, 0.6682900190353394, 0.5552300214767456, -0.24269999563694, -0.5492600202560425, 0.7758399844169617, -0.07019700109958649, -0.6886399984359741, -0.08205699920654297, 0.5329300165176392, -0.1641799956560135, -0.3800700008869171, 0.13804000616073608, 0.535040020942688, 0.24028000235557556, 0.5374400019645691, -0.022352000698447227, -0.3827100098133087, -0.9018399715423584, 0.07497099786996841, 0.19056999683380127, 0.3067399859428406, 0.45688000321388245, 0.3120200037956238, 0.6508600115776062, 0.14792999625205994, 0.07024499773979187, -0.5602399706840515, 0.16322000324726105, -0.31887999176979065, -0.4889500141143799, 0.34450000524520874, -0.1766899973154068, -0.38370999693870544, -0.10862000286579132, -0.20261000096797943, 0.09072499722242355, -0.0037446001078933477, 0.18231000006198883, -0.22811000049114227, -0.13145999610424042, -0.1180500015616417, -0.7899199724197388, -0.6004700064659119, -0.31909000873565674, 0.24023999273777008, 0.4838399887084961, -0.24095000326633453, -0.3399600088596344, 0.6844599843025208, -0.09142400324344635, 0.5845900177955627, 0.09865500032901764, 0.2323099970817566, 0.16057999432086945, 0.5242499709129333, 0.38457000255584717, -0.39629998803138733, -0.3460099995136261, 0.1896599978208542, 0.08134599775075912, 0.949429988861084, 0.17750999331474304, 0.10091999918222427, -0.4355199933052063, -0.1132500022649765, 0.4833100140094757, 0.07771100103855133, 0.10941000282764435, -0.20194000005722046, -0.7249199748039246, -0.17981000244617462, -0.18016000092029572, -0.15477000176906586, 0.09271900355815887, 0.47523999214172363, 0.23654000461101532, -0.2787500023841858, 0.2426699995994568, -0.000738409988116473, 0.12466999888420105, -0.35133999586105347, -1.215499997138977, 0.06126900017261505, 0.591480016708374, -0.2673099935054779, -0.24277999997138977, -0.029413999989628792, 0.31092000007629395, -0.18062999844551086, -0.6031699776649475, 0.41585999727249146, -0.3109399974346161, 0.3142099976539612, 0.06630299985408783, -0.5352799892425537, 0.030451999977231026, 0.5048900246620178, -0.021438000723719597, -0.29987001419067383, -0.2827399969100952, 0.567799985408783, 0.8888800144195557, -0.18382999300956726, 0.2572700083255768, 0.12933999300003052, -0.744979977607727, 0.06474000215530396, -0.23075999319553375, -0.6254299879074097, -0.017839999869465828, 0.5669999718666077, -0.10879000276327133, -0.7994099855422974, 0.5912899971008301, -0.11190000176429749, 0.8788800239562988, -0.021849000826478004, 0.2937900125980377, -0.27865999937057495, 0.20430999994277954, 0.5055199861526489, 0.13877999782562256, -0.14386999607086182, 0.10305000096559525, 0.57396000623703, 0.3997099995613098, -0.06486999988555908, -0.4244599938392639, 0.44718998670578003, -1.0161000490188599, 0.14869000017642975, 0.764549970626831, -0.5976399779319763, -0.1483200043439865, -0.09698399901390076, 0.32293999195098877, -0.1359499990940094, 0.09128200262784958, -0.6099100112915039, 0.20021000504493713, 0.24713000655174255, -0.520359992980957, 0.29361000657081604, 0.01636200025677681, 0.1437000036239624, -0.4059399962425232, 0.9113399982452393, 0.013260999694466591, -0.187950000166893, -0.972540020942688, -0.9979900121688843, -0.13765999674797058, -0.7050999999046326, 0.10424000024795532, -0.5502600073814392, 0.47887998819351196, -0.1117900013923645, 0.06122700124979019, 0.44624999165534973, 0.2858000099658966, 0.057009000331163406, -0.33059000968933105, -0.08320900052785873, 0.13030000030994415, 0.2443999946117401, 0.2611199915409088, -0.6654300093650818, 0.9840599894523621, -0.9545199871063232, 0.22898000478744507, 0.009248100221157074, -0.30232998728752136, 0.7837200164794922, 0.026876000687479973, 0.15744000673294067, -0.27924999594688416, 0.28791001439094543, -0.42660000920295715, -0.1925400048494339, -0.4373599886894226, -0.6669899821281433, -0.5079299807548523, -0.2785300016403198, -0.024546999484300613, -0.24000999331474304, -0.2501800060272217, -0.24233999848365784, -0.19559000432491302, 0.14488999545574188, 0.21768000721931458, -0.050244998186826706, 0.2168699949979782, 0.09205999970436096, -0.9622700214385986, -0.03836600109934807, -0.07605300098657608, 0.4033699929714203, 0.2821800112724304, 0.49779999256134033, -0.15674999356269836, 0.31178998947143555, -0.1618800014257431, -0.04499400034546852, 0.10096000134944916, 0.31237998604774475, 0.29102998971939087, 0.19393999874591827, -0.8493099808692932, -0.2301499992609024, -0.2427700012922287, 0.37836000323295593, -0.7228800058364868, 0.40090999007225037, 0.5932499766349792, -0.5862299799919128, -0.2775900065898895, 0.8224899768829346, -0.0572660006582737, -0.2884899973869324, -0.21581999957561493, 0.0750259980559349, -0.618369996547699, -0.0541750006377697, -0.4353800117969513, 0.1418599933385849, -0.36548998951911926, -0.6010100245475769, 0.34053000807762146, 0.7404900193214417, -0.16774000227451324, -0.14077000319957733, -0.07974400371313095, 0.3597800135612488], u'creased': [-0.17789000272750854, -0.010824000462889671, 0.07090800255537033, -0.47993001341819763, -0.16106000542640686, 0.4375399947166443, 0.0455159991979599, -0.21806000173091888, 0.29976001381874084, 0.26256999373435974, -0.5846800208091736, 0.08634600043296814, -0.07489900290966034, -0.10056000202894211, -0.4773299992084503, 0.4293000102043152, 0.3707599937915802, 0.08080200105905533, 0.2857699990272522, -0.0905819982290268, 0.43011999130249023, 0.32587000727653503, 0.2480199933052063, 0.21557000279426575, -0.7363200187683105, 0.05396199971437454, 0.23159000277519226, -0.21424999833106995, 0.30485999584198, -0.006378700025379658, 0.30055001378059387, 0.22407999634742737, -0.7326599955558777, -0.08125399798154831, 0.5999000072479248, 0.2682099938392639, -0.7058699727058411, -0.3070800006389618, 0.005210299976170063, 0.5806099772453308, 0.33368000388145447, 0.08041200041770935, 0.39416998624801636, -0.06694400310516357, 0.12303999811410904, 0.18648000061511993, 0.2077299952507019, 0.13183000683784485, -0.2244900017976761, -1.2355999946594238, -0.017266999930143356, -0.20140999555587769, 0.1371700018644333, 0.6892200112342834, 0.18908999860286713, -0.6283000111579895, 0.06307400017976761, -0.18704000115394592, 0.39076998829841614, 0.3478499948978424, 0.2889699935913086, -0.31411999464035034, -0.3424699902534485, -0.06477200239896774, -0.007551299873739481, 0.04259499907493591, -0.47018998861312866, -0.13559000194072723, 0.22989000380039215, -0.07434900104999542, -0.010885999538004398, -0.16652999818325043, 0.30270999670028687, 0.17937999963760376, 0.7900599837303162, 0.23669999837875366, -0.4548400044441223, -0.03208199888467789, -0.40845999121665955, 0.1253499984741211, -0.23217999935150146, -0.2763499915599823, -0.3472500145435333, -0.1814499944448471, -0.4874599874019623, -0.2497200071811676, -0.463019996881485, -0.28073999285697937, 0.06999199837446213, 0.682669997215271, -0.056042999029159546, 0.23184999823570251, -0.28189000487327576, 0.15828000009059906, -0.6254600286483765, -0.26214998960494995, 0.20499999821186066, 0.18820999562740326, 0.3299500048160553, 0.3781900107860565, 0.1367799937725067, 0.026885999366641045, 0.24258999526500702, 0.43841999769210815, -0.04546700045466423, 0.021017000079154968, 0.12488000094890594, 0.1431799978017807, -0.033392999321222305, -0.31929001212120056, -0.5889099836349487, -0.39236998558044434, -0.0726810023188591, -0.14079000055789948, -0.03803899884223938, 0.5095300078392029, -0.09887199848890305, 0.3432300090789795, 0.36910998821258545, -0.1932400017976761, -0.2615000009536743, -0.006226500030606985, 0.2524299919605255, 0.25957000255584717, 0.1099499985575676, -0.010703999549150467, 0.018977999687194824, -0.1956699937582016, 0.39983999729156494, 0.0027542999014258385, 0.17598000168800354, -0.563510000705719, -0.6004599928855896, 0.36072999238967896, -0.4790700078010559, 0.2296600043773651, -0.14188000559806824, 0.7350500226020813, 0.654009997844696, -0.37222999334335327, 0.5923500061035156, -0.13936999440193176, -0.22735999524593353, 0.18233999609947205, -0.23726999759674072, 0.21852000057697296, -0.044537998735904694, 0.23305000364780426, 0.03384099900722504, 0.02254600077867508, -0.18714000284671783, -0.19242000579833984, -0.09879600256681442, -0.31578999757766724, 0.08426400274038315, -0.4952000081539154, 0.05534299835562706, -0.5482400059700012, -0.010358000174164772, 0.5853000283241272, -0.1641400009393692, -0.19405999779701233, -0.029374999925494194, 0.4980199933052063, 0.2769100069999695, -0.15665000677108765, -0.05174599960446358, 0.29197001457214355, 0.1803400069475174, -0.30169999599456787, -0.42601001262664795, 0.28022998571395874, -0.13258999586105347, -0.904229998588562, -0.5252599716186523, -0.1832900047302246, 0.3113900125026703, 0.23691000044345856, 0.5911300182342529, -0.006227000150829554, -0.04861399903893471, -0.04948800057172775, -0.5041400194168091, 0.3299500048160553, 0.07709100097417831, -0.7758700251579285, 1.0924999713897705, -0.08888000249862671, -0.14951999485492706, 0.34345000982284546, 0.010569999925792217, -0.12196999788284302, 0.2167699933052063, 0.4860199987888336, 0.07417900115251541, 0.5181099772453308, -0.5373799800872803, -0.38741999864578247, 0.0707240030169487, 0.018487000837922096, -0.576479971408844, -0.09265200048685074, -0.21938000619411469, 0.4136900007724762, 0.18862000107765198, -0.4866900146007538, -0.2629700005054474, 0.29394999146461487, -0.5743499994277954, -0.4126400053501129, 0.24126000702381134, -0.2671000063419342, -0.5744199752807617, 0.7650600075721741, 0.5081999897956848, 0.3037799894809723, -0.17969000339508057, -0.7697299718856812, 0.5998600125312805, 0.7324299812316895, 0.05794300138950348, 0.6677299737930298, 0.29201000928878784, 0.11090999841690063, -0.2967599928379059, 0.30191999673843384, 0.2007399946451187, 0.5793899893760681, -0.268449991941452, -0.4798400104045868, 0.4586699903011322, 0.19085000455379486, -0.36136001348495483, -0.3345000147819519, -0.4829599857330322, -0.0756400004029274, -0.20709000527858734, -0.3201799988746643, -0.5949900150299072, -0.06453099846839905, -0.018634000793099403, 0.4231100082397461, -0.6583200097084045, 0.06328900158405304, -0.04719400033354759, 0.19553999602794647, -0.6382899880409241, -0.25014999508857727, 0.7742300033569336, -0.04500599950551987, -0.30935001373291016, -0.29335999488830566, 0.49261999130249023, -0.6346799731254578, 0.17922000586986542, -0.04802799969911575, 0.15283000469207764, 0.9646999835968018, -0.76555997133255, -0.23512999713420868, -0.1612199991941452, 0.2703999876976013, 0.5321699976921082, 0.006666599772870541, 0.13194000720977783, -0.440530002117157, 0.10362999886274338, -0.1915300041437149, -0.5702300071716309, 0.1936500072479248, -0.2418700009584427, 0.03408699855208397, 0.39838001132011414, -0.007085300050675869, -0.3127399981021881, 0.035638000816106796, 1.3674999475479126, -0.25764000415802, -0.411080002784729, 0.3644999861717224, 0.682449996471405, 0.3139899969100952, -0.09895599633455276, 0.3216100037097931, -0.22679999470710754, 0.11052999645471573, 0.07059500366449356, -0.3412100076675415, 0.02999899908900261, -0.17701999843120575, 0.04126200079917908, 0.3803200125694275, -0.060169000178575516, 0.3843800127506256, -0.12755000591278076, -0.2856000065803528, 0.0974849984049797, -0.541949987411499, 0.16787000000476837, -0.64055997133255], u'curved': [-0.17023999989032745, 0.28279998898506165, -0.443589985370636, -0.8779600262641907, -0.12660999596118927, 0.2776400148868561, -0.3030700087547302, 0.18966999650001526, 0.2624100148677826, -0.5062999725341797, -0.3012799918651581, 0.13694000244140625, 0.09320999681949615, 0.02443300001323223, -0.18559999763965607, 0.4986000061035156, -0.3628300130367279, 0.3833000063896179, 0.6855000257492065, -0.17563000321388245, -0.08872000128030777, 0.04637699946761131, -0.5004400014877319, 1.0260000228881836, -0.050801001489162445, 0.3949599862098694, 0.03863000124692917, -0.20845000445842743, -0.4079799950122833, 0.502240002155304, 0.5097699761390686, 1.1327999830245972, -0.23385000228881836, -0.1139800027012825, 0.2484699934720993, 0.26614999771118164, 0.3304600119590759, -0.5375800132751465, 0.25586000084877014, 0.4997299909591675, -0.011284999549388885, 0.32168999314308167, -0.6967200040817261, -0.3314499855041504, -0.07772400230169296, 0.6994100213050842, -0.0761369988322258, 0.2098200023174286, -0.19663000106811523, -0.3722499907016754, -0.3424200117588043, 0.11311999708414078, 0.6312800049781799, -0.17149999737739563, 0.294050008058548, -0.5989699959754944, -0.36379000544548035, 0.4875200092792511, 0.39680999517440796, 0.24386000633239746, 0.580049991607666, 0.1657799929380417, 0.5192800164222717, 0.26579999923706055, -0.02758600004017353, -0.36928001046180725, -0.3340100049972534, 0.786549985408783, 0.8786100149154663, -0.3031199872493744, 0.08878599852323532, 0.03584799915552139, 0.006009100005030632, 0.30717000365257263, 1.0694999694824219, -0.006479800213128328, -0.48875001072883606, 0.08581600338220596, -0.7428500056266785, -0.742579996585846, -0.08578599989414215, 0.3346399962902069, 0.07101300358772278, -0.5526000261306763, 0.3118000030517578, -0.2814899981021881, 0.15852999687194824, -0.047258999198675156, 0.36430999636650085, 0.2633799910545349, 0.9203600287437439, 0.3711400032043457, 0.47262999415397644, -0.2593599855899811, -0.8753299713134766, -0.33504998683929443, -0.08041399717330933, -0.5775499939918518, 0.600600004196167, 0.35097000002861023, -0.3849799931049347, 0.3467499911785126, -0.04377400130033493, 0.12174999713897705, -0.24810999631881714, 0.21258999407291412, -0.05596600100398064, -0.5622400045394897, -0.2536099851131439, -0.038346000015735626, -0.13095000386238098, 0.6622899770736694, 0.025359999388456345, -0.640529990196228, -0.5785499811172485, 0.03446299955248833, -0.8155999779701233, 0.3619700074195862, -0.2345000058412552, -0.44808000326156616, -0.4324600100517273, -0.6394799947738647, 0.5464500188827515, -0.14562000334262848, 0.04326999932527542, 0.39513999223709106, -0.4769900143146515, -0.08711499720811844, -0.7158399820327759, 0.011254999786615372, -0.5423399806022644, 0.3750799894332886, -0.03037399984896183, 0.5743299722671509, 0.16832999885082245, -0.07587199658155441, -0.03941499814391136, -0.024481000378727913, 0.285290002822876, 0.15378999710083008, 0.22754999995231628, 0.2799699902534485, -0.7028099894523621, -0.31352999806404114, 0.16943000257015228, 0.15814000368118286, -0.00842059962451458, -0.12647999823093414, 0.37547001242637634, -0.2318599969148636, -0.04663600027561188, -0.20578999817371368, -0.27814000844955444, -0.8112499713897705, 0.21295000612735748, 0.24640999734401703, -0.04346099868416786, -0.9266200065612793, 0.18233999609947205, 0.08812300115823746, -0.3619999885559082, 0.03217099979519844, -0.19687999784946442, -0.18302999436855316, 0.4062800109386444, 0.20107999444007874, -0.07198599725961685, 0.2878899872303009, 0.03750799968838692, -0.7573999762535095, -0.10147000104188919, -0.39546999335289, 0.7219799757003784, -0.10200999677181244, -0.05658400058746338, -0.3934600055217743, -0.44277000427246094, 0.28679001331329346, -0.11706999689340591, -1.302899956703186, 0.06687500327825546, -0.5934900045394897, 0.008759699761867523, 0.6683400273323059, 0.23684999346733093, -0.5961800217628479, 0.5570999979972839, 0.6769400238990784, 0.0999239981174469, 0.6071799993515015, 0.18282000720500946, 0.7136899828910828, -0.09142199903726578, 0.4988600015640259, -0.23115000128746033, 0.47822999954223633, -0.16538000106811523, -0.21863999962806702, 0.11918000131845474, 0.13325999677181244, 0.14319999516010284, -0.36967000365257263, -0.7174299955368042, -0.29967001080513, 0.44602999091148376, 0.4618600010871887, -0.5329800248146057, -0.12245000153779984, -0.034563999623060226, 0.6688500046730042, 0.618619978427887, 0.033948998898267746, 0.7501099705696106, -0.0434969998896122, 0.19312000274658203, 0.046831000596284866, 0.5042799711227417, -0.3028799891471863, 0.14962999522686005, -0.38506001234054565, 0.2503199875354767, -0.08322799950838089, 0.14291000366210938, -0.07219400256872177, -0.7009599804878235, -0.13042999804019928, 0.17844000458717346, -0.10036999732255936, 0.09703999757766724, -0.09944000095129013, 0.4369699954986572, -0.23190000653266907, 0.45445001125335693, -0.023406999185681343, -0.410290002822876, 0.05849599838256836, -0.3522000014781952, -0.0764480009675026, -0.30970001220703125, 0.18084999918937683, 0.6285200119018555, -0.4394899904727936, -0.1781499981880188, 0.11585000157356262, -0.44200998544692993, -0.44336000084877014, -0.06538800150156021, 0.03585800155997276, 0.0409419983625412, 0.4573099911212921, -0.36024001240730286, -0.292169988155365, -0.19271999597549438, -0.02649799920618534, -0.5768499970436096, 0.14474999904632568, -0.2350499927997589, -0.20774999260902405, -0.48717001080513, 0.5993499755859375, -0.37692999839782715, -0.3237999975681305, 0.34321001172065735, 0.03792500123381615, -0.3022199869155884, -0.11430999636650085, 0.16473999619483948, -0.029277000576257706, -0.06145400181412697, 0.3932099938392639, 0.32653000950813293, -0.4219599962234497, -0.29260000586509705, 0.35367000102996826, -0.3326900005340576, 0.1574999988079071, -0.32152000069618225, 0.34130001068115234, -0.8508999943733215, 0.2141299992799759, 0.2992999851703644, 0.0010317000560462475, -0.12772999703884125, -0.35701999068260193, -0.43327999114990234, 0.0739080011844635, 0.028044000267982483, 0.2699800133705139, -0.03560600057244301, -0.1959799975156784, 0.4629899859428406, 0.35326001048088074, -0.30546998977661133, 0.6827200055122375, 0.43904998898506165, 0.1084199994802475, -0.3540700078010559, -0.21994000673294067, 0.5826600193977356, -0.45219001173973083], u'huge': [0.00863569974899292, 0.04323500022292137, 0.10826999694108963, -0.4933899939060211, 0.031105000525712967, 0.34536001086235046, 0.44176000356674194, 0.1060900017619133, -0.2687300145626068, -1.2101999521255493, -0.005691499914973974, 0.24459999799728394, -0.40035998821258545, 0.10162000358104706, 0.050523001700639725, -0.10093999654054642, -0.17821000516414642, 0.08861999958753586, -0.0026004998944699764, 0.12685999274253845, 0.1762000024318695, -0.028074000030755997, 0.4271099865436554, -0.11744999885559082, -0.1983799934387207, 0.4214400053024292, 0.20364999771118164, -0.18929000198841095, 0.12053000181913376, 0.147489994764328, -0.5519499778747559, 0.06803199648857117, -0.8004400134086609, 0.4056900143623352, -0.8070200085639954, 0.06293799728155136, -0.6312400102615356, -0.12952999770641327, 0.08231700211763382, 0.3749000132083893, -0.09302099794149399, -0.4615499973297119, 0.2876400053501129, 0.1489800065755844, -0.257889986038208, 0.0629659965634346, 0.3624800145626068, -0.4963800013065338, 0.4743100106716156, -0.022802000865340233, 0.21250000596046448, 0.2549799978733063, -0.4630900025367737, -0.18571999669075012, -0.11483000218868256, 0.037144001573324203, -0.21351000666618347, 0.17996999621391296, 0.19043000042438507, 0.1905200034379959, -0.19411000609397888, -0.147599995136261, 0.2835400104522705, -0.026713000610470772, 0.15452000498771667, 0.1569799929857254, -0.3568600118160248, 0.31553998589515686, -0.2785300016403198, 0.1296599954366684, -0.19949999451637268, 0.1577499955892563, -0.005440299864858389, 0.19505000114440918, 0.05037099868059158, -0.11546000093221664, -0.06547100096940994, -0.4185200035572052, 0.47905001044273376, -0.15148000419139862, 0.0033128000795841217, -0.05282599851489067, -0.03830600157380104, -0.3754799962043762, 0.38729000091552734, 0.30702000856399536, 0.2778699994087219, 0.06358800083398819, 0.23237000405788422, -0.29725998640060425, 0.38947999477386475, -0.06902799755334854, -0.3737500011920929, -0.2692500054836273, -0.23868000507354736, -0.13044999539852142, 0.08064199984073639, -0.4270099997520447, -0.055987998843193054, -0.1842000037431717, 0.051867999136447906, 0.3656199872493744, -0.2722499966621399, -0.3847599923610687, -0.014818999916315079, 0.33410000801086426, 0.06721299886703491, 0.05581200122833252, -0.22292999923229218, -0.2996099889278412, -0.40602999925613403, -0.10226999968290329, -0.1837099939584732, -0.19185000658035278, 0.5508300065994263, -0.03803100064396858, 0.1811400055885315, 0.5398100018501282, 0.1853100061416626, -0.8243200182914734, 0.32444998621940613, -0.15076999366283417, -0.036299001425504684, 0.5232099890708923, 0.4499500095844269, -0.29969000816345215, -0.21503999829292297, 0.12946000695228577, -0.4123699963092804, -0.10683999955654144, 0.1610500067472458, 0.5338900089263916, -0.6125100255012512, 0.03601599857211113, 0.35416001081466675, -0.12594999372959137, -0.30893000960350037, -0.11017999798059464, 0.052400000393390656, 0.2935900092124939, -0.5838099718093872, -0.13266000151634216, 0.24751000106334686, 0.1841599941253662, 0.031853001564741135, -0.08540699630975723, 0.32910001277923584, 0.1052900031208992, 0.1754699945449829, -0.1281300038099289, 0.021846000105142593, 0.14541999995708466, -0.43316999077796936, -0.2838599979877472, 0.32728999853134155, 0.42719998955726624, 0.10363999754190445, 0.0014054999919608235, -0.46143001317977905, -0.2661699950695038, -0.1761700063943863, 0.3441399931907654, 0.5932199954986572, 0.3492699861526489, -0.06324099749326706, -0.4242599904537201, 0.4000599980354309, -0.06829799711704254, -0.18127000331878662, 0.09875000268220901, -0.008396299555897713, -0.4538100063800812, -0.1632699966430664, -1.049399971961975, 0.2042199969291687, 0.11795999854803085, -0.1197500005364418, 0.4033200144767761, 0.6066200137138367, 0.24350999295711517, 0.19870999455451965, -0.2806999981403351, 0.4175100028514862, -0.17294000089168549, -0.08625999838113785, -0.005944199860095978, 0.22285999357700348, 0.11404000222682953, -0.14343999326229095, -0.006550599820911884, 0.2989400029182434, 0.2828899919986725, -0.19880999624729156, -0.16810999810695648, 0.5521100163459778, -0.07300399988889694, 0.22585999965667725, 0.33048000931739807, 0.17151999473571777, 0.2169400006532669, 0.960640013217926, -0.09608200192451477, -0.36114001274108887, -0.321370005607605, 0.12551000714302063, -0.07792600244283676, -0.10100000351667404, 0.27959999442100525, 0.23127000033855438, -0.25154998898506165, -0.21469999849796295, 0.13665999472141266, -0.2124200016260147, -0.5083699822425842, 0.3070800006389618, 0.3319700062274933, 0.04848700016736984, 0.032033998519182205, 0.017839999869465828, 0.4566099941730499, 0.5717099905014038, 0.25505998730659485, 0.17098000645637512, -0.35194000601768494, -0.04078799858689308, -0.35040000081062317, -0.25863999128341675, 0.02930299937725067, 0.029131000861525536, -0.35019001364707947, -0.06272699683904648, -0.08138199895620346, -0.23784999549388885, 0.10130999982357025, 0.0031063000205904245, 0.05632700026035309, 0.051676999777555466, -0.0843610018491745, 0.052545998245477676, -0.33671998977661133, 0.4686099886894226, 0.16574999690055847, 0.15692999958992004, 0.04687599837779999, -1.291200041770935, -0.5343999862670898, 0.12208999693393707, 0.01876699924468994, 0.34393998980522156, -0.22247999906539917, -0.014216000214219093, -0.0479310005903244, 0.07381899654865265, -0.4269599914550781, 0.6587299704551697, -0.17101000249385834, -0.9706799983978271, -0.45458000898361206, -0.38363999128341675, -0.071834996342659, 0.22139999270439148, 0.03582699969410896, 0.7309899926185608, 0.4006800055503845, 0.4792400002479553, -0.12366999685764313, -0.06890100240707397, -0.04371500015258789, -0.004552800208330154, -0.2032500058412552, 0.15467000007629395, 0.06066400185227394, -0.15028999745845795, -0.09242700040340424, 0.09410399943590164, -0.166020005941391, -2.128499984741211, 0.22896000742912292, 0.03430800139904022, -0.015879999846220016, -0.4869900047779083, 0.6048099994659424, 0.2511799931526184, 0.1272200047969818, 0.12703999876976013, -0.04326599836349487, -0.382099986076355, 0.131290003657341, 0.17560000717639923, 0.10326000303030014, 0.0605739988386631, 0.3229900002479553, -0.007013999857008457, 0.41416001319885254, 0.36695000529289246, 0.5144000053405762, 0.17696000635623932, -0.1310500055551529, -0.3701300024986267, -0.45785999298095703], u'tight': [-0.003175100078806281, 0.24496999382972717, -0.34606999158859253, -0.10074000060558319, 0.5974000096321106, -0.46053001284599304, 0.37654998898506165, 0.23699000477790833, -0.018989000469446182, -1.0663000345230103, -0.6137099862098694, 0.10022000223398209, 0.0008233800181187689, 0.2596699893474579, -0.522570013999939, -0.23101000487804413, -0.2896600067615509, 0.3899100124835968, -0.19787000119686127, -0.43355000019073486, 0.2318200021982193, -0.13053999841213226, 0.20160000026226044, -0.35166001319885254, 0.32405000925064087, -0.21157999336719513, -0.044902000576257706, -0.1758899986743927, -0.6149799823760986, 0.09677200019359589, 0.1712000072002411, -0.2560200095176697, -0.32095998525619507, 0.9065999984741211, -1.1444000005722046, 0.000780489994212985, -0.3215799927711487, 0.42381998896598816, -0.40733999013900757, -0.09149499982595444, -0.8649700284004211, -0.1741199940443039, 0.47075000405311584, -0.6208400130271912, -0.23968000710010529, 0.17448000609874725, -0.19407999515533447, -0.27873000502586365, 0.4119499921798706, 0.32308998703956604, -0.21091000735759735, -0.0802680030465126, -0.3795199990272522, -0.4645000100135803, 0.11422999948263168, -0.606939971446991, -0.3203499913215637, -0.34869998693466187, -0.2934499979019165, -0.010712999850511551, 0.329800009727478, -0.27702000737190247, -0.6378399729728699, 0.48155999183654785, -0.033208999782800674, -0.4986400008201599, 0.22468000650405884, -0.20621000230312347, 0.5084099769592285, -0.06553799659013748, -0.43963998556137085, 0.3620699942111969, 0.08259300142526627, -0.03406799957156181, -0.037974998354911804, -0.14523999392986298, 0.006250900216400623, 0.3072800040245056, -0.45392000675201416, -0.45094001293182373, 0.4187999963760376, -0.7192100286483765, -0.07366999983787537, 0.4424999952316284, 0.23194000124931335, 0.10414999723434448, 0.5838599801063538, 0.17673000693321228, -0.5844799876213074, 0.31624001264572144, -0.14256000518798828, 0.2287299931049347, -0.17520000040531158, -0.3467999994754791, -0.1691499948501587, 0.41909000277519226, 0.09281100332736969, 0.5336199998855591, -0.23387999832630157, -0.022096000611782074, 0.13895000517368317, -0.11539000272750854, -0.44394999742507935, -0.3205299973487854, -0.5831900238990784, 0.7133399844169617, 0.291269987821579, 0.18181000649929047, -0.3513599932193756, -0.09878399968147278, -0.40887001156806946, -0.3312300145626068, 0.028723999857902527, 0.3612299859523773, -0.14720000326633453, 0.27382999658584595, 0.24036000669002533, -0.06753899902105331, 0.1341100037097931, -0.5811799764633179, 0.097291000187397, -0.614799976348877, 0.6023899912834167, -0.31951001286506653, 0.6323500275611877, -0.17689000070095062, -0.00046872999519109726, -0.30048999190330505, 0.04253099858760834, -0.062334999442100525, 0.15317000448703766, 0.13691000640392303, -0.42037999629974365, -0.0633699968457222, 0.09891100227832794, -0.3979800045490265, 0.04292900115251541, -0.48629000782966614, 0.3499799966812134, 0.08994700014591217, -0.4671800136566162, 0.20281000435352325, 0.08077099919319153, -0.10886000096797943, -0.48177000880241394, 0.1938599944114685, -0.8159700036048889, -0.4885900020599365, 0.607420027256012, 0.8094300031661987, 0.17809000611305237, -0.34018999338150024, 0.06374199688434601, -0.5239499807357788, 0.903410017490387, -0.6460199952125549, -0.5124300122261047, 0.018817000091075897, -0.18685999512672424, 0.19373999536037445, -0.0331760011613369, 0.022717999294400215, 0.18794000148773193, -0.26368001103401184, 0.004374399781227112, -0.5314499735832214, -0.1692499965429306, 0.06942799687385559, 0.3544999957084656, 0.0302910003811121, 0.19128000736236572, 0.1847900003194809, -0.5082499980926514, 0.026825999841094017, -0.04188999906182289, 0.4272899925708771, -0.4952299892902374, 0.26363998651504517, -0.20052999258041382, 0.07696100324392319, 0.13955999910831451, -0.4520300030708313, 0.13259999454021454, -0.019425999373197556, 0.07137200236320496, -0.1657799929380417, 0.2131900042295456, -0.6406999826431274, 0.17392000555992126, 0.643090009689331, 0.11111000180244446, 0.06926800310611725, -0.2743000090122223, 0.8039799928665161, -0.2690199911594391, -0.8870900273323059, -0.4758700132369995, -0.05830400064587593, 0.39873000979423523, 0.08926399797201157, 1.1418999433517456, 0.1613900065422058, 1.0355000495910645, 0.21788999438285828, 0.412990003824234, -0.354559987783432, 0.12997999787330627, -0.13675999641418457, 0.2277200073003769, -0.12334000319242477, 0.18425999581813812, 0.22789999842643738, 0.44780999422073364, 0.208529993891716, -0.1251000016927719, -0.6595699787139893, 0.01015700027346611, -0.8545299768447876, 0.02410000003874302, -0.7259299755096436, 0.8716099858283997, 0.13755999505519867, 0.21879999339580536, 0.568149983882904, 0.40643998980522156, 0.5386800169944763, 0.07515499740839005, 0.10126999765634537, -0.06758199632167816, -0.3973900079727173, 0.5940899848937988, -0.5428000092506409, 0.4649699926376343, -0.31856998801231384, -0.3878200054168701, 0.0279690008610487, -0.06844300031661987, -0.4092699885368347, 0.1088699996471405, 0.5304499864578247, 0.7037799954414368, 0.5977299809455872, 0.3121800124645233, 0.8125699758529663, -0.5958399772644043, 0.16966000199317932, 0.24754999577999115, 0.240339994430542, 0.15296000242233276, -0.19648000597953796, 0.09487800300121307, -0.26565998792648315, 0.17294000089168549, 0.12013000249862671, 0.40202000737190247, 0.17222000658512115, -0.10006999969482422, 0.15164999663829803, -0.14121000468730927, 0.45454999804496765, -0.01873200014233589, -0.24379999935626984, -0.1811400055885315, -0.40577998757362366, -0.17007000744342804, 0.0556269995868206, 0.2295600026845932, -0.12723000347614288, -0.30417001247406006, 0.015946000814437866, -0.15160000324249268, 0.0879950001835823, 0.11296000331640244, 0.41745999455451965, -0.1310500055551529, -0.09718099981546402, -0.9289699792861938, 0.2777499854564667, -0.37525999546051025, -0.0023348999675363302, 0.01989700086414814, 0.11680000275373459, 0.4275699853897095, 0.26337000727653503, -0.12212999910116196, -0.5601900219917297, -0.2573300004005432, 0.12654000520706177, -0.30465999245643616, -0.7993800044059753, 0.3253900110721588, 0.4001300036907196, -0.34777000546455383, 0.7813500165939331, -0.21146999299526215, 1.0676000118255615, -0.36608999967575073, -0.2260199934244156, -0.5120999813079834, 0.1280200034379959], u'crinkled': [-0.789080023765564, -0.4828200042247772, -0.26368001103401184, -0.3868100047111511, -0.27312999963760376, 0.08906400203704834, -0.0262449998408556, -1.0002000331878662, -0.09451299905776978, 0.8140299916267395, -0.047676000744104385, -0.031975001096725464, 0.06877200305461884, 0.12467999756336212, -0.6108099818229675, 0.015768000856041908, 0.03432999923825264, 0.029270000755786896, -0.1460600048303604, 0.030632000416517258, 0.22357000410556793, -0.14904999732971191, -0.4139299988746643, -0.079865001142025, -0.3171299993991852, -0.06016499921679497, 0.24135999381542206, -0.42559000849723816, 0.44947001338005066, -0.08009099960327148, -0.21483999490737915, 0.0960870012640953, -0.396369993686676, -0.31926000118255615, 0.31435999274253845, 0.38464000821113586, -0.33941999077796936, 0.22705000638961792, 0.3832800090312958, 0.02292099967598915, -0.5042399764060974, 0.1627199947834015, 0.4015200138092041, -0.4723300039768219, -0.11546000093221664, 0.48537999391555786, 0.15865999460220337, -0.01799199916422367, -0.5936999917030334, -0.2023400068283081, 0.23827999830245972, -0.6449800133705139, 0.10363999754190445, -0.05690699815750122, 0.0672840029001236, -0.5116900205612183, -0.12926000356674194, -0.13457000255584717, 0.6032999753952026, 0.14042000472545624, -0.08386799693107605, 0.18283000588417053, -0.3837299942970276, 0.5905900001525879, 0.4916499853134155, 0.10903999954462051, 0.15374000370502472, 0.045639000833034515, 0.16418999433517456, -0.5200300216674805, -0.13267000019550323, -0.22213000059127808, -0.23603999614715576, 0.4891799986362457, 0.7579100131988525, 0.6155300140380859, -0.7918800115585327, -0.03884899988770485, 0.10220000147819519, 0.09458400309085846, 0.03304100036621094, -0.3796899914741516, -0.31981000304222107, -0.19655999541282654, 0.16197000443935394, 0.13947999477386475, 0.18308000266551971, 0.064690001308918, 0.4580099880695343, -0.08781100064516068, 0.29677000641822815, 0.045531999319791794, 0.11396999657154083, -0.20103000104427338, -0.2069299966096878, -0.05556200072169304, 0.06504900008440018, 0.3056800067424774, 0.27417999505996704, 0.4261600077152252, 0.5788999795913696, -0.30504998564720154, -0.40290001034736633, 0.28933000564575195, -0.7356100082397461, 0.09380999952554703, 0.14544999599456787, 0.06883899867534637, -0.6455699801445007, -0.4489299952983856, 0.0011542000574991107, -0.04788900166749954, 0.23342999815940857, 0.03632200136780739, 0.16495999693870544, 0.4499000012874603, 0.15509000420570374, 1.385200023651123, -0.03919599950313568, 0.0076406002044677734, 0.12020999938249588, -0.9880099892616272, -0.4106999933719635, -0.3772999942302704, 0.23497000336647034, -0.1907999962568283, -0.3866400122642517, -2.304199915670324e-05, 0.005921099800616503, 0.5738300085067749, -0.1594499945640564, -0.06247900053858757, -0.11738000065088272, -0.07791399955749512, -0.41791999340057373, -0.10715000331401825, -0.17237000167369843, 0.28696998953819275, 0.6076300144195557, 0.005875300150364637, 0.8039399981498718, -0.010235000401735306, -0.10548000037670135, -0.24747000634670258, 0.3276500105857849, 0.3148899972438812, 0.1652500033378601, 0.19554999470710754, 0.35530000925064087, -0.3155899941921234, -0.1521500051021576, -0.3836100101470947, -0.5005800127983093, -0.4682900011539459, -0.2998400032520294, 0.3893899917602539, 0.16923999786376953, -0.39054998755455017, -0.19022999703884125, 0.4892300069332123, -0.8838099837303162, -0.3557499945163727, -0.3956199884414673, -0.06951700150966644, 0.3108299970626831, -0.24879999458789825, -0.19710999727249146, 0.5427799820899963, 0.2559399902820587, -0.10126999765634537, -0.12108000367879868, -0.16008000075817108, -0.06268999725580215, -0.7860699892044067, -0.3094800114631653, -0.6350100040435791, 0.04993699863553047, 0.47343000769615173, 0.0707160010933876, -0.8310999870300293, 0.25812000036239624, -0.23378999531269073, -0.0612029992043972, 0.12419000267982483, 0.38499999046325684, -0.19533999264240265, 0.404449999332428, 0.07834500074386597, 0.18158000707626343, -0.38850998878479004, -0.0426580011844635, -0.19947999715805054, -0.1497800052165985, 0.39695999026298523, 0.21295000612735748, 0.30037999153137207, 0.16372999548912048, -0.2060299962759018, -0.026093000546097755, 0.09695799648761749, -0.8242700099945068, -0.21969999372959137, -0.376120001077652, -0.25791001319885254, 0.1536100059747696, -0.5589600205421448, 0.39160001277923584, 0.48166000843048096, -0.7765799760818481, -0.01146399974822998, -0.05117800086736679, -0.4844000041484833, -0.1700499951839447, 0.30414000153541565, 0.24442000687122345, -0.2554300129413605, -0.02422799915075302, -0.48260998725891113, 0.25679001212120056, 0.3919900059700012, 0.2870199978351593, 0.24461999535560608, 0.318230003118515, -0.3227800130844116, -0.16263000667095184, -0.19113999605178833, 0.2156900018453598, 0.17021000385284424, 0.00431300001218915, -0.27698999643325806, 0.365229994058609, 0.21928000450134277, -0.14711999893188477, 0.4535300135612488, 0.10627000033855438, 0.1710200011730194, 0.13228000700473785, -0.3386799991130829, -0.5505200028419495, 0.0015041000442579389, 0.17015999555587769, -0.01418600045144558, -0.45809999108314514, 0.6060000061988831, -0.22109000384807587, 0.1869100034236908, 0.28731000423431396, 0.18316000699996948, 0.3069800138473511, -0.08730799704790115, -0.23855000734329224, -0.21592000126838684, 0.4761500060558319, 0.07798100262880325, -0.3100399971008301, 0.5688499808311462, -0.2302599996328354, 1.1722999811172485, -0.47178998589515686, -0.5298900008201599, -0.14624999463558197, -0.13088999688625336, 0.49160000681877136, 0.7078700065612793, 0.28005000948905945, -0.11072999984025955, 0.1761700063943863, 0.06640200316905975, -0.5208399891853333, 0.3555600047111511, 0.46237000823020935, 0.012346000410616398, 0.3366900086402893, 0.5501599907875061, -0.01875000074505806, 0.12105999886989594, 1.4208999872207642, -0.4591299891471863, -0.21980999410152435, 0.06453300267457962, 0.03346500173211098, 0.460099995136261, 0.25049999356269836, -0.19154000282287598, 0.1385899931192398, 0.28567999601364136, 0.555180013179779, 0.061264000833034515, -0.17462000250816345, -0.036357998847961426, -0.19298000633716583, 0.16637000441551208, -0.2505500018596649, 0.36098000407218933, -0.4210200011730194, -0.14903999865055084, 0.33768001198768616, 0.25290998816490173, 0.1515900045633316, 0.1147800013422966], u'wilted': [0.10886000096797943, -0.31457000970840454, -0.3236500024795532, -0.03974900022149086, 0.22954000532627106, -0.09851600229740143, -0.36250001192092896, 0.21212999522686005, 0.6715999841690063, 0.10823000222444534, -0.37869998812675476, 0.5713499784469604, -0.4368799924850464, 0.2969900071620941, 0.16387000679969788, -0.28235000371932983, -0.0681150034070015, 0.40321001410484314, 0.2110999971628189, 0.46108999848365784, 0.08843199908733368, 0.04240399971604347, 0.11513999849557877, -0.34325000643730164, 0.4432399868965149, -0.014759000390768051, -0.06429299712181091, 0.5624399781227112, -0.21442000567913055, 0.09627799689769745, 0.01486399956047535, -0.1452299952507019, 0.3675200045108795, -0.010270999744534492, -0.5386099815368652, -0.2522299885749817, 0.1482599973678589, -0.22150999307632446, 0.029411999508738518, 0.38844001293182373, 0.5059700012207031, 0.07345700263977051, 0.4451499879360199, -0.2834799885749817, 0.42800000309944153, -0.14182999730110168, 0.020486999303102493, -0.21241000294685364, -0.22476999461650848, 0.04399999976158142, 0.3684000074863434, -0.1948699951171875, -0.3432900011539459, -0.5645899772644043, 0.22182999551296234, -0.5215799808502197, 0.13829000294208527, 0.07067699730396271, 0.48840999603271484, -0.19965000450611115, 0.6562100052833557, -0.5014299750328064, -0.4337800145149231, 0.26412999629974365, -0.03834100067615509, 0.46985000371932983, 0.09215600043535233, 0.07247699797153473, -0.5846400260925293, -0.6098700165748596, -0.12236999720335007, 0.2955099940299988, 0.27691999077796936, 0.023037999868392944, 0.011495999991893768, 0.5857399702072144, -0.019440000876784325, -0.24208000302314758, -0.3937700092792511, 0.43191999197006226, -0.512690007686615, -0.0289900004863739, 0.21740999817848206, 0.2659200131893158, 0.26183000206947327, 0.6587499976158142, -0.31452998518943787, -0.10753999650478363, -0.10074000060558319, -0.24469000101089478, 0.16631999611854553, -0.46797001361846924, 0.6387900114059448, 0.41405999660491943, -0.45201000571250916, 0.6188700199127197, -0.3710800111293793, 0.0378279983997345, -0.21806000173091888, 0.6293399930000305, 0.4140099883079529, -0.6104999780654907, 0.014047999866306782, -0.30261000990867615, -0.3407599925994873, 0.1725900024175644, -0.04005099833011627, 0.0896570011973381, -0.029619000852108, 0.26570001244544983, 0.3308199942111969, 0.4770599901676178, -0.1410900056362152, -0.027736999094486237, -0.025165999308228493, 0.5307899713516235, -0.5969399809837341, 0.6536200046539307, 0.4765099883079529, 0.2163500040769577, 0.26583999395370483, -0.9967300295829773, -0.15866999328136444, 0.6169400215148926, -0.022502999752759933, 0.33924001455307007, -0.3881100118160248, 0.37373000383377075, -0.4404999911785126, 0.5751699805259705, 0.4572199881076813, 0.6266999840736389, 0.4069100022315979, 0.5083000063896179, -0.4792799949645996, 0.08657299727201462, 0.5366500020027161, -0.442220002412796, 0.15622000396251678, 0.9513099789619446, 0.24821999669075012, -0.004914199933409691, 0.054033998399972916, 0.8683000206947327, -0.513759970664978, 0.4554300010204315, 0.05302400141954422, 0.34279000759124756, 0.449290007352829, -0.4969500005245209, -0.9799399971961975, -0.5816100239753723, -0.23859000205993652, 0.09346000105142593, -0.17015999555587769, -0.14422999322414398, -0.4573799967765808, -0.9608200192451477, -0.3779900074005127, 0.22481000423431396, -0.74413001537323, 0.005401900038123131, -0.25273001194000244, -0.8001899719238281, 0.2679400146007538, -0.36177998781204224, -0.5902400016784668, -0.08025699853897095, -0.058184001594781876, -1.1470999717712402, 0.007338599767535925, 0.04558800160884857, -0.17675000429153442, -0.28534001111984253, 0.3110800087451935, -0.12812000513076782, 0.278219997882843, -0.12219999730587006, -0.06955599784851074, 0.24124999344348907, -0.4469900131225586, -0.11830999702215195, -0.11247999966144562, -0.33878999948501587, 0.01583700068295002, 0.5121300220489502, 0.046796999871730804, -0.08391900360584259, -0.5587400197982788, -0.5639299750328064, -0.008528799749910831, 0.050668999552726746, 0.13219000399112701, -0.1618099957704544, -0.38495001196861267, -0.2634499967098236, -0.07936300337314606, 0.5141000151634216, -0.22417999804019928, 0.1691499948501587, -0.3372200131416321, -0.42671000957489014, 0.26023000478744507, -0.3359000086784363, -0.06759999692440033, -0.4936999976634979, 0.11517000198364258, -0.28200000524520874, 0.030667999759316444, 0.18024000525474548, 0.1186399981379509, -0.03720499947667122, -0.469870001077652, -0.7335900068283081, 0.17709000408649445, -0.3612000048160553, 0.7999200224876404, -0.009791599586606026, -0.2717599868774414, -0.20371000468730927, 0.43876999616622925, 0.07919800281524658, -0.9825900197029114, -0.4247699975967407, -0.2910900115966797, -0.24432000517845154, -0.25117000937461853, 0.15711000561714172, 0.26238998770713806, 0.21254999935626984, 0.45153000950813293, -0.2239599972963333, -0.25409001111984253, 0.040421001613140106, -0.337119996547699, -0.0855100005865097, -0.16554999351501465, 0.37428998947143555, -0.19781999289989471, -0.3305000066757202, 0.3187499940395355, -0.4576900005340576, -0.3829199969768524, 0.16788999736309052, 0.14322000741958618, 0.0383870005607605, 0.29276999831199646, -0.35392001271247864, -0.09404800087213516, -1.0820000171661377, 0.1878799945116043, 0.17223000526428223, 1.0082000494003296, -0.45928001403808594, -0.08964700251817703, -0.24190999567508698, 0.3130300045013428, 0.24420000612735748, -0.4666300117969513, 0.6084200143814087, -0.0162189994007349, -0.365200012922287, -0.05488700047135353, 0.13955000042915344, -0.16120000183582306, 0.04190399870276451, 0.006258599925786257, -0.30630001425743103, -0.1200300008058548, 0.020006999373435974, -0.12387999892234802, 0.19012999534606934, -0.23771999776363373, 0.4233100116252899, -0.15512000024318695, 0.45930999517440796, 0.704509973526001, -0.6803500056266785, -0.6668499708175659, 0.5607600212097168, 0.004256099928170443, -0.3716999888420105, -0.1821500062942505, -0.32245999574661255, 0.04159000143408775, -0.4068300127983093, -0.06735099852085114, 0.47315001487731934, -0.06127699837088585, -0.35030999779701233, 0.5620999932289124, 0.21705999970436096, -0.05830400064587593, -0.46794000267982483, -0.3179300129413605, -0.6409000158309937, 0.03660300001502037, -0.49494999647140503, 0.2994000017642975, -0.006877399981021881], u'dented': [0.7420700192451477, 0.25235000252723694, -0.44328999519348145, 0.048645999282598495, -0.22285999357700348, 0.4100300073623657, 0.06973200291395187, 0.5691400170326233, -0.24792000651359558, -0.4940800070762634, -0.09137500077486038, 0.1961199939250946, 0.5045400261878967, -0.02121799997985363, -0.3654400110244751, 0.1687600016593933, 0.45048999786376953, -0.010378999635577202, -0.05195600166916847, -0.913919985294342, 0.17059999704360962, 0.3695699870586395, 0.2815999984741211, -0.7528600096702576, 0.18696999549865723, -0.030595000833272934, 0.09653999656438828, 0.323529988527298, 0.32670000195503235, 0.6152200102806091, 0.12871000170707703, -0.29096999764442444, 0.4299899935722351, -0.1757200062274933, -0.37257999181747437, 0.13384999334812164, -0.47148001194000244, -0.30897998809814453, 0.4246099889278412, -0.020347999408841133, 0.11810000240802765, -0.07509700208902359, 0.20723000168800354, -0.559149980545044, -0.37178000807762146, 0.2596200108528137, -0.5220900177955627, -0.3381499946117401, -0.6470999717712402, 0.19386999309062958, 0.3763200044631958, -0.35433998703956604, 0.5714899897575378, 0.11033999919891357, 0.3029400110244751, 0.18213999271392822, 0.03631199896335602, -0.04462499916553497, 0.026023000478744507, 0.5362600088119507, 0.2730900049209595, 0.2109300047159195, -0.38752999901771545, 0.12571999430656433, -0.11638999730348587, 0.7843199968338013, -0.16863000392913818, -0.06706500053405762, 0.23240000009536743, -0.19709999859333038, -0.16764000058174133, 0.009412500075995922, 0.40143999457359314, 0.12612999975681305, 0.6247199773788452, -0.30031999945640564, -0.037679001688957214, -0.1118599995970726, -0.6216300129890442, 0.20347000658512115, -0.14789000153541565, 0.03159099817276001, 0.12838000059127808, 0.36917999386787415, 0.38725998997688293, -0.38870999217033386, -0.23093999922275543, -0.2943600118160248, 0.010697999969124794, 0.2650899887084961, 0.8376500010490417, 0.273499995470047, -0.0010671999771147966, 0.016628999263048172, -0.06789399683475494, 0.4495300054550171, 0.6012899875640869, 0.578220009803772, -0.937309980392456, 0.6818900108337402, -0.16759000718593597, -0.06418800354003906, -0.2759599983692169, -0.41012999415397644, 0.3229700028896332, 0.19821999967098236, 0.2440599948167801, -0.20878000557422638, -0.09998700022697449, -0.4409500062465668, 0.2711299955844879, -0.13725000619888306, -0.07755500078201294, -0.5807200074195862, 0.06197400018572807, -0.29739001393318176, 0.043671999126672745, 0.2730199992656708, -0.031082000583410263, -0.6568400263786316, 0.020061999559402466, -0.972599983215332, -0.011345000006258488, 0.4616200029850006, -0.08049800246953964, 0.11613000184297562, -0.7299500107765198, -0.26528000831604004, 0.23300999402999878, 0.4358699917793274, 0.14966000616550446, 0.8611900210380554, 0.36956000328063965, 0.2728999853134155, 0.5168099999427795, 0.08673500269651413, 0.2721099853515625, 0.19866999983787537, 0.28755998611450195, 0.2680499851703644, 0.1050800010561943, 0.21025000512599945, 0.4822399914264679, 0.4118900001049042, 0.37059998512268066, -0.1471100002527237, 0.19322000443935394, 0.06090199947357178, 0.429390013217926, -0.28172001242637634, 0.3873499929904938, -0.10409999638795853, 0.10405000299215317, 0.21265999972820282, -0.42030999064445496, 0.1808300018310547, 0.007125900126993656, -0.20110000669956207, -0.2260800004005432, 0.46472999453544617, -0.36675000190734863, 0.11477000266313553, -0.3503499925136566, 0.4025300145149231, 0.9960799813270569, -0.30142998695373535, -0.3997499942779541, -0.09249900281429291, 0.02565399929881096, -0.2525300085544586, -0.18592000007629395, 0.17059999704360962, 0.028922999277710915, -0.8028500080108643, 0.2516399919986725, -0.20130999386310577, 0.13850000500679016, 0.06065500155091286, 0.6417499780654907, -0.24560999870300293, -0.28022000193595886, 0.21574999392032623, 0.03495100140571594, -0.07199499756097794, 0.2560200095176697, -0.029572000727057457, 1.041599988937378, -0.0763309970498085, -0.015838999301195145, 0.1839199960231781, 0.049345001578330994, -0.6139400005340576, -0.25769999623298645, -0.18421000242233276, 0.10181000083684921, -0.07666300237178802, 0.3031100034713745, -0.22082999348640442, 0.0025341000873595476, 0.16256999969482422, 0.24607999622821808, -0.010262000374495983, 0.12382999807596207, -0.12462999671697617, -0.6219499707221985, -0.9957500100135803, 0.6277999877929688, -0.4000299870967865, -1.1633000373840332, 0.24208000302314758, -0.4440700113773346, 0.10266000032424927, -0.25525999069213867, -0.10571999847888947, 0.28777000308036804, -0.28178998827934265, 0.1530500054359436, -0.020979000255465508, -0.11275999993085861, 0.27118000388145447, 0.18612000346183777, 0.15386000275611877, -0.3206399977207184, -0.4078199863433838, 0.2719799876213074, 0.10226999968290329, -0.11175999790430069, -0.02212500013411045, 0.31446999311447144, 0.07056699693202972, 0.018285999074578285, 0.002532100072130561, -0.6250699758529663, -0.21119999885559082, -0.20829999446868896, 0.031228000298142433, 1.1552000045776367, -0.016081999987363815, -0.4359399974346161, -0.22780999541282654, 0.2018200010061264, -0.10589999705553055, -0.46189001202583313, -0.10668999701738358, -0.39660000801086426, -0.23149000108242035, -0.15564000606536865, -0.2382500022649765, -0.03476899862289429, 0.3577600121498108, 0.09183000028133392, -0.319130003452301, 0.36812999844551086, -0.47791001200675964, -0.1347000002861023, -0.11343999952077866, 0.18756000697612762, 0.4184400141239166, 0.05038699880242348, 0.07169999927282333, -0.32499998807907104, 0.583549976348877, -0.43849000334739685, 0.9107699990272522, -0.25609999895095825, 0.05653199926018715, 0.29008999466896057, -0.5616199970245361, -0.18095000088214874, -0.27897998690605164, -0.14702999591827393, -0.22202999889850616, 0.1479099988937378, -0.19699999690055847, -0.1261499971151352, 0.19728000462055206, 0.45159000158309937, -0.42667001485824585, 0.06324999779462814, 0.21185000240802765, -0.03469200059771538, 0.2438099980354309, -0.420199990272522, 0.15039999783039093, -0.08297000080347061, -0.391759991645813, -0.14770999550819397, 0.2518399953842163, 0.22085000574588776, 0.0984560027718544, -0.1989399939775467, -0.043480001389980316, 0.2137800008058548, 0.23186999559402466, -0.010363999754190445, 0.48372000455856323, 0.21668000519275665, -0.5144400000572205, -0.41370999813079834, -0.1934400051832199], u'crushed': [-0.13232000172138214, 0.14618000388145447, -0.15557999908924103, -0.2857399880886078, 0.7050399780273438, -0.3298499882221222, -0.20874999463558197, 0.7010200023651123, 0.4908199906349182, -0.20071999728679657, -0.06128399819135666, 0.16685999929904938, 0.41916000843048096, 0.05105099827051163, 0.031307999044656754, 0.4752500057220459, 0.04842500016093254, 0.590179979801178, -0.6628000140190125, -0.32771000266075134, -0.10307999700307846, 0.3352000117301941, 0.17956000566482544, -0.46209999918937683, -0.5854099988937378, -0.48829999566078186, -0.2317499965429306, 0.15185999870300293, 0.010975000448524952, 0.318450003862381, -0.6943399906158447, 0.1962299942970276, -0.24650000035762787, -0.07070600241422653, -0.16518999636173248, 0.07124900072813034, -0.07012499868869781, 0.6780200004577637, 0.42006999254226685, -0.02791699953377247, 0.32221999764442444, -0.08312000334262848, 0.5248500108718872, -0.3070800006389618, 0.3643699884414673, -0.06631699949502945, -0.21056999266147614, 0.4018700122833252, -0.20740999281406403, -0.21254999935626984, 0.2780100107192993, 0.2776699960231781, -0.3183000087738037, 0.0065162000246346, 0.5230900049209595, 0.019311999902129173, -0.035353999584913254, -0.5794699788093567, -0.3036699891090393, 0.06953299790620804, -0.059700001031160355, -0.010467000305652618, -0.2700900137424469, 0.6779500246047974, -0.6349300146102905, -0.2558799982070923, -0.2062000036239624, 0.16312000155448914, -0.0831810012459755, 0.020642999559640884, -0.19133000075817108, 0.1529799997806549, -0.534529983997345, 0.2533999979496002, -0.009072699584066868, 0.0287299994379282, 0.26096999645233154, -0.6484599709510803, 0.19679999351501465, 0.5034499764442444, 0.13190999627113342, 0.07670900225639343, 0.177279993891716, -0.10779000073671341, -0.19943000376224518, -0.3372499942779541, -0.03620399907231331, 0.27035000920295715, 0.12385000288486481, 0.196710005402565, 0.621720016002655, 0.055500999093055725, 0.2300499975681305, -0.19710999727249146, 0.07608100026845932, 0.30741000175476074, 0.017914000898599625, 0.44203999638557434, 0.3153899908065796, 0.4717699885368347, 0.41710999608039856, 0.21541999280452728, 0.388700008392334, -1.0851000547409058, 0.41776999831199646, 0.546779990196228, 0.20565000176429749, -0.33504000306129456, 0.029596999287605286, -0.5308499932289124, -0.16438999772071838, -0.044137001037597656, 0.3246999979019165, -0.47293999791145325, -0.015526999719440937, 0.12058000266551971, -0.47251999378204346, 0.2528899908065796, -0.00475780013948679, -0.2390899956226349, -0.24991999566555023, -0.7643899917602539, -0.25415998697280884, 0.08763500303030014, -0.011477000080049038, -0.5408899784088135, -0.18429000675678253, 0.12693999707698822, -0.23826000094413757, 0.5226200222969055, -0.2668200135231018, 1.3208999633789062, -0.23050999641418457, 0.07603800296783447, 0.4447900056838989, 0.28957000374794006, 0.03600800037384033, 0.002092099981382489, 0.2395000010728836, 0.6296499967575073, 0.586679995059967, 0.4625999927520752, -0.546239972114563, 0.1490200012922287, -0.26603999733924866, 0.17700999975204468, 0.1214900016784668, 0.023374000564217567, -0.012533999979496002, -0.6356099843978882, 0.05025799944996834, -0.19367000460624695, -0.18176999688148499, 0.23810000717639923, 0.3857499957084656, 0.29082000255584717, 0.48100998997688293, 0.1853100061416626, -0.5424399971961975, -0.20050999522209167, 0.052806999534368515, 0.25488001108169556, -0.1997700035572052, -0.07838000357151031, 1.1727999448776245, -0.38982999324798584, 0.13642999529838562, -0.20441000163555145, 0.19266000390052795, -0.5450299978256226, 0.19527000188827515, -0.10826999694108963, 0.49601998925209045, -0.20440000295639038, -0.09955000132322311, -0.1301400065422058, 0.36594000458717346, -0.01949400082230568, 0.30500999093055725, 0.09521500021219254, 0.3041299879550934, -0.21527999639511108, 0.28091999888420105, -0.5671499967575073, 0.1814900040626526, -0.11050000041723251, 0.012628999538719654, 0.07552000135183334, 0.7430099844932556, 0.10666000097990036, 0.5376600027084351, 0.12741999328136444, 0.18104000389575958, 0.07818900048732758, -0.013975000008940697, 0.2818000018596649, -0.05114800110459328, 0.11844000220298767, 0.32186999917030334, 0.23029999434947968, 0.7516599893569946, -0.024435000494122505, -0.11909999698400497, -0.015843000262975693, -0.4413599967956543, -0.39730000495910645, 0.7835900187492371, 0.030448999255895615, -0.5166500210762024, -0.08905799686908722, -0.5737199783325195, -0.04021900147199631, -0.1646299958229065, 0.6122000217437744, 0.523580014705658, -0.1784999966621399, 0.4429900050163269, -0.7046200037002563, 0.1448100060224533, 0.1138399988412857, 0.7751500010490417, 0.39653000235557556, -0.17922000586986542, -0.6399999856948853, -0.3492699861526489, 0.027698000892996788, 0.27153000235557556, -0.35583001375198364, -0.5706999897956848, -0.3965499997138977, -0.2630400061607361, -0.3180899918079376, 0.19057999551296234, -0.16777999699115753, 0.16857999563217163, -0.0009389100014232099, 0.10834000259637833, 0.40977001190185547, -0.4862399995326996, -0.7399100065231323, -0.5401700139045715, 0.12777000665664673, -0.3471300005912781, -0.07060100138187408, -0.2889299988746643, -0.24480000138282776, 0.8502699732780457, 0.5089499950408936, 0.402209997177124, 0.002867799950763583, 0.7773600220680237, 0.11321999877691269, 0.19056999683380127, -0.6864799857139587, 0.7149199843406677, -0.2096100002527237, 0.24535000324249268, -0.10487999767065048, -0.25738000869750977, -0.2630000114440918, -0.4638499915599823, -0.28123000264167786, 0.0670199990272522, -0.010715000331401825, 0.31942999362945557, -0.1081399992108345, -0.3084999918937683, -0.17023000121116638, 0.06515499949455261, 0.06355399638414383, -0.3580299913883209, -0.6852700114250183, 0.27647000551223755, -0.1576099991798401, -0.333840012550354, 0.007211099844425917, -1.0641000270843506, -0.7095400094985962, -0.2848300039768219, 0.07070600241422653, 0.13032999634742737, -0.11097999662160873, 0.3967199921607971, -0.22348999977111816, 0.06349799782037735, 0.6082000136375427, -0.477510005235672, -0.10204999893903732, 0.07791800051927567, 0.17111000418663025, -0.011962000280618668, -0.3046500086784363, 0.4106999933719635, 0.4221400022506714, 0.06203300133347511, 0.476859986782074, 0.3044799864292145, -0.33094000816345215, 0.18887999653816223, 0.2582699954509735], u'tall': [-0.4674600064754486, -0.148049995303154, -0.18253999948501587, -0.9249899983406067, -0.17010000348091125, 1.0987999439239502, 0.5065799951553345, 0.5976999998092651, -0.006434199865907431, -0.29892000555992126, 0.516260027885437, 0.37334999442100525, 0.12748999893665314, 0.4759199917316437, -0.004873599857091904, 0.03724399954080582, -0.013038000091910362, -0.569890022277832, -0.057204000651836395, -0.1009799987077713, -0.1745299994945526, 0.2547000050544739, -0.09666600078344345, 1.1092000007629395, -0.20072999596595764, 0.6959699988365173, 0.16015000641345978, -0.043476998805999756, -0.21455000340938568, 0.8214200139045715, -0.05917099863290787, 0.580780029296875, -0.6954900026321411, -0.4794600009918213, -0.5287600159645081, 0.2696700096130371, 0.10627999901771545, -0.2754800021648407, -0.22958000004291534, 0.5943300127983093, -0.5613399744033813, -0.37217000126838684, -0.040417999029159546, -0.05324700102210045, -0.5296199917793274, -0.06340599805116653, 0.46994999051094055, -0.3559499979019165, 0.3372400104999542, 0.09365499764680862, -0.4466800093650818, 0.3147999942302704, -0.7069900035858154, -0.36574000120162964, -0.2195100039243698, 0.41776999831199646, -0.07701700180768967, -0.2386700063943863, 0.3466799855232239, 0.08192399889230728, 0.12724000215530396, 0.03790700063109398, 0.36765000224113464, -0.40101999044418335, -0.18824000656604767, -0.3025600016117096, -0.07918599992990494, 0.7968900203704834, 0.26368001103401184, -0.4302699863910675, 0.04044400155544281, 0.24541999399662018, -0.3905099928379059, -0.3430599868297577, -0.46213001012802124, 0.23374000191688538, 0.44172000885009766, 0.05147499963641167, 0.360509991645813, -0.2718299925327301, -0.1406099945306778, 0.6693599820137024, -0.062438998371362686, 0.04705100134015083, -0.20981000363826752, 0.7657899856567383, 0.1569100022315979, 0.49511000514030457, 0.528190016746521, 0.3168799877166748, 0.709879994392395, -0.6939799785614014, 0.12647999823093414, 0.4714199900627136, -0.46564000844955444, -0.36107000708580017, -0.02944999933242798, -0.18020999431610107, 0.3857400119304657, -0.07752899825572968, -0.1090800017118454, 0.26003000140190125, -0.2221599966287613, -0.920989990234375, -0.31047001481056213, 0.06048800051212311, 0.5447999835014343, 0.24713000655174255, -0.15966999530792236, -0.4246799945831299, -0.4304499924182892, 0.48458999395370483, 0.024011999368667603, -0.7855899930000305, 0.3980199992656708, 0.08258199691772461, -0.28279000520706177, 0.4537599980831146, -0.06699100136756897, 0.054301999509334564, -0.1280200034379959, -0.0005238900193944573, 0.33796000480651855, 0.2999100089073181, -0.5521000027656555, 0.22447000443935394, 0.29892000555992126, 0.5837100148200989, 0.4659000039100647, -0.5967000126838684, -0.22577999532222748, 0.5828499794006348, -0.8237199783325195, 0.24660000205039978, 0.4578700065612793, -0.05622899904847145, -0.024654999375343323, -0.29822999238967896, -0.3557400107383728, -0.6068599820137024, 0.34619998931884766, 0.14268000423908234, 0.26467999815940857, 0.30055001378059387, -0.8672199845314026, 0.3405599892139435, -0.0915369987487793, -0.06899499893188477, -0.4250600039958954, -0.0418040007352829, -0.15554000437259674, -0.08647699654102325, -0.05952100083231926, 0.023145999759435654, 0.05968799814581871, 0.32447999715805054, 0.09770900011062622, -0.7817500233650208, -0.02657400071620941, 0.9914699792861938, 0.11125999689102173, 0.04238799959421158, 0.7509499788284302, -0.22169999778270721, -0.11949999630451202, -0.07219800353050232, -0.19280000030994415, 0.4647499918937683, 0.557699978351593, -0.6005100011825562, -0.14980000257492065, -0.25356000661849976, -0.09113500267267227, -0.1441500037908554, 0.4950200021266937, -0.21950000524520874, -0.05674799904227257, 0.21216000616550446, -0.05994100123643875, -0.7737799882888794, -0.3285199999809265, -0.5017799735069275, 0.6303499937057495, -0.07420600205659866, 0.19025999307632446, -0.22746999561786652, 0.38047999143600464, 0.3343299925327301, 0.8745899796485901, 0.7727599740028381, 0.10575000196695328, 0.19592000544071198, 0.5282899737358093, 0.34334999322891235, -0.05425399914383888, -0.21952000260353088, -0.5216400027275085, -0.11847999691963196, -0.11513999849557877, -0.10987000167369843, 1.4049999713897705, 0.1795099973678589, -0.07070499658584595, 0.18357999622821808, -0.18568000197410583, -0.12815000116825104, -0.35220998525619507, 0.2557399868965149, 0.24132999777793884, -0.24958999454975128, 0.8209099769592285, -0.022234000265598297, -0.27768000960350037, -0.9954100251197815, -0.03119399957358837, -0.05007300153374672, -0.24852000176906586, -0.18070000410079956, -0.04574500024318695, -0.5995799899101257, 0.6198899745941162, -0.3441999852657318, 0.7303299903869629, 0.24221999943256378, -0.14528000354766846, 0.5735200047492981, -0.3971799910068512, -0.1497499942779541, -0.38249000906944275, -0.05126800015568733, 0.300570011138916, 0.1721400022506714, -0.4424000084400177, 0.47293001413345337, -0.30118000507354736, -0.22981999814510345, -0.4401699900627136, -0.2798500061035156, -0.10478000342845917, -0.13979999721050262, 0.4453299939632416, -0.2624000012874603, -0.8432000279426575, -0.03097900003194809, -0.6630899906158447, 0.003246000036597252, 0.1413400024175644, -0.023452000692486763, 0.17086000740528107, 0.08601000159978867, 0.5145000219345093, -0.46401000022888184, 0.015580999664962292, -0.7008299827575684, 0.5151799917221069, -0.09098999947309494, -0.10029999911785126, -0.2847000062465668, -0.2086700052022934, -0.09992799907922745, 0.5821200013160706, 0.03906700015068054, 0.4460499882698059, -0.2603200078010559, -0.5224499702453613, -0.515030026435852, 0.14282000064849854, -0.2377600073814392, 0.43827998638153076, 0.22457000613212585, -0.37560001015663147, -0.3126299977302551, -0.13344000279903412, -0.27529001235961914, -0.431769996881485, 0.3789600133895874, -0.9571499824523926, 0.460750013589859, -0.8727999925613403, -0.09992899745702744, 0.5471699833869934, -0.3349199891090393, 0.1498900055885315, 0.21240000426769257, -0.24392999708652496, 0.512470006942749, -0.5176200270652771, 0.5915300250053406, -0.11693000048398972, 0.16933000087738037, 0.9299499988555908, 0.38413000106811523, -0.32510998845100403, -0.7393100261688232, 0.7983400225639343, 0.10834000259637833, -0.3936600089073181, -0.07108200341463089, 0.28251001238822937, 0.2349099963903427], u'short': [-0.20486000180244446, -0.267520010471344, -0.13951000571250916, 0.14983999729156494, 0.12790000438690186, 0.09535899758338928, -0.1877799928188324, 0.047616999596357346, 0.03429900109767914, -1.521399974822998, 0.3456900119781494, 0.29065001010894775, -0.2540299892425537, 0.18497000634670258, 0.17077000439167023, -0.14982999861240387, -0.0440480001270771, 0.34834998846054077, 0.10121999680995941, -0.26475000381469727, -0.5768600106239319, -0.07568400353193283, 0.19694000482559204, 0.3450300097465515, 0.013477999716997147, 0.3838900029659271, 0.38245999813079834, -0.2622700035572052, -0.21392999589443207, 0.23553000390529633, -0.2057500034570694, 0.2470400035381317, 0.21220999956130981, 0.00023573999351356179, -1.0607000589370728, 0.04586600139737129, -0.3050299882888794, 0.4069100022315979, 0.10013999789953232, 0.1517000049352646, -0.5293499827384949, -0.014732999727129936, 0.06774300336837769, 0.16784000396728516, 0.13116000592708588, 0.3902600109577179, -0.13503000140190125, 0.007235900033265352, -0.3853299915790558, 0.37626001238822937, -0.014399999752640724, -0.039090000092983246, -0.005426399875432253, -0.08377200365066528, 0.21782000362873077, 0.2450300008058548, -0.1836100071668625, -0.1654299944639206, 0.18811999261379242, 0.08195500075817108, 0.217289999127388, -0.19405999779701233, 0.11633999645709991, -0.47769999504089355, 0.11113999783992767, -0.46116000413894653, 0.2301499992609024, 0.4808099865913391, 0.9110199809074402, 0.23937000334262848, 0.32276999950408936, 0.3422600030899048, 0.42796000838279724, -0.15693999826908112, 0.27292999625205994, -0.25440001487731934, 0.016534000635147095, 0.42598000168800354, -0.17940999567508698, -0.5573899745941162, 0.11630000174045563, 0.3968999981880188, 0.4254400134086609, 0.19901999831199646, -0.05855099856853485, 0.719219982624054, 0.5692499876022339, 0.3362500071525574, 0.3357900083065033, 0.20140999555587769, 0.6989499926567078, 0.43463000655174255, -0.254720002412796, -0.1677599996328354, 0.33417999744415283, 0.13673999905586243, -0.40681999921798706, 0.06971099972724915, -0.08000300079584122, -0.0031487001106142998, 0.15312999486923218, -0.11387000232934952, -0.45928001403808594, 0.17784999310970306, -0.5211799740791321, -0.012802000157535076, 0.12134999781847, -0.3250100016593933, -0.2533400058746338, -0.20127999782562256, -0.6543800234794617, 0.35245999693870544, 0.1448500007390976, -0.12846000492572784, 0.033925000578165054, -0.15335999429225922, -0.10683999955654144, 0.14291000366210938, 0.22101999819278717, -0.6492599844932556, -0.1423099935054779, -0.20527000725269318, 0.1752600073814392, -0.06923700124025345, 0.30601999163627625, 0.29291000962257385, 0.18100999295711517, -0.12301000207662582, 0.06629499793052673, -0.014347000047564507, 0.08201199769973755, 0.23115000128746033, -0.45205000042915344, -0.12263999879360199, 0.07292100042104721, -0.1426900029182434, -0.016414999961853027, -0.08073300123214722, 0.04135200008749962, -0.06312599778175354, -0.16078999638557434, -0.17858000099658966, 0.011483999900519848, 0.01346299983561039, -0.24010999500751495, 0.16895000636577606, -0.028665000572800636, 0.15277999639511108, 0.3982299864292145, -0.2517699897289276, 0.4292300045490265, -0.10859999805688858, -0.44764000177383423, -0.06401599943637848, 0.4003799855709076, -0.2961199879646301, 0.16347000002861023, -0.20872999727725983, 0.16008999943733215, 0.005459799896925688, -0.2137400060892105, -0.030417999252676964, 0.12172000110149384, -0.3441399931907654, -0.2505500018596649, 0.5839300155639648, -0.3820500075817108, 0.2744700014591217, -0.043310001492500305, 0.16221000254154205, 0.2900699973106384, -0.3079800009727478, -0.840399980545044, 0.3626599907875061, -0.2700999975204468, 0.16588999330997467, 0.046594999730587006, 0.15994000434875488, -0.17035000026226044, -0.022657999768853188, 0.14993999898433685, -0.5769199728965759, 0.3615800142288208, -0.022242000326514244, 0.006812499836087227, -0.04345899820327759, -0.12790000438690186, 0.03246799856424332, 0.04552699998021126, 0.15103000402450562, -0.39316999912261963, 0.45563000440597534, 0.42555001378059387, -0.1705700010061264, -0.609000027179718, -0.25196999311447144, -0.46566998958587646, -0.5758500099182129, 0.11151999980211258, 0.01771700009703636, 1.0197999477386475, 0.07121700048446655, -0.3767400085926056, 0.08540499955415726, 0.30845001339912415, 0.22497999668121338, -0.28668999671936035, 0.39239001274108887, -0.17363999783992767, -0.21231000125408173, 0.3663899898529053, -0.8876500129699707, 0.023287000134587288, 0.1718900054693222, 0.03940499946475029, 0.17001000046730042, -0.2218399941921234, -0.2766000032424927, -0.12494999915361404, -0.4624600112438202, 0.4603799879550934, -0.26864001154899597, -0.3149699866771698, 0.13898000121116638, 0.2830300033092499, -0.22051000595092773, 0.20500999689102173, -0.3544600009918213, -0.13357999920845032, -0.08810599893331528, 0.22006000578403473, -0.17055000364780426, -0.32245999574661255, -0.126910001039505, -0.05182899907231331, 0.12345000356435776, -0.12272000312805176, -0.09690000116825104, 0.255840003490448, 0.21390999853610992, 0.6003199815750122, 0.12052000313997269, -0.01936499960720539, 0.04144899919629097, -1.0799000263214111, -0.3430899977684021, -0.05277400091290474, 0.08383800089359283, -0.07457999885082245, -0.32361000776290894, -0.07893799990415573, -0.131740003824234, -0.3129900097846985, -0.4710099995136261, 0.1585800051689148, -0.4742400050163269, -0.2896600067615509, 0.5426899790763855, 0.08773399889469147, 0.09475799649953842, 0.18094000220298767, 0.13027000427246094, -0.4317300021648407, -0.1638299971818924, -0.36945000290870667, 0.11495999991893768, -0.07663100212812424, 0.00691050011664629, -0.03061399981379509, 0.33855998516082764, 0.10577999800443649, -0.23723000288009644, -0.430400013923645, 0.5481600165367126, 0.06669799983501434, 0.0613970011472702, -1.7977999448776245, -0.012563000433146954, -0.07506400346755981, -0.20784999430179596, 0.0748559981584549, -0.3881399929523468, 0.10366000235080719, -0.24790999293327332, -0.28505998849868774, 0.07514700293540955, 0.0027681998908519745, 0.30913999676704407, -0.5832499861717224, 0.09537100046873093, -0.34551000595092773, -0.04350600019097328, -0.15490999817848206, -0.5308399796485901, 0.011629999615252018, 0.0938510000705719, -0.24615000188350677, -0.042330000549554825, -0.05276799947023392, -0.1764100044965744], u'shiny': [-0.042796000838279724, -0.23962000012397766, -0.2287299931049347, -0.571619987487793, -0.4779700040817261, -0.03560300171375275, 0.2903600037097931, -0.00836469978094101, 0.14591999351978302, -0.34984999895095825, -0.23770999908447266, -0.022074999287724495, 0.23229999840259552, 0.18657000362873077, -0.18407000601291656, 0.34073999524116516, -0.2293200045824051, 0.12767000496387482, 0.17116999626159668, -0.35238999128341675, 0.09687899798154831, 0.7608799934387207, 0.2762399911880493, 0.3579599857330322, -0.22800999879837036, -0.27344998717308044, 0.43647998571395874, 0.1027199998497963, 0.08952900022268295, 0.005603400059044361, -0.0989689975976944, 0.11941999942064285, -0.5870199799537659, 0.21184000372886658, -0.4396600127220154, 0.7828900218009949, 0.009549600072205067, -0.49375998973846436, 0.10087999701499939, 0.49689000844955444, -0.10147000104188919, -0.3228299915790558, -0.28700000047683716, -0.0019934000447392464, 0.42882001399993896, 0.2227099984884262, 0.2581700086593628, -0.1385599970817566, -0.2175299972295761, 0.01075700018554926, -0.18893000483512878, -0.41916999220848083, 0.2547900080680847, -0.1838500052690506, 0.12064000219106674, 0.06669600307941437, -0.4472000002861023, -0.41488999128341675, 0.20455999672412872, 0.08067700266838074, -0.05966800078749657, -0.2331800013780594, -0.14455999433994293, -0.2260800004005432, 0.6571300029754639, 0.13609999418258667, 0.06707700341939926, 0.34863999485969543, 0.260809987783432, -0.10779000073671341, -0.021762000396847725, -0.025272000581026077, 0.11925999820232391, 0.28310999274253845, 0.06049500033259392, 0.1513500064611435, -0.5190899968147278, 0.21028000116348267, 0.7067499756813049, -0.4821400046348572, 0.044906001538038254, 0.4742499887943268, 0.07575199753046036, -0.4178999960422516, 0.463019996881485, 0.36208000779151917, 0.1540900021791458, -0.13485999405384064, -0.21238000690937042, 0.28224998712539673, 0.5206800103187561, -0.10531000047922134, 0.0572349987924099, -0.13973000645637512, -0.6937299966812134, 0.1578799933195114, -0.3815999925136566, 0.03965499997138977, 0.23704999685287476, -0.06283999979496002, 0.4983699917793274, 0.11457999795675278, -0.034738000482320786, -0.06371600180864334, -0.29693999886512756, -0.3797599971294403, 0.43647000193595886, -0.4037100076675415, -0.7574399709701538, -0.4280500113964081, 0.023725999519228935, 0.6340299844741821, 0.2145799994468689, -0.5103399753570557, -0.19337999820709229, 0.15929000079631805, -0.06691800057888031, 0.6567599773406982, 0.3577199876308441, -0.3223299980163574, 0.1437699943780899, -0.6105999946594238, 0.302729994058609, 0.44067999720573425, -0.33375000953674316, 0.18212999403476715, 0.12065000087022781, 0.8152300119400024, -0.043453000485897064, 0.09612999856472015, -0.028163999319076538, 0.023459000512957573, 0.1719300001859665, 0.3340499997138977, -0.577430009841919, 0.08864299952983856, 0.01270699966698885, 0.3666900098323822, 0.004350699950009584, 0.05245000123977661, 0.39902999997138977, 0.1913599967956543, 0.028264999389648438, -0.251120001077652, 0.8016800284385681, 0.2938700020313263, -0.12396000325679779, -0.08309800177812576, 0.1695300042629242, 0.31119000911712646, -0.13800999522209167, -0.157710000872612, -0.3159799873828888, -0.19041000306606293, 0.24470999836921692, -0.49939998984336853, -0.1457899957895279, -0.6146399974822998, -0.12701000273227692, 0.4108999967575073, -0.02761800028383732, -0.37742000818252563, 0.11794000118970871, 0.13686999678611755, 0.6192399859428406, -0.1668899953365326, 0.07415799796581268, 0.7569000124931335, 0.33910998702049255, -0.7174400091171265, -0.49053001403808594, -0.13064000010490417, 0.20418000221252441, 0.049956001341342926, -0.0804620012640953, -0.8817200064659119, -0.10582999885082245, 0.31911998987197876, -0.39656001329421997, -0.9360299706459045, 0.17149999737739563, -0.263700008392334, 0.5589100122451782, 0.10322999954223633, 0.36052000522613525, -0.07816699892282486, 1.3652000427246094, 0.03701400011777878, 0.05831800028681755, 0.06963100284337997, 0.6496999859809875, -0.08415599912405014, -0.38082998991012573, 0.4914399981498718, 0.14285999536514282, -0.06461600214242935, -0.5382500290870667, 0.0057160998694598675, 0.05046800151467323, 0.2542000114917755, 0.5899500250816345, -0.49974000453948975, 0.0466109998524189, 0.01498199999332428, 0.12483000010251999, -0.22532999515533447, 0.5878599882125854, 0.12679000198841095, -0.5236700177192688, -0.20468999445438385, 0.08881500363349915, 0.10141000151634216, -0.2518500089645386, -0.2424899935722351, 0.16558000445365906, -0.22804999351501465, 0.26715001463890076, -0.2520799934864044, -0.1885399967432022, -0.4092699885368347, 0.11836999654769897, -0.307669997215271, 0.2397100031375885, 0.003465099958702922, -0.6224799752235413, -0.04017699882388115, 0.23253999650478363, 0.09500999748706818, 0.20201000571250916, -0.13660000264644623, 0.13118000328540802, 0.3188999891281128, -0.01671000011265278, -0.4155600070953369, 0.19318999350070953, 0.18129999935626984, 0.26256999373435974, -0.29660001397132874, -0.6294800043106079, -0.43189001083374023, 0.5141800045967102, -0.04567699879407883, -0.4117699861526489, 0.43274998664855957, -0.5585100054740906, 0.04227700084447861, 0.07611799985170364, -0.4537000060081482, 0.07163800299167633, -0.0872109979391098, -0.30347999930381775, -0.2978599965572357, 0.1633100062608719, 0.25999000668525696, 0.13744999468326569, 0.009554600343108177, 0.029815999791026115, -0.008456399664282799, -0.390500009059906, -0.08795899897813797, -0.07837100327014923, -0.24855999648571014, 0.6240699887275696, 0.02490299940109253, 0.20559999346733093, 0.2833999991416931, 0.017194999381899834, -0.23161999881267548, 0.13738000392913818, 0.5001000165939331, 0.008027499541640282, -0.2773500084877014, 0.060554999858140945, -0.16068999469280243, -0.1339000016450882, 0.5945299863815308, -0.1653600037097931, -0.07300200313329697, -0.9456599950790405, -0.08900800347328186, 0.4730300009250641, 0.20045000314712524, -0.07699800282716751, 0.6105899810791016, -0.40511998534202576, 0.47968000173568726, -0.2221899926662445, 0.38335999846458435, -0.14817999303340912, -0.3008800148963928, 0.2608799934387207, 0.27643001079559326, -0.25582998991012573, 0.8455700278282166, -0.17505000531673431, 0.031092999503016472, 0.30404001474380493, 0.029543999582529068, 0.25407999753952026, -0.0033776999916881323], u'clear': [-0.08102300018072128, -0.2917900085449219, 0.052021000534296036, -0.1332399994134903, 0.028162000700831413, -0.003144599962979555, -0.17156000435352325, 0.06332399696111679, 0.16568000614643097, -2.1721999645233154, -0.1412699967622757, 0.08789099752902985, -0.2298000007867813, 0.06901700049638748, 0.216729998588562, 0.36555999517440796, -0.3997899889945984, -0.15505999326705933, 0.09972800314426422, 0.20200000703334808, 0.16989000141620636, 0.14806999266147614, 0.10937999933958054, -0.1714099943637848, -0.7257999777793884, -0.13188999891281128, -0.05276799947023392, -0.2638300061225891, -0.13188999891281128, -0.11407999694347382, 0.08175700157880783, 0.1477299928665161, -0.24342000484466553, 0.007636399939656258, -1.0992000102996826, 0.1366100013256073, 0.19261999428272247, -0.3001199960708618, 0.03152399882674217, 0.11439000070095062, -0.10853999853134155, 0.210889995098114, -0.03736500069499016, 0.23449000716209412, 0.05463799834251404, 0.21504999697208405, 0.02307100035250187, 0.20917999744415283, -0.08606000244617462, -0.07858899980783463, -0.26945000886917114, -0.0408019982278347, -0.042601000517606735, -0.12093000113964081, -0.3361400067806244, 0.25624001026153564, -0.35266000032424927, -0.1722400039434433, 0.31018000841140747, 0.6425999999046326, -0.03607200086116791, 0.155799999833107, 0.2660900056362152, 0.17297999560832977, -0.08157999813556671, 0.008563599549233913, 0.13196000456809998, -0.11875999718904495, -0.1920499950647354, -0.32203999161720276, -0.0926939994096756, -0.19273999333381653, 0.005683199968189001, 0.17193999886512756, 0.24010999500751495, 0.014739000238478184, 0.09118799865245819, 0.45903000235557556, 0.004775300156325102, -0.18136000633239746, -0.16434000432491302, 0.012617000378668308, 0.4279100000858307, 0.07531800121068954, -0.042847998440265656, -0.05595200136303902, -0.07189500331878662, 0.08680599927902222, 0.07809200137853622, 0.20169000327587128, -0.34189000725746155, -0.019750000908970833, -0.44578999280929565, -0.09325399994850159, 0.23683999478816986, 0.09807900339365005, -0.0018185999942943454, -0.13012999296188354, 0.05425199866294861, -0.6840800046920776, 0.2137800008058548, -0.0847420021891594, -0.12382999807596207, 0.3664500117301941, -0.4643400013446808, 0.5679900050163269, 0.2234099954366684, 0.31606999039649963, -0.23558999598026276, 0.03388899937272072, 0.06250900030136108, -0.3146800100803375, 0.2768400013446808, -0.13729000091552734, -0.027180999517440796, 0.17143000662326813, -0.35534998774528503, 0.1442600041627884, 0.1413699984550476, -0.27987000346183777, 0.051006998866796494, 0.1688999980688095, 0.48614001274108887, 0.4324699938297272, -0.3101400136947632, -0.2273000031709671, -0.17252999544143677, 0.5022100210189819, -0.29023000597953796, -0.16832999885082245, -0.02758600004017353, 0.25613999366760254, 0.09605100005865097, 0.19144999980926514, -0.1557600051164627, 0.507669985294342, 0.006482699885964394, -0.047304000705480576, 0.473580002784729, -0.02966500073671341, -0.09588199853897095, 0.06457400321960449, 0.12470000237226486, -0.34389999508857727, -0.5959100127220154, -0.1730699986219406, 0.3062700033187866, 0.16350999474525452, -0.21708999574184418, -0.13142000138759613, -0.029781000688672066, 0.07941199839115143, 0.36017999053001404, -0.0687209963798523, 0.367000013589859, 0.2645399868488312, 0.1306000053882599, -0.34602001309394836, 0.22326000034809113, 0.22999000549316406, 0.14122000336647034, -0.3084000051021576, 0.22238999605178833, -0.13700999319553375, 0.2453799992799759, 0.10902000218629837, 0.33083999156951904, 0.05215900018811226, -0.5481699705123901, 0.3292100131511688, 0.3388899862766266, -0.06038200110197067, -0.16610999405384064, -0.26388001441955566, 0.13997000455856323, -0.1548600047826767, -0.05012999847531319, -0.08962800353765488, -0.008095400407910347, 0.13154999911785126, -0.019734999164938927, 0.2575800120830536, 0.37509000301361084, -0.012095999903976917, -0.49246999621391296, 0.13436000049114227, -0.2107200026512146, -0.13763000071048737, 0.2404700070619583, 0.13327999413013458, -0.043418001383543015, 0.007065100129693747, 0.3049600124359131, -0.11184000223875046, 0.6801699995994568, -0.6541699767112732, -0.39197999238967896, 0.07554599642753601, -0.20430000126361847, 0.041099000722169876, 0.8458600044250488, -0.3361000120639801, -0.26385000348091125, -0.39416998624801636, -0.25468000769615173, -0.09534899890422821, 0.19946999847888947, -0.30772000551223755, -0.5384600162506104, 0.18257999420166016, -0.09137900173664093, -0.2718299925327301, 0.10918000340461731, -0.04210200160741806, -0.25613999366760254, -0.03969400003552437, 0.3498699963092804, -0.24526000022888184, -0.011982999742031097, -0.024230999872088432, 0.6278499960899353, -0.16640999913215637, 0.02610900066792965, 0.029095999896526337, -0.16936999559402466, 0.25328999757766724, -0.12065999954938889, 0.023087000474333763, 0.16152000427246094, -0.14057999849319458, 0.04484599828720093, 0.45329999923706055, 0.34099000692367554, -0.028432000428438187, -0.39406999945640564, -0.06892400234937668, -0.29128000140190125, -0.012954000383615494, 0.04817600175738335, -0.09045500308275223, -0.0098770996555686, -0.022352000698447227, 0.09153500199317932, -0.08467300236225128, -0.4395500123500824, -0.2523699998855591, 0.7971900105476379, 0.21525999903678894, 0.001963400049135089, -0.10022000223398209, -0.07566899806261063, -0.25113001465797424, -0.12675000727176666, 0.12178999930620193, 0.25892001390457153, 0.026660999283194542, -0.38418999314308167, -0.1856600046157837, -0.15324999392032623, 0.4448400139808655, -0.08881500363349915, 0.1011900007724762, 0.0060883997939527035, 0.2930000126361847, -0.41499999165534973, 0.26712000370025635, 0.03368299826979637, -0.4231700003147125, 0.2202499955892563, -0.027350999414920807, 0.40922999382019043, -0.013338999822735786, -0.2954300045967102, 0.3769899904727936, -0.019656000658869743, -0.08289600163698196, -1.5197999477386475, 0.296099990606308, 0.8126299977302551, -0.18198999762535095, 0.5908200144767761, 0.007938000373542309, 0.23090000450611115, 0.23573000729084015, 0.2494100034236908, -0.1875399947166443, -0.04029000177979469, 0.17258000373840332, 0.19480000436306, 0.13099999725818634, -0.2155199944972992, 0.01635199971497059, 0.6225600242614746, 0.4128299951553345, 0.40386998653411865, -0.06291099637746811, -0.09315899759531021, -0.07813700288534164, -0.30083000659942627, -0.035913001745939255], u'splintered': [0.02184399962425232, -0.5934799909591675, 0.09295199811458588, 0.16787000000476837, 0.7092999815940857, 0.28714999556541443, -0.052685000002384186, 0.15379999577999115, 0.644819974899292, -0.27564001083374023, -0.031346000730991364, -0.06938999891281128, -0.40206998586654663, -0.5226200222969055, 0.034170001745224, -0.09263800084590912, -0.032019998878240585, 0.37408000230789185, 0.2178799957036972, -0.10051999986171722, 0.3564099967479706, 0.6556699872016907, 0.26438000798225403, 0.12312000244855881, -0.2065100073814392, -0.4352099895477295, -0.0680989995598793, -0.11663000285625458, -0.3876599967479706, 0.6505600214004517, 0.04149100184440613, -0.23799000680446625, 0.2145799994468689, 0.012338999658823013, 0.06899400055408478, 0.048907000571489334, 0.45715999603271484, -0.046824000775814056, -0.09761200100183487, 0.17861999571323395, 0.4721300005912781, 0.06840799748897552, 0.07137200236320496, -0.5817700028419495, 0.19673000276088715, 0.23840999603271484, -0.45579999685287476, -0.5134199857711792, -0.20764000713825226, -0.07761000096797943, -0.04556100070476532, -0.07515399903059006, -0.2140199989080429, -0.4353100061416626, 0.4500100016593933, -0.26864001154899597, -0.4828000068664551, -0.017343999817967415, -0.2390500009059906, 0.415149986743927, -0.027615999802947044, 0.18925000727176666, -0.4708099961280823, 0.43233999609947205, -0.1823199987411499, -0.11977999657392502, -0.18456999957561493, 0.5418199896812439, 0.1400499939918518, 0.05395499989390373, -0.16571000218391418, -0.1262899935245514, -0.7482500076293945, 0.12547999620437622, 0.7115300297737122, 0.04285300150513649, 0.031293001025915146, -0.9803400039672852, -0.2787899971008301, -0.03919500112533569, -0.44402000308036804, -0.12392999976873398, -0.048312000930309296, -0.025182999670505524, -0.42570000886917114, -0.14875000715255737, 0.08631899952888489, -0.0651869997382164, 0.2786700129508972, 0.596809983253479, 0.2687000036239624, 0.054274000227451324, 0.3396399915218353, -0.4175400137901306, 0.12536999583244324, -0.3784399926662445, 0.04493600130081177, -0.542389988899231, -0.049910999834537506, 0.6714900135993958, 0.031387001276016235, 0.44078999757766724, 0.4074999988079071, -0.2890700101852417, 0.34964999556541443, 0.18693000078201294, -0.12346000224351883, -0.15095999836921692, 0.07266300171613693, -0.004919900093227625, -0.5176399946212769, -0.4018400013446808, 0.09887900203466415, -0.08507099747657776, -0.28828001022338867, -0.16877999901771545, -0.4197799861431122, 0.4085400104522705, 0.24592000246047974, 0.12161999940872192, -0.0023717000149190426, -0.6291499733924866, 0.05758000165224075, 0.42228999733924866, 0.28565001487731934, 0.050560999661684036, -0.40498000383377075, -0.4147599935531616, 0.08022499829530716, 0.24128000438213348, -0.6696799993515015, 0.24344000220298767, -0.09069699794054031, -0.03921100124716759, 0.820609986782074, 0.17568999528884888, -0.057468000799417496, 0.2842499911785126, 0.10175999999046326, -0.8631200194358826, -0.48183000087738037, 0.5329399704933167, -0.4075300097465515, 0.8440799713134766, 0.09320300072431564, 0.04601399973034859, 0.1566700041294098, 0.5199699997901917, -0.013534000143408775, -0.6231399774551392, -0.23544000089168549, -0.23488999903202057, -0.47516000270843506, -0.12116000056266785, -0.33052000403404236, 0.5235900282859802, 0.9366400241851807, -0.22690999507904053, 0.01686199940741062, 0.4924300014972687, -0.09936200082302094, 0.16053999960422516, -0.29715999960899353, 0.6118199825286865, 0.3405599892139435, 0.09903399646282196, -0.22753000259399414, 0.024893000721931458, -0.10552000254392624, -0.5407199859619141, -0.45072999596595764, 0.2798500061035156, 0.4655100107192993, 0.2174600064754486, 0.30959999561309814, 0.026318000629544258, 0.3097600042819977, 0.08135999739170074, 0.23494000732898712, -0.28294000029563904, 0.26886001229286194, -0.014817999675869942, 0.048923999071121216, -0.062345001846551895, 0.19599999487400055, -0.19043999910354614, -0.008719200268387794, 0.2737399935722351, -0.2928299903869629, 0.18482999503612518, -0.45822998881340027, -0.691860020160675, -0.0702579990029335, -0.02796800062060356, 0.6266099810600281, 0.9642199873924255, -0.0934469997882843, 0.5939099788665771, -0.43244001269340515, -0.17061999440193176, -0.2187899947166443, -0.30967000126838684, 0.1782200038433075, -0.6524400115013123, -0.19307999312877655, 0.2344599962234497, 0.48559001088142395, -0.39146000146865845, -0.15147000551223755, 0.43501999974250793, 0.1724099963903427, 0.5806099772453308, 0.2533699870109558, 0.3859800100326538, 0.06715299934148788, 0.3567799925804138, 0.3499999940395355, -0.07796099781990051, 0.3324899971485138, 0.6735600233078003, 0.00021949999791104347, -0.3296099901199341, -0.25957000255584717, -0.47192999720573425, -0.22175000607967377, 0.2212499976158142, -0.0006423500017262995, -0.5164999961853027, -0.35322999954223633, 0.20959000289440155, -0.0401809997856617, -0.37003999948501587, -0.4803900122642517, 0.5178300142288208, 0.4351400136947632, -0.02911200001835823, 0.4063200056552887, -0.1408199965953827, -0.45285001397132874, -0.7658100128173828, 0.37542998790740967, -0.33709999918937683, 0.10293000191450119, 0.012512000277638435, -0.577489972114563, -0.4388499855995178, 0.05674099922180176, 0.7258399724960327, -0.27434998750686646, 0.6647300124168396, 0.08129599690437317, -0.36267000436782837, 0.14026999473571777, -0.847599983215332, 0.6410800218582153, 0.15666000545024872, -0.1987999975681305, -0.4833100140094757, -0.12921999394893646, 0.5436199903488159, 0.2297399938106537, -0.16746999323368073, -0.3763900101184845, -0.27160999178886414, 0.23582999408245087, -0.0373929999768734, -0.7344499826431274, 0.33807000517845154, 0.1420699954032898, 0.06317800283432007, -0.3020800054073334, 0.20636999607086182, 0.0008364099776372313, -0.5401600003242493, -0.682889997959137, -0.19041000306606293, 0.11642000079154968, 0.15007999539375305, 0.23763999342918396, -0.02845500037074089, -0.05930599942803383, -0.27153000235557556, -0.43907999992370605, 0.12349999696016312, 0.21198999881744385, -0.19245000183582306, -0.11976999789476395, 0.6960200071334839, 0.7171800136566162, 0.11089999973773956, 0.27911999821662903, -0.1341399997472763, 0.3644599914550781, 0.2212499976158142, 0.07973600178956985, 0.35043999552726746, 0.37323999404907227, -0.3127399981021881, 0.04508800059556961, 0.1219400018453598], u'cored': [0.3912599980831146, -0.7712500095367432, -0.2642900049686432, -1.002500057220459, 0.02796800062060356, -0.4409500062465668, 0.16211000084877014, 0.46661001443862915, -0.5052400231361389, 0.8194800019264221, 0.8666800260543823, 0.7601400017738342, 0.6386600136756897, 0.09304100275039673, -0.7213199734687805, 0.22210000455379486, -0.1060900017619133, 0.1498199999332428, -0.5176799893379211, 0.3475799858570099, -0.05955899879336357, 0.0406779982149601, 0.2114800065755844, 1.017199993133545, 0.19255000352859497, -0.21668000519275665, 0.20489999651908875, 0.6017799973487854, 0.639549970626831, 0.1536799967288971, -0.4247100055217743, -0.3011600077152252, 0.38853999972343445, 0.06859900057315826, 0.47846999764442444, 0.3555999994277954, 0.260699987411499, 0.3096100091934204, 0.05330599844455719, -0.9982900023460388, 0.6980599761009216, -0.49077001214027405, -0.2502099871635437, -0.12789000570774078, -0.6524199843406677, 0.21490000188350677, -0.8127099871635437, 0.89410001039505, 0.3695099949836731, -0.045906998217105865, -0.1847500056028366, -0.12083999812602997, 0.09247799962759018, 0.7689499855041504, -0.12451999634504318, -1.4926999807357788, 0.2785300016403198, -0.01522000040858984, 0.10632999986410141, -0.6269099712371826, -0.09266900271177292, -0.08927500247955322, -0.9155499935150146, 0.48409000039100647, -0.3388200104236603, 0.15825000405311584, -0.4756700098514557, 0.44683000445365906, -0.3795199990272522, 0.14045999944210052, 0.05371600016951561, -0.7390599846839905, 0.06629499793052673, 0.555649995803833, -0.8970299959182739, 0.5885099768638611, 0.7092499732971191, 0.3399600088596344, 0.5239400267601013, -1.141700029373169, -0.45972999930381775, -0.4105600118637085, 1.0549999475479126, -0.4413500130176544, -0.7365800142288208, 0.2920199930667877, -0.23623999953269958, 0.4315299987792969, 0.5101699829101562, 0.260670006275177, 0.5705599784851074, 0.5820599794387817, -0.5013399720191956, 0.1990399956703186, -0.8998299837112427, -0.20502999424934387, 0.33017000555992126, 0.7128999829292297, -0.15782000124454498, 0.19286000728607178, -0.1950799971818924, 0.09791500121355057, 0.810509979724884, -1.11080002784729, -1.0694999694824219, 0.79926997423172, -0.17720000445842743, 0.06399500370025635, -0.33726999163627625, -0.29951000213623047, -0.23986999690532684, -0.5848900079727173, 0.9623299837112427, -0.6047700047492981, -0.7932000160217285, -0.020457999780774117, -0.28196001052856445, 0.07498499751091003, 0.9877499938011169, 0.1630299985408783, 0.5226399898529053, -0.46441999077796936, -0.5316799879074097, -0.05425800010561943, -0.6535500288009644, -0.16540999710559845, -0.8645300269126892, -0.40494000911712646, -1.0698000192642212, -0.1950799971818924, 0.528689980506897, 0.6302800178527832, -0.04916299879550934, 0.29736998677253723, 0.08417700231075287, 0.11004000157117844, 0.2115200012922287, 0.5511299967765808, 0.2912299931049347, 0.7075899839401245, 0.509909987449646, 0.18100999295711517, 0.3470599949359894, 0.44235000014305115, 0.16349999606609344, 0.22396999597549438, 0.46733999252319336, -0.4189299941062927, 0.9215099811553955, -0.9783400297164917, -0.19986000657081604, -0.2277899980545044, 0.3042599856853485, -0.0841199979186058, -0.3762800097465515, -0.2194100022315979, 0.4938800036907196, -0.03262399882078171, -1.0765999555587769, -0.0961960032582283, -0.42757999897003174, 0.5538899898529053, 1.1694999933242798, -1.0313999652862549, 0.7024000287055969, -0.2380400002002716, -0.4186600148677826, 0.3913100063800812, 0.0325080007314682, -0.4239799976348877, -0.8261500000953674, 0.28641000390052795, 0.4345000088214874, -0.5720900297164917, -0.11788000166416168, -0.906029999256134, 0.36570999026298523, 0.09272400289773941, 0.6400799751281738, -0.6992499828338623, 0.1544100046157837, -0.10287000238895416, 0.47659000754356384, -0.9597600102424622, -0.44029998779296875, -1.5994000434875488, 0.21026000380516052, 1.5254000425338745, 0.8843899965286255, 0.44975998997688293, 0.6934900283813477, 0.8531200289726257, -0.8978700041770935, 0.013616000302135944, 0.7402300238609314, 0.07758700102567673, -0.438510000705719, -0.4433799982070923, 0.7145000100135803, -0.07545100152492523, -0.3448300063610077, 0.3956199884414673, -0.9360499978065491, -0.23691999912261963, -0.03649500012397766, 0.6232399940490723, 0.4434100091457367, 0.00023258000146597624, -0.09995800256729126, -0.07772000133991241, 0.5399199724197388, 0.7082200050354004, -0.1998399943113327, 0.8828399777412415, -0.05993400141596794, 0.028776999562978745, 1.3626999855041504, -0.2817800045013428, 0.7436500191688538, -0.9376000165939331, -0.6131299734115601, 0.8243399858474731, -0.42785999178886414, 0.5022000074386597, -0.5564000010490417, -0.09689299762248993, -0.6695899963378906, -0.4083999991416931, 0.2992100119590759, -0.35491999983787537, -0.1685899943113327, 0.14184999465942383, -0.5776200294494629, -0.7934399843215942, 0.07140299677848816, 0.40713000297546387, 0.1174900010228157, -0.23578999936580658, -0.566569983959198, 0.6563699841499329, 0.14176000654697418, 0.5095999836921692, -0.5834199786186218, -0.26846998929977417, 0.06911300122737885, -0.19122999906539917, -0.3578299880027771, 0.09902399778366089, 0.432559996843338, -0.41523000597953796, 0.7903199791908264, 1.2422000169754028, -1.304900050163269, -0.5973899960517883, -0.6855999827384949, 0.21852999925613403, -0.4481000006198883, 0.08411300182342529, 0.03642300143837929, 0.2586199939250946, 0.19965000450611115, 0.7753000259399414, 0.14425000548362732, 0.2101300060749054, -0.3526799976825714, 0.01744000054895878, -0.5753999948501587, 0.46588999032974243, 0.05481800064444542, 0.21219000220298767, 0.43720000982284546, 1.0342999696731567, -1.225100040435791, 0.5697799921035767, -0.41602998971939087, 0.8475900292396545, 0.902180016040802, -0.6025400161743164, -1.2314000129699707, -0.759880006313324, -0.6635900139808655, 0.7537699937820435, 0.31068000197410583, 0.007109799887984991, 0.4593000113964081, -0.01787799969315529, 0.5504900217056274, 0.5648400187492371, -0.6225699782371521, 0.07521600276231766, -0.039406001567840576, 0.09207600355148315, 0.7464500069618225, -0.016076000407338142, -0.11468999832868576, -0.08615399897098541, -0.9761199951171875, -0.11195000261068344, 1.1456999778747559, -0.01739799976348877], u'cooked': [0.002950600115582347, 0.5060300230979919, 0.42423000931739807, 0.13815000653266907, 0.0894630029797554, -0.21825000643730164, 0.2632800042629242, 0.38262999057769775, 0.013236000202596188, -0.5251299738883972, 0.27849000692367554, -0.7051699757575989, -0.048948999494314194, 0.4390299916267395, -0.6769899725914001, -0.19620999693870544, -0.27305999398231506, -0.05324599891901016, -0.14191000163555145, 0.40661999583244324, 0.02373499982059002, -0.0992949977517128, 0.257860004901886, -0.06647399812936783, -0.26100000739097595, 0.19676999747753143, -0.5002599954605103, -0.0011810000287368894, -0.033952999860048294, -0.4869000017642975, -0.8672800064086914, 0.519070029258728, -0.5826200246810913, -0.12771999835968018, -0.24613000452518463, 0.8961799740791321, -0.1033099964261055, 0.4461899995803833, -0.22234000265598297, 0.04130899906158447, 0.2679600119590759, -0.20607000589370728, -0.2662700116634369, -0.2353699952363968, 0.27024999260902405, 0.26886001229286194, 0.3704499900341034, 0.21270999312400818, 0.05715399980545044, 0.6140000224113464, 0.06215300038456917, 0.2538500130176544, 0.5369200110435486, -0.38526999950408936, -0.297650009393692, -0.3852800130844116, 0.1332699954509735, -0.38690999150276184, 0.14925000071525574, -0.03937000036239624, 0.2632000148296356, -0.1434900015592575, 0.4133700132369995, 0.01114100031554699, -0.5419700145721436, -0.5706899762153625, 0.01929200068116188, 0.03469099849462509, -0.576229989528656, -0.041735999286174774, 0.46693000197410583, -0.09967999905347824, -0.09896700084209442, -0.07040199637413025, -0.2438499927520752, 0.5158100128173828, 1.1187000274658203, 0.3855299949645996, -0.2842499911785126, -0.15863999724388123, -0.37584999203681946, 0.1374099999666214, 0.2772899866104126, 0.11772000044584274, 0.47293999791145325, -0.4117400050163269, -0.5288100242614746, 0.300029993057251, -0.10159999877214432, 0.06470199674367905, -0.12660999596118927, -0.08420900255441666, -0.08552899956703186, 0.6862300038337708, 0.3045099973678589, 0.11554999649524689, -0.43884000182151794, 0.5001299977302551, -0.11117000132799149, 0.40365999937057495, 0.27360999584198, -0.5352799892425537, 0.3040800094604492, -0.8785600066184998, -0.009310499764978886, 0.1659799963235855, 0.26194000244140625, 0.573930025100708, -0.16949999332427979, 0.12831999361515045, 0.3653700053691864, 0.4476900100708008, 0.007591899950057268, -0.4857200086116791, -0.2696300148963928, -0.19999000430107117, -0.8143200278282166, 0.10453999787569046, 0.4395500123500824, -0.1381700038909912, -0.3827100098133087, -0.4239499866962433, -0.2207300066947937, 0.4509100019931793, -0.2102299928665161, 0.3755500018596649, -0.2858099937438965, 0.24577000737190247, -0.23263999819755554, 0.7732899785041809, 0.1438400000333786, 0.903469979763031, 0.20722000300884247, 0.2844899892807007, -0.3350299894809723, -0.2500999867916107, 0.16840000450611115, -0.33382999897003174, -0.493120014667511, 0.7313500046730042, 0.7634999752044678, 0.14207999408245087, -0.1535400003194809, 0.06297700107097626, -0.8814799785614014, 0.10419999808073044, 0.11112000048160553, 0.07572899758815765, 0.3104499876499176, -0.42059001326560974, -0.7462599873542786, 0.6202300190925598, -0.06604199856519699, 0.18976999819278717, -0.39111998677253723, -0.29030999541282654, 0.0373929999768734, -0.4378899931907654, -0.09396299719810486, -0.18100999295711517, -0.3206999897956848, -0.03690300136804581, -0.12872999906539917, -0.08564499765634537, 0.2922700047492981, 0.18205000460147858, -0.07048200070858002, -0.4287700057029724, 0.15557000041007996, -0.5944600105285645, 0.06831199675798416, -0.11512000113725662, -0.20387999713420868, -0.12036000192165375, -0.16203999519348145, -0.15487000346183777, -0.036965999752283096, -0.4143899977207184, 0.46893998980522156, -0.7359099984169006, -0.08721400052309036, 0.15117999911308289, 0.19629999995231628, -0.397489994764328, -0.4138000011444092, 0.3610999882221222, 0.34060999751091003, 0.5287600159645081, 0.6146699786186218, -0.4057300090789795, 0.17100000381469727, 0.927839994430542, -0.5759099721908569, 0.16944000124931335, -0.12178999930620193, -0.15746000409126282, -0.18382999300956726, 0.2922999858856201, -0.4645000100135803, -0.44999998807907104, 0.028286000713706017, -0.2708300054073334, 0.18669000267982483, 0.3604600131511688, 0.6096000075340271, 0.388619989156723, 0.7026299834251404, -0.08224300295114517, -0.3149000108242035, -0.2880200147628784, 0.09455999732017517, -0.6084399819374084, -0.06696200370788574, -0.024098999798297882, 0.34084999561309814, 0.07325799763202667, 0.8287799954414368, -0.8551599979400635, -0.27316999435424805, 0.2756899893283844, 0.6378499865531921, 0.07248999923467636, -0.7058600187301636, -0.9104599952697754, -0.1994599997997284, -0.7653700113296509, 0.21353000402450562, -0.06747400015592575, -0.13373999297618866, 0.08478400111198425, 0.05001400038599968, 0.07335200160741806, -0.2101999968290329, -0.10546000301837921, 0.125900000333786, -0.019378000870347023, 0.24679000675678253, 0.27781999111175537, -0.7709900140762329, -0.4554400146007538, -0.12234999984502792, -0.5432599782943726, -0.1436000019311905, -0.015106000006198883, -0.5230200290679932, -0.0800039991736412, 0.4400300085544586, 0.20983000099658966, 0.050898998975753784, -1.0211000442504883, 0.3035599887371063, -0.0018178999889642, -0.32708999514579773, 0.01619100011885166, -0.153889998793602, 0.20537999272346497, -0.07218900322914124, 0.20135000348091125, -0.42037999629974365, 0.7199900150299072, -0.229980006814003, -0.35657998919487, -0.17506000399589539, -0.19981999695301056, 0.12935000658035278, 0.11333999782800674, -0.22799000144004822, -0.0245789997279644, 0.6243900060653687, -0.20830999314785004, -0.8930400013923645, 0.039347998797893524, 0.47235000133514404, 0.7776399850845337, -0.01664699986577034, 0.32881999015808105, -0.6951299905776978, 0.17809000611305237, -1.201799988746643, -0.9382200241088867, 0.3151400089263916, -0.12777000665664673, -0.04115400090813637, 0.18092000484466553, -0.2016099989414215, 0.25262001156806946, 0.7688199877738953, 0.04885999858379364, 0.29102998971939087, 0.36403998732566833, 0.05232999846339226, -0.03563300147652626, 0.09517999738454819, 0.0009574100258760154, -0.26148998737335205, -1.2085000276565552, -0.039358001202344894, -0.4370400011539459, -0.32332998514175415, -0.16161000728607178], u'clean': [0.2783200144767761, -0.04442400112748146, -0.5575600266456604, -1.0219999551773071, 0.03052300028502941, -0.04794200137257576, 0.24502000212669373, 0.2623000144958496, 0.3472200036048889, -1.5707000494003296, 0.3107700049877167, -0.05641699954867363, -0.1247899979352951, 0.239779993891716, -0.5646399855613708, 0.21052999794483185, -0.08142100274562836, 0.06959199905395508, 0.007063699886202812, 0.0850209966301918, -0.41721001267433167, 0.30548998713493347, 0.07680899649858475, 0.33520999550819397, 0.17608000338077545, -0.12450999766588211, 0.2185100018978119, 0.22617000341415405, 0.3792699873447418, -0.34338000416755676, 0.013063999824225903, 0.3081800043582916, -0.18570999801158905, 0.11490999907255173, -0.981190025806427, 0.4011000096797943, 0.21710999310016632, -0.3632799983024597, -0.326449990272522, 0.07357999682426453, -0.21644000709056854, -0.10360000282526016, 0.5972099900245667, 0.2101600021123886, -0.0004983000108040869, 0.24706000089645386, 0.06412100046873093, -0.16660000383853912, 0.4308899939060211, -0.03754600137472153, -0.4012199938297272, -0.017376000061631203, 0.4305799901485443, -0.4935300052165985, 0.12905000150203705, 0.23633000254631042, 0.13971999287605286, -0.13806000351905823, -0.39169999957084656, 0.24018999934196472, -0.20607000589370728, -0.14667999744415283, -0.4124999940395355, -0.1580599993467331, -0.18353000283241272, 0.1937199980020523, -0.016468999907374382, -0.27876999974250793, -0.18111999332904816, -0.13078999519348145, 0.005813499912619591, -0.25641998648643494, 0.023144999518990517, 0.40119001269340515, 0.2771500051021576, 0.12328000366687775, -0.0041970000602304935, 0.7121800184249878, 0.32322999835014343, -0.23577000200748444, -0.011481000110507011, -0.15293000638484955, 0.3347100019454956, 0.10010000318288803, 0.4767799973487854, 0.1743900030851364, -0.5059800148010254, 0.46184998750686646, -0.04833900183439255, -0.45712000131607056, -0.12764999270439148, 0.2645300030708313, 0.41655001044273376, -0.15445999801158905, 0.0077817002311348915, -0.38666999340057373, 0.12744000554084778, -0.3421899974346161, 0.4945000112056732, 0.32760998606681824, -0.13154000043869019, -0.29093000292778015, -0.2782900035381317, 0.092739999294281, -0.041367001831531525, 0.23027999699115753, 0.22191999852657318, -0.09614299982786179, -0.41471999883651733, -0.12515999376773834, -0.01334299985319376, -0.7575500011444092, -0.3552600145339966, -0.4725399911403656, -0.30226001143455505, 0.3665199875831604, 0.07861799746751785, -0.2605299949645996, -0.27008000016212463, -0.3153400123119354, -0.31147000193595886, 0.39392998814582825, 0.1603199988603592, 0.23960000276565552, -0.09602800011634827, 0.5193799734115601, -0.5836399793624878, 0.35095998644828796, -0.3799099922180176, 0.40024998784065247, 0.13443000614643097, -0.1297300010919571, 0.40220001339912415, 0.1141899973154068, -0.09274999797344208, 0.16186000406742096, -0.14350999891757965, 0.2780100107192993, 0.40015000104904175, -0.22307999432086945, 0.618690013885498, 0.007821000181138515, 0.4809400141239166, -0.26794999837875366, -0.4489099979400635, 0.3339099884033203, 0.15925000607967377, 0.034595001488924026, 0.444350004196167, 0.7104399800300598, 0.07846300303936005, 0.11503999680280685, -0.21089999377727509, -0.3307499885559082, 0.22694000601768494, -0.004253000020980835, -0.20880000293254852, -0.11467999964952469, 0.09940499812364578, -0.015622000209987164, -0.16353000700473785, -0.5167700052261353, -0.051876001060009, 0.3224399983882904, -0.21926000714302063, -0.238769993185997, 0.812720000743866, 0.002436900045722723, 0.4703899919986725, 0.23683999478816986, -0.14640000462532043, 0.13016000390052795, -0.16196000576019287, 0.07682199776172638, 0.058997999876737595, 0.035725999623537064, 0.44012999534606934, -0.031362999230623245, -0.7252900004386902, -0.20317000150680542, -0.41508999466896057, -0.31000998616218567, 0.5944499969482422, -0.18172000348567963, -0.22224000096321106, 0.06682799756526947, 0.872730016708374, 0.03930699825286865, 0.5479400157928467, -0.04082600027322769, 0.2980000078678131, 0.37130001187324524, -0.6876599788665771, 0.2598699927330017, 0.40742000937461853, 0.08954600244760513, -0.7065200209617615, -0.024467000737786293, -0.3898400068283081, 0.17991000413894653, 0.29061999917030334, 0.3660599887371063, 0.5552399754524231, -0.0711439996957779, -0.37584999203681946, -0.2981500029563904, 0.3827599883079529, -0.4232400059700012, -0.04602200165390968, 0.164560005068779, -0.6338599920272827, -0.3326199948787689, -0.6178699731826782, 0.3754099905490875, 0.4749299883842468, 0.21398000419139862, -0.09518799930810928, -0.5218200087547302, -0.17333999276161194, -0.06932999938726425, 0.2916199862957001, 0.10620000213384628, 0.02401600033044815, 0.1824900060892105, -0.08026000112295151, -0.3509899973869324, -0.18831999599933624, -0.1906999945640564, -0.00014381000073626637, 0.1697700023651123, 0.465939998626709, 0.41725999116897583, -0.05653500184416771, -0.16141000390052795, -0.025141000747680664, -0.3417400121688843, -0.3761399984359741, -0.03874899819493294, -0.06873399764299393, -0.19686000049114227, 0.2249000072479248, -0.3286600112915039, -0.2172199934720993, 0.42441999912261963, -0.7790899872779846, 0.19085000455379486, 0.5230799913406372, 0.29686999320983887, -0.4746899902820587, -0.48284000158309937, 0.26208001375198364, -0.85698002576828, -0.05196300148963928, 0.12839999794960022, 0.8116099834442139, -0.3655500113964081, -0.08382400125265121, 0.07368700206279755, 0.11878000199794769, 0.3757399916648865, -0.04257100075483322, -0.07650399953126907, -0.7250000238418579, 0.15532000362873077, 0.15873999893665314, 0.45785000920295715, 0.18782000243663788, -0.1417199969291687, 0.2803800106048584, -0.060718998312950134, -0.04270099848508835, 0.019208999350667, -0.5972200036048889, 0.18016000092029572, -0.20252999663352966, 0.09983500093221664, -1.9105000495910645, 0.12475000321865082, 0.329120010137558, -0.502560019493103, -0.041593998670578, 0.18898999691009521, -0.05632900074124336, 1.038100004196167, -0.037911999970674515, 0.43237999081611633, -0.10006999969482422, -0.1074799969792366, -0.1125200018286705, -0.4786199927330017, -0.5037699937820435, 0.41124001145362854, -0.4321100115776062, 0.18820999562740326, 0.4423699975013733, -0.09234800189733505, 0.5383999943733215, -0.33991000056266785, -0.19088000059127808, 0.4124799966812134], u'deflated': [0.48868998885154724, 0.11987999826669693, 0.09869100153446198, -0.05164099857211113, 0.2934899926185608, -0.28540000319480896, 0.2824600040912628, 0.35436999797821045, -0.14305000007152557, 0.2889699935913086, -0.5073999762535095, -0.023920999839901924, 0.05768999829888344, -0.2566699981689453, -0.05392099916934967, 0.24237999320030212, 0.048861999064683914, -0.22397999465465546, 0.3695499897003174, 0.25874000787734985, 0.4005500078201294, 0.06799499690532684, 0.18741999566555023, -0.12276999652385712, -0.19550000131130219, 0.049929000437259674, -0.21309000253677368, 0.21705999970436096, 0.010049000382423401, 0.5700799822807312, 0.10377000272274017, -0.40619000792503357, -0.12658999860286713, 0.10716000199317932, -0.25819000601768494, 0.09481099992990494, -0.32475000619888306, 0.33337000012397766, 0.5849599838256836, 0.6812300086021423, 0.040417999029159546, -0.2642900049686432, -0.18250000476837158, -0.3352400064468384, -0.18129000067710876, 0.19315999746322632, -0.4738300144672394, -0.05762699991464615, -0.19652999937534332, 0.3322100043296814, 0.659500002861023, -0.7608000040054321, -0.08721400052309036, 0.007683999836444855, -0.01271899975836277, -0.1258700042963028, 0.18306000530719757, 0.09472700208425522, -0.044440001249313354, -0.21106000244617462, 0.20600999891757965, -0.17361000180244446, 0.18846000730991364, -0.4981600046157837, -0.21232999861240387, 0.30041998624801636, -0.021800000220537186, -0.4893200099468231, 0.11138000339269638, -0.18174000084400177, -0.26607000827789307, 0.03150000050663948, 0.22035999596118927, 0.6697700023651123, 0.4717999994754791, 0.1350100040435791, 0.4742000102996826, -0.18998000025749207, -0.24792000651359558, -0.07294999808073044, -0.2586100101470947, 0.2752000093460083, 0.4744400084018707, 0.20024000108242035, -0.25119999051094055, 0.14358000457286835, 0.08843500167131424, -0.2933200001716614, 0.16752000153064728, 0.5569800138473511, 0.67535001039505, 0.07674799859523773, 0.21236999332904816, 0.0851529985666275, -0.16216999292373657, 0.5125899910926819, -0.15814000368118286, 0.6326799988746643, -0.9574000239372253, 0.20032000541687012, 0.02254600077867508, 0.14700999855995178, -0.08574800193309784, -0.31518998742103577, 0.457040011882782, 0.33956998586654663, -0.4352000057697296, -0.010073999874293804, -0.16110000014305115, 0.29875999689102173, -0.30362001061439514, 0.7728599905967712, 0.06978800147771835, -0.32534000277519226, 0.20719000697135925, -0.16913999617099762, 0.24408000707626343, 0.16737000644207, 0.04023899883031845, -0.4234200119972229, 0.6728000044822693, -1.1655000448226929, 0.18619999289512634, 0.4604800045490265, 0.04763999953866005, -0.0793830007314682, -0.8170499801635742, -0.3546999990940094, -0.008618799969553947, 0.05003499984741211, 0.1428000032901764, 0.5889999866485596, -0.20031000673770905, 0.10313999652862549, 0.2643600106239319, -0.1244800016283989, 0.4691700041294098, -0.44277000427246094, -0.229980006814003, 0.223130002617836, 0.00527910003438592, -0.03729100152850151, -0.08903799951076508, 0.4612799882888794, -0.10683000087738037, 0.33785000443458557, 0.10583999752998352, 0.2917400002479553, 0.14831000566482544, -0.18690000474452972, -0.4059300124645233, -0.350490003824234, 0.06155800074338913, 0.1040399968624115, 0.2580699920654297, 0.30994001030921936, 0.5048800110816956, -0.04506099969148636, -0.27456000447273254, 0.10035999864339828, -0.6400200128555298, -0.25714001059532166, -0.5722100138664246, -0.09252800047397614, 1.2339999675750732, -0.8190900087356567, -0.3944999873638153, 0.3616800010204315, -0.22995999455451965, 0.07539600133895874, -0.19527000188827515, 0.13726000487804413, -0.054885998368263245, -0.1848900020122528, 0.28593000769615173, -0.42052000761032104, 0.030006999149918556, 0.006729099899530411, 0.1921599954366684, 0.18807999789714813, 0.20782999694347382, -0.3463999927043915, 0.284280002117157, -0.18504999577999115, 0.2194499969482422, 0.0824659988284111, 0.31641000509262085, -0.1894499957561493, -0.4649899899959564, 0.11973000317811966, 0.4409799873828888, -0.5841699838638306, -0.27939000725746155, -0.15148000419139862, -0.20769000053405762, 0.16368000209331512, 0.03677599877119064, 0.5505499839782715, 0.008092200383543968, 0.2570599913597107, -0.06428000330924988, -0.20319999754428864, -0.16930000483989716, -0.1252399981021881, -0.1468300074338913, -0.6593499779701233, -0.01953599974513054, 0.027479000389575958, -0.7515299916267395, 0.16539999842643738, -0.42906999588012695, 0.4057300090789795, -0.0504009984433651, 0.09681200236082077, -0.07779300212860107, -0.11655999720096588, 0.2974100112915039, -0.271450012922287, -0.09152799844741821, 0.17887000739574432, 0.6692600250244141, -0.16051000356674194, -0.22744999825954437, -0.19054000079631805, 0.34839001297950745, 0.2958900034427643, -0.40112000703811646, 0.47314000129699707, 0.2267799973487854, -0.4166400134563446, 0.19196000695228577, 0.04642599821090698, 0.24504999816417694, -0.2618899941444397, -0.48148998618125916, 0.026771999895572662, -0.17716999351978302, -0.013055000454187393, -0.11277999728918076, -0.1332399994134903, 0.37981998920440674, -0.1357399970293045, -0.4526199996471405, 0.4130600094795227, -0.005553800147026777, -0.46577000617980957, 0.1923000067472458, -0.29041001200675964, 0.2977199852466583, 0.08070400357246399, -0.04410399869084358, -0.5238199830055237, 0.13463999330997467, -0.5358800292015076, -0.38944000005722046, 0.3800800144672394, -0.18970000743865967, 0.16568000614643097, -0.19290000200271606, -0.44892001152038574, -0.18941999971866608, -0.37351998686790466, -0.2050500065088272, 0.3434000015258789, 0.17313000559806824, 0.1447400003671646, 0.07157599925994873, -0.25867998600006104, 0.17358000576496124, 0.3275800049304962, -0.23374000191688538, -0.14921000599861145, 0.26677998900413513, 0.15060000121593475, -0.10943999886512756, -0.15591999888420105, 0.5373700261116028, -0.12928999960422516, 0.17990000545978546, 0.3577199876308441, -0.0992719978094101, -0.04654200002551079, -0.19654999673366547, 0.13233999907970428, 0.2585600018501282, -0.15783999860286713, 0.02881699986755848, 0.0015323000261560082, -0.23237000405788422, -0.0534840002655983, 0.014659999869763851, -0.4417099952697754, 0.15365999937057495, 0.06386400014162064, -0.020653000101447105, -0.7426900267601013, 0.1860799938440323, -0.31264999508857727, -0.2386700063943863, -0.1521800011396408], u'barren': [0.002853600075468421, 0.16272999346256256, -0.27889999747276306, 0.021730000153183937, 0.2180500030517578, -0.05281100049614906, -0.008775399997830391, -0.25703001022338867, 0.2589699923992157, -0.009204699657857418, -0.07027500122785568, -0.10023999959230423, -0.30184999108314514, -0.32197999954223633, -0.2312999963760376, 0.056547001004219055, 0.07387600094079971, -0.0429529994726181, 0.48778998851776123, 0.31279000639915466, -0.3113900125026703, 0.6157199740409851, 0.13694000244140625, 0.008385200053453445, -0.11097999662160873, -0.34845998883247375, 0.22793999314308167, 0.07491400092840195, -0.6456300020217896, 0.09500999748706818, 0.4752900004386902, -0.0027834000065922737, -0.284170001745224, -0.12741999328136444, 0.7234200239181519, 0.6096299886703491, -0.27928999066352844, -0.2037999927997589, 0.2536799907684326, 0.055528998374938965, 0.2573600113391876, 0.3100000023841858, -0.045076001435518265, -0.2828800082206726, 0.5406000018119812, 0.08336800336837769, -0.4720799922943115, 0.2777099907398224, 0.0751660019159317, 0.1909099966287613, -0.1562899947166443, 0.11175999790430069, 0.5418199896812439, -0.1920199990272522, 0.07441899925470352, -0.09205000102519989, 0.10824999958276749, -0.7247899770736694, -0.04551500082015991, 0.13779999315738678, 0.08688399940729141, -0.03715699911117554, 0.5524799823760986, -0.48982998728752136, 0.21749000251293182, -0.011648000217974186, 0.19717000424861908, 0.36340999603271484, 0.1417199969291687, 0.05459899827837944, -0.3948099911212921, 0.23083999752998352, -0.3324500024318695, 0.26399001479148865, -0.7446100115776062, -0.6517599821090698, 0.010463000275194645, -0.24108000099658966, 0.40206000208854675, 0.3418799936771393, -0.075873002409935, 0.384880006313324, -0.4421899914741516, -0.04539500176906586, -0.09967699646949768, 0.2879300117492676, 0.23301999270915985, -0.011102999560534954, 0.46410998702049255, 0.27796998620033264, -0.08517900109291077, 0.026517000049352646, 0.5478000044822693, 0.43953999876976013, -0.3033599853515625, 0.31887000799179077, 0.06019499897956848, -0.22396999597549438, 0.10481999814510345, 0.46667999029159546, 0.2829299867153168, 0.3093000054359436, -0.5491099953651428, 0.2749899923801422, -1.062000036239624, -0.05841999873518944, -0.12445999681949615, 0.08284299820661545, -0.026693999767303467, 0.057016998529434204, -0.5052099823951721, -0.3492799997329712, 0.058747999370098114, -0.4018400013446808, -0.21852999925613403, -0.19526000320911407, 0.08087000250816345, 0.36616000533103943, 0.6306399703025818, 0.41025999188423157, 0.0861240029335022, -0.07229000329971313, -0.40233999490737915, 0.7247599959373474, 0.07303199917078018, 0.4957900047302246, -0.17478999495506287, -0.036649998277425766, -0.08220499753952026, -0.14386999607086182, 0.2887600064277649, 0.29469001293182373, 0.1444700062274933, 0.707099974155426, 0.04668600112199783, -0.06447699666023254, 0.4479900002479553, 0.47694000601768494, -0.27601999044418335, -0.5641000270843506, 0.3822300136089325, -0.39155998826026917, -0.33809998631477356, -0.2039799988269806, -0.7742199897766113, 0.05801999941468239, 0.45669999718666077, 0.5881400108337402, -0.07427500188350677, -0.60343998670578, -0.7118600010871887, -0.7471699714660645, -1.152500033378601, -0.07665599882602692, 0.11488000303506851, -0.027650000527501106, -0.05909999832510948, 0.15041999518871307, 0.559440016746521, 0.7117199897766113, -0.3391000032424927, -0.47797998785972595, 0.7160900235176086, 0.8845400214195251, -0.06698600202798843, -0.3812499940395355, -0.011637000367045403, -0.3494200110435486, -0.3879300057888031, -0.575410008430481, 0.10715000331401825, 0.022286999970674515, 0.17278000712394714, -0.5533599853515625, -0.20782999694347382, 0.04414299875497818, -0.0026310000102967024, 0.400299996137619, -0.6755099892616272, -0.2719799876213074, -0.3737100064754486, 0.4256100058555603, 0.3149400055408478, -0.008764199912548065, -0.0878790020942688, -0.11043000221252441, 0.7280300259590149, -0.42076998949050903, -0.5861799716949463, 0.18262000381946564, -0.053346000611782074, 0.06521499902009964, 0.49974000453948975, -0.2890399992465973, 0.5809000134468079, 0.6430500149726868, -0.41471999883651733, 0.09320899844169617, -0.13638000190258026, 0.4510200023651123, 0.9355999827384949, -0.5302600264549255, -0.36847999691963196, 0.2671999931335449, 0.14489999413490295, -0.03706999868154526, 0.5864999890327454, -0.5497000217437744, 0.4923099875450134, -0.19046999514102936, -0.6663299798965454, -0.07881899923086166, -0.11113999783992767, -0.38721999526023865, -0.008895300328731537, 0.09501399844884872, 0.05553499981760979, -0.07639200240373611, -0.20841999351978302, -0.13224999606609344, 0.6649900078773499, 0.027132000774145126, -0.002118099946528673, -0.10728000104427338, -0.3972100019454956, -0.31452998518943787, 0.44738999009132385, -0.24202999472618103, -0.39733999967575073, 0.31415000557899475, 0.424560010433197, 0.16133999824523926, -0.6214500069618225, -0.7350599765777588, 0.05576299875974655, 0.3402999937534332, -0.8388400077819824, 0.05178700014948845, -0.16485999524593353, -0.3745799958705902, 0.2348099946975708, 0.3474400043487549, 0.22988000512123108, -0.08785299956798553, -0.3325600028038025, 0.2796899974346161, 0.11925999820232391, 0.3574399948120117, 0.22628000378608704, -0.39278000593185425, 0.0913190022110939, 0.4781999886035919, 0.44986000657081604, -0.7383800148963928, 0.44944998621940613, -0.08138299733400345, -0.5081899762153625, 0.10350000113248825, 0.011532999575138092, 0.566100001335144, 0.4231399893760681, -0.07482600212097168, -0.11343000084161758, -0.20576000213623047, -0.18468999862670898, -0.321289986371994, 0.23962000012397766, -0.30136001110076904, 0.5315999984741211, -0.044450998306274414, -0.4993700087070465, -0.06843999773263931, -0.456059992313385, -0.13303999602794647, 0.07673300057649612, 0.14653000235557556, -0.41027000546455383, 0.7758700251579285, 0.055119000375270844, -0.24529999494552612, -0.46709999442100525, -0.1407800018787384, -0.3025299906730652, 0.04288399964570999, -0.08716700226068497, 0.11181999742984772, 0.2637600004673004, 0.3537200093269348, -0.07527299970388412, 0.22799000144004822, 0.30834999680519104, 0.17531000077724457, -0.1291700005531311, 0.6115999817848206, -0.33886000514030457, 0.33858001232147217, 0.5645999908447266, 0.24666999280452728, -0.2121499925851822, -0.2294600009918213], u'fresh': [0.19193999469280243, -0.35721999406814575, 0.05487700179219246, -0.14247000217437744, 0.25613000988960266, -0.49257999658584595, 0.3346099853515625, -0.0925929993391037, 0.5489299893379211, -1.500499963760376, 0.5852400064468384, -0.1373700052499771, 0.20675000548362732, 0.526170015335083, 0.022770000621676445, 0.030021000653505325, -0.617900013923645, 0.17746999859809875, 0.05431000143289566, 0.3825500011444092, -0.5747799873352051, 0.0401029996573925, -0.022590000182390213, -0.3934899866580963, 0.2327200025320053, -0.25328001379966736, 0.028098000213503838, 0.23080000281333923, 0.06789100170135498, 0.1168999969959259, -0.5803200006484985, -0.06251300126314163, -0.3975900113582611, -0.05566899850964546, -0.8806800246238708, 0.4604099988937378, -0.31442001461982727, 0.044725000858306885, 0.16824999451637268, -0.03160399943590164, -0.2037000060081482, -0.0825520008802414, 0.8884199857711792, 0.14424000680446625, 0.09517599642276764, 0.1046300008893013, -0.2371399998664856, 0.013570000417530537, 0.051580000668764114, -0.025426000356674194, -0.14177000522613525, 0.09591200202703476, 0.0525749996304512, 0.302619993686676, 0.05833100154995918, -0.12746000289916992, 0.10604999959468842, 0.17273999750614166, 0.11962000280618668, 0.20969000458717346, -0.15649999678134918, -0.3441399931907654, 0.3769800066947937, 0.2229599952697754, -0.275160014629364, 0.20938000082969666, -0.21521000564098358, -0.7274199724197388, -0.37522000074386597, 0.1457200050354004, -0.025818999856710434, 0.11095999926328659, -0.23917999863624573, -0.04699200019240379, -0.11994999647140503, 0.1035199984908104, 0.43832001090049744, 0.032958999276161194, 0.09805399924516678, -0.06772200018167496, -0.027199000120162964, -0.21283000707626343, 0.10142000019550323, 0.21541999280452728, 0.24582000076770782, 0.12111999839544296, -0.35036998987197876, -0.2667500078678131, -0.05182100087404251, -0.06440400332212448, 0.3186500072479248, -0.45816999673843384, -0.5890899896621704, -0.5154899954795837, 0.1745699942111969, 0.12369000166654587, -0.21408000588417053, -0.3246299922466278, 0.34419000148773193, 0.4272199869155884, -0.19221000373363495, -0.18675999343395233, 0.28387999534606934, -0.3730500042438507, -0.3212299942970276, -0.21973000466823578, 0.07992900162935257, 0.31463998556137085, 0.03294600173830986, -0.41642001271247864, 0.4492399990558624, 0.05191899836063385, -0.28082001209259033, -0.35499998927116394, 0.22762000560760498, 0.0439159981906414, -0.4318700134754181, 0.44018998742103577, 0.4873200058937073, -0.860729992389679, -0.12700000405311584, -0.3106899857521057, -0.2693899869918823, 0.8238400220870972, -0.08789800107479095, 0.1367100030183792, -0.24440999329090118, 0.3667899966239929, 0.032499998807907104, 0.6011599898338318, -0.4046599864959717, 0.6090499758720398, -0.2890099883079529, -0.05502700060606003, -0.1368499994277954, 0.014170000329613686, -0.20321999490261078, 0.5735300183296204, -0.02718999981880188, 0.23204000294208527, 0.46845000982284546, 0.3295300006866455, 0.20319999754428864, 0.01583000086247921, -0.8450800180435181, 0.39614999294281006, -0.6039100289344788, -0.2117999941110611, 0.38506999611854553, -0.3509199917316437, -0.2847900092601776, 0.2835099995136261, 0.3197000026702881, 0.6069200038909912, 0.18693000078201294, -0.5572199821472168, -0.056731998920440674, -0.35484999418258667, -0.4047999978065491, 0.267520010471344, 0.2486100047826767, 0.0033581999596208334, -0.18579000234603882, 0.22530999779701233, -0.054113999009132385, 0.12809999287128448, 0.8205400109291077, 0.13459999859333038, -0.17539000511169434, -0.19553999602794647, 0.5865600109100342, 0.10447999835014343, -0.5707100033760071, -0.32864999771118164, 0.4521700143814087, 0.4238399863243103, 0.40070998668670654, -0.2473600059747696, 0.2549799978733063, 0.024380000308156013, -0.22506000101566315, -0.3188000023365021, 0.2535400092601776, -0.5785999894142151, -0.14979000389575958, -0.06769700348377228, 0.4629800021648407, -0.21431000530719757, 0.547029972076416, -0.38708001375198364, -0.1027199998497963, 0.4758099913597107, -0.38451001048088074, 0.06760500371456146, 0.007639199960976839, -0.2469400018453598, -0.0865119993686676, -0.7114999890327454, -0.3016299903392792, 0.7315999865531921, 0.4274199903011322, -0.38196998834609985, -0.07972200214862823, 0.35069000720977783, -0.43588000535964966, 0.07256100326776505, 0.07981599867343903, 0.42621999979019165, -0.099590003490448, 0.20624999701976776, 0.07884199917316437, -0.3804900050163269, 0.07181499898433685, 0.016147000715136528, 0.3558799922466278, -0.558929979801178, 0.8861299753189087, -0.30987998843193054, 0.07470700144767761, -0.16344000399112701, 0.8938500285148621, 0.3528999984264374, -0.10446999967098236, 0.2085999995470047, 0.003992199897766113, -0.11022000014781952, 0.29486000537872314, 0.13592000305652618, 0.15474000573158264, 0.49862000346183777, 0.08034300059080124, 0.16176000237464905, 0.12208999693393707, -0.44332000613212585, 0.8429099917411804, 0.7894200086593628, 0.20003999769687653, -0.20611999928951263, -0.6820499897003174, -0.19524000585079193, -0.4300299882888794, -0.18296000361442566, -0.49970000982284546, -0.08182299882173538, -0.9503499865531921, -0.2126699984073639, 0.5194500088691711, 0.6648499965667725, -0.48938998579978943, -1.1943999528884888, 0.6985499858856201, -0.03613400086760521, 0.2656799852848053, 0.04061700031161308, -0.18208999931812286, -0.17603999376296997, 0.044992998242378235, 0.3267099857330322, 0.023291999474167824, 1.0861999988555908, 0.11350999772548676, -0.13506999611854553, -0.11248999834060669, -0.07234799861907959, 0.12894000113010406, -0.4377399981021881, -0.10874000191688538, 0.3824799954891205, -0.13431000709533691, -0.4534499943256378, -0.0544620007276535, 0.6208500266075134, -0.14797000586986542, 0.011827999725937843, 0.2957800030708313, 0.6198700070381165, -1.7071000337600708, -0.24108999967575073, 0.39500999450683594, -0.5930299758911133, 0.09757299721240997, 0.22703999280929565, 0.43678000569343567, -0.047635000199079514, -0.19584999978542328, -0.25442999601364136, -0.08994299918413162, 0.41909998655319214, 0.15826000273227692, -0.5313599705696106, -0.5766199827194214, 0.07935299724340439, 0.3697499930858612, -0.060210999101400375, -0.19899000227451324, 0.5399399995803833, 0.2136400043964386, -0.3405799865722656, -0.0267730001360178, 0.39726999402046204], u'caramelized': [0.05956200137734413, -0.5109599828720093, 0.5732200145721436, 0.09378200024366379, -0.4690000116825104, -0.586430013179779, 0.04416000097990036, 0.04065300151705742, -0.041478000581264496, 0.42559999227523804, -0.1813099980354309, -0.0506649985909462, -0.18539999425411224, 0.3189300000667572, -0.8448500037193298, 0.5692600011825562, -0.40560999512672424, -0.2997399866580963, 0.14187000691890717, 0.7349799871444702, -0.562749981880188, 0.26853999495506287, -0.44192999601364136, -0.2220200002193451, -0.17396999895572662, -0.2942900061607361, -0.13377000391483307, 0.18206000328063965, -0.9232699871063232, -0.17870000004768372, -1.1506999731063843, 0.3995400071144104, -0.15710000693798065, -0.711359977722168, 0.1745299994945526, 0.48607000708580017, 0.01624000072479248, 0.2623000144958496, -0.44721999764442444, 0.026642000302672386, 0.7536299824714661, 0.1474200040102005, 0.2032099962234497, -0.6159600019454956, 0.5329300165176392, 0.6000499725341797, 0.004666000138968229, -0.10452000051736832, -0.6576700210571289, 0.37376001477241516, 0.10832999646663666, -0.18288999795913696, 0.37231001257896423, 0.02317499928176403, -0.35620999336242676, -0.20054000616073608, -0.016774000599980354, 0.20826999843120575, 0.3177100121974945, -0.026173999533057213, 0.13026000559329987, -0.03378000110387802, -0.265639990568161, 0.268779993057251, -0.24365000426769257, -0.27757999300956726, 0.2657800018787384, 0.38060998916625977, -0.11183000355958939, 0.05772000178694725, -0.0076512000523507595, 0.14103999733924866, 0.04018700122833252, 0.2509300112724304, 0.2674899995326996, 0.18363000452518463, 1.131700038909912, -0.46299999952316284, -0.3280700147151947, 0.04955499991774559, 0.11992000043392181, 0.13675999641418457, 0.3777500092983246, -1.1162999868392944, -0.2252500057220459, 0.006415899842977524, -0.40448999404907227, 0.407150000333786, -0.4515799880027771, -0.14217999577522278, -0.0322519987821579, 0.16327999532222748, -0.056168001145124435, 0.8915299773216248, -0.6185399889945984, 0.08663800358772278, -0.12910999357700348, 0.2901799976825714, -0.2177100032567978, 0.663670003414154, 0.10785999894142151, -0.1468300074338913, 0.2676999866962433, -0.20122000575065613, -0.33285999298095703, -0.21281999349594116, 0.14751000702381134, -0.37790000438690186, 0.11864999681711197, 0.1738699972629547, 0.3711499869823456, 0.2252199947834015, 0.2932499945163727, -0.39438000321388245, 0.2792600095272064, 0.01093399990350008, -0.8381100296974182, 0.5834900140762329, 0.2378000020980835, 0.20685000717639923, 0.2358900010585785, -0.8155099749565125, -0.009106099605560303, 0.52156001329422, -0.7259600162506104, -0.395579993724823, 0.04810500144958496, 0.2955099940299988, -0.7823600172996521, 0.8487100005149841, -0.43112999200820923, 0.7455499768257141, 0.5201299786567688, 0.8130199909210205, -0.08693599700927734, 0.04953400045633316, -0.03103099949657917, 0.4066700041294098, -0.016690999269485474, -0.3093099892139435, 0.07331100106239319, 0.5691499710083008, -0.7739700078964233, 0.1215599998831749, 0.7649700045585632, -0.354310005903244, -0.14514000713825226, -0.08902300149202347, 0.9200900197029114, -0.17343999445438385, -0.29791998863220215, 0.24886000156402588, 0.30724000930786133, 0.014922999776899815, -0.4582900106906891, -0.7666500210762024, -0.01463599968701601, -0.3590500056743622, -0.412090003490448, -0.2222599983215332, -0.31463998556137085, 0.25, 0.1028899997472763, -0.5420299768447876, -0.05037299916148186, 0.44273000955581665, -0.31746000051498413, -0.11212000250816345, 0.015437000431120396, -0.552869975566864, -0.509660005569458, -0.08525300025939941, 0.07165399938821793, -0.6937599778175354, -0.2347099930047989, -0.340939998626709, -0.33065998554229736, -0.2522900104522705, 0.09813699871301651, -0.9500300288200378, -0.6013799905776978, 0.13199999928474426, 0.18580999970436096, -0.5703099966049194, 0.2856200039386749, -0.2378299981355667, 1.0714000463485718, -0.07173600047826767, -0.7520800232887268, -0.04187700152397156, 0.22053000330924988, 0.728410005569458, -0.199070006608963, -0.2971299886703491, 0.1175599992275238, -0.2920899987220764, -0.3690199851989746, 0.6185399889945984, -0.4946799874305725, -0.39225998520851135, -0.3566400110721588, -0.6976900100708008, -0.15349000692367554, -0.49237000942230225, 0.07394000142812729, 0.44113999605178833, 0.4420500099658966, -0.2500399947166443, -0.21558000147342682, -0.0784360021352768, 0.9256100058555603, -0.3810499906539917, -0.32774001359939575, 0.5161399841308594, -0.8748400211334229, 0.3246400058269501, 0.6775500178337097, -0.8969299793243408, -0.11655999720096588, 0.11783000081777573, 0.3451800048351288, 0.17736999690532684, -0.2345000058412552, 0.06850399821996689, -0.6053400039672852, -0.2872200012207031, -0.21355000138282776, 0.505840003490448, 0.10154999792575836, -0.6596999764442444, 0.19543999433517456, 0.2918899953365326, 0.32870998978614807, -0.39348000288009644, 0.25426000356674194, 0.24267999827861786, 1.4011000394821167, 0.05383500084280968, -0.10683000087738037, 0.06231600046157837, -0.16086000204086304, -0.1852799952030182, -0.14328999817371368, 0.10666000097990036, -0.121069997549057, 0.12310999631881714, -0.07655499875545502, -0.11952999979257584, -0.44453001022338867, -1.010699987411499, -0.11823999881744385, -0.13556000590324402, -0.01268799975514412, 0.4039500057697296, -0.1642400026321411, 0.807449996471405, 0.18368999660015106, 0.31558001041412354, -0.3335399925708771, 0.47095000743865967, -0.3942500054836273, -0.11483000218868256, -0.13338999450206757, 0.5040199756622314, 0.2700200080871582, 0.11518999934196472, 0.259660005569458, -0.8532500267028809, -0.06224599853157997, 0.719730019569397, -0.4469600021839142, 0.02721099928021431, -0.0596730001270771, 0.9594500064849854, -0.1512400060892105, 0.6936699748039246, 0.5910800099372864, -0.342739999294281, -1.3806999921798706, -0.44530001282691956, -0.12482000142335892, -0.17080999910831451, -0.6913599967956543, -0.2002200037240982, 0.23946000635623932, -0.062428999692201614, 0.42215999960899353, 0.1417199969291687, -0.297760009765625, 0.1151600033044815, 0.2655799984931946, -0.40384000539779663, 0.19550000131130219, -0.3483099937438965, -0.701990008354187, -0.9829800128936768, -0.6521099805831909, -0.34757000207901, -0.38837000727653503, -0.07676299661397934]} +objs_dict = {u'lightbulb': [0.39017000794410706, -0.36684998869895935, 0.1300400048494339, 0.19380000233650208, -0.5776399970054626, 0.27017998695373535, -0.7988600134849548, -0.08235999941825867, 0.23645000159740448, 0.1733900010585785, -0.5525500178337097, 0.4142799973487854, -0.14318999648094177, 0.007144200149923563, -0.028845999389886856, 0.13551999628543854, 0.5674499869346619, -0.21085000038146973, -1.063099980354309, 0.3390200138092041, 0.4120999872684479, 0.8149099946022034, 0.135110005736351, 0.33788999915122986, 0.19972999393939972, -0.3126400113105774, -0.18885000050067902, 0.16535000503063202, 0.3117699921131134, -0.097632996737957, 0.1426900029182434, 0.11766999959945679, -0.2812199890613556, 0.13745999336242676, 0.0774179995059967, 0.13732999563217163, -0.19167999923229218, 0.41762998700141907, 0.6315199732780457, 0.7547199726104736, 0.018557999283075333, 0.018137000501155853, -0.4241200089454651, -0.028156999498605728, -0.6999899744987488, 0.16718000173568726, -0.04638899862766266, -0.40248000621795654, -0.23138000071048737, -0.12861000001430511, 0.14595000445842743, 0.1890600025653839, 0.3117400109767914, -0.04619399830698967, 0.44506001472473145, 0.24942000210285187, -0.11096999794244766, 0.4408099949359894, -0.19471000134944916, 0.731909990310669, -0.15719999372959137, 0.1534699946641922, -0.20674000680446625, 0.20428000390529633, -0.19735999405384064, 0.3512600064277649, 0.12592999637126923, 0.04709799960255623, 0.35806000232696533, -0.27121999859809875, 0.21445000171661377, 0.6810399889945984, 0.09149599820375443, -0.5373799800872803, 0.5092599987983704, 0.63086998462677, -0.5224599838256836, -0.386680006980896, 0.7476999759674072, 0.6594200134277344, -0.6299899816513062, 0.0064603001810610294, -0.4437899887561798, -0.14836999773979187, 0.40689998865127563, -0.15915000438690186, 0.4200499951839447, 0.31779998540878296, -0.1754000037908554, -0.07346100360155106, -0.2871299982070923, -0.11997000128030777, -0.24899999797344208, 0.6814299821853638, 0.04824800044298172, -0.19682000577449799, 0.3444899916648865, -0.49375998973846436, -0.15234999358654022, 0.13875000178813934, -0.007455199956893921, 0.38697999715805054, -0.08914099633693695, 0.47183001041412354, -0.5498600006103516, -0.4045400023460388, -0.5654500126838684, -0.021882999688386917, -0.8551999926567078, 0.3111000061035156, -0.15335999429225922, 0.0587100014090538, 0.15383000671863556, 0.5809999704360962, 0.035016998648643494, 0.3586199879646301, -0.04457699880003929, 0.5574600100517273, -0.0649000033736229, 0.11229000240564346, -0.041402000933885574, -0.32308998703956604, 0.3281700015068054, 0.7347699999809265, 0.38506999611854553, -0.08412999659776688, 0.4682199954986572, 0.3113099932670593, -0.2499299943447113, -0.5948200225830078, 1.0299999713897705, -0.4258100092411041, 0.699180006980896, -0.06161599978804588, -0.039395999163389206, 0.5107700228691101, -0.41225001215934753, 0.013923999853432178, 0.09260500222444534, -0.36204999685287476, -0.16267000138759613, -0.3536500036716461, -0.11341000348329544, -0.21154999732971191, 0.01783600077033043, 0.4005100131034851, 0.27781999111175537, 0.13565999269485474, 0.5120599865913391, -0.10018999874591827, -0.41117000579833984, 0.13854999840259552, 0.13693000376224518, 0.03209799900650978, -0.29809999465942383, 0.21804000437259674, 0.2985999882221222, -0.14616000652313232, -0.389629989862442, -0.051516998559236526, -0.8532000184059143, 0.06994500011205673, -0.1830500066280365, 0.4226199984550476, -0.32989999651908875, 0.3575200140476227, -0.1711300015449524, 0.27667999267578125, -0.0194690003991127, -0.4866600036621094, -0.7117900252342224, 0.10836999863386154, 0.3392300009727478, -0.7555500268936157, -0.3968000113964081, -0.7511799931526184, 0.07553199678659439, 0.43248000741004944, 0.22290000319480896, -0.3882099986076355, 0.31161001324653625, -0.17733000218868256, 0.3491100072860718, 0.3080500066280365, -0.2782599925994873, -0.31810998916625977, 0.6185100078582764, 0.18017999827861786, -0.6533200144767761, -0.3118700087070465, 0.4415299892425537, -0.19760000705718994, 0.7589100003242493, -0.7913200259208679, -0.05096900090575218, 0.6225500106811523, -1.1895999908447266, 0.02027300000190735, 0.024000000208616257, -0.1612900048494339, -0.25044000148773193, 0.2095700055360794, 0.35286998748779297, 0.0946120023727417, -0.09207499772310257, -0.3238300085067749, 0.3717299997806549, -0.24932999908924103, 0.06595700234174728, -0.4003399908542633, -0.04682600125670433, -0.2981700003147125, -0.06402499973773956, 0.5217099785804749, 0.20072999596595764, 0.9763299822807312, -0.04685100167989731, -0.32427000999450684, -0.10874000191688538, 0.3020699918270111, -0.3841100037097931, 0.7464399933815002, -0.42761000990867615, 0.7340099811553955, -0.24122999608516693, -0.4282500147819519, -0.28064998984336853, 0.20432999730110168, 0.4332300126552582, 0.6038200259208679, 0.18182000517845154, -0.31321999430656433, -0.1927500069141388, -0.654770016670227, -0.29526999592781067, -0.37307998538017273, -0.725600004196167, -0.10050000250339508, 0.19492000341415405, -0.608709990978241, -0.16140000522136688, -0.3903599977493286, -0.9850199818611145, 0.08120600134134293, 0.3489699959754944, -0.930620014667511, -0.1898300051689148, 0.0692100003361702, -0.15218999981880188, -0.334089994430542, -0.06312300264835358, 0.16547000408172607, 0.19426999986171722, -0.648169994354248, 0.1098100021481514, -0.11388999968767166, -0.3305400013923645, 0.2590999901294708, -0.07720399647951126, -0.8772000074386597, -0.09768400341272354, -0.12125000357627869, 0.36511000990867615, -0.13431000709533691, 0.004244800191372633, 0.14496000111103058, -0.3638100028038025, 0.471560001373291, 0.386929988861084, 0.38763999938964844, -0.1603900045156479, 0.4681699872016907, -0.33748000860214233, -0.11840000003576279, -0.27928999066352844, -0.58024001121521, 0.5039899945259094, -0.03803899884223938, -0.3761099874973297, 0.1617799997329712, -0.3414599895477295, 0.18852999806404114, -0.7964000105857849, 0.26203998923301697, -0.2649500072002411, 0.1618099957704544, -0.4299600124359131, -0.5538300275802612, -0.019088000059127808, 0.33772000670433044, 0.09833700209856033, 0.1729699969291687, 0.38273000717163086, -0.13450999557971954, -0.574679970741272, -0.5538600087165833, -0.4624199867248535, 0.28975000977516174, 0.5906299948692322, 0.32332998514175415], u'shoes': [0.046521998941898346, -0.27535000443458557, -0.1372399926185608, -0.08483199775218964, -0.5181000232696533, -0.1770700067281723, 0.10560999810695648, 0.21041999757289886, 0.371069997549057, -1.121399998664856, -0.31126001477241516, -0.028116999194025993, -0.15352000296115875, 0.04626300185918808, 0.0881119966506958, -0.30849000811576843, 0.2911199927330017, 0.26403000950813293, 0.42719000577926636, -0.34049999713897705, 0.1041800007224083, 0.062320999801158905, 0.3102000057697296, -0.1408499926328659, -0.6470400094985962, -0.11184000223875046, -0.3789699971675873, 0.19210000336170197, 0.7177799940109253, 0.5410400032997131, -0.06356599926948547, -0.07503599673509598, -0.42344000935554504, 0.030515000224113464, -1.092900037765503, 0.449290007352829, -0.30285999178886414, -0.05439300090074539, 0.30504000186920166, 0.37779998779296875, -0.15199999511241913, -0.6460199952125549, 0.0035019998904317617, -0.3173699975013733, -0.21862000226974487, -0.15986000001430511, 0.7918099761009216, 0.05972500145435333, -0.1509000062942505, 0.46226999163627625, -0.18327000737190247, -0.28367000818252563, 0.18201999366283417, 0.12201999872922897, -0.005777500104159117, 0.5082899928092957, -0.12477999925613403, -0.1820099949836731, -0.12711000442504883, 0.02223999984562397, -0.043411001563072205, -0.2563900053501129, -0.3502100110054016, -0.11584000289440155, 0.1497800052165985, -0.2808699905872345, -0.6232699751853943, 0.041839998215436935, -0.37849000096321106, 0.13702000677585602, 0.4625000059604645, 0.31520000100135803, -0.3492699861526489, -0.5148699879646301, 0.47793999314308167, -0.47211000323295593, 0.06896500289440155, 0.04206300154328346, 0.20796999335289001, -0.46062999963760376, -0.07726799696683884, 0.2194100022315979, 0.10565000027418137, 0.008249400183558464, 0.2724500000476837, -0.37880000472068787, 0.18285000324249268, -0.23850999772548676, -0.23803000152111053, 0.5057399868965149, 0.12291999906301498, 0.3009200096130371, 0.04097500070929527, 0.16286000609397888, 0.0921889990568161, 0.10074000060558319, -0.12800000607967377, -0.28922998905181885, 0.030912000685930252, -0.4964599907398224, 0.1638299971818924, 0.5025299787521362, -0.7382400035858154, -0.13186000287532806, -0.35128000378608704, -0.8575000166893005, 0.780269980430603, -0.18528999388217926, 0.2434300035238266, -0.9970300197601318, 0.04215500131249428, 0.2493000030517578, 0.025662999600172043, -0.2630000114440918, -0.06221200153231621, -0.16773000359535217, 0.6916599869728088, 0.011309999972581863, 0.3172900080680847, -0.6394699811935425, -0.10209999978542328, -0.20327000319957733, 0.47415998578071594, -0.1436299979686737, -0.3637300133705139, 0.24241000413894653, -0.05324700102210045, 0.5356199741363525, 0.2931300103664398, -0.11685000360012054, -0.14448000490665436, -0.026388999074697495, 0.19352999329566956, 0.61080002784729, -0.4250600039958954, -0.5867800116539001, -0.1386300027370453, 0.15971000492572784, -0.11920999735593796, 0.17622999846935272, -0.008022700436413288, -0.3856300115585327, 0.4962399899959564, -0.28029999136924744, 0.046500999480485916, 0.19912000000476837, -0.238429993391037, 0.11766999959945679, -0.010824000462889671, 0.013438999652862549, 0.14736999571323395, 0.39594998955726624, -0.3389599919319153, -1.0918999910354614, -0.24478000402450562, -0.3876799941062927, -0.07248000055551529, -0.5232700109481812, 0.6285399794578552, 0.18720999360084534, 0.7936699986457825, -0.5734900236129761, -0.1639000028371811, -0.1250700056552887, 0.47115999460220337, -0.4240100085735321, 0.22926999628543854, 0.7875199913978577, 0.29151999950408936, 0.4116100072860718, 0.004468199796974659, 0.3899799883365631, -0.27469000220298767, 0.17663000524044037, -0.06794200092554092, -0.3611299991607666, 0.17212000489234924, 0.4335100054740906, -0.21216000616550446, -0.5775600075721741, 0.4483500123023987, -0.11044000089168549, 0.2762199938297272, -0.08030200004577637, 0.022432999685406685, -0.1305599957704544, 0.9050700068473816, 0.7572399973869324, 0.44690999388694763, -0.049949001520872116, 0.03932100161910057, -0.2561799883842468, -0.015061999671161175, 0.24647000432014465, -0.12950000166893005, 0.31459999084472656, -0.7015399932861328, -0.3108200132846832, -0.09731300175189972, -0.08591300249099731, 0.7679200172424316, 0.2945899963378906, 0.5856500267982483, 0.5761200189590454, 0.359279990196228, 0.18246999382972717, 0.49358001351356506, 0.3626999855041504, -1.4315999746322632, -0.46276000142097473, -0.032113999128341675, 0.04262800142168999, -0.13220000267028809, 0.5428299903869629, 0.24456000328063965, -0.1214900016784668, 0.3379800021648407, -0.6558399796485901, -0.2924099862575531, -0.6022499799728394, 0.6264500021934509, 0.16839000582695007, 0.1530199944972992, 0.050641000270843506, 0.6398599743843079, 0.25516000390052795, -0.25940001010894775, 0.5928000211715698, -0.17903999984264374, 0.028286000713706017, 0.492000013589859, -0.15595999360084534, -0.15252000093460083, 0.1951099932193756, -0.05823900178074837, 0.14026999473571777, 0.015316000208258629, 0.43463999032974243, -0.38975998759269714, 0.0024409000761806965, -0.033796001225709915, 0.07804200053215027, -0.7078800201416016, 0.4417400062084198, 0.11490000039339066, -0.014995000325143337, 0.062279000878334045, -0.5150399804115295, 0.3796199858188629, -0.4562000036239624, 0.015585999935865402, 0.03340499848127365, -0.27535998821258545, -0.3035700023174286, 0.3631199896335602, -0.22878000140190125, -0.12161999940872192, 0.133310005068779, -0.015417000278830528, -0.42552000284194946, -0.09953100234270096, -0.7592700123786926, 0.08535800129175186, -0.42236000299453735, 0.12310999631881714, -0.07830899953842163, -0.6050599813461304, 0.22946999967098236, -0.60930997133255, 0.07077299803495407, -0.17032000422477722, 0.14959999918937683, -0.02638299949467182, -0.704990029335022, -0.7353900074958801, -0.11537999659776688, -1.2410999536514282, 0.029536999762058258, -0.7642499804496765, 0.4077099859714508, 0.5358999967575073, -0.12793999910354614, 0.2875800132751465, -0.15876999497413635, -0.45684000849723816, 1.0506000518798828, -0.11748000234365463, 0.5167499780654907, -0.34022000432014465, -0.5885800123214722, 0.23378999531269073, 0.3157399892807007, 0.1094600036740303, 1.0147000551223755, -0.8254799842834473, -0.6924700140953064, 0.21558000147342682, 0.003895200090482831, 0.2953599989414215, 0.10051999986171722], u'deck': [0.69691002368927, -0.4374000132083893, -0.13625000417232513, -0.7106299996376038, -0.914929986000061, 0.5428299903869629, 0.19729000329971313, 0.09845999628305435, 0.0030793999321758747, -0.7409300208091736, 0.05412900075316429, 0.29629001021385193, 0.08075899630784988, -0.6501700282096863, -0.3341600000858307, 0.22018000483512878, 0.46525999903678894, 0.3407999873161316, 0.46261000633239746, -0.20417000353336334, 0.16092999279499054, -0.10333999991416931, -0.11806999891996384, -0.24074000120162964, 0.2425999939441681, 0.3346799910068512, 0.250789999961853, 0.7631499767303467, 0.16992999613285065, 0.40459999442100525, 0.06604199856519699, 0.2781600058078766, 0.29857999086380005, -0.4919399917125702, -0.39059001207351685, 0.17373999953269958, 0.7179099917411804, -0.4869000017642975, 0.040453001856803894, 0.5164700150489807, -0.06567899882793427, -0.09547200053930283, -0.46623000502586365, -0.023152999579906464, 0.12078999727964401, 0.22202999889850616, 1.0042999982833862, -0.325080007314682, 0.14590999484062195, -0.4718500077724457, -0.5068299770355225, 0.2634499967098236, -0.21773000061511993, -0.6363499760627747, -0.28349000215530396, -0.18341000378131866, 0.1960500031709671, 0.015974000096321106, 0.11800000071525574, 0.5454800128936768, 0.02817000076174736, -0.2699199914932251, 0.10920000076293945, 0.1384200006723404, 0.005056200083345175, -0.584089994430542, -0.42236998677253723, -0.10407000035047531, -0.33177998661994934, -0.09125299751758575, 0.29273998737335205, -0.09028299897909164, -0.06210099905729294, -0.0797630026936531, 0.21528999507427216, 0.40342000126838684, -0.12875999510288239, 0.6309199929237366, -0.3145099878311157, -0.3585500121116638, -0.3861599862575531, 0.21863999962806702, 0.14024999737739563, -0.1501300036907196, 0.5015100240707397, -0.5294899940490723, -0.24684999883174896, 0.020258000120520592, -0.08620300143957138, 0.0627020001411438, 0.644070029258728, 0.11206000298261642, 0.25505000352859497, 0.20242999494075775, -0.10660000145435333, 0.49424999952316284, -0.7115700244903564, -0.2545199990272522, 0.6300600171089172, -0.7193199992179871, -0.6070399880409241, 0.43880000710487366, -0.027922000735998154, 0.15880000591278076, 0.10409999638795853, 0.15421999990940094, 0.2420399934053421, -0.12806999683380127, -0.2630999982357025, -0.19005000591278076, -0.18519000709056854, 0.30410000681877136, 0.3132700026035309, -0.3409300148487091, -0.7431700229644775, 0.02123899944126606, -0.19077999889850616, -0.08377499878406525, -0.5788599848747253, -0.027431000024080276, -0.24100999534130096, -0.8638100028038025, 0.1374099999666214, 0.6240100264549255, -0.3848699927330017, -0.17685000598430634, 0.40880000591278076, -0.7565699815750122, 0.5967599749565125, 0.1837500035762787, -0.10745999962091446, 0.9901999831199646, 0.2269899994134903, 0.21472999453544617, 1.0563000440597534, 0.1211400032043457, 0.04199400171637535, -0.1426600068807602, -0.29493001103401184, 0.30270999670028687, -0.3529599905014038, 0.3832100033760071, 0.41718000173568726, 0.7058200240135193, -0.055309001356363297, -0.1169700026512146, 0.4711799919605255, 0.43057000637054443, -0.3125799894332886, 0.2535099983215332, -0.11665000021457672, -0.3036699891090393, 0.3680399954319, -0.9888299703598022, 0.47940999269485474, 0.12943999469280243, 0.473470002412796, 0.1965000033378601, -0.26750001311302185, 0.42260000109672546, 0.4845699965953827, -0.08237899839878082, -0.07620400190353394, 0.13961000740528107, 0.5386099815368652, 0.27289000153541565, 0.11027000099420547, 0.5671399831771851, 0.45739999413490295, 0.3138900101184845, -0.3137499988079071, 0.12953999638557434, 0.35791999101638794, -0.34782999753952026, 0.11586999893188477, -0.25905001163482666, -0.4595699906349182, 0.041085001081228256, -0.35653001070022583, -1.281599998474121, 0.5504199862480164, 0.3612399995326996, 0.5407900214195251, 0.3236199915409088, -0.4746299982070923, -0.2639000117778778, 0.8781399726867676, 0.590499997138977, 0.49818000197410583, 0.679610013961792, 0.13700999319553375, 0.6757000088691711, 0.15826000273227692, -0.15910999476909637, 0.16505999863147736, 0.09252800047397614, -0.14408999681472778, 0.1283400058746338, -0.2853899896144867, -0.49939998984336853, 0.5272499918937683, 0.07024700194597244, -0.14131000638008118, -0.5463500022888184, 0.6265900135040283, -0.06199999898672104, 0.2564699947834015, 0.11210999637842178, 0.25900998711586, -0.3294000029563904, 0.17476999759674072, -0.41095998883247375, -0.18583999574184418, -0.5845199823379517, 0.24878999590873718, -0.22044000029563904, -0.10166999697685242, -0.021967999637126923, 0.30601999163627625, -0.22926999628543854, 0.3323099911212921, 0.5666199922561646, 0.2862200140953064, -0.08959700167179108, 0.17831000685691833, 0.4972600042819977, -0.1788100004196167, -0.49803999066352844, 0.2033900022506714, -0.01592000015079975, 0.31327998638153076, -0.1381099969148636, -0.31543999910354614, 0.28547999262809753, 0.4487999975681305, -0.1222200021147728, -0.14895999431610107, -0.05668500065803528, -0.27987000346183777, -0.022300999611616135, 0.3535799980163574, 0.05703600123524666, -0.020421000197529793, -0.0033696999307721853, -0.15573999285697937, -0.6741600036621094, -0.24705000221729279, 0.2989000082015991, 0.04337399825453758, 0.44207999110221863, 0.4908500015735626, -0.37494000792503357, 0.6210600137710571, -0.18344999849796295, 0.07318200170993805, -0.1977899968624115, -0.35951000452041626, -0.3811100125312805, 0.3591800034046173, 0.1415800005197525, -0.37369000911712646, -0.5125799775123596, 0.08153899759054184, -0.5162100195884705, -0.2304600030183792, -0.051472000777721405, -0.3932900130748749, -0.016713999211788177, -0.0037444999907165766, -0.1876000016927719, 0.08778099715709686, -0.57396000623703, 0.3039900064468384, -0.06687399744987488, 0.07144399732351303, -0.36111000180244446, -1.0394999980926514, 0.09242600202560425, -0.7790899872779846, -0.03511200100183487, 0.1467600017786026, -0.08915399760007858, -0.5047500133514404, -0.5969300270080566, -0.3001300096511841, -0.25609999895095825, -0.08163800090551376, 0.17563000321388245, 0.18988999724388123, -0.40108001232147217, 0.09762699902057648, -0.12918999791145325, -0.9747499823570251, 0.124719999730587, -0.06618999689817429, 0.43408000469207764, 0.6983699798583984, -0.05463999882340431, -0.07890299707651138, 0.040130000561475754], u'laptop': [-0.3197399973869324, 0.09940899908542633, -0.4389300048351288, -0.6219900250434875, -0.008528199978172779, -0.21390999853610992, -0.12591999769210815, -0.5920799970626831, 0.2059600055217743, -1.097599983215332, -0.055291999131441116, 0.22302000224590302, 0.18996000289916992, -0.5440499782562256, -0.14157000184059143, 0.04588799923658371, -0.4753200113773346, -0.45403000712394714, 0.04497399926185608, -0.5168099999427795, 0.46459001302719116, -0.006091599818319082, -0.43261000514030457, -0.7541199922561646, -0.09027300029993057, -0.6691300272941589, 0.2708199918270111, 0.3124200105667114, 0.9091100096702576, -0.1919099986553192, 0.2019300013780594, 0.2066899985074997, -0.1155799999833107, 0.16946999728679657, -0.2565099895000458, -0.42590999603271484, -0.4595699906349182, -0.5330299735069275, -0.4637799859046936, 0.2397100031375885, 0.0917230024933815, -0.10288000106811523, -0.7892199754714966, 0.3324599862098694, 0.1102600023150444, -0.07122399657964706, 0.6078299880027771, 0.07784900069236755, -0.10509999841451645, -0.1695999950170517, 0.5229300260543823, -0.3479599952697754, -0.23378999531269073, -0.08229699730873108, 0.09516000002622604, -0.3396100103855133, 0.04419099912047386, 0.19259999692440033, 0.06369899958372116, 0.28233999013900757, -0.38982000946998596, 0.18195000290870667, -0.12083999812602997, 1.2324999570846558, -0.020860999822616577, 0.7671899795532227, -0.5358399748802185, 0.15178999304771423, -0.3432300090789795, -0.12007000297307968, -0.09675800055265427, -0.07334599643945694, 0.4356299936771393, -0.14846999943256378, 0.479310005903244, 0.3108600080013275, -0.43070000410079956, -0.3904600143432617, 0.12038999795913696, -0.09047900140285492, -0.3712100088596344, -0.42034000158309937, 0.10134000331163406, -0.0938820019364357, 0.13843999803066254, -0.16446000337600708, -0.6844599843025208, 0.29440999031066895, -0.0011560999555513263, -0.15252000093460083, -0.00358260003849864, 0.10377000272274017, -0.3623799979686737, -0.07077399641275406, 0.3465000092983246, -0.20412999391555786, -0.45486998558044434, -0.39724001288414, -0.0818059965968132, -0.25235000252723694, -0.5283499956130981, 0.6159300208091736, 0.10723000019788742, -0.5950000286102295, -0.032173000276088715, -1.1188000440597534, 0.3410100042819977, 0.22583000361919403, -1.032099962234497, -0.3566100001335144, -0.2975200116634369, 0.024728000164031982, 0.282370001077652, 0.24553999304771423, -0.23187999427318573, 0.19226999580860138, 0.3171600103378296, 0.2989499866962433, 0.14879000186920166, -0.1639299988746643, -0.20702999830245972, -0.5932899713516235, 0.351639986038208, -0.5176799893379211, -0.039131999015808105, -0.09025599807500839, -0.19136999547481537, -0.19857999682426453, -5.1574999815784395e-05, 0.1761299967765808, 0.06115199998021126, -0.11427000164985657, -0.22460000216960907, 0.1594499945640564, 0.6940900087356567, -0.1817999929189682, 0.7967600226402283, 0.6018199920654297, -0.11208000034093857, -0.10676000267267227, 0.0766569972038269, 0.4040200114250183, 0.35694000124931335, -0.18156999349594116, -0.16489000618457794, 0.190420001745224, 0.042716000229120255, -0.3733699917793274, -0.09698499739170074, 0.18820999562740326, 0.33096998929977417, 0.17222000658512115, 0.13019999861717224, -0.09499199688434601, -0.373199999332428, 0.15008999407291412, -0.08366599678993225, 0.4581100046634674, -0.27921000123023987, 0.1322699934244156, -0.27678000926971436, -0.17643000185489655, -0.16198000311851501, 0.1289599984884262, 0.4005100131034851, -0.34209001064300537, 0.027540000155568123, 0.34049999713897705, 0.02452000044286251, 0.11129999905824661, -0.5092300176620483, -0.41593998670578003, -0.8233699798583984, 0.5596399903297424, 0.11937999725341797, -1.0306999683380127, -0.11959999799728394, -0.22045999765396118, 0.006281800102442503, -0.946619987487793, 0.37046000361442566, 0.1314300000667572, 0.32989001274108887, -0.06603600084781647, 0.10290999710559845, -0.015588999725878239, 1.093400001525879, 0.09572499990463257, 0.6458100080490112, -0.35951998829841614, 0.21754999458789825, 0.052678000181913376, 0.3529700040817261, 0.27667000889778137, -0.4320800006389618, 0.2940100133419037, -0.5991899967193604, 0.2792400121688843, -0.06321600079536438, -0.08550199866294861, -0.11305999755859375, -0.14008000493049622, 0.3562699854373932, 0.2809999883174896, 0.15004999935626984, -0.585070013999939, -0.24605000019073486, 0.3669799864292145, -0.9398000240325928, -0.49480998516082764, 0.20930999517440796, -0.03752399981021881, -0.1067499965429306, 0.42195001244544983, 0.01066299993544817, -0.0981839969754219, -0.550790011882782, -0.18578000366687775, 0.1588200032711029, 0.6535000205039978, -0.42072001099586487, 0.4550899863243103, -0.40490999817848206, 0.5183899998664856, 0.42772001028060913, 0.9472000002861023, -0.08227299898862839, -0.13679000735282898, 0.1755799949169159, -0.18482999503612518, -0.12331999838352203, -0.21297000348567963, -0.37843000888824463, -0.034637998789548874, 0.29905998706817627, 0.262580007314682, 0.16068999469280243, 0.5089600086212158, -0.3084299862384796, 0.18508000671863556, 0.44137999415397644, -0.021534999832510948, -0.1449899971485138, -0.12835000455379486, 0.049880001693964005, 0.1035500019788742, 0.10927999764680862, -0.40099000930786133, -0.24247999489307404, 0.29774999618530273, -0.13009999692440033, 0.6781499981880188, -0.7126200199127197, 0.11727000027894974, -0.22732999920845032, 0.3942900002002716, 0.4095500111579895, 0.2748900055885315, 0.15384000539779663, 0.07833600044250488, -0.1302500069141388, -0.11405999958515167, -0.6139699816703796, 0.016589000821113586, 0.41130998730659485, -0.03935199975967407, 0.016479000449180603, -0.2939099967479706, 0.1035899966955185, 0.24490000307559967, 0.03974299877882004, 0.2199700027704239, 0.10600999742746353, -0.3029100000858307, -0.21254000067710876, -0.1254200041294098, -1.1943000555038452, 0.5750899910926819, -0.4384100139141083, 0.017083000391721725, 0.2617500126361847, 0.3351899981498718, 0.20221999287605286, 0.2803100049495697, -0.2176399976015091, -0.1438400000333786, 0.09978500008583069, -0.0837009996175766, -0.40358999371528625, -0.17118999361991882, 0.2846499979496002, -0.15994000434875488, -0.06177400052547455, -0.21613000333309174, -0.1444299966096878, 0.11918999999761581, 0.36340999603271484, 0.3137899935245514, -0.06488999724388123, -0.10329999774694443], u'ceramic': [0.29096001386642456, 0.32806000113487244, -0.3333500027656555, -0.8796300292015076, 0.13797999918460846, -0.35710999369621277, -0.22844000160694122, 0.09686099737882614, -0.22282999753952026, -0.49849000573158264, -0.27052998542785645, 0.22554999589920044, -0.33945998549461365, 0.4998700022697449, 0.05628599971532822, -0.08721499890089035, -0.3024899959564209, -0.0017562999855726957, -0.29809001088142395, -0.25282999873161316, -0.04520300030708313, -0.17353999614715576, 0.16926999390125275, 0.5368599891662598, -0.2620599865913391, -0.7876200079917908, -0.5418199896812439, 0.03324799984693527, 0.14775000512599945, 0.46959999203681946, 0.1491200029850006, 0.8819000124931335, -0.17856000363826752, -0.10531999915838242, 0.48232999444007874, 0.48899000883102417, 0.06590099632740021, 0.15470999479293823, 0.16227999329566956, 0.41089001297950745, -0.2509799897670746, -0.0540350005030632, 0.15862999856472015, -0.29725000262260437, -0.1667100042104721, 0.4029099941253662, 0.29166001081466675, -0.015351000241935253, -0.41648998856544495, 0.08175799995660782, 0.3480199873447418, 0.2819899916648865, 0.12906000018119812, 0.46977001428604126, 0.06429799646139145, 0.09673299640417099, -0.07455699890851974, 0.20237000286579132, 0.5577700138092041, 0.15835000574588776, -0.459850013256073, -0.21213999390602112, 0.18694999814033508, -0.03374199941754341, 0.5175399780273438, 0.3855699896812439, -0.27028998732566833, 0.15300999581813812, 0.16646000742912292, 0.27206000685691833, -0.31025999784469604, -0.5119600296020508, 0.09119699895381927, -0.11508999764919281, -0.5515599846839905, 0.5489000082015991, 0.007398500107228756, -0.5202500224113464, -0.10723999887704849, -0.3726100027561188, -0.93954998254776, -0.5260699987411499, -0.46977999806404114, -0.5259000062942505, 0.1742199957370758, 0.7323200106620789, 0.5035600066184998, 0.2131199985742569, -0.5149000287055969, -0.2639800012111664, 0.5360599756240845, -0.10221999883651733, 0.025575000792741776, 0.218189999461174, 0.04934300109744072, 0.11561000347137451, 0.05479700118303299, 0.022360000759363174, 0.10363999754190445, 0.20410999655723572, 0.07729999721050262, 0.3136099874973297, 0.003474599914625287, -0.25266000628471375, 0.3030500113964081, -0.35267001390457153, 0.14191000163555145, -0.05992399901151657, -0.6609899997711182, 0.15073999762535095, -0.011672000400722027, -0.03029699996113777, -0.5700899958610535, -0.7286700010299683, -0.1418199986219406, -0.09427200257778168, -0.3732700049877167, 0.2811200022697449, -0.002144899917766452, 0.24060000479221344, 0.4051699936389923, 0.5243300199508667, -0.21367999911308289, 0.26342999935150146, 0.020018000155687332, 0.21369999647140503, -0.02598400041460991, 0.17096999287605286, 0.23779000341892242, 0.258650004863739, 0.06009799987077713, 0.6063699722290039, 0.49351000785827637, 0.2884199917316437, -0.1036200001835823, -0.43274998664855957, -0.21616999804973602, 0.3667199909687042, 0.14562000334262848, 0.05089699849486351, 0.43748998641967773, 0.447519987821579, 0.05122299864888191, -0.9952499866485596, 0.8561099767684937, 0.12032999843358994, -0.5425300002098083, -0.19234000146389008, -0.27087000012397766, -0.4677799940109253, -0.08596699684858322, 0.028870999813079834, 0.029069000855088234, 0.13078999519348145, 0.15060000121593475, 0.18066999316215515, -0.8923500180244446, -0.15672999620437622, 0.08953599631786346, -0.17714999616146088, 0.0954039990901947, 0.21018999814987183, 0.04994799941778183, 0.012415999546647072, 0.6855300068855286, 0.27312999963760376, 0.4864499866962433, 0.3749200105667114, -0.02666199952363968, 0.11751999706029892, -0.037842001765966415, 0.42601001262664795, 0.3291899859905243, 0.11106999963521957, -0.3092699944972992, -0.012272999621927738, -0.593999981880188, 0.078063003718853, 0.2068299949169159, -0.8553299903869629, -0.17412999272346497, -0.3673799932003021, 0.5479900240898132, -0.23966999351978302, 0.031029000878334045, -0.697700023651123, 0.5946499705314636, 0.23930999636650085, 0.23274999856948853, -0.05335899814963341, 0.8104100227355957, 0.23800000548362732, 0.18842999637126923, 0.744949996471405, -0.15017999708652496, 0.1604599952697754, -0.4240500032901764, 0.5793899893760681, -0.03870600089430809, -0.17430000007152557, -0.05190499871969223, 0.32785001397132874, 0.5059999823570251, -0.11683999747037888, 0.8556100130081177, -0.09983000159263611, 0.22630000114440918, 0.14796000719070435, -0.9079700112342834, -0.13503000140190125, 1.037500023841858, 0.43195000290870667, 0.3591800034046173, 0.08081399649381638, 0.11231999844312668, 0.1244100034236908, 0.2107899934053421, -0.4378499984741211, 0.04831400141119957, 0.258899986743927, 0.24922999739646912, 0.21765999495983124, 0.01648700051009655, -0.3374199867248535, -0.31301000714302063, 0.027674999088048935, -0.0830800011754036, 0.14687000215053558, 0.19317999482154846, 0.12279000133275986, -0.0193060003221035, -0.3012999892234802, -0.005157399922609329, 0.43369001150131226, 0.2845200002193451, 0.049876000732183456, -0.011303000152111053, -0.24264000356197357, 0.01685200072824955, 0.03271299973130226, -0.1256999969482422, -0.292279988527298, -0.5070099830627441, 0.3986800014972687, -0.7585999965667725, 0.3989900052547455, -0.8003100156784058, -0.4430699944496155, 0.05423500016331673, 0.8514500260353088, 0.6157600283622742, -0.6433200240135193, -0.3063899874687195, -0.2411399930715561, 0.11007999628782272, 0.6075199842453003, -0.032311998307704926, -0.1411599963903427, -0.11422999948263168, 0.025666000321507454, -0.20378999412059784, -0.2104800045490265, 0.5684400200843811, -0.19202999770641327, 0.44168999791145325, 0.34751999378204346, -0.10860999673604965, 0.8176400065422058, 0.19077999889850616, 0.07202400267124176, -0.22384999692440033, 0.14685000479221344, 0.655489981174469, 0.4004400074481964, -0.40132999420166016, 0.0806960016489029, -0.37167999148368835, 0.02910199947655201, -1.6109000444412231, -0.03283200040459633, -0.013608000241219997, -0.3787499964237213, -0.14643999934196472, -0.01447100006043911, 0.030779000371694565, 0.41912001371383667, 0.09714800119400024, -0.051725998520851135, 0.24536000192165375, -0.1320199966430664, -0.3731200098991394, 0.18570999801158905, 0.2585600018501282, 0.49818000197410583, 0.38051000237464905, 0.10650999844074249, 0.4054099917411804, -0.9791600108146667, 0.1843699961900711, 0.2357800006866455], u'paper': [-0.009166699834167957, 0.3133299946784973, 0.21573999524116516, -0.4844000041484833, -0.20145000517368317, -0.3103399872779846, -0.12467999756336212, 0.1876399964094162, 0.12060000002384186, -1.7350000143051147, 0.005279500037431717, -0.37244999408721924, 0.36862999200820923, -0.05180500075221062, 0.3111099898815155, -0.17445999383926392, -0.154339998960495, 0.34498000144958496, -0.21369999647140503, -0.493910014629364, 0.12894000113010406, -0.389490008354187, 0.15620000660419464, 0.7556700110435486, 0.025001000612974167, -0.40832000970840454, -0.11947000026702881, -0.21365000307559967, 0.3034999966621399, -0.5856500267982483, -0.1625099927186966, 0.07111000269651413, -0.5045099854469299, 0.31536000967025757, -1.1837999820709229, 0.10942000150680542, -0.38694000244140625, 0.226500004529953, -0.1339299976825714, 0.23969000577926636, -0.5136500000953674, -0.2578299939632416, 0.11444000154733658, 0.3739199936389923, 0.0893860012292862, 0.0463160015642643, 0.2914600074291229, 0.23055000603199005, -0.19162000715732574, 0.22559000551700592, 0.5584499835968018, 0.13547000288963318, -0.002864199923351407, -0.027580000460147858, 0.14036999642848969, 0.5564000010490417, -0.16137999296188354, 0.11903999745845795, 0.18276000022888184, -0.41297999024391174, 0.49257999658584595, 0.4067699909210205, -0.15057000517845154, -0.0626320019364357, 0.22046999633312225, -0.4693000018596649, 0.17723999917507172, -0.4510500133037567, -0.1375100016593933, -0.13362999260425568, 0.40154001116752625, -0.23718999326229095, -0.1681399941444397, 0.22892999649047852, -0.07190900295972824, 0.5708699822425842, 0.34685999155044556, 0.3931800127029419, -0.30059999227523804, -0.019259000197052956, -0.7989100217819214, -0.5016999840736389, -0.31957998871803284, -0.11919999867677689, 0.05229799821972847, -0.08737500011920929, -0.5333999991416931, 0.08890999853610992, -0.3181299865245819, 0.29559001326560974, -0.1119299978017807, -0.23306000232696533, -0.20031000673770905, -0.0992870032787323, 0.10849999636411667, -0.01115499995648861, -0.3337000012397766, 0.15963999927043915, -0.1243399977684021, -0.9348800182342529, 0.26958000659942627, 0.02748199924826622, 0.16795000433921814, -0.15407000482082367, -0.15403999388217926, -0.10283999890089035, -0.21977999806404114, 0.3467999994754791, -0.19941000640392303, 0.20074999332427979, -0.26499998569488525, 0.12060999870300293, -0.1609800010919571, -0.3847300112247467, 0.12358000129461288, 0.14700999855995178, -0.05960699915885925, 1.0282000303268433, 0.3377000093460083, -0.5563899874687195, 0.16734999418258667, 0.023375999182462692, 0.453110009431839, 0.18964999914169312, -0.08354999870061874, 0.20201000571250916, -0.05205700173974037, -0.35938000679016113, -0.026081999763846397, -0.05106300115585327, -0.07021600008010864, 0.004509900230914354, 0.33006998896598816, 0.004580100066959858, -0.17990000545978546, 0.054917000234127045, -0.3544900119304657, 0.17339999973773956, 0.6235600113868713, -0.11945000290870667, 0.8096500039100647, 0.025310000404715538, -0.0760359987616539, -0.68927001953125, 0.41697999835014343, 0.5502899885177612, 0.11768999695777893, 0.177839994430542, 0.040681999176740646, -0.5024799704551697, 0.3255099952220917, 0.32565000653266907, -0.2141299992799759, -0.6255499720573425, -0.16440999507904053, -0.3764899969100952, -0.35179999470710754, -0.47352999448776245, 0.21554000675678253, -0.18925000727176666, -0.1791200041770935, -0.0038487999700009823, 0.3281500041484833, -0.019485000520944595, -0.027943000197410583, 0.167480006814003, 0.09610799700021744, 0.07386799901723862, -0.21402999758720398, 0.09362500160932541, -0.28029999136924744, -0.5480499863624573, -0.7960100173950195, 0.11749999970197678, 0.01576399989426136, -0.8767399787902832, 0.22468000650405884, 0.35350000858306885, 0.13624000549316406, -0.47124001383781433, 0.6100299954414368, -0.2046000063419342, 0.14500999450683594, 0.2095700055360794, -0.13234999775886536, -0.3615500032901764, -0.03981899842619896, -0.05695899948477745, 0.055355001240968704, 0.2267799973487854, 0.38133999705314636, 0.9799900054931641, 0.500249981880188, 0.050168998539447784, 0.1607699990272522, 0.5623000264167786, -0.21612000465393066, 0.11857999861240387, -0.44071999192237854, -0.0647210031747818, -0.38113000988960266, -0.0520160011947155, -0.2728100121021271, 0.24169999361038208, 0.1281599998474121, -0.46814998984336853, 0.15618999302387238, -0.013542000204324722, -0.3079499900341034, -0.20678000152111053, -0.10942000150680542, -0.08545900136232376, 0.2385299950838089, 0.08461099863052368, 0.6839900016784668, 0.09225299954414368, -0.2515600025653839, -0.21142999827861786, 0.12976999580860138, 0.22427000105381012, 0.06334800273180008, 0.29589998722076416, -0.39570000767707825, 0.34532999992370605, -0.14253999292850494, 0.061101000756025314, -0.03179299831390381, 0.36392998695373535, 0.1590700000524521, -0.4318999946117401, 0.049157001078128815, 0.03246200084686279, -0.8956900238990784, -0.07560399919748306, 0.3447999954223633, -0.1779700070619583, -0.24866999685764313, -0.47843000292778015, -0.3652400076389313, -0.4975000023841858, -0.06752700358629227, -0.4250200092792511, -0.1118599995970726, -0.24121999740600586, -0.8824499845504761, 0.09088800102472305, 0.21852000057697296, -0.49445000290870667, 0.23729999363422394, 0.29245999455451965, 0.03074900060892105, -0.3163599967956543, 0.14194999635219574, -0.17996999621391296, 1.159000039100647, 0.17678000032901764, 0.005684900097548962, -0.0335719995200634, 0.16925999522209167, -0.55485999584198, 0.1505099982023239, -0.06223899871110916, -0.3303999900817871, -0.2874999940395355, 0.7548699975013733, 0.035760000348091125, -0.17118999361991882, 0.11343000084161758, -0.06799300014972687, 0.15911999344825745, -0.2078700065612793, 0.3206599950790405, 0.06883099675178528, 0.027248000726103783, -0.4615499973297119, 0.036708999425172806, -1.2466000318527222, -0.35089001059532166, 0.06853599846363068, 0.22759999334812164, 0.2426699995994568, -0.14012999832630157, -0.4547100067138672, -0.2965700030326843, 0.36924999952316284, 0.09262699633836746, 0.858240008354187, 0.08398400247097015, -0.12720000743865967, 0.15137000381946564, -0.25259000062942505, 0.2832599878311157, -0.1179099977016449, 0.34191998839378357, 0.2184399962425232, 0.9151399731636047, 0.5141100287437439, 0.007992600090801716, -0.2508400082588196, 0.05672299861907959], u'keyboard': [-0.516510009765625, 0.09091100096702576, -0.15025000274181366, -0.6994400024414062, -0.3595600128173828, -0.20853999257087708, -0.6225600242614746, -0.775629997253418, 0.4004400074481964, -0.45462000370025635, -0.35763999819755554, 0.3844900131225586, -0.017059000208973885, -0.007276200223714113, 0.5527499914169312, -0.29787999391555786, -0.1676200032234192, -0.23332999646663666, 0.13050000369548798, 0.09944900125265121, 0.2721799910068512, 0.06843899935483932, -0.57955002784729, -0.07878699898719788, -0.26993998885154724, 0.49448999762535095, 0.3315100073814392, 0.2828400135040283, 0.27160000801086426, -0.042753998190164566, -0.35784000158309937, 0.20915000140666962, -0.5829799771308899, -0.08330900222063065, -0.3259100019931793, -0.6766899824142456, 0.1771399974822998, -0.6737200021743774, -0.23093999922275543, 1.0160000324249268, -0.05075199902057648, 0.006068299990147352, -0.8216599822044373, -0.20377999544143677, 0.31644999980926514, 0.3520500063896179, 0.605929970741272, 0.00024734000908210874, 0.031222999095916748, -0.06225400045514107, 0.6196100115776062, 0.35060998797416687, 0.20295999944210052, 0.09565400332212448, 0.33212000131607056, 0.007508900016546249, 0.3944999873638153, -0.1041800007224083, 0.058451998978853226, 0.4007900059223175, 0.5192800164222717, 0.3424699902534485, 0.5424900054931641, 0.4711199998855591, 0.28352999687194824, 0.4379200041294098, 0.14219999313354492, -0.08105599880218506, 0.20633000135421753, -0.05450500175356865, 0.298909991979599, 0.04256799817085266, 0.31786999106407166, -0.10892999917268753, -0.0368489995598793, -0.01971299946308136, -0.18352000415325165, 0.6676200032234192, 0.24116000533103943, -0.4736100137233734, 0.06381300091743469, 0.01809299923479557, -0.5497900247573853, -0.6638299822807312, -0.055344998836517334, -0.007459099870175123, -0.7695199847221375, 0.6425700187683105, -0.1520099937915802, 0.1489199995994568, -0.14122000336647034, 0.6054499745368958, -0.37770000100135803, -0.6582199931144714, 0.6379799842834473, 0.37867000699043274, -0.16266000270843506, -1.108199954032898, 0.37160998582839966, -0.23354999721050262, -0.9902300238609314, 0.25275999307632446, -0.04953400045633316, -0.9568600058555603, -0.3727799952030182, -0.37782999873161316, -0.31992000341415405, 0.2337000072002411, -0.6390699744224548, -0.45124000310897827, 0.34384000301361084, 0.842199981212616, -0.01068700011819601, 0.6490700244903564, -1.2288000583648682, -0.2614299952983856, -0.12338999658823013, 0.1377599984407425, -0.7955099940299988, -0.41909998655319214, -0.31433001160621643, -0.5230600237846375, 0.07140100002288818, -1.2239999771118164, -0.029637999832630157, -0.18559999763965607, -0.37860000133514404, 0.15672999620437622, -0.5562499761581421, 0.16202999651432037, 0.154789999127388, -0.026683000847697258, -0.2501299977302551, -0.5713899731636047, 0.061879001557826996, -0.14319999516010284, 0.782480001449585, 0.09357199817895889, 0.44106000661849976, -0.3117400109767914, -0.03965200111269951, 0.02897699922323227, -0.07759100198745728, -0.32881999015808105, -0.190420001745224, 0.14678999781608582, -0.04083799943327904, -0.04810599982738495, -0.3776400089263916, 0.16027000546455383, -0.6079999804496765, -0.7069299817085266, 0.21772000193595886, -0.5675699710845947, 0.38938000798225403, 0.03588400036096573, 0.04171700030565262, 0.32528001070022583, 0.32368001341819763, 0.38995999097824097, 0.21977999806404114, 0.07033199816942215, -0.1012599989771843, -0.2243800014257431, 0.1130400002002716, 0.028697000816464424, -0.24115000665187836, 0.6093800067901611, -0.025808999314904213, 0.02915799990296364, -1.0017999410629272, -0.17093999683856964, -0.07370500266551971, 0.7739199995994568, -0.2693299949169159, -0.6563699841499329, 0.0644410029053688, -0.31244999170303345, -0.6209400296211243, -0.28909000754356384, -0.4030500054359436, 0.18190999329090118, 0.450080007314682, 0.380840003490448, -0.27423998713493347, -0.7322099804878235, 0.6711900234222412, 0.1812800019979477, 0.17184999585151672, -0.12471000105142593, -0.17047999799251556, 0.223580002784729, 0.6314200162887573, -0.33956000208854675, 0.19803999364376068, 0.14012999832630157, -0.17541000247001648, 0.289110004901886, -0.26006001234054565, -0.6520199775695801, 0.7555400133132935, 0.26982998847961426, 0.511139988899231, 0.07364899665117264, 0.06162000074982643, 0.03284800052642822, -0.35220998525619507, 0.10385999828577042, -0.38850998878479004, -0.11095999926328659, 0.35409998893737793, 0.06132199987769127, 0.7140100002288818, -0.10377000272274017, 0.5850099921226501, 0.3919999897480011, 0.015714000910520554, -0.2078000009059906, -0.4268699884414673, 0.7204499840736389, -0.6637899875640869, 0.4914900064468384, -0.7437999844551086, 0.2843399941921234, 0.038247000426054, -0.01247899979352951, -0.20138999819755554, 0.19133000075817108, 0.10537000000476837, -0.5995100140571594, 0.285970002412796, 0.25238001346588135, -0.34929001331329346, -0.17178000509738922, 0.4633600115776062, 0.7087500095367432, -0.3457599878311157, 0.8426399827003479, -0.8436999917030334, 0.31439998745918274, 0.4044100046157837, -0.14751000702381134, 0.02610900066792965, 0.057263001799583435, -0.47898000478744507, -0.05691799893975258, -0.27792999148368835, 0.1576700061559677, -0.24025000631809235, 0.12397000193595886, -0.024519000202417374, -0.19177000224590302, -0.20859000086784363, -0.33855998516082764, -0.5284900069236755, 0.522059977054596, 0.6395000219345093, -0.03539599850773811, -0.01571599952876568, 0.04297100007534027, -0.5798299908638, -0.30349001288414, -0.48969998955726624, 0.1761000007390976, 0.21588000655174255, -0.2731499969959259, -0.07138500362634659, 0.11761999875307083, -0.2807599902153015, -0.0912109985947609, -0.3280099928379059, -0.12910999357700348, 0.19210000336170197, -0.7851999998092651, -0.252020001411438, -0.0347369983792305, -0.6778799891471863, -0.21328000724315643, -0.6976799964904785, 0.1593800038099289, -0.21747000515460968, 0.2808299958705902, -0.5977299809455872, 0.3565100133419037, 0.07142700254917145, -0.5395799875259399, -0.5880299806594849, -0.14239999651908875, -0.6807500123977661, -0.5361199975013733, 0.4502300024032593, -0.41238000988960266, -0.008000900037586689, -0.39638999104499817, -0.16068999469280243, 0.1805499941110611, 0.33899998664855957, 0.17309999465942383, 0.09473499655723572, -0.6293500065803528], u'chair': [0.26287999749183655, -0.34735000133514404, -0.06681100279092789, -0.5413699746131897, -0.1654299944639206, -0.41130000352859497, -0.15191000699996948, -0.7251099944114685, -0.09152600169181824, -1.073099970817566, -0.21671999990940094, 0.18073999881744385, 0.0071319998241961, -0.3741599917411804, 0.4346199929714203, 0.4003700017929077, 0.2278600037097931, 0.6564300060272217, -0.021995000541210175, -0.2340800017118454, 0.011830000206828117, 0.5048400163650513, -0.35343000292778015, -0.04830100014805794, -0.16913999617099762, 0.1082099974155426, 0.2671400010585785, -0.19253000617027283, 0.1493300050497055, -0.3642599880695343, 0.227510005235672, -0.34850001335144043, 0.4112299978733063, 0.4484800100326538, -1.0097999572753906, 0.5613600015640259, 0.2844499945640564, -0.46062999963760376, -0.21184000372886658, 0.2615799903869629, 0.05406000092625618, -0.3945100009441376, -0.7251600027084351, -0.2441900074481964, 0.22812999784946442, -0.04066399857401848, -0.17152999341487885, -0.3496299982070923, 0.23770000040531158, 0.23925000429153442, -0.35809001326560974, 0.2228499948978424, 0.04532900080084801, -0.1647700071334839, -0.16047999262809753, 0.1600400060415268, -0.14869999885559082, -0.2853600084781647, 0.06768900156021118, 0.1628500074148178, 0.16503000259399414, 0.41007000207901, 0.19621999561786652, 0.4339900016784668, -0.17326000332832336, -0.8454700112342834, -0.48517000675201416, 0.1844100058078766, -0.48346999287605286, -0.16788999736309052, -0.393669992685318, 0.22717000544071198, -0.012973000295460224, 0.005787400063127279, -0.11799000203609467, -0.07160300016403198, -0.027664000168442726, 0.6246500015258789, 0.11687999963760376, -0.852150022983551, -0.16933999955654144, -0.061496999114751816, -0.2391200065612793, 0.16617999970912933, 0.2426300048828125, -0.40026000142097473, 0.10967999696731567, 0.9017000198364258, -0.5371400117874146, -0.06190799921751022, 0.27382999658584595, 0.021372999995946884, -0.08977899700403214, -0.1812400072813034, 0.015042000450193882, -0.4366599917411804, -0.04990300163626671, 0.03473899886012077, 0.10565999895334244, -0.06626500189304352, 0.2614299952983856, 0.3048900067806244, 0.09348300099372864, -0.07574799656867981, 0.3256300091743469, -0.055994998663663864, 0.08477400243282318, 0.02731800079345703, -0.2612200081348419, -0.5651500225067139, -0.14590999484062195, 0.03209900110960007, -0.6313300132751465, 0.02594600059092045, -0.24345999956130981, 0.18219000101089478, -0.12268000096082687, -0.16399000585079193, 0.09029699862003326, 0.3832100033760071, -0.5982699990272522, 0.2570199966430664, -0.41776999831199646, -0.32479000091552734, 0.07399400323629379, -0.2951599955558777, -0.13134999573230743, -0.3954100012779236, 0.1166900023818016, 0.1421400010585785, 0.23186999559402466, -0.298660010099411, 0.08726199716329575, -0.38218000531196594, -0.4986500144004822, -0.2582400143146515, -0.06451600044965744, 0.584309995174408, -0.5666999816894531, 0.08214300125837326, 0.2921999990940094, 0.46549999713897705, 0.22288000583648682, 0.016992000862956047, 0.21743999421596527, 0.356550008058548, 0.06735800206661224, -0.019022999331355095, -0.35231998562812805, -0.12150999903678894, 0.6293100118637085, 0.36017000675201416, 0.5008400082588196, -0.2669599950313568, -0.6836000084877014, 0.25891000032424927, 0.08094999939203262, -0.4337399899959564, 0.05247100070118904, 0.3930499851703644, 0.16621999442577362, 0.717739999294281, 0.11807999759912491, -0.3293200135231018, 0.40801000595092773, 0.006457299925386906, -0.043032001703977585, 0.2934199869632721, 0.2556700110435486, 0.42704999446868896, -0.5355200171470642, 0.6764900088310242, -0.10678000003099442, -0.3977999985218048, -0.12916000187397003, -0.30145999789237976, -0.1408900022506714, 0.9960100054740906, -0.13050000369548798, -0.5565800070762634, 0.17820000648498535, -0.1392499953508377, 0.05777899920940399, 0.4366399943828583, 0.3500100076198578, -0.22999000549316406, -0.6261799931526184, 0.40345001220703125, -0.032496001571416855, -0.022292999550700188, 0.582859992980957, -0.15579000115394592, 0.11271999776363373, -0.2690899968147278, -0.43672001361846924, 0.45899999141693115, -0.08141600340604782, 0.65420001745224, -0.24706999957561493, 0.14470000565052032, 0.8074899911880493, 0.13561999797821045, 0.19220000505447388, 0.03100699931383133, -0.07134100049734116, -0.4036099910736084, 0.4171299934387207, -0.10425999760627747, -0.19787000119686127, -0.022537000477313995, 0.428739994764328, -0.11561000347137451, 0.12082000076770782, -0.13224999606609344, 0.09584800153970718, 0.32152000069618225, -0.597000002861023, -0.7554799914360046, -0.25992000102996826, -0.6354900002479553, -0.3130300045013428, 0.047325000166893005, -0.1944199949502945, 0.11597999930381775, -0.14563000202178955, 0.26155000925064087, -0.18571999669075012, 0.23457999527454376, -0.4477500021457672, -0.03043000027537346, 0.40136998891830444, -0.1163799986243248, -0.15205000340938568, 0.06029000133275986, 0.21924999356269836, -0.06702599674463272, -0.3574100136756897, 0.17170999944210052, -0.33893001079559326, -0.3023200035095215, 0.13413000106811523, 0.09029799699783325, -0.38745999336242676, 0.10395999997854233, -0.21243999898433685, 0.6090800166130066, -0.09574700146913528, -0.26721999049186707, -0.5835599899291992, -0.13920000195503235, -0.27570000290870667, -0.37957999110221863, 0.5087299942970276, -0.5353099703788757, 0.2642599940299988, 0.24383999407291412, -0.5884900093078613, -0.22609999775886536, 0.13120000064373016, -0.06507200002670288, -0.5305799841880798, -0.41470998525619507, 0.37222999334335327, 0.5992900133132935, -0.16639000177383423, -0.8086199760437012, 0.4308600127696991, 0.5467399954795837, 0.0002752399886958301, 0.7196699976921082, -0.6357399821281433, -0.22461000084877014, -0.12460000067949295, -0.353630006313324, -0.12981000542640686, -0.34681999683380127, -1.007699966430664, 0.4916900098323822, 0.16505999863147736, 0.28804001212120056, -0.6623799800872803, -0.42212000489234924, -0.268449991941452, -0.5160499811172485, 0.03111799992620945, 0.49397000670433044, -0.03925900161266327, 0.43592000007629395, -0.0017541999695822597, -0.329010009765625, 0.40112999081611633, 0.05714000016450882, -0.22851000726222992, -0.014979000203311443, -0.5059400200843811, 0.6295099854469299, 0.10739000141620636, 0.10382000356912613, -0.21383999288082123, 0.8368399739265442], u'milk': [0.1580599993467331, 0.7882300019264221, -0.05890500172972679, -0.20678000152111053, -0.07413200289011002, -0.33476999402046204, -0.016550999134778976, 0.12451999634504318, -0.04703599959611893, -0.9745699763298035, -0.33827999234199524, -1.354200005531311, -0.11853999644517899, -0.08978699892759323, -0.224140003323555, 0.022633999586105347, -0.023969000205397606, -0.23534999787807465, -0.45862001180648804, 0.46491000056266785, -0.05933700129389763, 0.0012326999567449093, 0.1951500028371811, 0.21275000274181366, -0.6399199962615967, -0.2803899943828583, -0.512470006942749, -0.12139999866485596, -0.4227199852466583, -0.3751800060272217, -1.2879999876022339, 0.45021000504493713, -0.21514999866485596, -0.08137000352144241, -0.1682800054550171, 1.0740000009536743, 0.5863900184631348, 0.2500300109386444, 0.28147000074386597, -0.08528400212526321, -0.672730028629303, -0.2300100028514862, 0.5469899773597717, -0.3288800120353699, -0.07443299889564514, -0.07081999629735947, -0.2245199978351593, 0.2079000025987625, 0.11399000138044357, 0.28248000144958496, 0.23951999843120575, 0.5756199955940247, 0.22811999917030334, 0.021656999364495277, 0.007967500016093254, 0.44137001037597656, 0.3181000053882599, 0.506630003452301, -0.407260000705719, -0.29840999841690063, 0.20155000686645508, 0.3490299880504608, 0.13040000200271606, -0.5171599984169006, -0.004941800143569708, -0.0602400004863739, -0.18689000606536865, -0.4040699899196625, -0.4755899906158447, 0.09739600121974945, 0.16584999859333038, -0.03692600131034851, -0.13439999520778656, -0.30281999707221985, -0.0013096999609842896, -0.19842000305652618, 0.3572799861431122, -0.43209001421928406, -0.33998000621795654, 0.220660001039505, -0.09944400191307068, 0.07326900213956833, -0.2926799952983856, -0.005749300122261047, 0.15147000551223755, -0.2012999951839447, 0.25231999158859253, -0.17282000184059143, -0.29826000332832336, -0.38065001368522644, 0.18448999524116516, -0.07777900248765945, -0.13651999831199646, 0.20938999950885773, -0.40511998534202576, 0.24651999771595, 0.24751000106334686, -0.024102000519633293, -0.5044999718666077, 0.4476099908351898, 0.4109399914741516, 0.18087999522686005, 0.027851000428199768, -0.6047800183296204, -0.10147000104188919, -0.0033811000175774097, -0.2738899886608124, -0.35815000534057617, -0.08317500352859497, 0.7244600057601929, 0.05825100094079971, 0.229980006814003, -0.5896999835968018, -0.1071700006723404, -0.18674999475479126, 0.5962499976158142, 0.2654399871826172, 0.5790899991989136, -0.04557200148701668, -0.6993600130081177, -0.8087599873542786, 0.256630003452301, 0.45245999097824097, 0.6049500107765198, 0.08347400277853012, -0.468860000371933, 0.39125001430511475, 0.1784600019454956, -0.22457000613212585, -0.020476000383496284, 0.19464999437332153, -0.04759500175714493, 0.13402000069618225, 1.0053000450134277, -0.1599700003862381, 0.11009000241756439, -0.33709999918937683, 0.002159199910238385, -0.034609001129865646, 0.7600600123405457, 0.7500500082969666, 0.258109986782074, -0.1798200011253357, -0.4456000030040741, -0.5230100154876709, -0.07207199931144714, -0.09433499723672867, 0.015796000137925148, 0.8754400014877319, -0.2894600033760071, -0.7467300295829773, 0.3243899941444397, 0.3559100031852722, 0.4259899854660034, -0.11395999789237976, -0.24609999358654022, -0.35962000489234924, -0.48917001485824585, 0.039691999554634094, -0.49277999997138977, 0.16333000361919403, -0.011978000402450562, -0.08750800043344498, -0.5123999714851379, -0.04473999887704849, -0.0867609977722168, -0.011491999961435795, -0.4089600145816803, -0.3234499990940094, -0.11145000159740448, 0.8706499934196472, 0.7098699808120728, -0.32548999786376953, 0.4471699893474579, -0.26280999183654785, -0.5411800146102905, 0.1359100043773651, -0.7397199869155884, 0.3251599967479706, -0.296750009059906, -0.4438199996948242, 0.5437600016593933, -0.006426199804991484, -0.21708999574184418, 0.08110599964857101, -0.6612799763679504, 0.9485499858856201, -0.42462000250816345, 0.623170018196106, -0.5222200155258179, 0.36017000675201416, 1.1901999711990356, -0.27542001008987427, -0.020097000524401665, -0.5267199873924255, -0.35120999813079834, -0.14163999259471893, -0.15518000721931458, 0.00153929996304214, 0.5573599934577942, 0.3988899886608124, -0.44312000274658203, 0.6738799810409546, 0.284960001707077, -0.17587999999523163, -0.1345600038766861, 0.13233999907970428, 0.3457399904727936, -0.64683997631073, -0.1751600056886673, 0.08346500247716904, -0.09916699677705765, -0.38763999938964844, 0.17204000055789948, -0.5371500253677368, -0.5995000004768372, 0.12377999722957611, -0.7326400279998779, 0.3926999866962433, 0.036986999213695526, 0.11212000250816345, 0.6006500124931335, 0.0413689985871315, -0.7774900197982788, -0.1216999962925911, -0.11048000305891037, 0.05912800133228302, 0.3351399898529053, 0.29354000091552734, 0.18817000091075897, 0.1631699949502945, -0.008710700087249279, -0.6405500173568726, -0.06720700114965439, 0.920989990234375, 0.45796000957489014, 0.3363899886608124, 0.07987499982118607, -0.4713299870491028, -0.5886499881744385, -0.8215399980545044, -0.2997500002384186, -0.26787999272346497, -0.1125900000333786, -1.3270000219345093, 0.022292999550700188, 0.445279985666275, 0.41762998700141907, -0.022009000182151794, -0.16019999980926514, 0.34352999925613403, -0.45142999291419983, 0.2002899944782257, 0.5572900176048279, -0.1722099930047989, 0.062279000878334045, -0.3955000042915344, 0.06017399951815605, 0.6518800258636475, -0.12976999580860138, -0.36844000220298767, -0.20155000686645508, 0.12999999523162842, -0.48708000779151917, 0.34193000197410583, -0.10388000309467316, 0.2318599969148636, -0.5546200275421143, 0.07915499806404114, 0.06605000048875809, -0.17350000143051147, 0.33601999282836914, -0.35161998867988586, -0.000979120028205216, 0.1831900030374527, 0.3419100046157837, -1.281599998474121, -0.38446998596191406, -0.36375001072883606, -0.246629998087883, -0.4165000021457672, -0.29712000489234924, 0.4579800069332123, -0.16561000049114227, -0.20555000007152557, 1.2928999662399292, 0.5939300060272217, -0.4812999963760376, 0.31029999256134033, 0.7236599922180176, -0.3996500074863434, 0.02092299982905388, 0.22832000255584717, 0.043248001486063004, 0.1295900046825409, -0.024810999631881714, 0.2974100112915039, -0.9133099913597107, -0.23362000286579132, 0.24119000136852264], u'roots': [-0.19890999794006348, 0.11631999909877777, -0.35732999444007874, 0.23071999847888947, 0.18400000035762787, -0.44363999366760254, -0.1539199948310852, -0.10354000329971313, 0.44839999079704285, -0.6488800048828125, -0.2261500060558319, 0.5501599907875061, 0.1413400024175644, 0.35385000705718994, 0.11189000308513641, 0.11274000257253647, -0.2631799876689911, 0.19717000424861908, 0.08624500036239624, 0.2935599982738495, -0.27292001247406006, -0.09246300160884857, 0.036226000636816025, 0.030706999823451042, -0.1370600014925003, -0.1411300003528595, 0.29697999358177185, 0.007127800025045872, -0.4349299967288971, 1.066499948501587, -0.37382999062538147, 0.4985100030899048, -0.8560600280761719, -0.3397600054740906, -0.37918001413345337, -0.11828000098466873, 0.800320029258728, 0.1157900020480156, 0.11356999725103378, -0.00483600003644824, 0.011397000402212143, -0.045513998717069626, 0.6478800177574158, -0.31273001432418823, 0.4712100028991699, 0.00917030032724142, 0.26326000690460205, -0.010001000016927719, -0.28505000472068787, -0.08539900183677673, 0.26756998896598816, 0.2978000044822693, -0.122529998421669, -0.2601200044155121, -0.4005599915981293, -0.10640999674797058, 0.19228999316692352, -0.3370400071144104, 0.058302998542785645, 0.3569900095462799, 0.7301099896430969, 0.008640400134027004, 0.4413599967956543, 0.47418999671936035, -0.13467000424861908, 0.23317000269889832, 0.04541200026869774, 1.0631999969482422, 0.1931699961423874, 0.2529299855232239, -0.0655829980969429, -0.33917000889778137, -0.2384600043296814, 0.6130200028419495, -0.46459001302719116, -0.3643600046634674, 0.22123000025749207, 0.008185399696230888, 0.15564000606536865, 0.22477999329566956, -0.2673400044441223, 0.5306699872016907, 0.3654699921607971, -0.41547998785972595, 0.03828199952840805, 0.22925999760627747, 0.2590399980545044, -0.19784000515937805, -0.33531999588012695, 0.03922000154852867, 0.20262999832630157, -0.10137999802827835, 0.31255000829696655, -0.20111000537872314, 0.02492300048470497, -1.1593999862670898, 0.4219900071620941, 0.05191199854016304, 0.38016000390052795, -0.5419700145721436, -0.1929199993610382, 0.1642400026321411, 0.016398999840021133, -0.21007999777793884, -0.45458000898361206, 0.10569000244140625, 0.42858999967575073, -0.25679001212120056, 0.5730400085449219, 0.09234300255775452, 0.6782699823379517, -0.4771299958229065, 0.23615999519824982, -0.2848399877548218, 0.012253999710083008, -0.26243001222610474, -0.1302099972963333, 0.20337000489234924, -0.07750000059604645, -0.6873900294303894, -0.3562000095844269, -0.19563999772071838, 0.3805299997329712, 0.28979000449180603, 0.21626999974250793, 0.4782100021839142, 0.1712699979543686, 0.44132000207901, -0.09697499871253967, 0.17835000157356262, -0.10046999901533127, 0.3029400110244751, 0.36539000272750854, -0.5506899952888489, -0.01336199976503849, -0.2533099949359894, -0.029222000390291214, 0.6012799739837646, -0.47683000564575195, 0.06351199746131897, 0.49467000365257263, 0.1921599954366684, 0.07274799793958664, -0.30713000893592834, 0.2122800052165985, 0.6492499709129333, -0.5218300223350525, -0.21472999453544617, -0.002536599989980459, 0.5972499847412109, -0.020889999344944954, 0.047745998948812485, -0.32207000255584717, -0.025622999295592308, 0.3583900034427643, 0.34782999753952026, 0.4174399971961975, -0.13597999513149261, 0.5496299862861633, 0.29315000772476196, -0.06792200356721878, 0.4375, -0.40669000148773193, 0.23308999836444855, -0.22046999633312225, 0.029097000136971474, -0.20201000571250916, -0.48927000164985657, 0.05811699852347374, -0.24829000234603882, 0.39864999055862427, -0.16601000726222992, -0.6187000274658203, 0.4637799859046936, 0.6260899901390076, 0.5808799862861633, 0.06145999953150749, -0.03550200164318085, 0.15395000576972961, -0.17118999361991882, -0.05191799998283386, 0.27507999539375305, 0.2469100058078766, 0.23734000325202942, -0.09836400300264359, 0.5217400193214417, -0.026667000725865364, 0.3767299950122833, 0.7433500289916992, -0.41479000449180603, -0.478769987821579, -0.1294499933719635, 0.06263300031423569, 0.5688700079917908, 0.11135999858379364, -0.07127600163221359, 0.46560001373291016, -0.2138500064611435, -0.39337998628616333, 0.43536999821662903, 0.825469970703125, -0.04288699850440025, 0.49724000692367554, 0.5552099943161011, 0.08378300070762634, -0.33941999077796936, 0.03849099949002266, 0.6405900120735168, -0.008678499609231949, 0.8011299967765808, -0.4174799919128418, -0.05693599954247475, 0.21875999867916107, 0.34143000841140747, -0.16148999333381653, 0.5150099992752075, 0.544160008430481, 0.23653000593185425, 0.18062999844551086, 0.30234000086784363, 0.4678399860858917, -0.19769999384880066, -0.43494001030921936, -0.4562700092792511, -0.2782300114631653, -0.5544400215148926, -0.050732001662254333, 0.05240900069475174, -0.22339999675750732, 0.3985700011253357, 0.31558001041412354, 0.2779799997806549, -0.02501400001347065, 0.5418300032615662, -0.5199599862098694, -0.3347499966621399, 0.11486999690532684, -0.5906800031661987, -0.6252999901771545, -0.10155999660491943, -0.12281999737024307, -0.4355100095272064, 0.1882600039243698, -0.3259899914264679, -0.8993399739265442, -0.2630299925804138, 0.213809996843338, 0.19683000445365906, -0.35326001048088074, 0.3477500081062317, 0.14704999327659607, 0.4181100130081177, -0.15074999630451202, 0.09931299835443497, 0.3148699998855591, 0.007646599784493446, 0.32541000843048096, -0.22300000488758087, -0.2537800073623657, 0.03277000039815903, 0.5575799942016602, -0.12489999830722809, 0.45614001154899597, 0.007024699822068214, -0.047210998833179474, 0.4726400077342987, -0.23377999663352966, 0.43511998653411865, 0.18233999609947205, 0.5705400109291077, -0.3294200003147125, 0.12908999621868134, -0.2918199896812439, -0.42465999722480774, -0.2038699984550476, 0.08919800072908401, -1.0736000537872314, -0.08311200141906738, 0.8252900242805481, -0.1979999989271164, -0.4365200102329254, -0.41262999176979065, -0.24984000623226166, 0.10764999687671661, -0.4654900133609772, 0.08481500297784805, 0.1354600042104721, 0.32989999651908875, 0.4083000123500824, 0.08812399953603745, 0.19639000296592712, 0.13027000427246094, 0.08715199679136276, 0.11757999658584595, -0.06256700307130814, 0.41593000292778015, -0.07584699988365173, -0.21299000084400177, -0.1878499984741211, 0.09532000124454498], u'carpet': [0.6022999882698059, 0.16408999264240265, 0.3446800112724304, -0.5269200205802917, -0.23120999336242676, -0.30935999751091003, -0.22423000633716583, 0.22099000215530396, 0.2876400053501129, -0.32047000527381897, 0.4043099880218506, -0.01571499928832054, 0.2666800022125244, -0.1313299983739853, 0.021014999598264694, -0.3890100121498108, -0.6150199770927429, 0.3624599874019623, -0.1824900060892105, 0.11221999675035477, -0.20728999376296997, -0.34900999069213867, 0.07086899876594543, 0.28216999769210815, -0.16895000636577606, -0.13650000095367432, -0.36991000175476074, -0.5695800185203552, 0.17007000744342804, 0.360260009765625, 0.05866200104355812, 0.003904900047928095, -0.09133099764585495, 0.08667699992656708, -0.7409899830818176, 0.17188000679016113, -0.1487800031900406, -0.5667399764060974, 0.021389000117778778, -0.11674000322818756, -0.34869998693466187, -0.19102999567985535, 0.31227999925613403, -0.2275400012731552, 0.43891000747680664, 0.1775899976491928, 0.7806299924850464, 0.11221999675035477, -0.16765999794006348, -0.6744999885559082, 0.3827599883079529, -0.31047001481056213, 0.2553499937057495, 0.2753100097179413, -0.012098999693989754, -0.02462499961256981, -0.4157699942588806, -0.6743500232696533, 0.7452700138092041, -0.08000999689102173, -0.021150000393390656, -0.5877199769020081, -0.058573998510837555, 0.2610799968242645, 0.6595399975776672, -0.15415999293327332, 0.2992999851703644, 0.16189000010490417, -0.5498300194740295, 0.125450000166893, 0.8374199867248535, 0.19732999801635742, 0.07498499751091003, -0.32135000824928284, 0.51569002866745, 0.5159299969673157, 0.1297599971294403, -0.3421500027179718, 0.6105499863624573, -0.026838000863790512, -0.209989994764328, -0.6682299971580505, -0.12701000273227692, -0.47082000970840454, 0.5005300045013428, 0.10903000086545944, -0.25150999426841736, -0.46992000937461853, -0.25196000933647156, 0.18497000634670258, 0.3126299977302551, -0.3989900052547455, 0.2943800091743469, 0.20728999376296997, -0.018908999860286713, -0.09216699749231339, 0.42364001274108887, 0.008761200122535229, 0.5197399854660034, -0.12981000542640686, 0.5846899747848511, 0.727869987487793, -0.2866100072860718, 0.28407999873161316, 0.12437000125646591, -0.21080000698566437, 0.5133699774742126, -0.13792000710964203, -0.11063999682664871, 0.23047000169754028, 0.22062000632286072, 0.32958000898361206, -0.1376499980688095, -0.15097999572753906, -0.17892000079154968, 0.2944900095462799, 0.15599000453948975, 0.24065999686717987, -0.567359983921051, -0.40692999958992004, 0.24390000104904175, -0.29458001255989075, 0.624970018863678, 0.7133499979972839, 0.610450029373169, 0.4713999927043915, 0.209989994764328, -0.07009000331163406, 0.28446000814437866, -0.043772000819444656, 0.0374470017850399, 0.05378299951553345, -0.11495999991893768, 0.6305099725723267, -0.42239001393318176, -0.20387999713420868, -0.22104999423027039, 0.07767300307750702, -0.3296299874782562, -0.24295000731945038, 0.2937600016593933, 0.336650013923645, 0.28738000988960266, -0.28797999024391174, 0.3401600122451782, 0.29548999667167664, -0.14907999336719513, -0.06903199851512909, 0.030362000688910484, 0.23371000587940216, -0.12841999530792236, -0.05620000138878822, -0.44137001037597656, -0.20882000029087067, 0.16283999383449554, 0.26489999890327454, 0.13604000210762024, -0.1659799963235855, 0.46698999404907227, -0.08998200297355652, 0.2399200052022934, 0.00680350000038743, -0.19674000144004822, 0.12099000066518784, 0.4700999855995178, 0.5191599726676941, 0.49660998582839966, -0.23711000382900238, 0.16627000272274017, 0.47200000286102295, -0.42181000113487244, -0.30208998918533325, 0.5261600017547607, -0.07861199975013733, -0.4461599886417389, -0.013539000414311886, 0.3637099862098694, 0.6146399974822998, -0.054069001227617264, -0.5083199739456177, -0.02906700037419796, -0.022518999874591827, 0.19878000020980835, 0.1324699968099594, -0.08847799897193909, 0.2458599954843521, 0.7593299746513367, 0.43533000349998474, 0.5113800168037415, -0.0031993999145925045, 0.21945999562740326, 0.3021399974822998, -0.4129999876022339, -0.6958600282669067, 0.5581099987030029, 0.0687979981303215, -0.08137699961662292, 0.6065199971199036, -0.040998999029397964, -0.30410000681877136, 0.9818800091743469, -0.22139999270439148, 0.03657799959182739, 0.006023699883371592, 0.923009991645813, 0.18485000729560852, 0.029412999749183655, 0.08692199736833572, -0.5613899827003479, -0.036786001175642014, -0.4990299940109253, 0.4927299916744232, -0.015401000156998634, -0.2510800063610077, 0.4150800108909607, -0.4205699861049652, 0.24658000469207764, -0.5856299996376038, -0.6213499903678894, -0.32798999547958374, 0.8840600252151489, -0.37358999252319336, 0.017312999814748764, 0.03265099972486496, -0.4531500041484833, -0.298909991979599, 0.3584499955177307, 0.08102499693632126, 0.08153899759054184, -0.33427000045776367, 0.6594700217247009, -0.03332800045609474, 0.4715699851512909, 0.4955199956893921, 0.9209799766540527, 0.021679000928997993, 0.2890399992465973, -0.5443900227546692, 0.2925199866294861, -0.16292999684810638, -0.22620999813079834, -0.12556999921798706, -0.7642599940299988, 0.30629000067710876, 0.022272000089287758, 0.21188999712467194, -0.6499900221824646, -0.22120000422000885, -0.2515299916267395, 0.37703999876976013, 0.43303999304771423, -0.3405500054359436, 0.06711400300264359, -0.4897199869155884, 0.8231599926948547, -0.13526000082492828, -0.21127000451087952, 0.1609800010919571, 0.6484400033950806, -0.14614999294281006, 0.0037754999939352274, -0.2997699975967407, 0.4064199924468994, 0.22283999621868134, 0.5862299799919128, -0.3505699932575226, 0.09504300355911255, 0.3059999942779541, 0.6176400184631348, -0.33809998631477356, 0.17986999452114105, -0.4346599876880646, -0.5706899762153625, -0.4101699888706207, -0.19115999341011047, 0.0701029971241951, -0.7644100189208984, 0.02881000004708767, -0.06799600273370743, 0.333050012588501, -0.5267099738121033, -0.057739000767469406, 0.1544799953699112, 0.2960599958896637, 0.3680199980735779, 0.6439200043678284, 0.3500800132751465, 0.28338998556137085, -0.44176000356674194, -0.06122000142931938, 0.8133999705314636, 0.5333200097084045, -0.16538000106811523, 0.8967999815940857, -0.7287999987602234, 0.2905299961566925, 0.8054500222206116, 0.45085999369621277, -0.08126100152730942, 0.7598299980163574], u'tire': [1.0125000476837158, 0.5006800293922424, -0.3492699861526489, -0.3671700060367584, -0.7460600137710571, -0.4223000109195709, 0.04518499970436096, 0.377020001411438, -0.20502999424934387, -0.5750399827957153, -0.050815001130104065, 0.0347759984433651, 0.07670500129461288, -0.2539899945259094, -0.21156999468803406, -0.2752400040626526, -0.05556600168347359, 0.3274799883365631, 0.3310700058937073, 0.04201500117778778, 0.0819770023226738, 0.7226399779319763, 1.041700005531311, 0.12796999514102936, -0.9150500297546387, 0.04868699982762337, -0.3884600102901459, 0.06117900088429451, -0.0006185999955050647, -0.10372000187635422, -0.123539999127388, 0.5887200236320496, 0.162650004029274, 0.13018999993801117, -0.6390600204467773, 0.1448799967765808, -0.7425199747085571, 0.17587000131607056, 0.33774998784065247, 0.4050000011920929, -0.2577199935913086, -0.05798200145363808, 0.050501998513936996, -0.46386998891830444, -0.15520000457763672, 0.3158999979496002, 0.003759700106456876, -0.24594999849796295, -0.35830000042915344, 0.4684300124645233, 0.19668999314308167, 0.09707300364971161, -0.0605349987745285, 0.36197999119758606, 0.3384700119495392, 0.33623000979423523, 0.19492000341415405, -0.2045699954032898, -0.059856001287698746, 0.10705000162124634, 0.02020999975502491, 0.10786999762058258, 0.30138999223709106, -0.5902199745178223, 0.3542500138282776, 0.15433000028133392, -0.03650199994444847, -0.1783200055360794, -0.16329999268054962, 0.8699100017547607, -0.2739199995994568, -0.13892999291419983, 0.444130003452301, 0.7191399931907654, -0.41589000821113586, 0.24959999322891235, -0.5811899900436401, -0.8042899966239929, -0.11108999699354172, -0.5197799801826477, -0.2754000127315521, 0.22333000600337982, 0.0858369991183281, 0.5522199869155884, -0.9371799826622009, 0.30138999223709106, 0.21414999663829803, -0.01964700035750866, -0.07277899980545044, 1.0382000207901, 0.48923999071121216, 0.07148200273513794, 0.054680999368429184, -0.09137000143527985, 0.4475800096988678, 0.10036999732255936, -0.11258000135421753, -0.12838999927043915, -0.6423400044441223, -0.7128900289535522, -0.7397400140762329, 0.7812399864196777, 0.068572998046875, -0.6719300150871277, 0.19303999841213226, 0.06831599771976471, -0.4320800006389618, 0.39041000604629517, 0.444599986076355, 0.47995999455451965, 0.31321001052856445, -0.2685199975967407, -0.11685000360012054, -0.278219997882843, -0.2081499993801117, -0.05432099848985672, -0.05465000122785568, 0.24833999574184418, -0.32016000151634216, 0.11269000172615051, 0.17970000207424164, -0.09021099656820297, 0.6362400054931641, -0.26743000745773315, -0.8658699989318848, -0.050999999046325684, -0.5239899754524231, -0.10429999977350235, 0.2712399959564209, 0.08341000229120255, 0.38850000500679016, 0.6070700287818909, 0.34132999181747437, 0.44422999024391174, 0.027025999501347542, 0.5666999816894531, -0.3440200090408325, -0.15916000306606293, 0.16809000074863434, -0.19417999684810638, 0.46417000889778137, -0.14168000221252441, 0.7452800273895264, -0.15202000737190247, -0.08281400054693222, 0.3936299979686737, -0.718779981136322, -0.37891000509262085, 0.241239994764328, 0.3598000109195709, -0.03017299994826317, -0.08826100081205368, 0.03422499820590019, -0.3912000060081482, 0.7809799909591675, -0.5808299779891968, 0.5374699831008911, -0.2707799971103668, 0.1396300047636032, -0.3288300037384033, 0.3068000078201294, -0.8100799918174744, -0.8544800281524658, 0.16548000276088715, 0.7173600196838379, -0.18086999654769897, 0.17145000398159027, 0.7568299770355225, 0.2777400016784668, 0.446370005607605, -0.5047299861907959, 0.3059900104999542, 0.64478999376297, 0.22944000363349915, 0.46535998582839966, 0.06398700177669525, 0.042374998331069946, 0.03903299942612648, 0.562749981880188, 0.3304100036621094, 0.3625499904155731, -0.2406499981880188, -0.16598999500274658, 0.11540000140666962, 0.2773500084877014, -0.5482000112533569, 1.003499984741211, 0.5533499717712402, 0.3804300129413605, -0.027452999725937843, 0.3118399977684021, -0.2501699924468994, -0.7132300138473511, 0.09541799873113632, 0.047495000064373016, 0.21428999304771423, 0.40560999512672424, -0.4190399944782257, 0.3626599907875061, 0.13731999695301056, 0.2573400139808655, 0.20393000543117523, 0.857230007648468, -0.3125399947166443, 0.08967699855566025, 0.43094000220298767, -0.0641620010137558, -0.4518600106239319, -0.41907998919487, 0.0802370011806488, -0.13946999609470367, -0.04527999833226204, 0.41506001353263855, -0.3752399981021881, -0.18443000316619873, -0.13312000036239624, 0.03919500112533569, 0.003468800103291869, -0.005438200198113918, -0.5455800294876099, 0.5348600149154663, -0.06086299940943718, 0.22604000568389893, 0.09922400116920471, 0.6825000047683716, 0.4595000147819519, 0.1602800041437149, 0.44850000739097595, 0.11131999641656876, -0.32714998722076416, 0.6012700200080872, -0.550000011920929, -0.13241000473499298, -0.11080999672412872, 0.1076899990439415, 0.18979999423027039, 0.44117000699043274, -0.25665000081062317, 0.022079000249505043, -0.1032399982213974, 0.0713450014591217, -0.8023099899291992, 0.04469100013375282, -0.06649699807167053, -0.33678001165390015, -0.18588000535964966, 0.09856399893760681, -0.4162899851799011, 0.7382299900054932, 0.13485999405384064, 0.2823599874973297, -0.4436900019645691, -0.2527399957180023, -0.35335999727249146, 0.4942300021648407, 0.03414199873805046, -0.8171899914741516, -0.09656599909067154, -0.21299000084400177, -0.026135999709367752, -0.06534399837255478, -0.7734900116920471, -0.3580699861049652, 0.42879998683929443, 0.3815999925136566, -0.006460899952799082, -0.4143899977207184, -0.08447200059890747, 0.17664000391960144, -0.23202000558376312, -0.1962299942970276, -0.04444200173020363, 0.10621999949216843, 0.07670699805021286, -0.32627999782562256, -0.05210699886083603, -0.5983200073242188, 0.08668600022792816, -0.17844000458717346, 0.579990029335022, 0.2175000011920929, -0.11037000268697739, 0.0961180031299591, -0.23670999705791473, 0.12195000052452087, 0.14835000038146973, 0.47986000776290894, -0.8471900224685669, -0.6442300081253052, -0.020161999389529228, 0.46445998549461365, -0.08032199740409851, -0.26280999183654785, -0.10382000356912613, 0.1292400062084198, 0.1009799987077713, -0.25453001260757446, 0.45285001397132874, 0.2258400022983551, -0.27281999588012695], u'sky': [-0.02631399966776371, -0.21511000394821167, 0.7220399975776672, 0.1962299942970276, -0.5379599928855896, 0.367000013589859, -0.5703099966049194, 0.46239998936653137, -0.23777000606060028, -0.8634999990463257, 0.1628900021314621, -0.2936199903488159, 0.3343299925327301, 0.1888899952173233, 0.24337999522686005, 0.7423700094223022, 0.002166799968108535, -0.11714000254869461, 0.23875999450683594, 0.07627800107002258, 0.1057400032877922, 0.4585599899291992, 0.04949000105261803, 0.6210700273513794, 0.5119500160217285, -0.5013999938964844, 0.18643000721931458, 0.02526099979877472, -0.1485999971628189, -0.30722999572753906, 0.3679099977016449, -0.5548800230026245, -0.625819981098175, 0.121069997549057, -0.7511399984359741, 0.41780000925064087, -0.2055799961090088, -0.39239999651908875, -0.12825000286102295, 0.657010018825531, 0.02577199973165989, 0.07030799984931946, -0.09190700203180313, 0.777999997138977, 0.005051900167018175, -0.3305499851703644, 0.21613000333309174, 0.4873200058937073, 0.2708300054073334, -0.9314900040626526, -0.07851800322532654, -0.0009389999904669821, 0.1712699979543686, -0.7330700159072876, -0.5172600150108337, 0.4720500111579895, -0.020462999120354652, 0.008467099629342556, 0.38482001423835754, -0.16966000199317932, -0.5202599763870239, 0.4186300039291382, 0.5118200182914734, 0.09505099803209305, 0.14345000684261322, -0.03013000078499317, 0.15567000210285187, 0.20674000680446625, 0.07333199679851532, -0.32583001255989075, 0.034536998718976974, 0.27261000871658325, -0.44249001145362854, -0.09140300005674362, 0.0635640025138855, 0.5067899823188782, -0.09900099784135818, -0.17885999381542206, 0.18558000028133392, 0.09766100347042084, -0.02224699966609478, 0.3191399872303009, 0.10361000150442123, 0.1964700073003769, -0.16193999350070953, 0.2366500049829483, 0.14650000631809235, 0.2690800130367279, 0.1846500039100647, -0.05592900142073631, 0.259799987077713, -0.013791999779641628, -0.02143700048327446, 0.2925800085067749, -0.2675899863243103, 0.4132699966430664, -0.04095999896526337, 0.06119399890303612, 0.10187000036239624, -0.17990000545978546, 0.23463000357151031, 0.22597000002861023, -0.4406200051307678, 0.6581799983978271, 0.05048400163650513, 0.2943600118160248, 0.08011999726295471, 0.46755000948905945, -0.042562998831272125, -0.09205999970436096, -0.09654500335454941, -0.2983199954032898, 0.9643200039863586, -0.14222000539302826, 0.20348000526428223, -0.25251999497413635, -0.2781200110912323, 0.23176999390125275, 0.0007631899788975716, -0.7354599833488464, 0.03572800010442734, -0.13555000722408295, -0.11455000191926956, 0.8334699869155884, 0.12065999954938889, -0.5934799909591675, -0.35677000880241394, 0.7792400121688843, -0.33472999930381775, -0.34393998980522156, -0.3331100046634674, 0.3606700003147125, 0.19912999868392944, 0.6769499778747559, 0.30866000056266785, -0.14143000543117523, -0.15514999628067017, 0.14007000625133514, -0.17817999422550201, -0.24532000720500946, -0.1480100005865097, 0.3624500036239624, -0.10685999691486359, -0.2431900054216385, -0.4590100049972534, -0.031366001814603806, 0.5648400187492371, 0.5408899784088135, -0.2446800023317337, -0.1072700023651123, 0.36726000905036926, 0.39465001225471497, -0.003948899917304516, -0.21515999734401703, 0.45364001393318176, -0.037613000720739365, 0.591159999370575, -0.13030000030994415, -0.22857999801635742, -0.3564999997615814, -0.047717999666929245, -0.7384300231933594, 0.475739985704422, 0.11437000334262848, 0.5913500189781189, -0.013233000412583351, 0.05935300141572952, 0.3376699984073639, -0.43533000349998474, 0.330159991979599, -0.39778000116348267, 0.29732000827789307, 0.3846000134944916, 0.30149999260902405, -0.23589999973773956, 0.1541299968957901, -0.21657000482082367, 0.20455999672412872, 0.08562800288200378, 0.12801000475883484, 0.11283999681472778, 0.20205000042915344, 0.5295799970626831, -0.05208799988031387, -0.09955800324678421, -0.4620699882507324, 0.7719900012016296, -0.13091999292373657, -0.49755001068115234, -0.41971999406814575, 0.08098600059747696, 0.10332000255584717, -0.5033199787139893, -0.33204999566078186, -0.10910999774932861, -0.36438000202178955, -0.42506998777389526, 0.12387999892234802, -0.22226999700069427, 0.13003000617027283, 1.5613000392913818, 0.17709000408649445, -0.4491199851036072, -0.09582400321960449, -0.29488998651504517, 0.2103399932384491, -0.07987000048160553, 0.11031000316143036, 0.20056000351905823, -0.2531900107860565, -0.03889799863100052, 0.2733500003814697, -0.19516000151634216, -0.38569000363349915, -0.10457000136375427, 0.008478599600493908, 0.6632400155067444, 0.030435999855399132, 0.0828080028295517, -0.4027999937534332, 0.863860011100769, 0.14507000148296356, 0.2176699936389923, -0.5141900181770325, -0.5190899968147278, 0.15425999462604523, -0.2540299892425537, -0.35879001021385193, 0.4064500033855438, -0.3332099914550781, 0.34035998582839966, 0.2190299928188324, 0.11383000016212463, -0.4647499918937683, -0.16853000223636627, -0.14775000512599945, -0.39882999658584595, 0.08050200343132019, -0.8762000203132629, -0.49952998757362366, -0.12010999768972397, -0.1274300068616867, -0.47207000851631165, 0.048395998775959015, 0.35975998640060425, 0.18980999290943146, -0.37053000926971436, -0.38286998867988586, -0.10439000278711319, 0.1321299970149994, -0.2382200062274933, 0.5006800293922424, 0.10941000282764435, -0.020653000101447105, -0.25777000188827515, -0.07558099925518036, -0.6736400127410889, -0.07993000000715256, -0.024802999570965767, 0.47363999485969543, 0.3543199896812439, 0.018657000735402107, 0.23295000195503235, 0.003118300111964345, -0.4269599914550781, -0.48510000109672546, 0.03892200067639351, 0.0702190026640892, -0.24286000430583954, 0.34727001190185547, 0.18783999979496002, 0.25900998711586, 0.037498001009225845, -0.22065000236034393, -0.021453000605106354, -0.27417999505996704, -1.270400047302246, 0.024389000609517097, -0.17258000373840332, -0.1375499963760376, -0.22618000209331512, 0.2342499941587448, -0.6434000134468079, -0.2792600095272064, 0.22002999484539032, -0.5743399858474731, -0.34821000695228577, -0.1783200055360794, -0.09125900268554688, -0.04607899859547615, 0.5280500054359436, 0.43915000557899475, -0.8200500011444092, 0.1710900068283081, -0.14970999956130981, 0.03767399862408638, 0.2010200023651123, -0.03651199862360954, -0.1030300036072731, -0.08137200027704239], u'lake': [-0.4185999929904938, 0.017361000180244446, -0.6574900150299072, -0.3973200023174286, 0.3380799889564514, 0.16595999896526337, 0.48050999641418457, -0.01699800044298172, -0.037675000727176666, -0.7266799807548523, -0.13384999334812164, 0.050641998648643494, -0.22875000536441803, 0.06661900132894516, 0.43024998903274536, 0.13015000522136688, -0.09972299635410309, 0.3322399854660034, 0.38763999938964844, 0.6245499849319458, -1.1957000494003296, -0.023615000769495964, -0.27713000774383545, 0.35051000118255615, 0.41940999031066895, 0.020733999088406563, -0.023298000916838646, -0.19892999529838562, -0.03473600000143051, 0.2347699999809265, 0.5961599946022034, 0.23532000184059143, -0.26969999074935913, 0.17475000023841858, 0.020570000633597374, 0.3936299979686737, 0.14893999695777893, 0.0905890017747879, -0.34314000606536865, -0.5323399901390076, -0.7184600234031677, 0.2424899935722351, 0.516700029373169, 1.0823999643325806, 0.1714099943637848, 0.4431400001049042, 0.6402400135993958, 0.5467399954795837, 0.6172900199890137, 0.8371300101280212, -0.29745998978614807, 0.2861599922180176, -0.3441700041294098, -0.08467800170183182, 0.46050000190734863, -0.1605300009250641, 0.24108999967575073, 0.2749899923801422, 0.4967299997806549, 0.437610000371933, -0.004288400057703257, 0.09171800315380096, 0.5394799709320068, -0.08484599739313126, 0.2718699872493744, -0.49573999643325806, -1.0992000102996826, 0.09044499695301056, -0.45146000385284424, -0.37560999393463135, 0.4055800139904022, -0.8655700087547302, 0.1081399992108345, 0.5349100232124329, -1.0657999515533447, 0.05038600042462349, 0.5169299840927124, 0.569570004940033, 0.15727999806404114, -0.5072799921035767, -0.048990000039339066, 0.3195199966430664, 0.07800199836492538, -0.3591800034046173, 0.49000999331474304, -0.128930002450943, -0.51774001121521, 0.12268999963998795, 0.09758199751377106, -0.6491400003433228, -0.07332699745893478, 0.3867799937725067, 0.7206599712371826, 0.024560000747442245, 0.4801500141620636, 0.38672998547554016, 0.2955299913883209, -0.13659000396728516, 0.07972999662160873, 0.01677200011909008, 0.49632999300956726, 0.18242999911308289, -0.40059998631477356, -0.0671980008482933, -0.23819999396800995, 0.8135700225830078, 0.33087000250816345, -0.018644999712705612, -0.2700200080871582, -0.057714998722076416, 0.060398999601602554, -0.4062899947166443, -0.13616999983787537, -0.4077500104904175, -0.11368999630212784, -0.3425599932670593, 0.26653000712394714, 0.6821200251579285, -0.4092000126838684, 0.543150007724762, -0.24934999644756317, -0.12125000357627869, -0.3596000075340271, 0.05407699942588806, 0.016951000317931175, -0.0633540004491806, 0.5711399912834167, 0.12009000033140182, -0.006327900104224682, -0.47929999232292175, -0.507889986038208, 0.5483999848365784, 0.8992900252342224, -0.3071900010108948, 0.4668799936771393, 0.16783000528812408, -0.03912600129842758, -0.0729999989271164, -0.19085000455379486, -0.036782000213861465, 0.049616001546382904, -0.41214999556541443, -0.3992899954319, -0.32747000455856323, -0.36228999495506287, -0.3013699948787689, 0.8098700046539307, -0.4106999933719635, -0.09696400165557861, -0.006721800193190575, 0.842960000038147, 0.5695599913597107, -0.07679200172424316, -0.3212699890136719, 0.8727499842643738, 0.21797999739646912, 0.4418700039386749, -0.0334319993853569, -0.19130000472068787, 0.08118599653244019, 0.2984200119972229, -0.8860499858856201, 0.1813499927520752, 0.13923999667167664, -0.46950000524520874, -0.12599000334739685, 0.03966199979186058, -0.17648999392986298, 0.13410000503063202, 0.22144000232219696, -0.4716300070285797, 0.5821499824523926, 0.04631299898028374, 0.24244000017642975, 0.19925999641418457, 0.2951200008392334, 0.5077999830245972, -0.18264000117778778, -0.12926000356674194, 0.2490600049495697, -0.1867700070142746, 0.8432199954986572, -0.033969998359680176, -0.15084999799728394, 0.33215001225471497, -0.7080100178718567, -0.14695000648498535, -0.4000299870967865, -0.05258899927139282, -0.14812999963760376, 0.32179999351501465, 0.5656099915504456, -0.24948999285697937, -0.056386999785900116, -0.3294900059700012, 0.06014300137758255, 0.5208699703216553, -1.1858999729156494, -0.03716300055384636, 0.41005998849868774, 1.307800054550171, -0.6196100115776062, -0.4647600054740906, -0.5207300186157227, -0.40365999937057495, -0.16300000250339508, -0.28839999437332153, -0.13804000616073608, -0.04821600019931793, 0.2408200055360794, 0.028551999479532242, 0.48945000767707825, 0.17155000567436218, -0.6380800008773804, 0.23082999885082245, 0.2552799880504608, -0.04844199866056442, 0.10095000267028809, 0.16175000369548798, -0.9472200274467468, 0.8845400214195251, -0.19548000395298004, -0.024744000285863876, 0.07474499940872192, 0.8663399815559387, 0.05909400060772896, 0.0018780999816954136, 0.10262999683618546, 0.1660500019788742, -0.003298999974504113, 0.357340008020401, -0.20065000653266907, -0.2997100055217743, -0.4977400004863739, -0.04432599991559982, 0.20424999296665192, -0.024900000542402267, 0.14330999553203583, 0.49327000975608826, -0.7701500058174133, 0.2333800047636032, 0.5857599973678589, 0.11857999861240387, -0.37349000573158264, -0.7671999931335449, -0.4333899915218353, 0.40035000443458557, 0.1530500054359436, -0.1282300055027008, -0.1410199999809265, -0.17985999584197998, 0.030215999111533165, 0.5425999760627747, -0.5472000241279602, 0.3329800069332123, -0.10107000172138214, -0.07327000051736832, -0.2788499891757965, -0.06503699719905853, -0.11939000338315964, -0.4634000062942505, -0.3260599970817566, 0.01011900044977665, -0.17725999653339386, -0.5975300073623657, -0.5046399831771851, 1.1511000394821167, -0.060058001428842545, 0.06997100263834, -0.016746999695897102, -0.0444829985499382, 0.07627800107002258, 0.3555299937725067, 1.0546000003814697, 0.9896799921989441, 0.2555899918079376, -1.2197999954223633, 0.2347699999809265, 0.07054000347852707, -0.5222899913787842, -0.14322000741958618, 0.9070500135421753, -0.08330199867486954, -0.6820799708366394, -0.4507400095462799, -0.43283000588417053, 0.26034000515937805, 0.05567900091409683, 0.45612001419067383, 0.08878599852323532, -0.5658000111579895, -0.11924999952316284, -0.2872599959373474, 0.6148499846458435, -0.4973900020122528, -0.40832000970840454, 0.2856299877166748, 0.5578100085258484, -0.14098000526428223, 0.135670006275177], u'sugar': [-0.40450000762939453, 0.2778100073337555, 0.10035999864339828, 0.08349400013685226, -0.3647100031375885, -0.6815199851989746, -0.09159000217914581, 0.024819999933242798, 0.00750999990850687, -0.7480900287628174, -0.49160000681877136, -0.9749900102615356, -0.04872199892997742, 0.4165700078010559, -0.004584699869155884, 0.009519600309431553, -0.7276300191879272, 0.05014000087976456, -0.3006899952888489, -0.1274999976158142, -0.6680300235748291, -0.14262999594211578, 0.7099900245666504, 0.38058000802993774, -0.764930009841919, -0.0011931000044569373, -0.5756700038909912, -0.04459099844098091, -1.253000020980835, -0.14119000732898712, -0.6205599904060364, 0.795799970626831, -0.6817799806594849, -0.049003999680280685, -0.40095001459121704, 0.9579799771308899, 0.36535000801086426, 0.35335999727249146, -0.5702900290489197, 0.251910001039505, -0.5676500201225281, -0.6066499948501587, 0.3712100088596344, 0.17272000014781952, 0.2737399935722351, -1.0450999736785889, -0.6182399988174438, -0.4057900011539459, 0.035815998911857605, 0.36879000067710876, 0.6897199749946594, 0.4776900112628937, -0.5020400285720825, 0.21765999495983124, -0.6435700058937073, -0.003627199912443757, -0.16674000024795532, 0.09730999916791916, 0.5424500107765198, -0.3609499931335449, -0.23995999991893768, -0.28745999932289124, -0.06606400012969971, 0.19074000418186188, 0.458979994058609, -0.10777000337839127, -0.32829999923706055, 0.5212399959564209, -0.29951998591423035, -0.13533000648021698, 0.24178999662399292, 0.37435001134872437, -0.8556299805641174, 0.054962001740932465, -0.29728999733924866, 0.04898500069975853, 0.1689399927854538, -0.8374300003051758, -0.007092200219631195, 0.17726999521255493, 0.5040799975395203, 0.39789000153541565, 0.06350400298833847, 0.038109999150037766, 0.2701900005340576, -0.11309999972581863, -0.5763800144195557, -0.18795999884605408, 0.36719998717308044, -0.717519998550415, 0.049323998391628265, -0.2639999985694885, -0.05143800005316734, 0.19922000169754028, -0.4713299870491028, -0.2989000082015991, 0.6114199757575989, 0.1052900031208992, -0.15790000557899475, 0.46911001205444336, 0.14619000256061554, -0.012984000146389008, 0.2721500098705292, -0.6993700265884399, -0.019453000277280807, -0.1309400051832199, -0.23578999936580658, -0.3182399868965149, -0.48041999340057373, 0.36327001452445984, 0.20489999651908875, -0.39921000599861145, 0.24818000197410583, 0.7075200080871582, 0.008642500266432762, 0.16946999728679657, 0.3899500072002411, 0.8349400162696838, 0.1817300021648407, 0.3427700102329254, -0.4681299924850464, -0.05234599858522415, 0.06570500135421753, -0.13395999372005463, -0.1263899952173233, -0.33583998680114746, 0.3023799955844879, 0.38541001081466675, -0.26930001378059387, 0.6486999988555908, -0.030260000377893448, 1.2324999570846558, -0.093299001455307, 0.5858700275421143, -0.22849999368190765, 0.01182899996638298, -0.2899700105190277, -0.1691100001335144, -0.45730000734329224, -0.2377299964427948, 0.4688299894332886, 0.00043814998934976757, -0.8804799914360046, 0.18609000742435455, -0.023336000740528107, 0.42691001296043396, 0.23513999581336975, -0.0743890032172203, 0.6009699702262878, -0.26517999172210693, -0.4715900123119354, 0.3773699998855591, -0.1841599941253662, 0.0267340000718832, -0.10138999670743942, 0.2714900076389313, -0.4145199954509735, -0.08031900227069855, -0.2015099972486496, -0.029992999508976936, 0.673259973526001, -0.4908500015735626, -0.3434300124645233, -0.5859900116920471, -0.3714500069618225, -0.12256000190973282, 0.08431600034236908, 0.06784500181674957, 0.16120000183582306, 0.09379100054502487, 0.21588000655174255, 0.5735499858856201, -0.8880500197410583, 0.35572001338005066, 0.42726999521255493, -0.2285500019788742, -0.025975000113248825, -0.3153400123119354, -0.3382200002670288, -0.26128000020980835, -0.054405998438596725, 0.6937800049781799, 0.1037599965929985, -0.3362799882888794, -0.11653999984264374, -0.7545999884605408, 0.8476399779319763, -0.0357699990272522, 0.13955000042915344, -0.27340999245643616, 0.21834999322891235, 1.2999000549316406, 0.02841299958527088, -0.16136999428272247, -0.6156200170516968, -0.33434000611305237, 0.3795500099658966, -0.29137998819351196, 0.4105199873447418, -0.3892500102519989, 0.5625399947166443, -0.03929699957370758, 0.5788900256156921, -0.16056999564170837, -0.5616999864578247, -0.3305499851703644, 0.6188200116157532, -0.00811379961669445, -0.0531190000474453, -0.4207499921321869, 0.013922000303864479, -0.25593000650405884, -0.6202300190925598, 0.978950023651123, -0.5017099976539612, 0.11009000241756439, 0.2912200093269348, -0.3125399947166443, 0.13579000532627106, -0.09214600175619125, 0.11328999698162079, -0.23612000048160553, -0.31266000866889954, -0.4435800015926361, -0.21511000394821167, 0.19221000373363495, 0.16325999796390533, 0.8626599907875061, 0.16286000609397888, -0.2507700026035309, 0.17506000399589539, 0.265390008687973, -0.16574999690055847, 0.47727999091148376, 0.5682399868965149, -0.5807899832725525, 0.7470999956130981, -0.1690800040960312, -0.7240800261497498, -0.8362200260162354, -0.6340399980545044, -0.18756000697612762, -0.41571998596191406, 0.24674999713897705, -1.2028000354766846, -0.029221000149846077, 0.48083001375198364, -0.04078099876642227, -0.41670000553131104, -0.7512500286102295, 0.17231999337673187, -0.18097999691963196, -0.20667999982833862, 0.3010900020599365, 0.07049500197172165, 0.13845999538898468, -0.37580999732017517, -0.068790003657341, -0.028699999675154686, 0.04602900147438049, -0.37582001090049744, -0.2510499954223633, 0.014518000185489655, -0.48945000767707825, 0.304610013961792, 0.01991499960422516, -0.15536999702453613, -0.477649986743927, -0.16504999995231628, 0.0580810010433197, -0.46491000056266785, 0.20170000195503235, 0.07062699645757675, 0.29236000776290894, -0.297789990901947, 0.06345000118017197, -1.0634000301361084, -0.34915998578071594, -0.6043499708175659, -0.5004299879074097, -0.4124000072479248, -0.301690012216568, -0.2561100125312805, -0.2320300042629242, 0.15192000567913055, 0.2011599987745285, 0.5486099720001221, -0.7600299715995789, -0.15445999801158905, 0.02697099931538105, -0.6072099804878235, -0.13165999948978424, -0.27851998805999756, -0.5519700050354004, 0.5866600275039673, 0.4135099947452545, -0.9335799813270569, -0.794920027256012, -0.41422998905181885, 0.3686800003051758], u'bush': [-0.21908000111579895, 0.290010005235672, -0.11343000084161758, -0.14796000719070435, -0.14478999376296997, 0.2706499993801117, -0.25655001401901245, 0.04499800130724907, 0.055977001786231995, -1.6960999965667725, -0.0007705899770371616, 0.3699199855327606, -0.15699000656604767, -0.09675300121307373, 0.24677999317646027, 0.4632999897003174, -0.3402099907398224, -0.3410699963569641, 0.3529199957847595, 0.32986998558044434, -0.026227999478578568, -0.2814300060272217, 0.7144700288772583, -0.15894000232219696, 0.03106600046157837, 0.3259600102901459, -0.2974399924278259, 0.22217999398708344, 0.14020000398159027, 0.21558000147342682, 0.4212700128555298, -0.2011599987745285, -0.3144899904727936, -0.12264999747276306, -1.405900001525879, -0.12156999856233597, -0.26458999514579773, -0.5867599844932556, -0.552649974822998, -0.3362500071525574, 0.2589400112628937, 0.3838900029659271, 0.006109099835157394, -0.26684999465942383, -0.47690001130104065, -0.16861000657081604, -0.041843000799417496, -0.6252599954605103, 0.2421800047159195, -0.007134200073778629, -0.3212999999523163, 0.27237001061439514, -0.3314700126647949, 0.5247600078582764, 0.09276799857616425, -0.28073999285697937, -0.23702000081539154, -0.14417999982833862, 0.6414899826049805, 0.31452998518943787, 0.3510499894618988, 0.14529000222682953, 0.4229399859905243, -0.3676699995994568, 0.4182400107383728, -0.5751799941062927, 0.08245600014925003, 0.4581800103187561, -0.1383100003004074, 0.02452700026333332, 0.01351999957114458, -0.014492000453174114, 0.07130599766969681, 0.2582300007343292, 0.4727100133895874, -0.07401800155639648, -0.06421799957752228, 0.9634299874305725, -0.6310999989509583, 0.11761000007390976, 0.05829999968409538, 0.1004600003361702, 0.4258599877357483, 0.1193699985742569, 0.27752000093460083, 0.021462999284267426, -0.839900016784668, 0.49842000007629395, 0.06832700222730637, -0.40867000818252563, 0.07727299630641937, -0.18672999739646912, -0.4577699899673462, 0.1119299978017807, -0.20062999427318573, -0.361380010843277, -0.34856000542640686, 0.6028599739074707, -0.12351000308990479, -0.39395999908447266, -0.13550999760627747, -0.11901000142097473, 0.19818000495433807, 0.5400300025939941, -0.31457000970840454, 0.21807999908924103, -0.7609500288963318, 0.14988000690937042, 0.512179970741272, -0.13395999372005463, 0.2875399887561798, 0.27605998516082764, -0.07295600324869156, 0.4213100075721741, 0.2594299912452698, -0.035287998616695404, 0.1171799972653389, -0.13782000541687012, -0.1193699985742569, -0.48151999711990356, -0.17072999477386475, -0.3982599973678589, -0.5049399733543396, -0.0698539987206459, -0.09981899708509445, 0.13978999853134155, -0.38367000222206116, 0.04927799850702286, 0.3674499988555908, 0.10101000219583511, -0.45875999331474304, -0.07138899713754654, -0.16628000140190125, -0.08022899925708771, -0.394540011882782, 0.05620799958705902, -0.3018699884414673, 0.2561500072479248, 0.9162499904632568, -0.23781000077724457, 0.12487000226974487, -0.28001001477241516, -0.23246000707149506, -0.18122999370098114, -0.8097900152206421, 0.24536000192165375, -0.26684001088142395, -0.641539990901947, -0.008338799700140953, 0.10598000138998032, 0.18949000537395477, 0.09990300238132477, 0.21337999403476715, 0.16436000168323517, -0.7061399817466736, -0.08276999741792679, 0.4909699857234955, -0.252920001745224, 0.5917500257492065, 0.007123699877411127, 0.8086299896240234, -0.2577199935913086, 0.17117999494075775, 0.5247600078582764, 0.26572999358177185, -0.02971000038087368, -0.3145599961280823, -0.08096300065517426, -0.07880400121212006, 0.025550000369548798, 0.15724000334739685, -0.5020700097084045, -0.4759500026702881, -0.2028300017118454, 0.14390000700950623, -0.10926999896764755, 0.22462999820709229, 0.0781169980764389, 0.5073000192642212, 0.4106299877166748, 0.34599000215530396, 0.11620999872684479, 0.08459100127220154, -0.15571999549865723, -0.5447400212287903, -0.07292799651622772, -0.2953599989414215, -0.2883400022983551, 0.12156999856233597, -0.0019672999624162912, -0.5072299838066101, -0.1229500025510788, 0.1682800054550171, -0.33065998554229736, 0.06265799701213837, 0.28773000836372375, 0.8335099816322327, 0.1896599978208542, 0.33114001154899597, -0.32506999373435974, 0.8929299712181091, -0.3502100110054016, 0.3879300057888031, 0.5267500281333923, -0.21507999300956726, -0.2837499976158142, 0.5434100031852722, -0.20135000348091125, -0.29488998651504517, -0.24244000017642975, -0.09268199652433395, -0.4966999888420105, 0.24616000056266785, 0.05816899985074997, 0.28878000378608704, 0.18024000525474548, -0.4900600016117096, -0.4333699941635132, -0.014348000288009644, 0.2693600058555603, 1.2337000370025635, 0.11196999996900558, 0.2641400098800659, 0.48541000485420227, 0.3101699948310852, -0.07801199704408646, 0.03883500024676323, 0.22324000298976898, -0.5756099820137024, -0.017340999096632004, 0.17970000207424164, -0.3379400074481964, 0.7383700013160706, 0.48697999119758606, -0.2666400074958801, -0.16496999561786652, -0.3608900010585785, 0.010335000231862068, 0.9348599910736084, 0.004520699847489595, -0.24449999630451202, 0.03443700075149536, 0.3292199969291687, 0.11657000333070755, -0.5246400237083435, -0.5305200219154358, 0.35835000872612, -0.33570998907089233, 0.36682000756263733, -0.404449999332428, 0.2135400027036667, -0.4437299966812134, 0.24874000251293182, 0.012502999976277351, 0.6342599987983704, -0.1467600017786026, 0.4611000120639801, 0.37393999099731445, 0.0005246200016699731, -0.1507599949836731, 0.1896899938583374, 0.3260200023651123, -0.11450999975204468, 0.2382899969816208, 0.08997400104999542, -0.007820400409400463, 0.18841999769210815, -0.20528000593185425, -0.08384700119495392, -0.08453699946403503, 0.12519000470638275, -0.2644200026988983, 0.16859999299049377, 0.060527998954057693, -0.47606000304222107, -0.41780000925064087, -2.031599998474121, 0.4199199974536896, 1.6030999422073364, -0.7821599841117859, -0.2536900043487549, -0.6892300248146057, 0.26420000195503235, 0.04044400155544281, 0.2144699990749359, 0.3502500057220459, -0.10147999972105026, 0.3278599977493286, 0.46538999676704407, -0.04176799952983856, 0.2886599898338318, 0.47150999307632446, 0.32014000415802, 0.46810999512672424, -0.2481900006532669, -0.010181999765336514, -0.5091699957847595, -0.15886999666690826, -0.8985499739646912, 0.8518999814987183], u'bike': [-0.029796000570058823, -0.43119001388549805, -0.06935500353574753, -0.18479999899864197, 0.07714000344276428, 0.2913599908351898, 0.0770450010895729, 0.12447000294923782, 0.04058799892663956, -0.29912999272346497, 0.32523998618125916, -0.23069000244140625, 0.014662000350654125, 0.38185998797416687, 0.38137999176979065, 0.3201200067996979, 0.18668000400066376, 0.04465299844741821, 0.10176999866962433, 0.15674999356269836, 0.2623400092124939, 0.43724000453948975, 0.1512400060892105, -0.0968180000782013, -0.21859000623226166, -0.622979998588562, -0.10836999863386154, 0.08058200031518936, -0.047129999846220016, 0.0435979999601841, -0.05147299915552139, 0.10206999629735947, 0.618399977684021, -0.041110001504421234, -0.7313100099563599, 0.8008400201797485, -0.8603100180625916, -0.5800999999046326, -0.05719799920916557, 0.32798001170158386, -0.8982700109481812, 0.0764629989862442, -0.5547400116920471, -0.6623200178146362, -0.2862800061702728, 0.39267998933792114, 1.148300051689148, 0.1280599981546402, 0.28951001167297363, 0.010130999609827995, -0.2379399985074997, -0.4232900142669678, 0.5331699848175049, 0.24087999761104584, 0.295879989862442, 0.6086400151252747, -0.12419000267982483, -0.3430899977684021, -0.4422999918460846, 0.01576000079512596, 0.016465000808238983, 0.6015899777412415, -0.09357000142335892, -0.12077999860048294, -0.18458999693393707, 0.2440200001001358, -0.3180899918079376, -0.06169100105762482, -0.09923499822616577, -0.08308599889278412, -0.09708599746227264, 0.29565000534057617, 0.3487499952316284, 0.11366000026464462, -0.03494400158524513, -0.008169399574398994, 0.11138000339269638, -0.11706999689340591, -0.08688200265169144, -0.036146000027656555, -0.05923900008201599, 0.5625900030136108, 0.5179700255393982, -0.034244999289512634, -0.5043900012969971, -0.2553600072860718, 0.03993000090122223, 0.6642400026321411, -0.16898000240325928, 0.6917700171470642, 1.19350004196167, -0.12556999921798706, 0.6662999987602234, -0.7691500186920166, 0.21125000715255737, -0.4758400022983551, 0.4633699953556061, -0.06496799737215042, 0.2252500057220459, 0.02080499939620495, -0.8317199945449829, 0.37240999937057495, -0.16690999269485474, -0.25275999307632446, 0.022691000252962112, -0.1720000058412552, 0.5857899785041809, 0.03449099883437157, 0.09302199631929398, -0.2903200089931488, -0.741129994392395, -0.19192999601364136, 0.08716099709272385, -0.37790998816490173, 0.12626999616622925, 0.42851001024246216, 0.38457000255584717, 0.06684499979019165, 0.18174000084400177, 0.825980007648468, 0.04208900034427643, 0.13244999945163727, 0.4571300148963928, -0.5327500104904175, -0.24501000344753265, 0.3345400094985962, -0.038290999829769135, 0.2582699954509735, -0.45291000604629517, 0.2084999978542328, 0.15699000656604767, -0.12097000330686569, 0.264739990234375, 0.6802800297737122, 0.21945999562740326, 0.052230000495910645, -0.02990500070154667, -0.2936300039291382, 0.49129000306129456, -0.5534899830818176, 0.10577999800443649, -0.20603999495506287, 0.4777800142765045, -0.05848899856209755, -0.22529999911785126, -0.02385599911212921, -0.18660999834537506, -0.13289999961853027, 0.16218000650405884, 0.2966499924659729, 0.20545999705791473, 0.49998000264167786, -0.44374001026153564, 0.018828999251127243, 0.2067600041627884, -0.6842899918556213, -0.03416400030255318, 0.22419999539852142, -0.24087999761104584, 0.2315399944782257, 0.3987799882888794, -1.0504000186920166, -0.418040007352829, -0.30979999899864197, 0.4285700023174286, -0.4210300147533417, 0.6469100117683411, 0.3043299913406372, 0.19391000270843506, 0.13739000260829926, -0.3988099992275238, -0.025147000327706337, -0.2266799956560135, 0.09873499721288681, -0.19415999948978424, -0.828719973564148, 0.5539199709892273, 0.03838000074028969, 0.11653000116348267, -0.08775299787521362, -0.1424199938774109, 0.18523000180721283, 0.049911998212337494, 0.27393001317977905, 0.7687000036239624, -0.07566200196743011, 0.6265699863433838, 0.623769998550415, 0.31404998898506165, -0.12931999564170837, -0.15078000724315643, -0.032947998493909836, -0.19001999497413635, -0.2869099974632263, -0.3423900008201599, -0.17241999506950378, -0.3940899968147278, -0.014089999720454216, 0.08963800221681595, -0.37942999601364136, 0.5897899866104126, 0.055309999734163284, 0.47473999857902527, 0.11799000203609467, 0.42983999848365784, 0.5571200251579285, -0.4093700051307678, -0.04685699939727783, -0.36834999918937683, 0.24059000611305237, -0.2462500035762787, -0.08381099998950958, -0.38391000032424927, -0.18082000315189362, -0.5211600065231323, -0.09241099655628204, -0.542169988155365, -0.12075000256299973, -0.09776999801397324, -0.24376000463962555, 0.1931300014257431, -0.18900999426841736, -0.3329299986362457, -0.060384999960660934, 1.2355999946594238, 0.04770800098776817, 0.03451500087976456, -0.051816001534461975, -0.5768799781799316, 0.061406999826431274, -0.12645000219345093, -0.5856599807739258, -0.1387699991464615, -0.772159993648529, 0.7821300029754639, 0.25189998745918274, -0.20294000208377838, 0.28321000933647156, 0.25470998883247375, -0.06075499951839447, 0.29745998978614807, -0.2903900146484375, 0.015884000808000565, -0.21404999494552612, -0.38214999437332153, -0.2939299941062927, -0.33316001296043396, 0.22137999534606934, -0.04992000013589859, 0.1253499984741211, -0.36274001002311707, -0.17098000645637512, 0.09488300234079361, -0.35245999693870544, 0.37053000926971436, -0.10129000246524811, -0.03988099843263626, -0.1382800042629242, 0.06619799882173538, 0.009970299899578094, -0.14020000398159027, -0.3915799856185913, -0.004230800084769726, 0.3801499903202057, -0.08377200365066528, 0.0075806002132594585, -0.3177799880504608, -0.005093399900943041, -0.01969200000166893, -0.18193000555038452, -0.26611000299453735, 0.4517500102519989, -0.11488000303506851, 0.026270000264048576, 0.16580000519752502, -0.29927998781204224, -1.3992999792099, -0.2581999897956848, -0.14869000017642975, 0.18688000738620758, 0.16022999584674835, -0.7991799712181091, 0.13460999727249146, 0.2958199977874756, -0.7216299772262573, 0.16078999638557434, 0.05095899850130081, -0.10017000138759613, 0.036368001252412796, -0.4857099950313568, 0.1731799989938736, -0.20611999928951263, -1.121399998664856, 0.1783200055360794, -0.29186001420021057, -0.010370999574661255, -0.2838299870491028, 1.0164999961853027, 0.4701499938964844, -0.17529000341892242], u'fig': [-0.12270999699831009, 0.07804299890995026, 0.09805800020694733, -0.43389999866485596, -0.331059992313385, -0.2435300052165985, -0.7221800088882446, 0.027682000771164894, -0.0099804000928998, 0.09856600314378738, 0.050916001200675964, 0.6344500184059143, -0.30302000045776367, 0.04570600017905235, 0.038839999586343765, 0.7194100022315979, -0.1033099964261055, 0.1527000069618225, -0.6234400272369385, 0.30799001455307007, 0.23778000473976135, 0.11395999789237976, -0.22181999683380127, -0.5233399868011475, 0.25971999764442444, 0.10547000169754028, 0.15730999410152435, -0.5886300206184387, 0.14583000540733337, 0.8184700012207031, 0.0863490030169487, -0.3083699941635132, -0.2544899880886078, 0.11255999654531479, -0.06542400270700455, 0.5412399768829346, 0.29646000266075134, 0.3243899941444397, 0.47071000933647156, -0.7111999988555908, -0.4399600028991699, 0.41508999466896057, 0.17260000109672546, -0.0641620010137558, -0.11535000056028366, 0.0034175999462604523, 0.01550500001758337, 0.12897999584674835, -0.11964000016450882, -0.0753370001912117, 0.3407500088214874, -0.518750011920929, -0.2793000042438507, 0.18873000144958496, -0.20904000103473663, -0.627810001373291, -0.04039100185036659, 0.06909999996423721, 0.551360011100769, 0.14074000716209412, 0.6201199889183044, 0.024570999667048454, 0.050374001264572144, 0.26298001408576965, -0.42236000299453735, -0.17818999290466309, -0.11031000316143036, 0.4473100006580353, -0.4410000145435333, 0.01845799945294857, -0.7580500245094299, 0.44495999813079834, -1.1892999410629272, 0.021611999720335007, -1.011299967765808, 0.21991999447345734, 0.32611000537872314, 0.4353199899196625, -0.047387998551130295, 0.5217000246047974, -0.1799200028181076, 0.4125399887561798, -0.03298399969935417, -0.7134600281715393, -0.19099000096321106, 0.01344200037419796, -0.3065600097179413, 0.2215700000524521, -0.2722499966621399, -0.3679099977016449, -0.9896299839019775, -0.3688200116157532, 0.16576999425888062, -0.3625899851322174, 0.2616899907588959, 0.16332000494003296, -0.1320600062608719, -0.15307000279426575, -0.2663699984550476, 0.58815997838974, 0.648360013961792, 0.037129998207092285, 0.22206999361515045, -0.32965999841690063, -0.5860700011253357, -0.4280700087547302, -0.3278700113296509, -0.8241099715232849, -0.13436999917030334, 0.028084000572562218, 0.2331400066614151, 0.3683899939060211, -0.2632899880409241, -0.0691400021314621, -0.0367249995470047, 0.322160005569458, -0.5042600035667419, 0.4146299958229065, -0.3216800093650818, 0.22463999688625336, -0.6372799873352051, 0.013683999888598919, 0.9792900085449219, -0.0006413999944925308, -0.32510000467300415, 0.09183699637651443, -0.21695999801158905, 0.9944999814033508, -0.0070814997889101505, 0.07033900171518326, -0.7610899806022644, -0.13308000564575195, 0.3665100038051605, 0.5422099828720093, 0.5267999768257141, 0.026605000719428062, 0.39122000336647034, -0.9233700037002563, -0.6154000163078308, -0.3410699963569641, 0.513979971408844, 0.7303000092506409, -0.21201999485492706, -0.24178999662399292, 0.13888999819755554, -0.21435000002384186, 0.13033999502658844, -0.5455800294876099, 0.4950999915599823, -0.17461000382900238, -0.4072999954223633, 0.19160999357700348, -0.11573000252246857, -0.058014001697301865, -0.3139599859714508, -1.052299976348877, 0.11879000067710876, -0.004033899866044521, -0.06697200238704681, 0.3881100118160248, -0.15758000314235687, 0.3373500108718872, 0.7082599997520447, 0.24105000495910645, -0.363209992647171, -0.5336599946022034, -0.15639999508857727, 0.5153800249099731, -0.0759890004992485, -1.0789999961853027, -0.1687300056219101, -0.42546001076698303, 0.22734999656677246, 0.12533999979496002, 0.7747600078582764, 0.1742600053548813, 1.0062999725341797, 0.33945998549461365, 0.20985999703407288, 0.05042000114917755, 0.17946000397205353, 0.4034999907016754, 0.02195099927484989, -0.2560200095176697, 0.09057100117206573, -0.04645499959588051, 0.7434399724006653, -0.12067999690771103, 1.0295000076293945, -0.015440000221133232, 0.07715000212192535, 0.3813300132751465, 0.29954999685287476, -0.11349000036716461, 0.06161399930715561, 0.023932000622153282, 0.6867200136184692, 0.37327998876571655, -0.31735000014305115, 0.13589000701904297, 0.100040003657341, -0.43952998518943787, -0.17003999650478363, -0.321260005235672, -0.31066998839378357, 0.23454999923706055, 0.2335900068283081, -0.20106999576091766, 0.6071100234985352, 0.07596000283956528, 0.048468999564647675, -0.5053899884223938, -0.09065999835729599, 0.012907999567687511, 0.05458199977874756, -0.054878998547792435, 0.07045300304889679, -0.406360000371933, -0.40900999307632446, -0.04399200156331062, 0.35763999819755554, -0.48941999673843384, -0.23699000477790833, 0.30974000692367554, -0.46241000294685364, 0.36287999153137207, -0.39945998787879944, -0.1814199984073639, -0.008555700071156025, 0.035371001809835434, -0.12624000012874603, 0.21990999579429626, 0.5753399729728699, -0.30855000019073486, 0.36862000823020935, 0.149959996342659, -0.039751000702381134, -0.3905999958515167, -0.19537000358104706, -0.6911699771881104, -0.01066100038588047, -0.6654300093650818, -0.3297399878501892, -0.12342999875545502, -0.6621099710464478, 0.6137999892234802, -0.2551800012588501, 0.7578399777412415, -0.1514900028705597, -0.3266400098800659, 0.43241000175476074, 1.2418999671936035, 0.6049500107765198, -0.4894599914550781, -0.0307180006057024, 0.04542100057005882, -0.2042199969291687, -0.6118999719619751, 0.9125499725341797, 0.08146899938583374, -0.37610000371932983, 0.2040500044822693, 0.3174099922180176, -0.3471499979496002, 0.15408000349998474, 0.3805600106716156, -0.24320000410079956, 0.3467499911785126, -0.024538999423384666, -0.07591000199317932, -0.5169199705123901, 0.33948999643325806, -0.20385000109672546, 0.8934500217437744, 0.30531999468803406, 0.6686999797821045, -0.08830200135707855, -0.24886000156402588, -0.21984000504016876, -0.06476899981498718, -0.44218000769615173, 0.33160001039505005, -0.4510299861431122, -0.09206099808216095, -0.2127400040626526, 0.09550800174474716, 0.43487998843193054, 0.03488900139927864, -0.44753000140190125, 0.06630799919366837, 0.17207999527454376, 0.3790299892425537, 0.8129299879074097, 0.44589000940322876, 0.34902000427246094, -0.40178999304771423, -0.3420099914073944, 0.6049200296401978, -0.045850999653339386, -0.4782699942588806], u'orange': [-0.24775999784469604, -0.12358999997377396, 0.20985999703407288, -0.15834000706672668, -0.158270001411438, -0.9011600017547607, -0.09570199996232986, -0.2300499975681305, 0.27094000577926636, -0.18885000050067902, -0.6093999743461609, -0.2914600074291229, -0.14546999335289001, -0.17565999925136566, 0.7756999731063843, 0.23427000641822815, -0.7333999872207642, -0.3403399884700775, -0.5699700117111206, -0.046918001025915146, 0.024907000362873077, -0.1827400028705597, 0.14791999757289886, 0.18594999611377716, -0.0466650016605854, -0.5565699934959412, -0.3818199932575226, 0.21154999732971191, -1.138800024986267, 0.2463199943304062, -0.27489998936653137, -0.21952000260353088, -0.6977099776268005, 0.05891000106930733, -0.3705799877643585, 1.0525000095367432, -0.1956299990415573, -0.1786399930715561, -0.24309000372886658, 0.021317999809980392, -0.05516500025987625, -0.27761000394821167, -0.217849999666214, 0.5232300162315369, 0.16325999796390533, -0.5455700159072876, 0.0014318999601528049, -0.08784600347280502, 0.6144300103187561, -0.725820004940033, -0.06120600178837776, 0.3518199920654297, 0.5500100255012512, -0.6703799962997437, -0.06033900007605553, -0.0018221000209450722, -0.28547000885009766, -0.36713001132011414, 0.6633399724960327, -0.8300399780273438, -0.26006001234054565, -0.3976300060749054, -0.387719988822937, 0.4136100113391876, 0.22863000631332397, -0.6484000086784363, -0.14765000343322754, -0.13465000689029694, -0.18153999745845795, -0.548550009727478, -0.07490000128746033, 0.21008999645709991, 0.05589799955487251, 0.5410699844360352, -0.5395600199699402, -0.289110004901886, -0.06219499930739403, 0.03349599987268448, -0.27441999316215515, -0.24379000067710876, 0.6012300252914429, 0.3373500108718872, -0.45934000611305237, 0.22089000046253204, 0.28856000304222107, -0.5455099940299988, 0.03418299928307533, 0.48993000388145447, 0.038961999118328094, -0.04527299851179123, 0.15440000593662262, -0.3797999918460846, -0.11647000163793564, 0.061434999108314514, -0.4374299943447113, 0.7728899717330933, 0.3194200098514557, -0.0697380006313324, 0.023406999185681343, 0.10732000321149826, -0.08948899805545807, -0.10486000031232834, 0.04256900027394295, -0.026283999904990196, 0.3334299921989441, 0.212349995970726, 0.07399000227451324, 0.1830500066280365, -0.5078700184822083, 0.11003000289201736, -0.41797998547554016, 0.10016000270843506, 0.5114499926567078, 0.03635700047016144, 0.04974700137972832, -0.25303998589515686, -0.11242000013589859, 0.8452399969100952, -0.09808299690485, -0.44179001450538635, 0.35958001017570496, -0.6562299728393555, 0.39122000336647034, -0.35646000504493713, -0.7998700141906738, -0.055275000631809235, -0.038644999265670776, 0.4357199966907501, 0.07128500193357468, 0.4419200122356415, -0.40933001041412354, 0.026579000055789948, -0.5595099925994873, 0.18181000649929047, -0.16561000049114227, -0.17276999354362488, -0.20476000010967255, -0.47508999705314636, 0.19046999514102936, 0.02533699944615364, 0.5610899925231934, 0.23522000014781952, -0.048615001142024994, 0.02029399946331978, 0.1580200046300888, 0.45451000332832336, -0.23931999504566193, 0.06862200051546097, 0.5082100033760071, -0.24706000089645386, 0.8049299716949463, 0.05498899891972542, 0.46608999371528625, -0.1808300018310547, 0.7378299832344055, -0.27206000685691833, 0.2641800045967102, -0.4946100115776062, -0.024972999468445778, 0.1933099925518036, -0.3761500120162964, 0.3346799910068512, -0.2435300052165985, 0.18171000480651855, 0.04149699956178665, -0.06338799744844437, 0.03601500019431114, 0.2895199954509735, -0.5293499827384949, -0.06390299648046494, 0.692110002040863, 0.12150000035762787, -0.6273099780082703, 0.2662700116634369, 0.36438000202178955, -0.7710800170898438, 0.40560999512672424, -0.3077999949455261, 0.5591899752616882, -0.32124000787734985, -0.1427599936723709, 0.07034599781036377, 0.19113999605178833, -0.3928999900817871, 0.004515999928116798, -0.7387099862098694, 0.8073999881744385, 0.13539999723434448, -0.12414000183343887, -0.7956799864768982, 0.058827001601457596, 0.48552000522613525, -0.29019999504089355, -0.1130099967122078, 0.018115000799298286, -0.16891999542713165, -0.4266299903392792, -0.2062000036239624, -0.21115000545978546, 0.4212599992752075, 0.9180200099945068, 0.0701799988746643, -0.01013300009071827, -0.16455000638961792, 0.21174000203609467, 0.14026999473571777, 0.5228899717330933, 0.759190022945404, 0.05075199902057648, 0.7546700239181519, -0.007933000102639198, 0.23611000180244446, -0.6852200031280518, -0.42566999793052673, 0.3409099876880646, 0.02098499983549118, 1.051800012588501, -0.03134499862790108, -0.08606100082397461, -0.5776799917221069, 1.3026000261306763, -0.37066999077796936, -0.35319000482559204, 0.05228099972009659, -0.16381999850273132, -0.46568000316619873, -0.10995999723672867, -0.4207099974155426, -0.5061600208282471, -0.5033699870109558, -0.3158299922943115, 0.4721600115299225, -0.053557999432086945, 0.15372000634670258, 0.31000998616218567, -0.10349000245332718, 0.1790899932384491, -0.32471001148223877, -0.16193999350070953, -0.5347899794578552, -0.13763999938964844, -0.7111799716949463, 0.18694999814033508, 0.1693200021982193, -0.7379900217056274, 0.03736399859189987, 1.020900011062622, -0.42802000045776367, 0.13443000614643097, -0.35760998725891113, 0.3981899917125702, 0.4380199909210205, -0.1639699935913086, -0.15302999317646027, 0.271369993686676, -0.5281000137329102, 0.31446999311447144, -0.11778999865055084, 0.275299996137619, -0.20476999878883362, 0.19547000527381897, 0.1483200043439865, 0.7838000059127808, -0.347460001707077, -0.5382000207901001, -0.7468400001525879, -0.30636999011039734, -0.23923000693321228, -0.16769999265670776, 0.30750998854637146, -0.11027999967336655, -0.007679900154471397, -0.2139499932527542, -0.055121999233961105, -0.4410400092601776, 0.13752000033855438, -1.3947999477386475, -0.11890999972820282, -0.36035001277923584, 0.05143199861049652, 0.008985799737274647, -0.029621999710798264, -0.2038400024175644, -0.23499999940395355, -0.06545300036668777, 0.5482800006866455, -0.331959992647171, -0.11157000064849854, -0.005047900136560202, 0.13196000456809998, 0.3384400010108948, -0.15592999756336212, -0.10277000069618225, -0.08295299857854843, 0.43792998790740967, -0.22457000613212585, 0.31512999534606934, 0.0797170028090477, 0.23864999413490295, -0.014213000424206257], u'furniture': [0.2879300117492676, 0.10170000046491623, -0.6632099747657776, -0.3328799903392792, -0.07068400084972382, -0.11704999953508377, 0.5600500106811523, -0.16865000128746033, -0.18546999990940094, -1.080899953842163, -0.27191001176834106, -0.10354000329971313, 0.07688099890947342, 0.2826499938964844, 0.3684200048446655, -0.5241900086402893, 0.14218999445438385, 0.3217099905014038, 0.24828000366687775, -0.11710000038146973, 0.5079699754714966, 0.4826900064945221, 0.401529997587204, -0.01259199995547533, -0.08412999659776688, -0.24316999316215515, -0.7035700082778931, -0.06500499695539474, 0.13815000653266907, 0.5220500230789185, 0.11556000262498856, 1.1104999780654907, -0.16440999507904053, 0.45882999897003174, -0.6487399935722351, 0.2875800132751465, -0.34904998540878296, -0.5207300186157227, 0.21358999609947205, -0.38530001044273376, -0.43884000182151794, -0.4136900007724762, 0.04776399955153465, -0.29374998807907104, -0.013895000331103802, 0.027101000770926476, 0.39524999260902405, -0.21509000658988953, -0.06926900148391724, -0.2100200057029724, 0.16830000281333923, -0.020069999620318413, 0.012252000160515308, 0.031222999095916748, -0.3190000057220459, -0.07209599763154984, -0.46764999628067017, -0.17855000495910645, -0.08872800320386887, -0.04049599915742874, 0.06234600022435188, 0.07220099866390228, 0.07961200177669525, -0.1767600029706955, 0.1073400005698204, -0.2025900036096573, -0.5039700269699097, -0.0743589997291565, -0.15817999839782715, -0.3865799903869629, -0.3092400133609772, -0.6970999836921692, -0.29938000440597534, -0.07273200154304504, 0.015367000363767147, 0.3233799934387207, -0.3306399881839752, -0.05425899848341942, -0.007138700224459171, -0.6454200148582458, -0.3633599877357483, 0.07767099887132645, -0.25786998867988586, -0.17529000341892242, 0.43018999695777893, -0.18831999599933624, -0.3616900146007538, -0.31226998567581177, -0.34060999751091003, 0.6136699914932251, 0.531499981880188, -0.16121000051498413, 0.20061999559402466, 0.08767899870872498, 0.30441999435424805, -0.1761299967765808, -0.07031600177288055, -0.769070029258728, 0.017416000366210938, -0.2869099974632263, -0.02006799913942814, 0.4056200087070465, 0.06114500015974045, -0.20272000133991241, -0.049977000802755356, -0.725629985332489, 0.4398399889469147, -0.30893000960350037, 0.3125999867916107, -0.3421100080013275, -0.0024248999543488026, -0.3373599946498871, -0.4771699905395508, -0.12862999737262726, 0.10608000308275223, 0.04122300073504448, 0.08205799758434296, 0.31283000111579895, -0.23095999658107758, -0.026496000587940216, 0.45989999175071716, 0.015236999839544296, 0.14052000641822815, -0.03949899971485138, -0.1439799964427948, 0.5381100177764893, -0.059140998870134354, 0.14681999385356903, 0.24955999851226807, 0.3969399929046631, 0.48927000164985657, 0.3030399978160858, 0.4773100018501282, -0.12054000049829483, 0.31661999225616455, -0.004521199967712164, 0.16571000218391418, 0.4026300013065338, -0.32308000326156616, 0.26061001420021057, 0.2379699945449829, 0.18127000331878662, 0.7674599885940552, 0.25508999824523926, 0.7516999840736389, 0.32378000020980835, 0.12818999588489532, 0.5608400106430054, -0.6273599863052368, -0.7227799892425537, 0.19092999398708344, 0.14030000567436218, -0.3764599859714508, -0.7795299887657166, -0.7971699833869934, 0.2634199857711792, -0.44020000100135803, -0.1787700057029724, 0.07543099671602249, -0.09569600224494934, 0.5834500193595886, 0.28150999546051025, -0.21028000116348267, 0.11694999784231186, 0.8996400237083435, 0.2953200042247772, 0.7457600235939026, 0.1073400005698204, 0.41356998682022095, 0.6133300065994263, -0.24570000171661377, 0.17076000571250916, -0.40648001432418823, 0.3628700077533722, -0.0021244999952614307, 0.1559700071811676, -0.1982399970293045, 0.21893000602722168, -0.19559000432491302, -0.5217800140380859, -0.08135999739170074, -0.7869300246238708, 0.057353999465703964, 0.029014000669121742, 0.21755999326705933, -0.18748000264167786, 0.25196000933647156, 0.2852500081062317, 0.41260001063346863, 0.3366999924182892, 0.19394999742507935, 0.004717899952083826, -0.4910599887371063, 0.04746200144290924, 0.15294000506401062, 0.2169400006532669, -0.588919997215271, 0.8276100158691406, -0.523360013961792, 0.09288500249385834, 0.16729000210762024, -0.7281200289726257, 0.020344000309705734, -0.260560005903244, 0.41418999433517456, -0.24379000067710876, 0.36237001419067383, 0.062164001166820526, -1.1054999828338623, -0.3285500109195709, -0.14291000366210938, 0.3442400097846985, 0.31244999170303345, -0.11661999672651291, 0.4118199944496155, 0.5152599811553955, 0.5363799929618835, -0.062401000410318375, -0.10243000090122223, -0.2621999979019165, 0.5264300107955933, -0.5538300275802612, -0.5491899847984314, -0.49323999881744385, -0.4120599925518036, 0.5520600080490112, 0.03526899963617325, 0.6246100068092346, 0.5922600030899048, -0.009066100232303143, 0.14834000170230865, -0.2620300054550171, -0.04692300036549568, 0.507319986820221, 0.6991699934005737, 0.08202499896287918, 0.05177599936723709, -0.14871999621391296, -0.9417099952697754, 0.037241000682115555, 0.2484399974346161, -0.04810300096869469, -0.5167400240898132, 0.517769992351532, -0.12557999789714813, 0.14374999701976776, -0.2851699888706207, 0.07683400064706802, -0.024692000821232796, 0.41495001316070557, 0.3191800117492676, -0.14151999354362488, -0.031151000410318375, -0.7999600172042847, 1.0684000253677368, 0.2757300138473511, -0.6056200265884399, 0.16350999474525452, 0.6867899894714355, 0.03789500147104263, 0.06038700044155121, -0.7957500219345093, -0.49632999300956726, -0.14061999320983887, 0.2075899988412857, -0.3343299925327301, 0.14994999766349792, 0.04329700022935867, 0.6432899832725525, -0.005141799803823233, -0.17663000524044037, 0.2701900005340576, -0.07544399797916412, -0.570930004119873, -0.5136799812316895, 0.11924999952316284, -0.9453200101852417, 0.4372600018978119, -1.1604000329971313, -0.26078000664711, -0.30682000517845154, -0.16857999563217163, 0.15252000093460083, -0.5585600137710571, 0.537090003490448, 0.7450799942016602, 0.17818999290466309, 0.3912999927997589, -0.11304999887943268, -0.2856200039386749, 0.2904199957847595, -0.11935000121593475, 0.10604999959468842, 0.2775000035762787, -0.0371600016951561, 0.19202999770641327, 0.3248800039291382, -0.04304900020360947, 0.22250999510288239, 0.5167800188064575], u'sauce': [-0.4912799894809723, 0.08678100258111954, 0.45028001070022583, -0.2935999929904938, -0.2700999975204468, -0.9139900207519531, -0.3487299978733063, 0.3296799957752228, 0.439520001411438, -0.20037999749183655, 0.6023899912834167, -0.5339699983596802, -0.15737999975681305, 0.7094500064849854, -0.04077399894595146, -0.16850000619888306, -0.2849400043487549, 0.4675300121307373, -0.3402400016784668, 0.011261999607086182, -0.2944500148296356, 0.15622000396251678, -0.390610009431839, -0.06457699835300446, -0.08048400282859802, 0.007212100084871054, 0.2456900030374527, 0.3045499920845032, -0.695930004119873, -1.0498000383377075, -1.2414000034332275, 0.35879001021385193, -0.5421599745750427, -0.1022299975156784, -0.08626999706029892, 0.8935400247573853, 0.06863900274038315, 0.24318000674247742, 0.32008999586105347, -0.17941999435424805, 0.28591999411582947, -0.44200000166893005, 0.24247999489307404, 0.4338099956512451, 0.4815100133419037, -0.08582700043916702, 0.6686199903488159, 0.7315300107002258, -0.5763900279998779, 0.4736199975013733, 0.16269999742507935, -0.24821999669075012, 0.5798799991607666, -0.350380003452301, -0.7595999836921692, 0.005692299921065569, -0.34953999519348145, -0.4933199882507324, 0.652999997138977, -0.11203999817371368, 0.4807800054550171, -0.21683000028133392, 0.4894300103187561, -0.15936000645160675, -0.045361001044511795, -0.5789300203323364, -0.572950005531311, 0.4389300048351288, 0.03179600089788437, -0.2590000033378601, -0.24964000284671783, -0.3422900140285492, 0.058038000017404556, -0.13325999677181244, -0.5112299919128418, -0.257779985666275, 0.9112899899482727, 0.38433000445365906, -0.626579999923706, -0.6764000058174133, 0.10444000363349915, 0.6102399826049805, -0.0011901999823749065, -0.4240399897098541, 0.5981600284576416, -0.616159975528717, -0.24862000346183777, 0.5285500288009644, -0.3306899964809418, -0.674780011177063, -0.028737999498844147, 0.13300999999046326, -0.02261500060558319, 0.13062000274658203, 0.001590500003658235, 0.3195500075817108, 0.22934000194072723, 0.8839399814605713, 0.2747099995613098, 0.6643099784851074, 0.5510299801826477, 0.08673900365829468, 0.17294000089168549, -0.43015000224113464, -0.37143000960350037, -0.6429299712181091, -0.19731000065803528, -0.016047000885009766, -0.5163300037384033, 0.7140899896621704, 0.7308499813079834, 0.3958800137042999, -0.6976799964904785, -0.7676699757575989, -0.20976999402046204, -0.4062899947166443, -1.1658999919891357, 0.6307700276374817, 0.5982699990272522, -0.17903000116348267, -0.3435699939727783, -0.24639999866485596, 0.21332000195980072, -0.24350999295711517, -0.05949300155043602, -0.569100022315979, 0.026984000578522682, 0.3686800003051758, -0.3999600112438202, 1.5508999824523926, -0.08703599870204926, 0.9812399744987488, -0.2776300013065338, 0.9264000058174133, 0.1600400060415268, -0.495959997177124, -0.4697900116443634, -0.042472999542951584, -0.018066000193357468, 0.1626099944114685, 0.44604000449180603, 0.3743799924850464, -0.4851599931716919, -0.15355999767780304, -0.239329993724823, 0.1617099940776825, -0.686020016670227, 0.2845500111579895, -0.04755600169301033, -0.13213999569416046, -0.9258099794387817, 0.5341500043869019, -0.2056799978017807, -0.5968199968338013, -0.1954600065946579, -0.9164000153541565, 0.16881999373435974, -0.6952000260353088, 0.2980499863624573, -0.04011800140142441, 0.41479000449180603, -0.0297279991209507, -0.2014700025320053, -0.3555000126361847, 5.430699820863083e-05, -0.6569799780845642, -0.15669000148773193, 0.40939000248908997, -0.30101001262664795, -1.0252000093460083, 0.19512000679969788, 0.12865999341011047, -0.5149199962615967, -0.0520550012588501, -0.28077998757362366, -0.3441999852657318, -0.2556900084018707, -0.4133000075817108, 0.8656299710273743, -0.9536399841308594, 0.26576000452041626, 0.5355100035667419, -0.20667999982833862, -0.37786999344825745, 0.2021699994802475, -0.6403200030326843, 0.8085200190544128, 0.12176000326871872, -0.4898500144481659, 0.04730900004506111, 0.2693299949169159, 0.8985400199890137, -0.5151500105857849, 0.3726600110530853, 0.20855000615119934, -0.26232999563217163, -0.40217000246047974, 0.4501500129699707, -0.6263499855995178, 0.09183800220489502, 0.2302200049161911, 0.10803999751806259, 1.1482000350952148, 0.7099999785423279, -0.3960399925708771, 0.797510027885437, 0.3500399887561798, 0.05378499999642372, -0.2218800038099289, -0.4264400005340576, 0.48840999603271484, -0.4711500108242035, -0.1723800003528595, 0.2607100009918213, -0.49031999707221985, -0.027354000136256218, 0.5867999792098999, -0.48747000098228455, 0.0322830006480217, 0.39166998863220215, -0.027691999450325966, 0.15365999937057495, -0.7025399804115295, -0.7036299705505371, -0.3439300060272217, -0.6671000123023987, 0.5698400139808655, -0.0014612999511882663, -0.25411999225616455, -0.2315800040960312, -0.3188300132751465, -0.16444000601768494, 0.30737000703811646, 0.2574700117111206, 0.9469500184059143, 0.15188999474048615, 0.23837999999523163, 0.37345999479293823, -0.25169000029563904, 0.2633500099182129, -0.6885799765586853, -0.18345999717712402, -0.13102999329566956, 0.06240199878811836, -0.6977400183677673, 0.17291000485420227, 0.6440600156784058, -0.018814999610185623, -0.22711999714374542, -1.2973999977111816, 0.5178599953651428, -0.4300999939441681, 0.08817099779844284, 0.6949800252914429, -0.7227200269699097, 0.17862999439239502, -0.8801299929618835, 0.32853999733924866, 0.05117600038647652, 0.8358500003814697, -0.07459600269794464, 0.048020001500844955, -0.011309999972581863, -0.33858001232147217, 0.31286001205444336, -0.16674000024795532, -0.6057199835777283, -0.06600400060415268, 0.42937999963760376, -0.10734999924898148, -0.16854999959468842, -0.4920800030231476, 0.25916001200675964, 0.8906300067901611, 0.47238001227378845, 0.8729900121688843, -0.47464001178741455, -0.19812999665737152, -1.246500015258789, -0.49066999554634094, 0.3214600086212158, 0.4853399991989136, -0.025888999924063683, -0.2016099989414215, -0.3012999892234802, 0.3393700122833252, 0.8394299745559692, -0.2210099995136261, 0.5507100224494934, 0.6546400189399719, -0.37185999751091003, 0.46553000807762146, 0.6421899795532227, -0.09417299926280975, -0.2672399878501892, -0.8180400133132935, 0.01742600090801716, -0.6424700021743774, -0.24448999762535095, 0.1559700071811676], u'persimmon': [0.5388299822807312, 0.06907899677753448, 0.8176800012588501, 0.25977998971939087, -0.5052800178527832, -0.5225800275802612, 0.19272999465465546, 0.3228600025177002, 0.3251599967479706, 1.1461999416351318, -0.3867399990558624, 0.3168500065803528, 0.1064700037240982, 0.23552000522613525, 0.19652000069618225, -0.5372800230979919, 0.08672299981117249, -0.0880960002541542, -0.19821999967098236, -0.5305299758911133, -0.13681000471115112, -0.018720999360084534, -0.11999999731779099, 0.03800800070166588, -0.41391000151634216, -0.7345600128173828, -0.22367000579833984, -0.07356099784374237, -0.07690399885177612, 0.2043199986219406, -0.5019500255584717, 0.22495000064373016, -0.15987999737262726, -0.1141899973154068, 0.33208999037742615, 0.4683699905872345, -0.32774001359939575, -0.4298500120639801, -0.1190200001001358, 0.328359991312027, 0.03308900073170662, -0.14565999805927277, -0.11358000338077545, -0.4480699896812439, -0.13888999819755554, -0.34911999106407166, 0.1032399982213974, -0.08347900211811066, 0.3709299862384796, -0.46046000719070435, 0.2327899932861328, 0.12442000210285187, 0.3513199985027313, 0.30024001002311707, -0.07081300020217896, -0.19333000481128693, 0.13490000367164612, -0.22300000488758087, 0.012381999753415585, 0.7963100075721741, 0.28453001379966736, 0.09931699931621552, 0.0926709994673729, 0.42594000697135925, 0.044374000281095505, 0.12375999987125397, -0.32475000619888306, 0.17663000524044037, -0.0490260012447834, -0.4278799891471863, 0.05437399819493294, 0.3470599949359894, -0.9222000241279602, 0.2944900095462799, -0.17069000005722046, 0.3909200131893158, -0.07920800149440765, 0.07957600057125092, -0.547760009765625, 0.12512999773025513, -0.16112999618053436, 0.22301000356674194, 0.16118000447750092, -0.3452399969100952, 0.05602499842643738, -0.1022299975156784, -0.7440800070762634, -0.08133599907159805, 0.21006999909877777, -0.20464999973773956, 0.11534000188112259, -0.5141199827194214, 0.4448699951171875, -0.17746999859809875, 0.3719399869441986, 0.04616999998688698, 0.23362000286579132, -0.21067999303340912, -0.13169999420642853, 0.09418900310993195, -0.16103999316692352, 0.0844929963350296, -0.12247999757528305, 0.08784700185060501, 0.3268299996852875, 0.2643899917602539, 0.14143000543117523, 0.010023999959230423, -0.4302299916744232, -0.1852799952030182, 0.21483999490737915, 0.02617100067436695, 0.06086000055074692, -0.02127699926495552, 0.28418999910354614, -0.0855410024523735, -0.42517000436782837, -0.04405000060796738, 0.34299999475479126, -0.028630999848246574, -0.09972099959850311, -0.29269999265670776, 0.5571600198745728, 0.1599700003862381, -0.32023999094963074, -0.24728000164031982, -0.29482999444007874, 0.23205000162124634, 0.41837000846862793, -0.46154001355171204, -0.6409000158309937, 0.7137399911880493, 0.5252400040626526, 0.8786100149154663, -0.4032900035381317, 0.04155600070953369, -0.7378900051116943, -0.45451998710632324, -0.3264099955558777, -0.011126999743282795, 0.9007400274276733, 0.24121999740600586, 0.334879994392395, 0.019504999741911888, 0.8051900267601013, 0.3768700063228607, -0.2604900002479553, -0.42893001437187195, -0.36748000979423523, -0.18977999687194824, -0.8392000198364258, 0.38012000918388367, 0.25918999314308167, 0.009673899970948696, -0.1481499969959259, -0.4976300001144409, 0.24303999543190002, -0.7193800210952759, 0.10847999900579453, 0.5359299778938293, -0.11716999858617783, 0.3841499984264374, -0.1173200011253357, 0.0270409993827343, -0.13892999291419983, 0.01867399923503399, -0.10842999815940857, 0.058035001158714294, -0.19915999472141266, -0.8992599844932556, -0.0014615999534726143, -0.21145999431610107, 0.2074500024318695, 0.5023999810218811, 0.15785999596118927, -0.05256899818778038, -0.7759000062942505, -0.11588999629020691, 0.9560999870300293, 0.1415800005197525, -0.2098899930715561, 0.00268030003644526, 0.4878099858760834, -0.09927300363779068, 0.11638999730348587, -0.6862999796867371, 0.5311499834060669, 0.21671999990940094, -0.2698499858379364, 0.31700000166893005, -0.7327799797058105, -0.08250000327825546, 0.2362699955701828, -0.047850001603364944, 0.12794999778270721, 0.1869100034236908, 0.07333000004291534, 0.4612100124359131, -0.28738000988960266, -0.05543600022792816, -0.1655299961566925, -0.5201900005340576, -0.2951900064945221, 0.2632000148296356, -0.1622599959373474, 0.9936800003051758, 0.19461999833583832, 0.49900001287460327, -0.23206999897956848, 0.054340001195669174, -0.3763200044631958, -0.24908000230789185, -0.13253000378608704, -0.4971199929714203, 0.33608001470565796, 0.12724000215530396, 0.4248499870300293, -0.10256999731063843, 0.3515999913215637, 0.012613000348210335, 0.4472399950027466, -0.7318699955940247, -0.544160008430481, 0.10498999804258347, -0.5730500221252441, 0.33733001351356506, -0.15487000346183777, 0.23844000697135925, 0.4164699912071228, 0.03070100024342537, -0.4444499909877777, -0.055642999708652496, -0.4697999954223633, 0.5534600019454956, 0.1848199963569641, 0.5357999801635742, 0.1265300065279007, -0.6107800006866455, -0.11766000092029572, -0.16870999336242676, 0.014475000090897083, 0.10367000102996826, -0.43035000562667847, -0.12658999860286713, -0.02590099908411503, 0.5347499847412109, -0.0698930025100708, 0.26864999532699585, 0.6594499945640564, 0.027816999703645706, -0.08632700145244598, 0.3519800007343292, -0.26969000697135925, -0.8441699743270874, -0.16514000296592712, -0.1564600020647049, -0.19674000144004822, 0.39785999059677124, 0.4241200089454651, 0.3946099877357483, 0.2715199887752533, 0.2309200018644333, -0.45892998576164246, -0.18564000725746155, 0.13991999626159668, 0.13634000718593597, 0.05447499826550484, 0.060113999992609024, 0.43522000312805176, 0.39289000630378723, 0.0044339001178741455, 0.1550000011920929, 0.16210000216960907, -0.2179500013589859, -0.13404999673366547, -0.23518000543117523, 0.9390299916267395, -0.09654200077056885, -0.16381999850273132, -0.1266999989748001, -0.3238399922847748, 0.15549999475479126, -0.08449900150299072, -0.1628199964761734, -0.3609299957752228, 0.45399999618530273, 0.2907100021839142, -0.5715000033378601, 0.29875001311302185, 0.1463399976491928, 0.20223000645637512, -0.3928999900817871, 0.7607399821281433, 0.28075000643730164, -0.042118001729249954, 0.07041700184345245, -0.04766400158405304, 0.049010999500751495, 0.3722499907016754, 0.4234499931335449], u'boulder': [-0.462909996509552, 0.4040699899196625, -0.18230000138282776, 0.05449200049042702, 0.47565001249313354, -0.2337300032377243, 0.36063000559806824, -0.3552800118923187, 0.2973099946975708, 0.22642000019550323, -0.11151000112295151, 0.1633400022983551, 0.16539999842643738, 0.05408100038766861, 0.22529000043869019, 0.39544999599456787, 0.2738400101661682, 0.14128999412059784, 0.404229998588562, 0.04613799974322319, 0.027977000921964645, 0.24381999671459198, 0.06596799939870834, 0.23492999374866486, -0.49314001202583313, -0.1261499971151352, -0.09884600341320038, 0.3693700134754181, -0.586899995803833, -0.23690000176429749, 0.44780999422073364, -0.09896399825811386, -0.16372999548912048, 0.00389509997330606, 0.04602399840950966, -0.019990000873804092, -0.40832000970840454, -0.04343299940228462, 0.02221900038421154, -0.5066099762916565, -0.3183099925518036, 0.52947998046875, 0.37988001108169556, 0.21455000340938568, 0.3077999949455261, 0.1728300005197525, 0.3396100103855133, -0.07902099937200546, 0.42500001192092896, -0.34512999653816223, 0.24887999892234802, -0.30052998661994934, -0.2485799938440323, 0.21714000403881073, 0.31602001190185547, 0.3878200054168701, -0.2721500098705292, -0.3355099856853485, -0.07605700194835663, -0.22120000422000885, -0.040883999317884445, 0.21407000720500946, 0.6119700074195862, 0.17095999419689178, 0.016032999381422997, -0.1927500069141388, -0.41822001338005066, 0.42214998602867126, -0.5049200057983398, -0.39921998977661133, -0.13199999928474426, 0.2785399854183197, -0.5737000107765198, 0.4606199860572815, -0.034157998859882355, 0.24911999702453613, 0.11108999699354172, 0.3399699926376343, -0.31540998816490173, -0.06936000287532806, -0.06837199628353119, -0.02835099957883358, -0.1216999962925911, 0.19021999835968018, -0.16220000386238098, -0.44541001319885254, -0.01674100011587143, 0.4728899896144867, -0.4151400029659271, -0.043115999549627304, 0.22458000481128693, 0.45028001070022583, 0.6726300120353699, 0.199070006608963, 0.15620000660419464, 0.1877100020647049, -0.16925999522209167, -0.18039999902248383, -0.1243399977684021, -0.22612999379634857, -0.0685880035161972, 0.1832900047302246, -0.4517099857330322, -0.11347000300884247, -0.3103100061416626, -0.007965600118041039, 0.6881099939346313, 0.4037800133228302, 0.14213000237941742, -0.489190012216568, -0.7330499887466431, -0.46900999546051025, -0.10033000260591507, -0.26662999391555786, 0.12195999920368195, 0.2624399960041046, 0.20467999577522278, -0.5083699822425842, 0.4168500006198883, 0.2150000035762787, 0.6952400207519531, 0.5321999788284302, 0.26868999004364014, -0.03927600011229515, -0.5188500285148621, -0.2998200058937073, 0.3374199867248535, -0.2962000072002411, -0.2776600122451782, 0.13008999824523926, -0.19422000646591187, 0.4113599956035614, -0.023684000596404076, 0.6358000040054321, 0.1092199981212616, -0.09667400270700455, -0.09474000334739685, 0.3543500006198883, 0.1552799940109253, -0.335999995470047, 0.3984900116920471, 0.12002000212669373, -0.2882300019264221, -0.08698900043964386, 0.3434999883174896, -0.32280001044273376, 0.3201099932193756, 0.5331199765205383, 0.520550012588501, 0.10044000297784805, 0.5682799816131592, 0.05578799918293953, -0.20770999789237976, 0.10198000073432922, 0.26120999455451965, 0.38839998841285706, 0.5492299795150757, -0.5568400025367737, -0.7876899838447571, -0.11067000031471252, 0.08299700170755386, -0.34132999181747437, 0.06640499830245972, -0.47672000527381897, 0.7374899983406067, -0.3418999910354614, 0.45921000838279724, -0.47211000323295593, -0.30667999386787415, -0.2128099948167801, 0.10233999788761139, 0.003959100227802992, 0.20291000604629517, -0.17384999990463257, -0.03916199877858162, 0.24557000398635864, 0.28248998522758484, -0.05982999876141548, -0.1530199944972992, -0.3911600112915039, 0.1962900012731552, 0.06789799779653549, 0.4160600006580353, 0.03341799974441528, 0.15851999819278717, -0.6410599946975708, 0.18310000002384186, 0.18314999341964722, 0.3375900089740753, -0.25290998816490173, 0.25547000765800476, 0.02691599912941456, -0.1459999978542328, 0.5723000168800354, -0.3961000144481659, 0.18452000617980957, -0.10752999782562256, 0.06859900057315826, -0.24434000253677368, -0.020795000717043877, 0.4782100021839142, -0.21873000264167786, -0.6026300191879272, -0.047221001237630844, -0.19444000720977783, 0.028471000492572784, -0.0563029982149601, 0.2775000035762787, -0.010963000357151031, 0.05010300129652023, -0.0031647998839616776, 0.6249399781227112, -0.0008415200281888247, -0.3543199896812439, -0.025640999898314476, 0.42497000098228455, -0.8644800186157227, -0.2463199943304062, 0.03673100098967552, -0.29495999217033386, 0.03465000167489052, 0.2984299957752228, -0.1615000069141388, -0.06697999686002731, -0.14966000616550446, -0.07495799660682678, -0.5328199863433838, 0.02314699999988079, -0.10490000247955322, -0.4016900062561035, 0.10266000032424927, 0.12033999711275101, -0.0722000002861023, -0.6792100071907043, 0.044964998960494995, -0.05635000020265579, -0.2359900027513504, -0.0715700015425682, 0.38036999106407166, -0.6448900103569031, -0.7831299901008606, 0.4019800126552582, -0.12960000336170197, -0.2061000019311905, -1.117300033569336, -0.1176299974322319, 0.0035602001007646322, -0.058538999408483505, 0.1761000007390976, 0.8149099946022034, 0.27312999963760376, 0.024491000920534134, 0.23722000420093536, -0.7752900123596191, 0.6043199896812439, 0.29085999727249146, 0.6669899821281433, -0.5301799774169922, -0.026094000786542892, 0.18690000474452972, 0.022272000089287758, 0.09227199852466583, 0.5166699886322021, -0.29719001054763794, 0.16402000188827515, -0.15789000689983368, 0.4948900043964386, 0.6660900115966797, 0.2371699959039688, 0.6636300086975098, -0.645039975643158, -0.3431299924850464, -0.4151900112628937, 0.135670006275177, 0.3352000117301941, -0.1768999993801117, -0.251010000705719, 0.6211599707603455, -0.27612999081611633, 0.4160900115966797, -0.6760799884796143, 0.05117600038647652, -0.09780199825763702, -0.1678600013256073, -0.5386800169944763, -0.29166001081466675, -0.061785999685525894, -0.11475999653339386, 0.17755000293254852, -0.16154000163078308, 0.34922000765800476, 0.14535999298095703, -0.14922000467777252, 0.2320300042629242, 0.06279700249433517, 0.19293999671936035, 0.35620999336242676, 0.2804499864578247, 0.09851600229740143, 0.4085400104522705], u'plate': [0.22996999323368073, 0.46397000551223755, 0.0487309992313385, -0.28001001477241516, 0.09095600247383118, -0.3223699927330017, -0.6196200251579285, -0.3269200026988983, 0.4125500023365021, -0.8782100081443787, -0.2747499942779541, 0.41284000873565674, -0.7196000218391418, -0.007927199825644493, -0.53302001953125, -0.3446600139141083, -0.4163300096988678, 0.05273300036787987, 0.07143499702215195, -0.36719998717308044, 0.07962600141763687, 0.012903000228106976, -0.36539000272750854, -0.37470000982284546, -0.02250799909234047, -0.5860900282859802, -0.00031448001391254365, 0.4810500144958496, 0.17981000244617462, -0.6441900134086609, 0.31233999133110046, 0.7260500192642212, -0.0738229975104332, -0.1527000069618225, -0.8216400146484375, 0.15288999676704407, 0.0005836099735461175, 0.44251999258995056, -0.5082799792289734, 0.6480799913406372, -0.21101999282836914, 0.18111999332904816, -0.040862999856472015, -0.33550000190734863, 0.46386000514030457, 0.3365600109100342, 0.13620999455451965, -0.11591000109910965, -0.013919999822974205, 0.2960900068283081, 0.16875000298023224, 0.05621400102972984, 0.17653000354766846, 0.41154998540878296, -0.2856299877166748, 0.12939000129699707, -0.0519150011241436, -0.3217799961566925, -0.028023000806570053, 0.057496000081300735, 0.4810299873352051, -0.12076999992132187, 0.4929800033569336, -0.08502600342035294, 0.27882999181747437, 0.1863899976015091, -0.09891100227832794, 0.07700599730014801, -0.06128700077533722, 0.038600001484155655, -0.045586999505758286, 0.2882100045681, 0.37288999557495117, 0.16695000231266022, -0.6886100172996521, -0.17276999354362488, 0.24244000017642975, -0.23023000359535217, -0.3651300072669983, -0.4601399898529053, 0.03825400024652481, -0.10386999696493149, -0.5133900046348572, -0.3217799961566925, -0.398470014333725, -0.28411000967025757, -0.230880007147789, -0.19565999507904053, -0.29510998725891113, -0.0211970005184412, 1.0119999647140503, 0.044523000717163086, -0.7592800259590149, 0.08661100268363953, -0.2557600140571594, 0.3678100109100342, -0.4096199870109558, -0.16050000488758087, 0.06298799812793732, -0.0339290015399456, -0.06460899859666824, 0.09211599826812744, 0.21504999697208405, -0.20806999504566193, 0.5728099942207336, 0.17002999782562256, 0.45186999440193176, 0.4485200047492981, -0.13165999948978424, 0.24150000512599945, 0.08731900155544281, -0.0225249994546175, -0.18494999408721924, -0.8463799953460693, -0.7262399792671204, -0.35989001393318176, -0.8489099740982056, -0.33698999881744385, 0.10025999695062637, -0.24688999354839325, -0.2723900079727173, 0.05665599927306175, -0.06994199752807617, 0.025676000863313675, -0.029691999778151512, -0.42113998532295227, 0.3765900135040283, -0.5653499960899353, -0.028137000277638435, 0.5461099743843079, -0.01093399990350008, 0.7123900055885315, 0.4671100080013275, 1.095900058746338, 0.10728999972343445, 0.32058000564575195, 0.20038999617099762, 0.271450012922287, -0.29614999890327454, -0.10916999727487564, -0.21634000539779663, 0.16755999624729156, 0.0713379979133606, -0.2964499890804291, -0.03911300003528595, 0.05911099910736084, -0.3959900140762329, -0.08687900006771088, -0.11808999627828598, -0.8177099823951721, 0.3661699891090393, 0.1730400025844574, 0.221670001745224, -0.29113999009132385, 0.15686999261379242, -0.9015100002288818, -0.24005000293254852, -0.613510012626648, -0.06704600155353546, -0.48236000537872314, 0.028432000428438187, -0.030920999124646187, -0.39267000555992126, -0.07135199755430222, 0.680649995803833, -0.3089199960231781, -0.29826000332832336, 0.1732500046491623, 0.2641200125217438, -0.3480600118637085, -0.08890300244092941, -0.0738380029797554, 0.652180016040802, -0.9208199977874756, 0.2956700026988983, -0.45535001158714294, -0.2921999990940094, 0.18973000347614288, -0.08141600340604782, -0.8208000063896179, 0.6550400257110596, -0.21310999989509583, 0.41262000799179077, 0.20167000591754913, -0.2824000120162964, -0.36208999156951904, 0.3218800127506256, 0.3283900022506714, 0.052682001143693924, -0.028542999178171158, -0.19399000704288483, 1.1923999786376953, 0.33847999572753906, 0.7335299849510193, 0.6387199759483337, 0.4166699945926666, 0.3130500018596649, -0.18716999888420105, -0.6244400143623352, -0.06630299985408783, 1.25600004196167, -0.23559999465942383, 0.1802700012922287, -0.01635199971497059, 1.0326000452041626, 0.2404100000858307, 0.7878900170326233, -0.3090200126171112, 0.022861000150442123, 0.23323999345302582, -0.07205300033092499, -0.5209699869155884, 0.22487999498844147, 0.12037999927997589, -0.11823000013828278, -0.16132000088691711, -0.4834100008010864, -0.7554000020027161, 0.19968999922275543, 0.5443300008773804, -0.1458899974822998, 0.28233999013900757, 0.4460200071334839, -0.4038200080394745, -0.19175000488758087, -0.0643870010972023, 0.1740799993276596, 0.11693999916315079, 0.361380010843277, -0.34630000591278076, -0.7174100279808044, -0.019415000453591347, -0.5091400146484375, -0.005256799980998039, 0.27004000544548035, -0.42195001244544983, -0.14305999875068665, 0.3917900025844574, 0.123259998857975, 0.10919000208377838, -0.29350998997688293, 0.23645000159740448, -0.09896499663591385, 0.26444000005722046, -0.8996000289916992, -0.23207999765872955, -0.4567300081253052, -0.14708000421524048, 0.16579000651836395, -0.061650000512599945, 0.2840000092983246, -0.35460999608039856, 0.4699699878692627, -0.04667700082063675, 0.08284799754619598, 0.02938299998641014, 0.017597999423742294, -0.19598999619483948, -0.6678900122642517, 0.010618999600410461, -0.41850998997688293, -0.40766000747680664, 0.49007999897003174, 0.8429800271987915, 0.04131900146603584, 0.3179599940776825, -0.24973000586032867, 0.5012900233268738, 0.5120099782943726, 0.15557999908924103, -0.09777999669313431, -0.5386300086975098, -0.14619000256061554, 0.3721199929714203, 0.2412099987268448, 0.098191998898983, -1.1331000328063965, 0.27338001132011414, -0.694570004940033, 0.4390299916267395, -0.2557699978351593, 0.10986000299453735, 0.0818289965391159, -0.1289999932050705, -0.7240300178527832, 0.14067000150680542, 0.380950003862381, -0.49518001079559326, 0.40762999653816223, -0.06102300062775612, 0.006929200142621994, 0.08942600339651108, -0.16360999643802643, 0.11082000285387039, -0.053380001336336136, 0.1334100067615509, 0.6803500056266785, -0.5078799724578857, 0.5844699740409851, -0.10628999769687653], u'coffee': [-0.3468100130558014, 0.5370200276374817, 0.17779000103473663, 0.38572001457214355, -0.4906199872493744, 0.5654399991035461, 0.1420699954032898, -0.12881000339984894, 0.32262998819351196, -0.7728700041770935, -0.1995999962091446, -0.6172000169754028, -0.2544099986553192, -0.061365000903606415, -0.3371799886226654, -0.23259000480175018, -0.08780000358819962, -0.2569200098514557, -0.7098699808120728, 0.009064500220119953, -0.17636999487876892, 0.7657999992370605, 0.45509999990463257, 0.02457600086927414, -0.8898800015449524, -0.16298000514507294, -0.021198999136686325, -0.2335900068283081, -0.5103800296783447, -0.3519099950790405, -0.46494999527931213, 0.5050899982452393, -0.2670600116252899, 0.2824999988079071, -0.8600599765777588, 0.7673599720001221, -0.23765000700950623, -0.4856800138950348, -0.2890399992465973, -0.33779001235961914, -0.8194599747657776, -0.136230006814003, -0.4148600101470947, -0.05334100127220154, 0.3806900084018707, -0.4436100125312805, 0.6518999934196472, -0.17736999690532684, -0.05151800066232681, 0.3444899916648865, 0.46792998909950256, 0.024908000603318214, -0.005197300110012293, 0.08449900150299072, -0.18591000139713287, 0.4365200102329254, -0.16502000391483307, 0.35260000824928284, 0.03734099864959717, -0.3621799945831299, -0.3727400004863739, -0.6844300031661987, -0.1810699999332428, 0.13037000596523285, -0.5474900007247925, 0.30074000358581543, -0.05525900050997734, 0.4341700077056885, -0.5560200214385986, -0.39169999957084656, 0.40705999732017517, -0.060440000146627426, -0.11410000175237656, -0.3169400095939636, -0.4650300145149231, -0.669950008392334, 0.526390016078949, -0.6205099821090698, -0.742579996585846, -0.5906800031661987, -0.279449999332428, -0.1883399933576584, -0.15939000248908997, -0.011927000246942043, 0.34325000643730164, -0.3537999987602234, -0.17462000250816345, -0.006110699847340584, -0.10886000096797943, -0.43320000171661377, 0.153779998421669, -0.692520022392273, -0.09012500196695328, -0.20115000009536743, 0.1386300027370453, 0.5563399791717529, 0.04183100163936615, -0.453220009803772, -0.09133400022983551, 0.1266299933195114, -0.06559500098228455, -0.11974000185728073, -0.02257700078189373, -0.5090399980545044, -0.3653300106525421, -0.16644999384880066, -0.05056599900126457, 0.3029699921607971, -0.19582000374794006, 0.30390000343322754, -0.2041500061750412, -0.3171499967575073, 0.02098100073635578, -0.15164999663829803, 0.4764699935913086, 0.17773999273777008, -0.2156900018453598, -0.09064500033855438, -0.12728999555110931, 0.005275100003927946, -0.5076799988746643, 0.42706000804901123, 0.3704800009727478, 0.04797599837183952, -0.25808998942375183, -0.052730999886989594, 1.1952999830245972, 0.14158999919891357, 0.6507899761199951, 0.0801210030913353, -0.09184599667787552, 1.1764999628067017, -0.24573999643325806, -0.16606000065803528, -0.1759600043296814, -0.11052999645471573, -0.3586600124835968, 0.8274700045585632, -0.05659399926662445, -0.001865800004452467, 0.1569499969482422, 0.26311999559402466, 0.04719499871134758, 0.4426800012588501, 0.13415999710559845, 0.059971000999212265, 0.15809999406337738, -0.26664999127388, 0.456930011510849, -0.7328699827194214, -0.43999001383781433, 0.5022600293159485, 0.4971599876880646, 0.24583999812602997, -0.4971599876880646, -0.057725001126527786, -0.3832300007343292, -0.6013000011444092, 0.5894299745559692, -0.29102998971939087, 0.7193599939346313, -0.13576999306678772, -0.07969699800014496, -0.07197699695825577, -0.16380000114440918, -0.520579993724823, 0.2570900022983551, -0.01755799911916256, 0.19234000146389008, 0.15386000275611877, -0.27851998805999756, 0.4191800057888031, -0.6672000288963318, 0.43612998723983765, 0.3616900146007538, -0.4888699948787689, 0.508870005607605, 0.2955699861049652, 0.16021999716758728, -0.14515000581741333, -0.38231000304222107, 0.3613100051879883, -0.027483999729156494, -0.37362000346183777, 0.2564600110054016, 0.056488998234272, 0.9410499930381775, 0.3788900077342987, 0.17239999771118164, -0.5256199836730957, 0.40191999077796936, 0.6528300046920776, -0.06520800292491913, 0.052955999970436096, -0.7592999935150146, -0.23736999928951263, -0.4041900038719177, 0.14212000370025635, -0.6594899892807007, 0.6136900186538696, 0.2742699980735779, -0.2680799961090088, 0.3306500017642975, 0.44764000177383423, 0.01825300045311451, -0.5714499950408936, 0.33156999945640564, 0.3542799949645996, -0.4913400113582611, -0.7191699743270874, 0.010734999552369118, -0.4503600001335144, -0.05372000113129616, 0.22210000455379486, 0.07198099792003632, 0.2849400043487549, 0.2805599868297577, -0.32148000597953796, 0.13151000440120697, 0.3542900085449219, 0.7686499953269958, -0.36581000685691833, -0.26934999227523804, 0.1004600003361702, -0.21607999503612518, 0.5572699904441833, -0.1684899926185608, 0.5522900223731995, -0.014996999874711037, 0.3045400083065033, 0.14036999642848969, 0.15790000557899475, -0.4514699876308441, 0.8884999752044678, 0.5707600116729736, 0.17141999304294586, 0.2805100083351135, -0.33809998631477356, -0.44110000133514404, -0.58433997631073, -0.5178200006484985, -0.002272099955007434, -1.0285999774932861, -0.6263899803161621, -0.6028100252151489, 0.06740900129079819, -0.0890130028128624, 0.21807000041007996, -0.16535000503063202, -0.23286999762058258, 0.8490099906921387, -0.22221000492572784, 0.2612000107765198, 0.012125000357627869, 0.14645999670028687, 0.6585400104522705, -0.046188000589609146, 0.2547999918460846, 0.3788299858570099, -0.05311800166964531, -0.001664399984292686, 0.3487600088119507, 0.26844000816345215, -0.33862999081611633, 0.005444900132715702, -0.31810998916625977, -0.20492999255657196, -0.0982310026884079, 0.264849990606308, -0.2850100100040436, -0.6182100176811218, 0.14268000423908234, 0.25881001353263855, -0.09572800248861313, -0.2813900113105774, 0.038995999842882156, -1.373900055885315, 0.24184000492095947, -0.5359799861907959, -0.06731899827718735, -1.0226000547409058, -0.3121500015258789, -0.08405400067567825, -0.8276900053024292, 0.21254999935626984, 0.10762999951839447, 0.8076900243759155, -0.4056299924850464, 0.08019699901342392, 0.5660499930381775, -0.11186999827623367, 0.48743999004364014, -0.6557000279426575, 0.06653100252151489, 0.2431199997663498, 0.38499999046325684, -0.1532900035381317, -0.5012800097465515, -0.4274199903011322, 0.601170003414154], u'handle': [-0.6350399851799011, 0.25558000802993774, 0.27101001143455505, -0.27967000007629395, -0.021522000432014465, 0.26151999831199646, 0.47936001420021057, 0.5839800238609314, -0.004087099805474281, -1.6094000339508057, -0.21633000671863556, 0.5021799802780151, 0.2329300045967102, -0.5065900087356567, -0.12039999663829803, -0.5412099957466125, -0.5085200071334839, -0.02715199999511242, 0.17560000717639923, 0.164110004901886, 0.03818399831652641, -0.16960999369621277, 0.10706000030040741, -0.3789899945259094, -0.3346000015735626, 0.8201799988746643, -0.16030000150203705, 0.22023999691009521, 0.008441399782896042, 0.06574299931526184, 0.01903199963271618, 0.15272000432014465, -0.04164300113916397, -0.28692999482154846, -0.6916000247001648, 0.008434800431132317, -0.2329699993133545, -0.4049699902534485, -0.18231000006198883, 0.1915699988603592, -0.5305299758911133, -0.1987600028514862, 0.2065799981355667, -0.4400700032711029, 0.058469001203775406, 0.21457000076770782, -0.23037000000476837, -0.09228099882602692, 0.23555999994277954, 0.7229499816894531, -0.09337600320577621, 0.15223999321460724, -0.6073600053787231, -0.30195000767707825, -0.08992599695920944, -0.05100800096988678, -0.22857999801635742, 0.450190007686615, -0.14904999732971191, -0.15498000383377075, 0.3719800114631653, -0.11683999747037888, -0.1871899962425232, 0.5112800002098083, 0.05845300108194351, -0.17605000734329224, -0.4953399896621704, 0.4288100004196167, -0.1837500035762787, -0.07569000124931335, 0.07360299676656723, 0.1844799965620041, -0.017477000132203102, 0.4407300055027008, 0.374889999628067, -0.29100000858306885, -0.44550999999046326, 0.37880000472068787, 0.1274999976158142, -0.5751299858093262, -0.3618600070476532, 0.06709899753332138, -0.06709600239992142, -0.5415300130844116, -0.08138500154018402, -0.03377300128340721, -0.22269000113010406, -0.04294800013303757, -0.3645800054073334, 0.10789000242948532, -0.319350004196167, 0.6587799787521362, -0.7722799777984619, -0.586929976940155, -0.14975999295711517, -0.4672299921512604, -0.3415299952030182, 0.18682000041007996, 0.35892000794410706, -0.22056999802589417, -0.24062000215053558, -0.24237999320030212, -0.4008600115776062, -1.0168999433517456, 0.264710009098053, -0.08926700055599213, 0.40650999546051025, 0.09264100342988968, -0.09869299829006195, 0.17815999686717987, -0.12067999690771103, -0.11885000020265579, -0.7850599884986877, -0.2972300052642822, -0.15508000552654266, -0.10123000293970108, -0.0009703100076876581, 0.2433300018310547, 0.026819000020623207, 0.20468999445438385, 0.07687199860811234, -0.30188998579978943, 0.13313999772071838, -0.13932999968528748, -0.09742099791765213, 0.21769000589847565, 0.01958400011062622, 0.4927699863910675, -0.01698100008070469, -0.17130999267101288, 0.48388999700546265, -0.19483999907970428, -0.03627299889922142, -0.02980799973011017, 0.22131000459194183, -0.11347000300884247, -0.36333000659942627, -0.19470000267028809, -0.30028998851776123, 0.16868999600410461, 0.11595000326633453, -0.13530999422073364, -0.2130800038576126, -0.17880000174045563, -0.33880001306533813, 0.42539000511169434, 0.03415900096297264, -0.4572399854660034, 0.4518199861049652, 0.1301400065422058, 0.4900600016117096, -0.14218999445438385, -0.0363599993288517, 0.1586800068616867, 0.353659987449646, 0.19483999907970428, -0.13268999755382538, -0.15080000460147858, -0.0896259993314743, -0.1260800063610077, 0.42412999272346497, 0.03244100138545036, -0.06661099940538406, -0.20079000294208527, 0.04473400115966797, -0.006118500139564276, 0.5240700244903564, 0.09656800329685211, 0.04802300035953522, 0.20646999776363373, -0.03967199847102165, 0.7225300073623657, -0.6320499777793884, -0.36743998527526855, 0.14670999348163605, -0.49351000785827637, 0.0009926300263032317, -0.7007200121879578, -0.0323369987308979, -0.5657200217247009, 0.26493000984191895, -0.28720998764038086, 0.14591999351978302, 0.20587000250816345, -0.3561199903488159, -0.06518100202083588, 0.4660699963569641, 0.3758600056171417, 0.2165900021791458, -0.04248199984431267, 0.0865660011768341, 0.12093999981880188, 0.2123900055885315, 0.3001999855041504, 0.4444499909877777, -0.053346000611782074, -0.14268000423908234, 0.3468799889087677, -0.36855998635292053, -0.302839994430542, 0.1516599953174591, -0.09013299643993378, 0.5504400134086609, -0.09816700220108032, 0.1338600069284439, 0.12031999975442886, 0.22641000151634216, 0.049279000610113144, 0.03812199831008911, -0.19720999896526337, 0.14573000371456146, 0.20492999255657196, 0.3987700045108795, -0.06722100079059601, -0.00036460001138038933, -0.9944599866867065, -0.10197000205516815, 0.08546499907970428, -0.3834500014781952, 0.22054000198841095, -0.04915900155901909, 0.045524999499320984, 0.4301699995994568, 0.5743799805641174, -0.021385999396443367, -0.6912199854850769, 0.04809999838471413, 0.24295000731945038, -0.29346001148223877, -0.08190199732780457, -0.03821299970149994, -0.17111000418663025, -0.18560999631881714, -0.20344999432563782, -0.14172999560832977, 0.8076900243759155, -0.2328599989414215, 0.04054199904203415, -0.17190000414848328, 0.07736799865961075, 0.05012499913573265, -0.15334999561309814, 0.1742600053548813, -0.31189000606536865, -0.2279299944639206, -0.2636600136756897, -0.09070699661970139, 0.04577599838376045, 0.1362999975681305, 0.06323900073766708, -0.0033666000235825777, 0.1399099975824356, 0.09754899889230728, -0.2567099928855896, 0.16345000267028809, -0.2222999930381775, -0.36098000407218933, -0.25321999192237854, -0.1263200044631958, 0.18671999871730804, -0.24653999507427216, -0.3219900131225586, -0.08645500242710114, 0.19750000536441803, 0.2914400100708008, -0.23571999371051788, -0.005081899929791689, -0.31134000420570374, -0.13816000521183014, -0.17786000669002533, 0.07928899675607681, 0.2300100028514862, 0.2985199987888336, 0.26954999566078186, 0.14499999582767487, -0.055011000484228134, -1.5063999891281128, 0.30542999505996704, -0.10802999883890152, 0.20830999314785004, -0.1183599978685379, 0.005499300081282854, 0.46465998888015747, 0.31207001209259033, 0.08366599678993225, -0.23604999482631683, 0.16725000739097595, 0.3367899954319, -0.4495700001716614, -0.0477680005133152, 0.11952000111341476, -0.01782199926674366, 0.227960005402565, -0.4270699918270111, 0.4380599856376648, 0.28327998518943787, -0.14788000285625458, -0.06647700071334839, 0.14156000316143036, -0.10936000198125839], u'garden': [-0.3815099895000458, -0.37696999311447144, 0.2855300009250641, -0.22542999684810638, 0.10474000126123428, 0.3144899904727936, 0.22423000633716583, -0.15057000517845154, 0.1826300024986267, -0.1294800043106079, 0.23156000673770905, 0.21854999661445618, -0.1908400058746338, 0.36695998907089233, 0.03639400005340576, 0.05786500126123428, -0.32440000772476196, -0.24842999875545502, 0.30524998903274536, 0.43773001432418823, -0.4292599856853485, 0.3825699985027313, -0.20917999744415283, 0.3992300033569336, -0.027998000383377075, -0.05781500041484833, -0.5520600080490112, -0.12752999365329742, -0.125900000333786, 0.6894400119781494, 0.857990026473999, 0.03945999965071678, -0.0792979970574379, 0.02505199983716011, -0.40748998522758484, 0.7513599991798401, 0.45625999569892883, -0.5597800016403198, -0.21382999420166016, -0.6349400281906128, 0.1618099957704544, 0.5070599913597107, -0.25475001335144043, 0.5692099928855896, 0.1527000069618225, 0.014781000092625618, 0.3423300087451935, 0.6582000255584717, -0.09254000335931778, 0.150969997048378, -0.44650998711586, 0.019342999905347824, -0.026875000447034836, -0.15076999366283417, 0.14729000627994537, -0.48680999875068665, 0.09150099754333496, 0.08454199880361557, 0.4682300090789795, -0.44352999329566956, 0.3041900098323822, -0.4485900104045868, 0.51555997133255, 0.5319700241088867, 0.16854000091552734, -0.06807299703359604, -0.21452000737190247, 0.3167400062084198, -0.4970000088214874, -0.5396900177001953, 0.125450000166893, 0.11830999702215195, 0.09286600351333618, -0.1215599998831749, -1.1497000455856323, 0.5708400011062622, 0.3026700019836426, -0.2992599904537201, 0.08565600216388702, -0.5900499820709229, -0.09740900248289108, 0.8000500202178955, 0.3870199918746948, -0.18768000602722168, 0.30121999979019165, 0.5868600010871887, 0.11291000247001648, 0.2039799988269806, -0.16124999523162842, -0.1702200025320053, 0.2853800058364868, -0.7242699861526489, 0.312610000371933, -0.4093399941921234, -0.00753450021147728, -0.26985999941825867, 0.6652600169181824, -0.31338000297546387, -0.1817300021648407, -0.6528800129890442, 0.07269400358200073, 0.2479500025510788, -0.39730000495910645, 0.014948000200092793, -0.2746399939060211, -0.4124299883842468, 0.17802000045776367, 0.10357999801635742, 0.3244900107383728, -0.03671099990606308, -0.031387001276016235, -0.24835999310016632, 0.3366299867630005, -0.002970699919387698, -0.2907699942588806, 0.23970000445842743, -0.34007999300956726, 0.5375000238418579, -0.061643000692129135, -0.3274100124835968, -0.2680400013923645, 0.22105999290943146, 0.05132700130343437, 0.18925000727176666, -0.29662999510765076, -0.4036400020122528, 0.12571999430656433, 0.37450000643730164, -0.39923998713493347, 0.1645900011062622, 0.674019992351532, 0.4233900010585785, 0.6137400269508362, 0.196260005235672, -0.6170799732208252, -0.15934999287128448, 0.36761999130249023, -0.16901999711990356, -0.11166000366210938, -0.04686199873685837, -0.19588999450206757, 0.21995000541210175, 0.5087400078773499, -0.011893999762833118, -0.038621000945568085, 0.5913800001144409, 0.10446000099182129, -0.012612000107765198, -0.7282500267028809, -0.20074999332427979, -0.1428299993276596, 0.16646000742912292, -0.5036200284957886, -0.31536000967025757, 0.059411000460386276, -0.06692200154066086, -0.3095400035381317, 0.6859800219535828, 0.1783200055360794, -0.14372000098228455, 0.5731599926948547, 0.33910998702049255, 0.4636799991130829, 0.18136000633239746, 0.22290000319480896, 0.5944100022315979, -0.16060000658035278, -0.2675800025463104, 0.6194999814033508, 0.01803700067102909, -0.13447000086307526, -0.07646600157022476, -0.24681000411510468, 0.20898999273777008, -0.1808999925851822, 0.3867500126361847, 0.3902199864387512, 0.3021300137042999, 0.6541000008583069, -0.46832001209259033, -0.0517209991812706, -0.12910999357700348, -0.0068398998118937016, -0.2579300105571747, -0.22234000265598297, 0.5822200179100037, 0.34984999895095825, 0.27858999371528625, 0.46055999398231506, -0.1822900027036667, -0.3108600080013275, 0.3313399851322174, -0.5105900168418884, -0.012795999646186829, -0.8314399719238281, 0.14110000431537628, 0.23273000121116638, 0.05725900083780289, -0.2703000009059906, -0.020945999771356583, 0.8648099899291992, -0.136570006608963, -0.37626999616622925, 0.021150000393390656, -0.0010658999672159553, -0.493259996175766, -0.0783730000257492, -0.3673200011253357, -0.12173999845981598, -0.2492000013589859, -0.273140013217926, 0.07824400067329407, 0.17736999690532684, -0.6940100193023682, 0.32315000891685486, 0.1722699999809265, 0.07948499917984009, -0.33959001302719116, 0.42671999335289, -0.17876000702381134, 0.9240900278091431, -0.36906999349594116, -0.5343599915504456, 0.18775999546051025, -0.3068400025367737, -0.41172999143600464, -0.18964999914169312, 0.3903000056743622, 0.0500200018286705, 0.006236200220882893, 0.367110013961792, 0.12999999523162842, 0.18671999871730804, -0.06464000046253204, 0.5920000076293945, 0.14650000631809235, -0.136570006608963, -0.5024200081825256, -0.5238800048828125, -0.32038000226020813, -0.31279999017715454, -0.3572799861431122, 0.04394200071692467, 0.1914599984884262, -0.4566099941730499, -0.17427000403404236, -0.11518000066280365, 0.4546000063419342, -0.10493999719619751, -0.07854799926280975, 0.991100013256073, 0.25859999656677246, -0.009918199852108955, -0.3179500102996826, 0.6255499720573425, 0.006936000194400549, 0.06283699721097946, -0.09808900207281113, 0.48958998918533325, -0.03616200014948845, -0.3287999927997589, -0.20322999358177185, 0.33671998977661133, -0.41905999183654785, -0.48342999815940857, -0.33577999472618103, -0.18427999317646027, 0.7384999990463257, 0.44089001417160034, -0.4633300006389618, 0.4697999954223633, -0.11963000148534775, 0.03388499841094017, -0.753790020942688, -0.08949100226163864, 0.14844000339508057, -1.8029999732971191, 0.07979899644851685, -0.08736500144004822, 0.026892000809311867, -0.4890199899673462, -0.20044000446796417, -0.06944800168275833, -0.448309987783432, -0.40529999136924744, 0.042635999619960785, 0.18982000648975372, 0.2551400065422058, -0.1647700071334839, -0.03965799883008003, 0.7552599906921387, -0.11817000061273575, -0.20559999346733093, 0.04431099817156792, 0.5842099785804749, 0.12741999328136444, 0.36162999272346497, 0.24651999771595, 0.7564100027084351, 0.26798999309539795], u'flower': [-0.5662500262260437, 0.20432999730110168, -0.5851500034332275, -0.3309299945831299, -0.11528000235557556, 0.5677400231361389, -0.209539994597435, 0.2922399938106537, -0.27884000539779663, 0.1757200062274933, -0.2682799994945526, 0.38054001331329346, -1.107200026512146, 0.292959988117218, 0.13242000341415405, 0.033291999250650406, -0.2502500116825104, -0.7677500247955322, -0.11358000338077545, -0.42972999811172485, -0.5192199945449829, 0.5524700284004211, -0.11015000194311142, 0.21106000244617462, 0.05629400163888931, 0.09303300082683563, -0.44808998703956604, -0.52183997631073, -0.4383600056171417, 0.4944800138473511, 0.1385599970817566, 0.1992799937725067, -0.02317800000309944, 0.14127999544143677, -0.6055300235748291, 0.5517799854278564, 0.5594099760055542, -0.8568099737167358, -0.943310022354126, -0.06471899896860123, -0.12499000132083893, -0.012474999763071537, 0.054218001663684845, 0.19483999907970428, -0.1574700027704239, -0.57014000415802, 0.4213300049304962, 0.39691999554634094, 0.07406599819660187, 0.15136000514030457, 0.15251000225543976, 0.11525999754667282, -0.12004999816417694, 0.2478100061416626, -0.2757999897003174, -0.06957600265741348, -0.4957599937915802, 0.08993899822235107, 0.36699000000953674, -0.4724400043487549, 0.437610000371933, -0.547760009765625, 0.4238399863243103, 0.01512099988758564, 0.251800000667572, 0.11751999706029892, 0.13941000401973724, -0.007396799977868795, -0.185479998588562, 0.18088999390602112, -0.41113001108169556, 0.1873299926519394, 0.04382999986410141, -0.6751599907875061, -0.36983001232147217, 0.5001199841499329, 0.37018999457359314, -0.9349799752235413, 0.730239987373352, 0.024934999644756317, 0.003587299957871437, 0.5699499845504761, -0.5312600135803223, 0.03549500182271004, 0.37011998891830444, 0.4763199985027313, 0.45197999477386475, -0.12177000194787979, 0.11737000197172165, 0.3409300148487091, 0.9555500149726868, -0.6446899771690369, -0.41471999883651733, -0.5304200053215027, 0.1923999935388565, 0.6453700065612793, 1.0368000268936157, -0.39090999960899353, 0.18783000111579895, -0.27827000617980957, 0.5083900094032288, -0.38374000787734985, -0.12257000058889389, -0.4944300055503845, -0.12424000352621078, 0.38025999069213867, 0.5948100090026855, -0.039329998195171356, 0.14562000334262848, 0.5093100070953369, 0.40623000264167786, 0.18896999955177307, 0.6166700124740601, -0.060210999101400375, 0.4706000089645386, 0.084927998483181, -0.5475299954414368, 0.8767499923706055, 0.20649999380111694, -0.6756899952888489, 0.01486899983137846, -0.42688998579978943, 0.014279000461101532, -0.26798000931739807, 0.27063998579978943, -0.14508000016212463, 0.0315839983522892, 0.7317699790000916, 0.11903999745845795, 0.5232099890708923, 0.5434799790382385, 0.3349300026893616, 0.4322499930858612, -0.024568000808358192, -0.5770000219345093, -0.006186699960380793, 0.011706000193953514, -0.5876299738883972, -0.3093099892139435, -0.1787700057029724, 0.42809998989105225, 0.16580000519752502, -0.11643999814987183, -1.0343999862670898, -0.5071600079536438, 0.3584200143814087, -0.09505400061607361, -0.13955999910831451, 0.23122000694274902, -0.09618300199508667, -0.1984899938106537, 0.35545000433921814, 0.03009999915957451, -0.03452900052070618, 0.05889200046658516, 0.1316000074148178, -0.6226800084114075, -0.5986599922180176, -0.3522399961948395, 0.26427000761032104, 0.05253500118851662, -0.32287999987602234, -0.053585998713970184, -0.11563999950885773, 0.05083899945020676, -0.009559599682688713, 0.03974900022149086, 0.17640000581741333, -0.3162600100040436, -0.3109000027179718, -0.6182000041007996, -0.015124999918043613, 0.01839200034737587, 0.12991000711917877, 0.2237900048494339, 0.2504099905490875, 0.5292099714279175, 0.7833200097084045, -0.31422001123428345, -0.2101999968290329, -0.5978699922561646, -0.7671599984169006, 0.18095000088214874, -0.01952500082552433, -0.1754000037908554, 0.29308000206947327, 0.7831299901008606, 0.3650699853897095, 0.0893929973244667, -0.49013999104499817, 0.21626000106334686, 0.24076999723911285, -0.01613299921154976, -0.22803999483585358, -0.09313199669122696, 0.3470099866390228, -0.07351800054311752, -0.24706000089645386, -0.1275400072336197, 0.11836999654769897, 0.8362900018692017, -0.13506999611854553, -0.017093999311327934, -0.4079799950122833, 0.519540011882782, -0.25209999084472656, -0.03884899988770485, 0.4420900046825409, -0.09170400351285934, -0.02496499940752983, 0.13481000065803528, -0.3832300007343292, 0.5194100141525269, -0.38631999492645264, 0.28165000677108765, 0.10380999743938446, -0.002802100032567978, 0.08509500324726105, 0.04745800048112869, -0.06684000045061111, 0.8299000263214111, -0.48078998923301697, -0.588890016078949, 0.25540000200271606, -0.5768700242042542, -0.05639899894595146, -0.0921109989285469, 0.30024999380111694, 0.4849799871444702, 0.2869099974632263, 0.5654199719429016, -0.18237000703811646, -0.03887699916958809, 0.517300009727478, -0.08642400056123734, 0.3870599865913391, -0.31314000487327576, -0.5476999878883362, -0.5207800269126892, -0.2874400019645691, 0.4049299955368042, -0.46720999479293823, -0.864109992980957, 0.29629001021385193, -0.8546599745750427, -0.010600999929010868, 0.3621099889278412, 0.10311999917030334, 0.09269800037145615, -0.1714400053024292, 0.5483499765396118, 0.3253200054168701, 0.24955999851226807, -0.7130200266838074, 1.0091999769210815, -0.22051000595092773, 0.5484200119972229, 0.08101800084114075, -0.08393599838018417, -0.1587499976158142, -0.09750600159168243, -0.29708999395370483, 0.2603900134563446, -0.13381999731063843, -0.17330999672412872, -0.38405999541282654, -0.24905000627040863, 0.3493500053882599, 0.1588899940252304, 0.42142000794410706, -0.15251000225543976, 0.569379985332489, -0.025880999863147736, -0.4279100000858307, -0.09410600364208221, 0.1973000019788742, -0.9737300276756287, -0.9771199822425842, -0.779009997844696, -0.1684899926185608, -0.3402999937534332, -0.44843998551368713, -0.13641999661922455, -0.2752099931240082, -0.026765000075101852, 0.3047100007534027, 0.20598000288009644, 0.2559800148010254, 0.41978999972343445, 0.003238100092858076, -0.0017529999604448676, -0.4491899907588959, 0.3981899917125702, 0.23364999890327454, 0.14860999584197998, 0.17371000349521637, 0.6088299751281738, -0.1115799993276596, 0.6574599742889404, 0.2155500054359436], u'bear': [0.08617699891328812, 0.05475499853491783, -0.44714000821113586, -0.022551000118255615, 0.3118300139904022, 0.03281800076365471, -0.1386999934911728, 0.965719997882843, 0.4887999892234802, -0.7509899735450745, -0.09366100281476974, -0.13359999656677246, -0.2696700096130371, 0.5577800273895264, 0.3718400001525879, -0.06423500180244446, -0.1288599967956543, -0.5601500272750854, -0.0642469972372055, -0.0416099987924099, 0.25532999634742737, -0.21694999933242798, 0.559440016746521, 0.4828700125217438, -0.19713999330997467, 0.07153700292110443, 0.036490000784397125, -0.33761000633239746, 0.16171999275684357, -0.01856599934399128, -0.004392500035464764, 0.05688200145959854, 0.0011433999752625823, -0.6351799964904785, -0.6394500136375427, -0.37049001455307007, -0.05787099897861481, 0.2994599938392639, -0.21265000104904175, -0.1446399986743927, -0.6208400130271912, -0.5991899967193604, 0.14505000412464142, -0.056853998452425, -0.6521599888801575, 0.23423999547958374, -0.16001999378204346, -0.13722999393939972, 0.4225499927997589, 0.029472999274730682, 0.40331000089645386, -0.43748000264167786, 0.01583399996161461, -0.20972999930381775, 0.2067900002002716, 0.7092999815940857, 0.19593000411987305, 0.5242400169372559, 0.2905600070953369, 0.0074848998337984085, 0.1034500002861023, -0.16943000257015228, 0.257860004901886, -0.2662599980831146, -0.19976000487804413, -0.11185000091791153, -0.5672399997711182, -0.4232900142669678, -0.3765000104904175, -0.2390100061893463, -0.29482999444007874, 0.3744400143623352, -0.5230900049209595, -0.15592999756336212, -0.11412999778985977, 0.01948400028049946, 0.2671099901199341, 0.057638999074697495, 0.3110100030899048, -0.3780199885368347, 0.3113600015640259, 0.07225000113248825, -0.020555000752210617, -0.4339100122451782, 0.7539399862289429, 0.012868000194430351, 0.05024399980902672, 0.24589000642299652, -0.15259000658988953, 0.015660999342799187, -0.10038000345230103, -0.3959600031375885, -0.3743799924850464, 0.18950000405311584, -0.4152899980545044, 0.3433000147342682, 0.0005279899924062192, -0.32638001441955566, -0.3639200031757355, 0.29912999272346497, 0.3400300145149231, 0.390859991312027, -0.04351300001144409, 0.11121000349521637, -0.11695999652147293, 0.42890000343322754, 0.13189999759197235, -0.016068000346422195, 0.6978899836540222, 0.51214998960495, -0.43073999881744385, 0.4399400055408478, -0.4082399904727936, 0.04582099989056587, 0.33180999755859375, 0.15765999257564545, -0.24086999893188477, 0.24838000535964966, 0.2809799909591675, -0.0416020005941391, -0.3338199853897095, -0.4464600086212158, 0.2276500016450882, 0.42092999815940857, 0.021709999069571495, -0.07294800132513046, -0.4520699977874756, 0.1491899937391281, -0.04959699884057045, -0.8039399981498718, -0.10510999709367752, 0.13964000344276428, 0.17542000114917755, 0.40327998995780945, -0.18581999838352203, 0.17392000555992126, -0.16662000119686127, 0.4273900091648102, 0.18717999756336212, -0.3443399965763092, 0.42816999554634094, -0.1311900019645691, 0.14406999945640564, 0.3389500081539154, 0.12115000188350677, -0.09330800175666809, 0.20791999995708466, 0.317440003156662, 0.15489999949932098, 0.3908100128173828, -0.2504799962043762, 0.9339100122451782, -0.18456000089645386, -0.606410026550293, 0.6893200278282166, 0.18998000025749207, 0.30239999294281006, -0.42274999618530273, -0.5629500150680542, 0.8706300258636475, 0.6314100027084351, 0.3581399917602539, 0.45691999793052673, -0.19333000481128693, -0.5554999709129333, 0.33449000120162964, 0.7002500295639038, 0.5532799959182739, -0.22769999504089355, -0.42976999282836914, -0.02907400019466877, -0.21066999435424805, -0.04041299968957901, 0.19192999601364136, 0.13761000335216522, -0.4858199954032898, 0.3165700137615204, -0.15511000156402588, -0.09555500000715256, -0.18359999358654022, 0.46821001172065735, -0.27362000942230225, -0.17750999331474304, -0.1513800024986267, -0.05668700113892555, -0.055984001606702805, -0.04369800165295601, -0.36201000213623047, 0.18331000208854675, 0.04332699999213219, 0.3891899883747101, -0.46320000290870667, -0.28213000297546387, -0.12514999508857727, -0.538860023021698, -0.4121600091457367, 0.45824000239372253, -0.09152700006961823, 0.1168299987912178, 0.2362000048160553, 1.3583999872207642, -0.4162999987602234, 0.4088299870491028, -0.3451699912548065, 0.0504009984433651, 0.25273001194000244, -0.4099999964237213, 0.6997299790382385, -0.029600000008940697, -0.04119500145316124, 0.11202000081539154, -0.49028000235557556, 0.010991999879479408, -0.4819900095462799, 0.4384300112724304, -0.5123100280761719, 0.3997899889945984, 0.6008300185203552, -0.40575000643730164, -0.04518299922347069, -0.24097999930381775, 0.4136100113391876, -0.14577999711036682, -0.20145000517368317, -0.08418600261211395, -0.23309999704360962, 0.3099699914455414, 0.06642299890518188, 0.3023099899291992, -0.1204100027680397, -0.5137500166893005, -0.21070000529289246, -0.4063799977302551, -0.23156000673770905, -0.40672001242637634, 0.41839998960494995, -0.3291099965572357, -0.8387799859046936, 0.10276000201702118, 0.29787999391555786, -0.24292999505996704, 0.6188899874687195, 0.5402600169181824, -0.12751999497413635, -1.1992000341415405, -0.2975099980831146, -0.213919997215271, -0.5308399796485901, -0.33414000272750854, -0.26833999156951904, -0.05181200057268143, 0.3443700075149536, -0.4065900146961212, 0.041439998894929886, 0.6415899991989136, 0.2142000049352646, -0.36100998520851135, -0.03551299870014191, -0.07894200086593628, -0.048973001539707184, 0.010626000352203846, 0.09264300018548965, 0.04282600060105324, 0.03621099889278412, -0.4253000020980835, 0.2589699923992157, 0.42135000228881836, -0.009285500273108482, 0.04488300159573555, 0.3364799916744232, -0.4607599973678589, 0.24865999817848206, -0.09607300162315369, 0.26530998945236206, -0.007200700230896473, -0.6796000003814697, -1.3896000385284424, -0.27316001057624817, 0.03496899828314781, -0.20169000327587128, -0.13490000367164612, 0.2436400055885315, -0.0804940015077591, -0.2445800006389618, -0.5914300084114075, 0.13549000024795532, 0.054294999688863754, 0.058038000017404556, 0.7219700217247009, -0.6249300241470337, -0.23878000676631927, -0.14982999861240387, 0.29794999957084656, -0.542900025844574, -0.24959999322891235, -0.019161000847816467, 0.2290000021457672, 0.530210018157959, 0.3533799946308136, -0.07489900290966034], u'coast': [0.18476000428199768, -0.054829999804496765, -0.15554000437259674, 0.36807000637054443, -0.29811999201774597, 0.17494000494480133, -0.25137999653816223, 0.5759599804878235, 0.5010899901390076, -0.8404200077056885, -0.5627400279045105, -0.15995000302791595, -0.016423000022768974, 0.080035001039505, 0.3162499964237213, 0.3257099986076355, 0.06914100050926208, -0.29295000433921814, 0.042440999299287796, 0.11362999677658081, 0.11885999888181686, 0.42904001474380493, -0.24097999930381775, 0.008985199965536594, -0.3980900049209595, -0.010440000332891941, 0.05829999968409538, 0.3160499930381775, -0.5098199844360352, 0.5718899965286255, 0.25383999943733215, 0.08062399923801422, -0.4825400114059448, 0.17856000363826752, -0.42559000849723816, -0.17031000554561615, 0.11525999754667282, 0.22018000483512878, 0.23568999767303467, -0.1494700014591217, -0.5316299796104431, -0.22055000066757202, 0.12425000220537186, 0.002504300093278289, -0.17228999733924866, -0.1413400024175644, 0.9818999767303467, 0.1315000057220459, 0.04089999943971634, -0.08827599883079529, -0.11316999793052673, -0.003083599964156747, 0.3110400140285492, -0.5289499759674072, -0.5083900094032288, 0.4027099907398224, -0.1421400010585785, 0.22384999692440033, -0.02384999953210354, -0.12529000639915466, -0.11513999849557877, 0.17488999664783478, 0.9042099714279175, -0.14842000603675842, 0.4966700077056885, 0.19479000568389893, -0.23375000059604645, 0.4857200086116791, 0.22551999986171722, -0.2933900058269501, 0.042385000735521317, -0.1869799941778183, -0.06704399734735489, -0.048367999494075775, -0.7627599835395813, 0.2479500025510788, 0.4133799970149994, -0.06189600005745888, -0.2040099948644638, 0.1316400021314621, -0.35989001393318176, 0.07717099785804749, -0.7646499872207642, 0.6609899997711182, 0.08921899646520615, -0.18388999998569489, -0.0015122999902814627, -0.19269999861717224, 0.2580699920654297, -0.260809987783432, -0.038541000336408615, 0.21127000451087952, 0.1434199959039688, -0.5691199898719788, -0.38161998987197876, 0.22025999426841736, 0.6022899746894836, 0.05518599972128868, 0.4117699861526489, 0.22617000341415405, 0.10053999722003937, 0.7587299942970276, 0.2819899916648865, 0.08801399916410446, 0.5708600282669067, -0.08093699812889099, 0.03926299884915352, -0.08093000203371048, 0.36579999327659607, 0.6579999923706055, -0.014445999637246132, -0.8527799844741821, -0.012292000465095043, -0.51774001121521, 0.4076499938964844, -0.15424999594688416, 0.30974000692367554, 0.09816800057888031, 0.29715999960899353, 0.3491399884223938, -0.23058000206947327, -0.5168200135231018, -0.8098400235176086, -0.21139000356197357, 0.3870899975299835, 0.3490400016307831, -0.32572999596595764, -0.2159699946641922, 0.45370998978614807, 0.15267999470233917, -0.4209200143814087, 0.40007999539375305, -0.11350999772548676, -0.5106099843978882, 0.5612199902534485, 0.34483999013900757, -0.10215000063180923, 0.20127999782562256, 0.20934000611305237, -0.3083600103855133, -0.1324400007724762, 0.09866800159215927, -0.36353999376296997, 0.5736799836158752, -0.6320199966430664, 0.09138999879360199, 0.4755699932575226, -0.27070000767707825, -0.3163299858570099, -0.30546000599861145, 1.0716999769210815, -0.35747000575065613, 0.8612599968910217, -0.036834001541137695, 0.809469997882843, -0.165910005569458, 0.004195299930870533, -0.268779993057251, 0.25429001450538635, 0.49386000633239746, 0.1433500051498413, -0.5352500081062317, 0.04150399938225746, -0.17204999923706055, -0.11562000215053558, -0.5773299932479858, 0.6537500023841858, -0.26767998933792114, 0.20674000680446625, 0.5782600045204163, 0.07100299745798111, -0.0531810000538826, -0.6875600218772888, 0.15049999952316284, 0.4337500035762787, 0.3257400095462799, -0.1465200036764145, 0.08299700170755386, -0.10615000128746033, -0.3589699864387512, -0.024723999202251434, -0.04822700098156929, 0.1179099977016449, -0.049793001264333725, 0.6533799767494202, -0.5912200212478638, -0.17812000215053558, 0.20091000199317932, -0.4951399862766266, -0.24344000220298767, -0.09558100253343582, 0.5836099982261658, 0.2781200110912323, -0.01460300013422966, 0.9128699898719788, -0.12604999542236328, 0.48563000559806824, -0.540340006351471, -0.17241999506950378, 0.08725299686193466, 0.9235600233078003, -0.22779999673366547, -0.009069600142538548, -0.1782499998807907, 0.3964200019836426, 0.22867999970912933, 0.10708999633789062, -0.0293550007045269, 0.6486300230026245, -0.19140000641345978, 0.033006999641656876, 0.28652000427246094, -0.2622399926185608, 0.6008800268173218, 0.20568999648094177, -0.14778999984264374, 0.002737900009378791, -0.42048001289367676, 0.14399999380111694, -0.29875999689102173, 1.1146999597549438, -0.4876599907875061, 0.5372999906539917, -0.2665899991989136, -0.02495799958705902, -0.09629400074481964, 0.529449999332428, -0.9615899920463562, -0.15806999802589417, 0.03431500121951103, 0.5185400247573853, 0.07953900098800659, -0.4985400140285492, 0.29767999053001404, 0.5569700002670288, -0.15275000035762787, -0.3267900049686432, 0.10113000124692917, 0.545009970664978, -0.2502000033855438, -0.6246600151062012, 0.24150000512599945, 0.32589998841285706, 0.19944000244140625, 0.16704000532627106, 0.01718899980187416, -0.30118998885154724, 0.12082000076770782, -0.42882001399993896, 0.14386999607086182, -0.06876199692487717, 0.7274399995803833, 0.21607999503612518, 0.6665999889373779, 0.04462200030684471, -0.2924399971961975, 0.12195000052452087, 0.395579993724823, -0.1437000036239624, 0.07192400097846985, 0.10955999791622162, -0.026350000873208046, -0.5658599734306335, 0.12941999733448029, -0.3005099892616272, -0.2526499927043915, -0.32743000984191895, -0.14485999941825867, 0.010331000201404095, -0.3880999982357025, 0.6939899921417236, -0.4418100118637085, -0.03872000053524971, -0.026512999087572098, 0.19482000172138214, 0.33456000685691833, -1.6979000568389893, 0.2643899917602539, 0.2859799861907959, -0.09531699866056442, -0.02472499944269657, 0.0828000009059906, -0.18671000003814697, 0.18203000724315643, -0.2981100082397461, -0.24921000003814697, -0.4860300123691559, -0.19699999690055847, 0.1914999932050705, -0.21690000593662262, -0.3404799997806549, 0.08874499797821045, 0.0532350018620491, -0.6068599820137024, 0.10875999927520752, 1.0820000171661377, 0.18987999856472015, -0.22105999290943146, 0.3443799912929535, -0.08415800333023071], u'vegetable': [0.21729999780654907, 0.0984639972448349, 0.3559400141239166, -0.01597600057721138, 0.10531000047922134, 0.07965700328350067, -0.07343000173568726, 0.07141300290822983, 0.19237999618053436, -0.37070998549461365, 0.08191099762916565, -0.13711999356746674, -0.3542799949645996, 0.5930600166320801, -0.24045999348163605, -0.027993999421596527, -0.43950000405311584, 0.15509000420570374, 0.013212000019848347, 0.12084999680519104, -0.6415600180625916, -0.04674199968576431, 0.3537999987602234, 0.40053001046180725, -0.011703000403940678, 0.26151999831199646, -0.3287400007247925, -0.10379999876022339, -0.47468000650405884, -0.11693999916315079, -0.4426000118255615, 0.6647300124168396, -0.20848000049591064, 0.023663999512791634, -0.15112000703811646, 0.5331400036811829, 0.4062100052833557, -0.39337998628616333, -0.5112900137901306, 0.1758500039577484, 0.48186999559402466, 0.20068000257015228, 0.2957899868488312, -0.604420006275177, 0.030236000195145607, -0.4623500108718872, 0.3656800091266632, 0.6788600087165833, -0.10965000092983246, 0.3525199890136719, 0.015644000843167305, 0.31365999579429626, 0.05926600098609924, -0.0556580014526844, -0.18001000583171844, -0.1976500004529953, -0.06275100260972977, -0.09548400342464447, 0.3606399893760681, -0.32554998993873596, -0.27445998787879944, -0.001310999970883131, 0.26409000158309937, 0.09819500148296356, -0.02389799989759922, 0.37130001187324524, -0.5283399820327759, -0.015675000846385956, -0.6756500005722046, 0.4046199917793274, 0.3767699897289276, 0.002157999901100993, 0.5101000070571899, -0.6043499708175659, -0.9886199831962585, 0.6396600008010864, 0.7393100261688232, -0.6098999977111816, 0.03654899820685387, -0.05273599922657013, 0.5414199829101562, 0.3220599889755249, -0.18312999606132507, 0.4940899908542633, 0.557919979095459, -0.014762000180780888, -0.30956000089645386, 0.08833400160074234, -0.014015999622642994, 0.004393099807202816, 0.3858200013637543, -0.6424800157546997, 0.11067000031471252, -0.8815400004386902, -0.18184000253677368, 0.5906800031661987, 0.3073199987411499, 0.34582000970840454, -0.208979994058609, 0.23800000548362732, -0.4095500111579895, 0.14857999980449677, 0.08945299685001373, -0.4607999920845032, -0.3876200020313263, 0.10400000214576721, 0.015938999131321907, -0.03952199965715408, 0.04806400090456009, 0.30316001176834106, -0.08085700124502182, 0.29747000336647034, 0.07598400115966797, -0.27452000975608826, 0.01611899957060814, 0.21320000290870667, -0.6390799880027771, 0.5472999811172485, 0.43553999066352844, 0.2109600007534027, -0.527649998664856, 0.26794999837875366, 0.8482699990272522, -0.10096000134944916, -0.2593599855899811, 0.3747900128364563, -0.027939999476075172, 0.3977400064468384, -0.05407299846410751, 0.5905799865722656, 0.29513999819755554, 0.9963200092315674, -0.05400199815630913, 1.1536999940872192, -0.4365699887275696, 0.0776590034365654, 0.11202000081539154, 0.18001000583171844, -0.0037555000744760036, 0.5334399938583374, 0.4668099880218506, 0.3799700140953064, 0.5722399950027466, -0.27967000007629395, -0.5732799768447876, 0.9338099956512451, 0.17646999657154083, -0.05966399982571602, -0.08203999698162079, -0.4496299922466278, -1.169700026512146, 0.35339999198913574, 0.12720000743865967, -0.27340999245643616, -0.36469998955726624, -0.3818399906158447, -0.7540000081062317, 0.31275999546051025, 0.1370300054550171, -0.05214599892497063, 0.41648998856544495, 0.08259499818086624, -0.09524799883365631, -0.18216000497341156, 0.3884499967098236, -0.29844000935554504, 0.07259999960660934, -0.030942000448703766, 0.005720099899917841, -0.5772600173950195, 0.1343899965286255, -0.3497300148010254, -0.43685001134872437, -0.2874799966812134, -0.34466999769210815, 0.47516000270843506, 0.8272799849510193, -0.4424099922180176, 0.4517900049686432, -0.7221999764442444, -0.328110009431839, 0.01982099935412407, -0.5112800002098083, -0.5726900100708008, 0.06115199998021126, 0.30129000544548035, 1.1593999862670898, 0.5637999773025513, 0.15433000028133392, -0.21719999611377716, -0.5037800073623657, 1.0830999612808228, -0.05734400078654289, -0.44442999362945557, -0.12693999707698822, 0.18623000383377075, -0.15383000671863556, -0.09798700362443924, -0.6098799705505371, 0.09770599752664566, -0.20691999793052673, -0.5501899719238281, 0.48590999841690063, 0.45115000009536743, 0.8721200227737427, 0.18870000541210175, 0.5269299745559692, -0.5496500134468079, 0.11745999753475189, 0.3884199857711792, -0.3377400040626526, -0.3686000108718872, -0.4266600012779236, -0.07110200077295303, -0.13864000141620636, 0.5024799704551697, 0.4150199890136719, 0.10392999649047852, 0.5456399917602539, 0.3037300109863281, 0.849560022354126, -0.24546000361442566, -0.9132999777793884, -0.44944998621940613, -0.09386000037193298, -0.14817999303340912, -0.15437999367713928, 0.3035399913787842, -0.007307900115847588, 0.28804001212120056, 0.5002300143241882, 0.5284199714660645, 0.14741000533103943, -0.1705400049686432, 1.0405000448226929, 0.8885899782180786, 0.13644999265670776, -0.32012999057769775, -0.9609500169754028, -0.5871099829673767, -0.14764000475406647, -0.2602899968624115, -0.7509599924087524, -0.01769999973475933, -1.024999976158142, -0.42761000990867615, 0.6018900275230408, 0.1872200071811676, -0.17507000267505646, -0.6437900066375732, 0.7432500123977661, -0.10275000333786011, 0.48848000168800354, 0.21227000653743744, 0.3015100061893463, -0.011025999672710896, -0.3559899926185608, 0.4997900128364563, -0.5618000030517578, 0.0763700008392334, -0.3087100088596344, -0.029935000464320183, 0.10777000337839127, -0.40779998898506165, 0.2976999878883362, -0.2607699930667877, -0.5365300178527832, 0.25609999895095825, 0.09694100171327591, -0.31428998708724976, -0.375110000371933, 0.5647600293159485, -0.05584700033068657, -0.2076600044965744, -0.3319999873638153, -0.048461999744176865, -1.0973999500274658, 0.03052699938416481, -0.8150399923324585, -0.01190400030463934, -0.10969000309705734, -0.5972999930381775, -0.21650999784469604, 0.23281000554561615, -0.22889000177383423, 0.5835300087928772, 0.9133700132369995, 0.16638000309467316, 0.2490600049495697, 0.373199999332428, -0.004070600029081106, -0.3506999909877777, 0.30507999658584595, -0.11868000030517578, 0.5072900056838989, -0.269459992647171, 0.24111999571323395, -0.8656399846076965, -0.02927899919450283, 0.35844001173973083], u'bean': [-0.10005000233650208, -0.2768000066280365, 0.7316799759864807, -0.15919999778270721, -0.4291499853134155, 0.009280200116336346, -0.09498400241136551, -0.23959000408649445, 0.03586899861693382, 0.27948999404907227, 0.18990999460220337, -0.02137400023639202, 0.38043999671936035, -0.31185999512672424, -0.6953999996185303, -0.42631998658180237, 0.010560999624431133, 0.20181000232696533, -0.4693799912929535, 0.2053299993276596, 0.16997000575065613, -0.34485000371932983, 0.07469700276851654, 0.14187000691890717, -0.16347000002861023, 0.31134000420570374, -0.1372700035572052, -0.04805200174450874, -0.24426999688148499, -0.10182999819517136, -0.16557000577449799, 0.00493970001116395, -0.3081200122833252, -0.0021522000897675753, -0.1956299990415573, 0.5063300132751465, 0.581089973449707, -0.39180999994277954, -0.46560001373291016, 0.16436000168323517, -0.1844799965620041, -0.021702999249100685, 0.561710000038147, 0.16362999379634857, 0.2215300053358078, -0.419189989566803, 0.3184800148010254, -0.09936100244522095, -0.6194199919700623, 0.6981199979782104, -0.05109899863600731, -0.08862199634313583, 0.4920099973678589, 0.0416099987924099, -0.3488599956035614, -0.29622000455856323, -0.044227998703718185, -0.09109000116586685, -0.5442399978637695, -0.775629997253418, -0.1861400008201599, 0.06032500043511391, -0.10231000185012817, 0.261029988527298, -0.4128200113773346, 0.2918800115585327, -0.48717001080513, -0.3464300036430359, -0.22910000383853912, -0.7088500261306763, -0.09190600365400314, 0.25165000557899475, 0.1693899929523468, -0.18459999561309814, -0.3606700003147125, -0.3406499922275543, 0.5945199728012085, -0.26278001070022583, -0.04757099971175194, -0.4606800079345703, -0.0838090032339096, 0.4432399868965149, 0.21074000000953674, 0.13380999863147736, 0.030650999397039413, 0.15386000275611877, 0.29221999645233154, -0.25025999546051025, 0.32427000999450684, -0.41214001178741455, -0.1960500031709671, -0.3941200077533722, -0.1409599930047989, -0.6968799829483032, -0.16798999905586243, 0.18208999931812286, -0.3104400038719177, 0.42405998706817627, -0.540340006351471, -0.19035999476909637, 0.34742000699043274, -0.21543000638484955, 0.24642999470233917, -0.6025800108909607, -0.5486299991607666, 0.1282999962568283, -1.030900001525879, -0.1537500023841858, -0.015731999650597572, 0.2691600024700165, 0.3274799883365631, 0.29409000277519226, 0.04467099905014038, -0.39695999026298523, -0.13884000480175018, -0.03950199857354164, -0.4570100009441376, 0.32506000995635986, 0.11772999912500381, -0.3903999924659729, -0.021398000419139862, -0.38387998938560486, 0.08085799962282181, -0.06576000154018402, 0.028836000710725784, -0.27204999327659607, 0.43303999304771423, 0.892490029335022, 0.40939000248908997, 0.03725200146436691, 0.0001485800021328032, 0.756600022315979, -0.48600998520851135, 0.1878499984741211, 0.11683999747037888, -0.5243399739265442, -0.2915399968624115, 0.7492799758911133, -0.22935999929904938, 0.430400013923645, 0.1687999963760376, 0.17800000309944153, 0.2584500014781952, -0.34224000573158264, -0.03501399978995323, 0.8488500118255615, -0.28143998980522156, -0.28235000371932983, -0.27184998989105225, -0.5338299870491028, -0.6155400276184082, 0.5588700175285339, 0.09440299868583679, 0.35760998725891113, 0.15197999775409698, -0.21467000246047974, 0.00109479995444417, -0.49526000022888184, -0.07193800061941147, 0.3154900074005127, 0.8080599904060364, -0.6856300234794617, 0.3038899898529053, 0.22371000051498413, -0.1662999987602234, -0.13979999721050262, -0.28883999586105347, 0.3519200086593628, -0.3463999927043915, -0.740119993686676, -0.05000799894332886, -0.5201200246810913, -0.1409599930047989, 0.21160000562667847, -0.15198999643325806, -0.14113999903202057, 0.3094800114631653, 0.1701499968767166, 0.2895900011062622, -0.4618600010871887, 0.033663999289274216, 1.003999948501587, 0.6055999994277954, -0.4102100133895874, -0.14162999391555786, -0.24729999899864197, 0.18413999676704407, 0.20961999893188477, -0.17338000237941742, -0.6462200284004211, 0.1712000072002411, 0.9049500226974487, -0.5607200264930725, -0.08720500022172928, -0.15805000066757202, 0.4823000133037567, -0.23194000124931335, 0.0625780001282692, -0.2797999978065491, -0.5670599937438965, 0.3131299912929535, -0.6820999979972839, 0.5687299966812134, 1.0245000123977661, -0.2699199914932251, -0.22112999856472015, 0.40667998790740967, 0.07110399752855301, -0.4284999966621399, -0.3109300136566162, -0.30893999338150024, -0.05122099816799164, -0.13098999857902527, -0.33618998527526855, -0.01637200079858303, 0.4449700117111206, 0.5041000247001648, 0.07619299739599228, -0.44141000509262085, -0.3547700047492981, -0.23019999265670776, -0.2731899917125702, -0.4393399953842163, -0.3922800123691559, -0.013887000270187855, 0.03947199881076813, -0.5381900072097778, 0.05164700001478195, -0.023163000121712685, -0.15748000144958496, 0.006848699878901243, -0.010599000379443169, -0.07139299809932709, -0.030122999101877213, 0.34950000047683716, 0.5228899717330933, -0.30079999566078186, 0.34060999751091003, -0.26629000902175903, -0.30636000633239746, -0.1564600020647049, -0.08171100169420242, -0.5854700207710266, -0.1349399983882904, -1.0379999876022339, 0.016919000074267387, 0.2967599928379059, -0.4312100112438202, -0.11574000120162964, -0.8266100287437439, 0.5648000240325928, 0.11615999788045883, -0.2861799895763397, -0.0693420022726059, 0.5545700192451477, 0.49511998891830444, 0.11437000334262848, 0.36232998967170715, 0.15105000138282776, -0.36757001280784607, -0.04566099867224693, 0.3592100143432617, 0.2578999996185303, -0.3930499851703644, -0.48458999395370483, -0.3573800027370453, -0.16694000363349915, -0.016440000385046005, 0.06214800104498863, 0.3093400001525879, -0.1654299944639206, -0.19133000075817108, -0.08608099818229675, 0.5221499800682068, -0.34845998883247375, 0.36221998929977417, -0.6352099776268005, 0.1414099931716919, -0.822409987449646, -0.26221001148223877, -0.36963000893592834, -0.40959998965263367, -0.028025999665260315, 0.006211500149220228, -0.26003000140190125, -0.03539099916815758, 0.4238300025463104, -0.07315699756145477, 0.012671000324189663, 0.15577000379562378, 0.4121600091457367, -0.12991000711917877, 0.21660999953746796, -0.14891000092029572, 0.03576099872589111, 0.07369499653577805, 0.08132799714803696, -0.8545299768447876, -0.2473900020122528, 0.09174799919128418], u'tulip': [-0.22193999588489532, -0.001726199989207089, -0.13503000140190125, -0.21547000110149384, 0.5516700148582458, 0.7713099718093872, -0.6813200116157532, -0.4697900116443634, -0.13026000559329987, 0.48357999324798584, -0.0935010015964508, 0.40487000346183777, -0.05278800055384636, 0.3020800054073334, 0.01358999963849783, 0.3172999918460846, 0.5663300156593323, 0.09827399998903275, -0.4528299868106842, -0.3490400016307831, 0.7783499956130981, 0.057833001017570496, 0.43773001432418823, -0.1525000035762787, 0.2576099932193756, -0.37790998816490173, -0.743340015411377, -0.2664699852466583, -0.33956000208854675, 0.7574599981307983, -0.13753999769687653, -0.3665199875831604, -0.6111699938774109, 0.346780002117157, 0.019137000665068626, 0.27465999126434326, -0.03574100136756897, -0.10899999737739563, 0.009040099568665028, 0.1409599930047989, -0.04606200009584427, 0.17854000627994537, -0.24172000586986542, 0.28887999057769775, -0.09272100031375885, -0.4828299880027771, -0.18557000160217285, -0.29886001348495483, -0.17242999374866486, 0.3434399962425232, 0.6723700165748596, -0.13962000608444214, -0.3424600064754486, -0.6299099922180176, -0.023677999153733253, -0.46320000290870667, -0.4350399971008301, 0.31136998534202576, 0.33610999584198, 0.28176000714302063, -0.389490008354187, -0.39902999997138977, -0.3979400098323822, 0.06917200237512589, 0.163100004196167, 0.3350900113582611, 0.03370700031518936, 0.772629976272583, -0.2900699973106384, -0.48938998579978943, -0.09667299687862396, 0.3465999960899353, 0.1517699956893921, 0.11000999808311462, -0.09611599892377853, 0.8544099926948547, 0.041134998202323914, -0.8745899796485901, -0.2768299877643585, -0.29565998911857605, -0.11614000052213669, -0.012985000386834145, 0.3814600110054016, 0.20479999482631683, 0.770550012588501, -0.43279001116752625, 0.46880000829696655, 0.5416799783706665, -0.16328999400138855, -0.5559899806976318, -0.12775999307632446, -0.5423499941825867, 0.8783000111579895, 0.4005599915981293, -0.4639599919319153, 0.7191200256347656, 0.6656799912452698, -0.23513999581336975, -0.05099000036716461, 0.8212800025939941, 0.2841799855232239, 0.39441999793052673, 0.15996000170707703, 0.04677499830722809, -0.5425000190734863, 0.30858999490737915, -0.10869999974966049, 0.1271899938583374, -0.4235999882221222, 0.5498300194740295, 0.6161100268363953, 0.48162999749183655, 0.02704400010406971, -0.1337900012731552, -0.07040199637413025, -0.29580000042915344, -0.03126699849963188, 0.33788999915122986, 0.16274000704288483, -0.385670006275177, -0.42849001288414, -0.5303400158882141, 0.4447000026702881, -0.49459999799728394, 0.5807399749755859, 0.330020010471344, -0.09945899993181229, 0.6947199702262878, -0.1188800036907196, -0.26232001185417175, 0.5384799838066101, 0.27632999420166016, 0.13560999929904938, 0.33199000358581543, 0.6054499745368958, -0.5400500297546387, 0.5225300192832947, -0.49678999185562134, -0.22227999567985535, -0.5784800052642822, 0.608739972114563, 1.016800045967102, -0.05918800085783005, -0.05518599972128868, 0.8960300087928772, 0.7001399993896484, -0.39204999804496765, -0.5321699976921082, -0.06215500086545944, 0.5748900175094604, -0.24217000603675842, 0.35381001234054565, -0.09098300337791443, 0.39313000440597534, -0.07773599773645401, 0.8279100060462952, 0.11956000328063965, -0.16143999993801117, -0.7084100246429443, -0.47874999046325684, -0.45058000087738037, -0.29308998584747314, -0.0008121100254356861, -0.08204799890518188, -0.09846799820661545, 0.5348600149154663, 0.16140000522136688, 0.045882001519203186, -0.43580999970436096, -0.17252999544143677, -0.03452200070023537, 0.09543900191783905, -0.07382600009441376, -0.12670999765396118, -0.11235000193119049, -0.5855200290679932, 0.22092999517917633, -0.11846999824047089, -0.8504599928855896, -0.14433999359607697, -0.7155100107192993, -0.37623000144958496, 0.7533599734306335, -0.2612699866294861, -0.2716499865055084, 0.39342001080513, 0.3228999972343445, 0.7473599910736084, 0.4523800015449524, -0.21220000088214874, 0.46248000860214233, -0.41040998697280884, -0.05240600183606148, -0.13696999847888947, -0.2718699872493744, 0.651390016078949, -0.6310700178146362, -0.2123900055885315, -0.04429600015282631, 0.24732999503612518, 0.17423999309539795, -0.2678700089454651, 0.14283999800682068, -0.014255999587476254, 0.38148999214172363, 0.10002999752759933, 0.6578800082206726, 0.9212700128555298, -0.2771899998188019, 0.1296200007200241, -0.035576000809669495, -0.6768699884414673, 0.9666000008583069, 0.14837999641895294, -0.38589000701904297, 0.9517800211906433, 0.37450000643730164, -0.5083299875259399, 0.02542800083756447, -0.18170000612735748, 0.2880600094795227, -0.4593999981880188, -1.1676000356674194, 0.10221000015735626, -0.4914200007915497, 0.3089199960231781, -0.42603999376296997, 0.31317999958992004, 0.43377000093460083, 0.05626299977302551, -0.025554999709129333, -0.34119999408721924, -0.011497999541461468, 0.34095999598503113, 0.09568899869918823, -0.7262099981307983, -0.1517699956893921, -0.6730700135231018, 0.1973399966955185, -0.7438499927520752, 0.1300400048494339, 0.0636959969997406, -0.23935000598430634, 0.6009200215339661, -0.2418999969959259, -0.2522999942302704, -0.4119499921798706, 0.9056500196456909, 0.8371300101280212, 0.1713699996471405, 0.381630003452301, 0.5275899767875671, -0.5289099812507629, -0.7609999775886536, 0.18341000378131866, -0.33779001235961914, 0.15379999577999115, 0.6125900149345398, 0.03673800081014633, -0.19311000406742096, -0.10130999982357025, 0.275519996881485, -0.17926999926567078, 0.22439000010490417, -0.5999299883842468, -0.02894500084221363, 0.3487299978733063, -0.18046000599861145, 0.47762998938560486, 0.47870001196861267, -0.4181100130081177, -0.05390800163149834, -0.5903900265693665, -0.5620999932289124, -0.08147499710321426, -0.051704999059438705, -0.004298099782317877, 0.21421000361442566, -0.7651900053024292, 0.05893599987030029, -0.8407099843025208, -0.37411999702453613, 0.5436999797821045, -0.7862100005149841, -0.22610999643802643, 0.15509000420570374, 0.4215399920940399, 0.207519993185997, -0.15331000089645386, -0.3806400001049042, 0.7032700181007385, -0.5277600288391113, 0.03634500131011009, -0.16660000383853912, 0.0934310033917427, -0.5026500225067139, -0.06847400218248367, 0.22421999275684357, 0.25883999466896057, -0.08711999654769897], u'fan': [-0.08864299952983856, 0.3103500008583069, 0.08443400263786316, -0.3838599920272827, 0.6256600022315979, -0.08180700242519379, 0.29559001326560974, -0.021588999778032303, -0.194240003824234, -0.36289000511169434, 0.05955500155687332, 0.3366200029850006, 0.3107999861240387, -0.46612000465393066, -0.09326600283384323, 0.19957999885082245, -0.41370999813079834, 0.19351999461650848, 0.034922998398542404, 0.19438999891281128, -0.1164499968290329, 0.1888899952173233, -0.35736000537872314, -0.2287999987602234, -0.19266000390052795, 0.025350000709295273, -0.0907140001654625, -0.13854999840259552, 0.5748100280761719, -0.18479999899864197, -0.2491299957036972, -0.15740999579429626, 0.0318400003015995, 0.07876700162887573, -1.1979000568389893, 0.07485300302505493, -0.7514899969100952, 0.035964999347925186, -0.0639059990644455, 0.5560299754142761, 0.381879985332489, -0.5989000201225281, 0.19739000499248505, 0.1507900059223175, 0.5664299726486206, 0.22854000329971313, 0.3759799897670746, 0.2990899980068207, 0.04547800123691559, -0.029145000502467155, -0.5536500215530396, -0.1382399946451187, -0.4784500002861023, 0.4080199897289276, -0.30783000588417053, 0.5845500230789185, -0.005273300223052502, 0.07105100154876709, 0.4487600028514862, -0.14149999618530273, 0.16951000690460205, -0.0530409999191761, 0.48107001185417175, 0.35666000843048096, -0.5259299874305725, -0.0504009984433651, -0.419840008020401, 0.30344998836517334, 0.46362000703811646, 0.46147000789642334, 0.40803998708724976, -0.5179100036621094, 0.2653200030326843, 0.32569000124931335, -0.12466999888420105, 0.4444800019264221, 0.20295000076293945, -0.4939500093460083, -0.06258399784564972, 0.04559699818491936, 0.17015999555587769, 0.4050399959087372, 0.07950600236654282, -0.8143399953842163, -0.04967999830842018, 0.18684999644756317, 0.21191999316215515, -0.11613000184297562, 0.02909800037741661, -0.21376000344753265, 0.2865000069141388, -0.02017500065267086, -0.23298999667167664, -0.5423399806022644, -0.23691000044345856, 0.38210999965667725, 0.5759199857711792, 0.005381300114095211, 0.17338000237941742, -0.6270099878311157, -0.04910000041127205, 0.07326400279998779, 0.1796099990606308, -0.49013999104499817, 0.06007000058889389, 1.105299949645996, 0.2055400013923645, 0.16569000482559204, 0.3388499915599823, 0.5460900068283081, -0.03290199860930443, -0.13665999472141266, 0.0009358699899166822, -0.4631600081920624, 0.22627000510692596, -0.13446000218391418, -0.6013399958610535, -0.13824999332427979, 0.0705140009522438, -0.6253299713134766, 0.05152200162410736, 0.0711819976568222, -0.023756999522447586, 0.25701001286506653, 0.04486599937081337, -0.28913000226020813, -0.31784000992774963, -0.11270999908447266, -0.026746999472379684, -0.5768499970436096, -0.455130010843277, 0.07979399710893631, -0.6107500195503235, -0.30546998977661133, 0.46875, -0.17821000516414642, -0.13484999537467957, 0.33111000061035156, 0.17520000040531158, 0.6368200182914734, -0.1559399962425232, -0.5509200096130371, 0.30133000016212463, -0.4688799977302551, 0.18856999278068542, 0.26478999853134155, -0.3832100033760071, -0.5910500288009644, 0.056474000215530396, -0.06899899989366531, 0.32078999280929565, 0.40625, -0.40588998794555664, -0.24567000567913055, 0.5453900098800659, -0.44633999466896057, 0.42302998900413513, 0.4810599982738495, -0.21735000610351562, -0.12529000639915466, -0.1017099991440773, 0.02198299951851368, -0.2693699896335602, -0.07818800210952759, 0.6690700054168701, -0.055332001298666, -0.27090001106262207, 0.06890299916267395, -0.5004199743270874, -0.3156599998474121, 0.1859399974346161, 0.198170006275177, -1.2998000383377075, -0.308789998292923, 0.3948799967765808, -0.07475899904966354, 0.1979999989271164, 0.2359900027513504, 0.6109799742698669, 0.18727999925613403, -0.047449998557567596, -0.306769996881485, -0.09702800214290619, 0.021323999390006065, 0.15059000253677368, -0.35374000668525696, -0.16514000296592712, 0.21222999691963196, 0.11957000195980072, 0.35308000445365906, -0.05137399956583977, 0.11193999648094177, -0.22888000309467316, -0.012624000199139118, -0.05252499878406525, -0.5806000232696533, 0.034609999507665634, -0.0872339978814125, 0.13067999482154846, -0.21111999452114105, 0.5719000101089478, 0.14757999777793884, 0.6701499819755554, 0.048236001282930374, 0.41903001070022583, -0.324970006942749, 0.32846999168395996, -0.2949100136756897, 0.06269799917936325, 0.5647100210189819, 0.018153000622987747, -0.34981998801231384, 0.5180000066757202, -0.040268998593091965, -0.05223200097680092, -0.10798999667167664, -0.19853000342845917, 0.17295999825000763, 0.015011999756097794, 0.2952300012111664, 0.025836000218987465, 0.13941000401973724, 0.044652000069618225, -0.11776000261306763, -0.10650999844074249, -0.185589998960495, -0.39395999908447266, -0.09781099855899811, -0.0009770100004971027, -0.34139999747276306, 0.21660999953746796, -0.4921700060367584, -0.1039000004529953, -0.5539000034332275, -0.010467000305652618, -0.5615699887275696, -0.11270000040531158, 0.4892899990081787, -0.5055599808692932, 0.3973099887371063, 0.47473999857902527, -0.3461399972438812, -0.45423001050949097, 0.23179000616073608, 0.1929599940776825, -0.012965000234544277, -0.31123998761177063, 0.06170399859547615, -0.11819999665021896, 0.011079000309109688, 0.31571999192237854, -0.21984000504016876, -0.09268099814653397, 0.2428700029850006, 0.6993700265884399, -0.29221999645233154, 0.1578499972820282, -1.2616000175476074, -0.18669000267982483, -1.1562999486923218, 0.008851200342178345, -0.023771999403834343, 0.16771000623703003, -0.39493998885154724, -0.6481999754905701, -0.21578000485897064, -0.493120014667511, 0.7049000263214111, 0.2575500011444092, -0.05648000165820122, 0.39427000284194946, -0.1490200012922287, -0.02531599998474121, -1.0728000402450562, -0.17089000344276428, -0.3450799882411957, -1.7000000476837158, -0.26047998666763306, 0.019063999876379967, 0.021164000034332275, -0.3025299906730652, 0.5586299896240234, -0.2753300070762634, -0.05570799857378006, -0.6289200186729431, 0.29980000853538513, 0.0018363000126555562, -0.2357099950313568, -0.04599199816584587, -0.1391099989414215, -0.017302000895142555, -0.06029000133275986, -0.5372700095176697, 0.024020999670028687, 0.6326199769973755, -0.16919000446796417, -0.34828001260757446, 0.24985000491142273, -0.051245998591184616, 0.09231500327587128], u'snake': [-0.15769000351428986, 0.43340998888015747, 0.030177999287843704, -0.013131000101566315, 0.12199000269174576, 0.2632099986076355, 0.1228799968957901, 0.5092399716377258, 0.013032999821007252, -0.21673999726772308, -0.24501000344753265, -0.21813000738620758, -0.28321999311447144, -0.09278500080108643, -0.254940003156662, -0.07129500061273575, -0.03096500039100647, 0.37957999110221863, -0.27202001214027405, 0.8872299790382385, -0.2249000072479248, 0.2695600092411041, 0.4129599928855896, 0.3750999867916107, -0.07028300315141678, -0.03931700065732002, -0.5843300223350525, -0.6086199879646301, -0.040265001356601715, 0.324429988861084, 0.2555699944496155, 0.16794000566005707, -0.2778800129890442, -0.008194699883460999, 0.328029990196228, 0.5092499852180481, -0.08032900094985962, -0.08487299829721451, -0.13673000037670135, 0.5002400279045105, -0.7290499806404114, 0.05210400000214577, -0.08894100040197372, -0.19699999690055847, -0.05819699913263321, -0.21874000132083893, 0.4378199875354767, 0.01959400065243244, 0.12913000583648682, -0.14439000189304352, 0.2046699970960617, 0.016551999375224113, 0.4283500015735626, 0.39188000559806824, 0.05693399906158447, 0.2953599989414215, -0.552079975605011, 0.2433300018310547, 0.5216299891471863, 0.5041700005531311, -0.03989800065755844, 0.14079000055789948, 0.7815899848937988, 0.5889800190925598, 0.14541000127792358, -0.8147900104522705, -0.32545000314712524, -0.06763199716806412, 0.5415899753570557, 0.28088998794555664, -0.06885000318288803, 0.6858199834823608, -0.08923099935054779, -0.2775700092315674, -0.20618000626564026, 0.36972999572753906, -0.14114999771118164, 0.010374000295996666, 0.17324000597000122, -0.2867499887943268, 0.8557699918746948, -0.5902799963951111, 0.09528899937868118, 0.14634999632835388, -0.017311999574303627, 0.3955399990081787, 0.06261199712753296, -0.033969998359680176, -0.190870001912117, -0.5575000047683716, -0.42594000697135925, 0.05035899952054024, 0.7269200086593628, -0.07382100075483322, -0.11535000056028366, 0.019395999610424042, 0.5784400105476379, 0.5133699774742126, 0.2901099920272827, -0.2730099856853485, -0.1012599989771843, 0.49136000871658325, 0.28751999139785767, -0.42214998602867126, 0.6375200152397156, -0.38593000173568726, 0.3177500069141388, 0.210999995470047, -0.04396799951791763, 0.6096199750900269, -0.0768980011343956, 0.11826000362634659, -0.714169979095459, 0.04132600128650665, 0.5378299951553345, -0.18389999866485596, 0.1183599978685379, 0.0640069991350174, -0.32401999831199646, -0.23869000375270844, -0.5829300284385681, -0.5385199785232544, -0.340719997882843, 0.4941500127315521, -0.5009300112724304, 0.12509000301361084, -0.2420700043439865, 0.6259599924087524, 0.4750500023365021, -0.16673000156879425, -0.09643500298261642, -0.12125000357627869, 0.17794999480247498, 0.5954700112342834, -0.26780998706817627, -0.08165399730205536, 0.3066299855709076, -0.5771899819374084, -0.17497999966144562, 0.19517000019550323, 0.35468000173568726, 0.5023400187492371, 0.11604999750852585, -1.3003000020980835, -0.5744699835777283, 0.08057200163602829, 0.39906999468803406, 0.07862900197505951, 0.0602709986269474, -0.07322899997234344, -0.33041998744010925, 0.4211199879646301, -0.45855000615119934, -0.0498960018157959, 0.09461399912834167, 0.26438000798225403, 0.495279997587204, -0.8656399846076965, -0.22164000570774078, 0.4512600004673004, -0.0882520005106926, -0.14903999865055084, 0.08970200270414352, 0.22540999948978424, 0.07802899926900864, 0.01567699946463108, 0.0011727999662980437, -0.19833999872207642, -0.518310010433197, -0.6139199733734131, -0.420960009098053, -0.329259991645813, 0.5616300106048584, 0.5903400182723999, 0.1254200041294098, -0.023499000817537308, -0.4348999857902527, -0.10286000370979309, 0.753000020980835, -0.44707998633384705, 0.17764000594615936, -0.007888800464570522, -0.19870999455451965, 0.4667699933052063, 0.1974100023508072, -0.7704799771308899, 0.7487099766731262, 0.003007899969816208, 0.22231000661849976, -0.944159984588623, -0.30390000343322754, 0.6541100144386292, -0.1931000053882599, 0.07450199872255325, 0.09241999685764313, 0.5187100172042847, -0.08989699929952621, -0.7590199708938599, -0.43814000487327576, 0.10560999810695648, 1.465999960899353, 0.8012300133705139, -0.27360999584198, 0.010635999962687492, -0.20993000268936157, 0.741320013999939, -0.5125799775123596, 0.0034294999204576015, 0.1620599925518036, 0.555840015411377, -0.4333600103855133, -0.1879200041294098, 0.5677099823951721, -0.30423998832702637, 0.04270099848508835, 0.2031400054693222, 0.2283399999141693, -0.4305900037288666, 0.49094000458717346, -0.30698999762535095, -0.11625999957323074, 0.32945001125335693, 0.3909299969673157, -0.20708000659942627, 0.28859999775886536, 0.11559999734163284, -0.4353399872779846, -0.4085099995136261, 0.23997999727725983, -0.2804499864578247, -0.4970700144767761, -0.27632999420166016, -0.07166600227355957, -0.015434999950230122, 0.23704999685287476, 0.12223000079393387, -0.33351001143455505, -0.3566499948501587, -0.3246999979019165, -0.4221400022506714, -0.23668000102043152, 0.36827999353408813, -0.47126999497413635, 0.031950999051332474, -0.6995999813079834, 0.33768999576568604, -0.10987000167369843, 0.3808799982070923, -0.341729998588562, 0.10671000182628632, -0.5283499956130981, -0.05839100107550621, -0.3968299925327301, 0.2020300030708313, 0.9500899910926819, 0.438400000333786, 0.10756999999284744, 0.5631999969482422, 0.2836199998855591, -0.09446399658918381, -0.46717000007629395, -0.624459981918335, 0.37457001209259033, -0.13722999393939972, -0.3451400101184845, 0.1863500028848648, 0.4189800024032593, -0.3441599905490875, -0.30452001094818115, 0.5331599712371826, -0.23048000037670135, -0.0844459980726242, 0.14092999696731567, 0.4407599866390228, 0.17058999836444855, -0.5088800191879272, -0.34158000349998474, -0.008167600259184837, -0.42500999569892883, 0.07152500003576279, -0.08985400199890137, 0.12421000003814697, 0.21734000742435455, 0.7699699997901917, -0.7335699796676636, 0.538919985294342, 0.13539999723434448, 0.1620199978351593, 0.6104000210762024, -0.36812999844551086, 0.4112200140953064, -0.21942000091075897, -0.15126000344753265, 0.6456699967384338, 0.0295219998806715, 0.0777989998459816, 0.2222999930381775, 0.1879200041294098, 0.24488000571727753, 0.13046999275684357], u'desk': [-0.7372599840164185, 0.4615199863910675, -0.5117200016975403, -0.5803700089454651, -0.10294999927282333, 0.2940100133419037, -0.34422001242637634, -0.06714200228452682, -0.45350000262260437, -1.1765999794006348, 0.05533500015735626, 0.8921300172805786, 0.5284900069236755, 0.39188000559806824, 0.14841000735759735, -0.15376000106334686, 0.028914999216794968, 0.24661000072956085, -0.2142000049352646, -0.07844699919223785, -0.09387200325727463, 0.18019999563694, -0.0007610099855810404, -0.11687000095844269, -0.7489399909973145, 0.20905999839305878, 0.4510599970817566, 0.0580499991774559, 1.0347000360488892, -0.3384999930858612, 0.00996679998934269, -0.1962299942970276, -0.05117600038647652, 0.18002000451087952, -0.8792499899864197, -0.15146000683307648, -0.45974001288414, -0.5840100049972534, -0.21755999326705933, 0.14563000202178955, -0.641480028629303, -0.5329800248146057, -0.5954300165176392, 0.13036000728607178, 0.34630000591278076, -0.20622000098228455, 0.16035999357700348, -0.35238000750541687, -0.11818999797105789, -0.09480399638414383, 0.34426000714302063, 0.11912000179290771, -0.09757299721240997, -0.2815000116825104, -0.14869999885559082, -0.08823800086975098, -0.11749999970197678, -0.040275998413562775, -0.3668299913406372, -0.26357999444007874, 1.054900050163269, 0.2165600061416626, -0.1400900036096573, 0.36542001366615295, -0.10907000303268433, -0.23113000392913818, -0.009483999572694302, 0.6450200080871582, 0.06804600358009338, -0.6388999819755554, -0.03178500011563301, 0.010525999590754509, -0.6494899988174438, 0.06312999874353409, -0.3068400025367737, 0.38694000244140625, -0.22328999638557434, 0.3450999855995178, 0.5926499962806702, -0.7514500021934509, 0.34532999992370605, 0.6376799941062927, -0.39157000184059143, 0.48392999172210693, 0.06879500299692154, -0.1976500004529953, -0.4975300133228302, -0.08575800061225891, -0.37105000019073486, -0.10072000324726105, -0.012934000231325626, -0.9448300004005432, -0.04775400087237358, -0.09099099785089493, -0.20652000606060028, 0.032047998160123825, -0.3066900074481964, -0.5813699960708618, 0.6902400255203247, -0.40470001101493835, 0.5223299860954285, 0.4591600000858307, -0.12627999484539032, -0.10999000072479248, 0.052018001675605774, -0.6498000025749207, 0.488319993019104, -0.0218339990824461, 0.5017600059509277, 0.1950799971818924, -0.3796899914741516, 0.3587000072002411, -0.12619000673294067, 0.2021699994802475, -0.7698500156402588, 0.04941299930214882, -0.4515100121498108, -0.44143998622894287, -0.30651000142097473, -0.36399000883102417, -0.23597000539302826, -0.002356099896132946, 0.14589999616146088, -0.05527999997138977, -0.2873600125312805, 0.35760000348091125, -0.40174999833106995, -1.4437999725341797, 0.2280299961566925, -0.032182998955249786, 0.6327300071716309, -0.2360599935054779, 0.1941000074148178, -0.4680199921131134, -0.023701999336481094, -0.05613499879837036, 0.07128900289535522, 0.4863399863243103, 0.2964800000190735, 0.049501001834869385, 0.10209000110626221, -0.04131700098514557, 0.3237900137901306, -0.16606999933719635, -0.09574300050735474, 0.08501499891281128, -0.4670099914073944, -0.04479200020432472, 0.33250001072883606, -0.8689900040626526, 0.4597199857234955, 0.6040800213813782, 1.1288000345230103, -0.8062899708747864, -0.5110399723052979, 0.3420799970626831, 0.014820000156760216, -0.21121999621391296, 0.4327299892902374, 0.0946659967303276, 0.18039000034332275, 0.1752600073814392, 0.4377500116825104, 0.6775699853897095, 0.6818199753761292, 0.3465900123119354, 0.20330999791622162, 0.09527900069952011, 0.1106799989938736, -0.040369000285863876, -0.7806699872016907, -0.47878000140190125, -0.5988900065422058, -0.08317700028419495, -0.35767000913619995, -0.4008300006389618, -0.07762599736452103, -0.10832999646663666, -0.3859499990940094, -0.893779993057251, -0.21774999797344208, -0.7900199890136719, 0.4937700033187866, 0.29791998863220215, 0.24856999516487122, -0.523419976234436, 1.0591000318527222, 0.35756999254226685, -0.33754000067710876, -0.2576200067996979, 0.42904001474380493, 0.40904000401496887, 0.07143600285053253, -0.3640899956226349, 0.22460000216960907, -0.3613100051879883, -0.2550100088119507, 0.020541999489068985, -0.48374998569488525, 0.09558799862861633, 0.8285199999809265, -0.35311999917030334, 0.12706999480724335, -0.11444000154733658, 0.7629600167274475, -0.077674999833107, -0.11896000057458878, -0.8481600284576416, -0.41422000527381897, -0.6296700239181519, -0.28393998742103577, -0.5783900022506714, 0.14538000524044037, -0.20128999650478363, 0.18170000612735748, -0.06585200130939484, -0.5333899855613708, -0.3158699870109558, 0.03213300183415413, 0.4113999903202057, -0.3053399920463562, 0.18352000415325165, -0.2593199908733368, 0.9710400104522705, 0.12437999993562698, 0.037376999855041504, 0.19830000400543213, -0.07700099796056747, -0.16267000138759613, -0.3997400104999542, 0.2531599998474121, 0.01089600007981062, -0.2644999921321869, -0.09471199661493301, 0.09311900287866592, 0.14131000638008118, -0.20295999944210052, 0.15098999440670013, 0.07073400169610977, -0.08685000240802765, -0.21842999756336212, 0.07689999788999557, -0.47321999073028564, 0.5206800103187561, 0.12511000037193298, -0.21521000564098358, -0.2505800127983093, 0.0041207000613212585, -0.2041500061750412, 0.413100004196167, 0.4249800145626068, -0.10485000163316727, 0.03199300169944763, -0.14439000189304352, 0.18825000524520874, -0.19493000209331512, -0.3836100101470947, 0.04325399920344353, 0.16870999336242676, 0.1014299988746643, -0.4458799958229065, 0.043907999992370605, 0.7081300020217896, 0.4113300144672394, 0.5978699922561646, 0.10012000054121017, 0.5499899983406067, 0.7413700222969055, -0.012017999775707722, 0.4767000079154968, -0.6293100118637085, -0.35530000925064087, 0.15884000062942505, -0.0028788000345230103, -0.14318999648094177, 0.30066001415252686, -0.6637799739837646, 0.9495999813079834, 0.10676000267267227, -0.24442000687122345, -0.5661699771881104, -0.17447000741958618, -0.11723999679088593, -0.5748500227928162, 0.3978100121021271, 0.11048000305891037, 0.1704999953508377, 0.3213199973106384, -0.7103099822998047, -0.5579500198364258, 0.39965999126434326, -0.003907599952071905, -0.19227999448776245, -0.009744799695909023, 0.10770999640226364, 0.1929900050163269, 0.28979000449180603, -0.1737699955701828, -0.6251199841499329, 0.6921200156211853], u'silk': [-0.1468300074338913, -0.37428998947143555, 0.13628999888896942, -0.28158000111579895, -0.10481999814510345, -0.5838299989700317, 0.023715000599622726, -0.4239700138568878, 0.14125999808311462, -0.65065997838974, -0.01218700036406517, -0.8608300089836121, -0.26541000604629517, 0.22586999833583832, 0.11266999691724777, -0.4014799892902374, -0.4954099953174591, 0.2945699989795685, -0.6165300011634827, 0.06436199694871902, -0.4790099859237671, -0.6855900287628174, -0.14632999897003174, 0.4204699993133545, -0.47589001059532166, -0.4716799855232239, -0.22689999639987946, -0.8791300058364868, 0.014316000044345856, 0.7702900171279907, 0.2510699927806854, -0.1427299976348877, -0.5308600068092346, -0.030786000192165375, -0.5089700222015381, 0.8341599702835083, -0.03745799884200096, -0.36302000284194946, 0.3224399983882904, -0.011745999567210674, -0.5540199875831604, -0.4317600131034851, -0.08170299977064133, -0.0008476100047118962, 0.41016000509262085, -0.21573999524116516, 0.4892500042915344, 0.07711099833250046, -0.3155499994754791, 0.0015799000393599272, 0.17882999777793884, -0.015675999224185944, 0.2737799882888794, -0.5409600138664246, -0.038672998547554016, -0.5223900079727173, -0.252020001411438, -0.5151299834251404, -0.19818000495433807, -0.30608999729156494, -0.1724500060081482, -0.47909998893737793, -0.05354100093245506, -0.38804998993873596, 0.9517999887466431, 0.6185100078582764, -0.14702999591827393, 0.19437000155448914, 0.05846500024199486, -0.10182999819517136, 0.4376800060272217, -0.03512100130319595, -0.09270299971103668, -0.3880099952220917, 0.2934199869632721, 0.9110100269317627, -0.2930299937725067, -0.24727000296115875, 0.149509996175766, -0.36636000871658325, -0.7692199945449829, -0.2440599948167801, -0.3851099908351898, -0.3563700020313263, 0.4674699902534485, 0.37665000557899475, 0.33059000968933105, 0.006176399998366833, 0.09207099676132202, 0.24724000692367554, 0.1683499962091446, -0.18678000569343567, -0.1390099972486496, -0.08335000276565552, 0.052848998457193375, 0.6152899861335754, 0.5221899747848511, 0.4476900100708008, 0.11971999704837799, 0.500760018825531, 0.42866000533103943, 0.7955899834632874, 0.35135000944137573, -0.18955999612808228, -0.3277300000190735, -0.2307399958372116, 0.20956000685691833, -0.0404760017991066, 0.07077000290155411, 0.07632700353860855, -0.4665899872779846, 0.22853000462055206, -0.028026999905705452, 0.20819999277591705, 0.41863998770713806, 0.35499000549316406, 0.3427099883556366, 1.2833000421524048, 0.3146499991416931, -0.8327199816703796, -0.32416000962257385, 0.15273000299930573, 0.31498000025749207, -0.41165998578071594, 0.050227999687194824, -0.08861199766397476, -0.05290599912405014, 0.5444499850273132, -0.3261699974536896, -0.13615000247955322, -0.013718999922275543, 0.23306000232696533, -0.540910005569458, -0.3404099941253662, -1.065500020980835, -0.35835000872612, -0.3916400074958801, -0.059689998626708984, -0.3144800066947937, -0.26385000348091125, -0.28367000818252563, 0.1164499968290329, 0.05382600054144859, -1.1721999645233154, -0.03487199917435646, -0.01634399965405464, 0.12132000178098679, -1.0753999948501587, -0.2786799967288971, 0.03777699917554855, 0.0004035900055896491, 0.04582099989056587, -0.030697999522089958, -0.8912799954414368, -0.17885999381542206, 0.37024998664855957, -0.3202599883079529, -0.18636000156402588, 0.2801699936389923, -0.2573400139808655, -0.028074000030755997, -0.3877899944782257, -0.3950600028038025, 0.14778999984264374, -0.036052998155355453, -0.42778998613357544, -0.3019700050354004, 0.13096000254154205, 0.18982000648975372, 0.28971999883651733, -0.5571100115776062, -0.7432600259780884, 0.11350999772548676, 0.21852999925613403, -0.22018000483512878, -0.40264999866485596, -0.4255799949169159, 0.5394099950790405, -0.7435200214385986, -0.7100499868392944, -0.2612299919128418, 0.3435699939727783, 0.03472699970006943, 0.37279999256134033, 0.7057999968528748, -0.3085800111293793, 0.12071999907493591, 0.4496900141239166, 0.414139986038208, -0.2993899881839752, 0.740149974822998, 0.6721400022506714, -0.2478799968957901, -0.10877999663352966, 0.03766300156712532, -0.22376999258995056, -0.2676199972629547, -0.4879699945449829, -0.29023998975753784, -0.389849990606308, 0.6633999943733215, -0.03832799941301346, 0.4392000138759613, 0.6101199984550476, 0.8256199955940247, -0.009805399924516678, 0.17723999917507172, 0.5815500020980835, -0.7833099961280823, -0.16354000568389893, 0.07369200140237808, -0.6641499996185303, 0.4745100140571594, -0.09685499966144562, 0.5087900161743164, -0.2388100028038025, 0.39746999740600586, -1.1002999544143677, -0.09082599729299545, -0.45458999276161194, 0.026770999655127525, -0.47971999645233154, -0.37244999408721924, -0.4261600077152252, -0.12852999567985535, -0.0386819988489151, -0.08196800202131271, 0.3540300130844116, -0.2598299980163574, -0.3544600009918213, 0.39680999517440796, -0.4910300076007843, -0.09347700327634811, 0.6276500225067139, 0.9261400103569031, -0.21152999997138977, 0.5726100206375122, -0.518809974193573, -0.6063500046730042, 0.02230899967253208, -0.07722599804401398, -0.2657400071620941, -0.40026000142097473, 0.906440019607544, -0.5757899880409241, 0.9242500066757202, -0.28812000155448914, -0.0858680009841919, 0.4864700138568878, -0.5689299702644348, -0.17726999521255493, 0.18657000362873077, 0.0675249993801117, -0.288239985704422, 0.8870199918746948, -0.11800999939441681, -0.04260599985718727, 0.5263599753379822, 0.403329998254776, -0.06495100259780884, 0.39618998765945435, -0.6810399889945984, 0.5148800015449524, 0.02668599970638752, 0.03379900008440018, 0.32203999161720276, -0.2722199857234955, -0.30414000153541565, -0.30827999114990234, 0.2444700002670288, -0.061524998396635056, 0.18871000409126282, 0.2731199860572815, 0.26883000135421753, -0.73471999168396, 0.03762799873948097, -0.4517099857330322, -0.8523799777030945, -0.9511899948120117, 0.21323999762535095, 0.037108998745679855, 0.03709400072693825, 0.36539000272750854, -0.32607001066207886, 0.18422000110149384, 0.17246000468730927, 0.39741000533103943, 0.3137899935245514, -0.20777000486850739, -0.12626999616622925, 0.3578200042247772, 0.5112599730491638, -0.16493000090122223, 0.8916199803352356, -0.7146499752998352, 0.3109399974346161, 0.1407099962234497, 0.16854000091552734, 0.22238999605178833, -0.01732799969613552], u'bread': [0.42445001006126404, 0.44214001297950745, 0.3783299922943115, -0.06940300017595291, -0.07333800196647644, -0.08113499730825424, -0.05778000131249428, 0.16374999284744263, -0.09904500097036362, -0.8168900012969971, -0.43911999464035034, -0.4971199929714203, -0.5049499869346619, 0.7461000084877014, 0.32350000739097595, -0.6949499845504761, -0.19990000128746033, 0.08571799844503403, 0.10033000260591507, 0.05936200171709061, -0.043522000312805176, 0.05022500082850456, -0.22728000581264496, -0.04965899884700775, 0.5257899761199951, -0.13038000464439392, 0.11082000285387039, -0.005702599883079529, -0.40167000889778137, -0.303600013256073, -0.3550400137901306, 0.5646499991416931, -0.42691001296043396, 0.09530500322580338, -0.8980699777603149, 0.17837999761104584, -0.02964700013399124, -0.12109000235795975, -0.13524000346660614, 0.40595999360084534, 0.1559700071811676, -0.8432400226593018, 0.517270028591156, -0.16569000482559204, -0.11146999895572662, 0.07732199877500534, 0.26822999119758606, 0.05721500143408775, -0.14571000635623932, 0.2316800057888031, 0.9179400205612183, 0.181099995970726, 0.446260005235672, 0.8429099917411804, -0.31894001364707947, -0.2061000019311905, -0.30366000533103943, 0.0045449999161064625, 0.03083300031721592, 0.17034000158309937, 0.3111099898815155, 0.12178000062704086, 0.10801000148057938, 0.07989300042390823, -0.6786999702453613, 0.08478700369596481, -0.5422400236129761, 0.15838000178337097, -0.3133600056171417, 0.1287900060415268, 0.6167700290679932, -0.053286999464035034, -0.38343000411987305, -0.23622000217437744, 0.004449700005352497, 0.1517300009727478, 0.5178499817848206, -0.11339999735355377, -0.6259499788284302, 0.03509499877691269, 0.12546999752521515, 0.5569400191307068, 0.051600001752376556, 0.08403799682855606, -0.08485099673271179, -0.44192999601364136, -0.46434998512268066, 0.36741000413894653, -0.18653999269008636, 0.3582800030708313, 0.22892999649047852, -0.7979699969291687, 0.10891000181436539, 0.061560001224279404, -0.24383999407291412, -0.2563199996948242, 0.6607499718666077, 0.5228999853134155, -0.3616800010204315, 0.6464400291442871, 0.19633999466896057, 0.1244800016283989, -0.036928001791238785, -0.9086700081825256, -0.2809000015258789, -0.10382000356912613, 0.07149799913167953, 0.3880699872970581, -0.8308299779891968, 0.12251999974250793, 0.43178999423980713, 0.6597999930381775, 0.04508600011467934, -0.6537700295448303, -0.39691001176834106, -0.08796899765729904, -0.47148001194000244, 0.3505899906158447, 0.7041000127792358, -0.09598200023174286, -0.27671000361442566, -0.29434001445770264, 0.49031001329421997, 0.35662001371383667, 0.027701999992132187, -0.28641998767852783, 0.3156599998474121, 0.5243800282478333, -0.2631700038909912, 0.8205900192260742, 0.07901199907064438, 0.47512999176979065, 0.33364999294281006, 0.801639974117279, 0.026969999074935913, -0.3341499865055084, -0.11802999675273895, 0.7964100241661072, -0.16586999595165253, 0.35163000226020813, 0.7006099820137024, 0.3594000041484833, -0.5951700210571289, -0.12043000012636185, -0.24310000240802765, 0.49751999974250793, -0.5605400204658508, -0.13812999427318573, 0.37623998522758484, -0.36406001448631287, -0.6373000144958496, 0.4753299951553345, 0.6435999870300293, -0.18634000420570374, -0.574209988117218, 0.16739000380039215, -0.017222000285983086, -0.5515400171279907, -0.09162700176239014, 0.0163589995354414, 0.0061147999949753284, 0.032249998301267624, -0.3087199926376343, 0.2383899986743927, -0.6533899903297424, 0.3114599883556366, -0.24672000110149384, -0.2788200080394745, 0.2989400029182434, -0.5313199758529663, -0.3832400143146515, 0.08935900032520294, -0.5534800291061401, 0.21006999909877777, -0.6627399921417236, 0.32095998525619507, -0.2917500138282776, -0.14104999601840973, 0.28332000970840454, -0.36122000217437744, 0.36750999093055725, 0.33959999680519104, -0.17524999380111694, -0.6083300113677979, 0.10298000276088715, 0.07603800296783447, 0.7550399899482727, 0.21258999407291412, 0.09431999921798706, 0.16234000027179718, 0.09964899718761444, 1.3594000339508057, -0.13559000194072723, -0.31185999512672424, -0.1212100014090538, -0.07656099647283554, -0.676639974117279, -0.8716300129890442, 0.0691860020160675, 0.23583999276161194, 0.3517799973487854, -0.5865300297737122, 1.0506000518798828, 0.7157800197601318, -0.05127900093793869, -0.7217299938201904, 0.5503100156784058, -0.18321000039577484, -0.32433000206947327, -0.19721999764442444, -0.21130000054836273, 0.08322100341320038, -0.20574000477790833, -0.14087000489234924, -0.20221999287605286, 0.11095000058412552, 0.49118998646736145, -0.5243499875068665, -0.022424999624490738, 0.03187299892306328, 0.514519989490509, 0.6174100041389465, -0.1301400065422058, -0.2434300035238266, -0.3547700047492981, -0.6711500287055969, 0.24244999885559082, 0.20685000717639923, 0.5862500071525574, 0.006843899842351675, 0.15636999905109406, 0.04175800085067749, -0.38749000430107117, 0.11131999641656876, 0.8875399827957153, 0.7852299809455872, -0.07043399661779404, 0.11204999685287476, -0.9423800110816956, -0.1041800007224083, -0.47953000664711, -0.04193099960684776, -0.20347000658512115, -0.47242000699043274, -0.7055799961090088, -0.18668000400066376, -0.14410999417304993, 0.41065001487731934, 0.11432000249624252, -0.9206100106239319, 0.7363799810409546, -0.0318400003015995, 0.1583700031042099, 0.26361000537872314, 0.45361000299453735, 0.8141499757766724, -0.5170800089836121, -0.2691799998283386, 0.5244100093841553, 0.5450999736785889, 0.08345700055360794, -0.45576998591423035, -0.1775600016117096, -0.7336000204086304, 0.6922900080680847, -0.39893999695777893, -0.4186300039291382, 0.057050999253988266, 0.2504900097846985, 0.1310800015926361, -0.737820029258728, 0.4501799941062927, 0.11530999839305878, 0.7658399939537048, 0.348690003156662, 0.01532800029963255, -1.1598000526428223, -0.22352999448776245, -0.9381399750709534, -0.5110999941825867, 0.07284300029277802, -0.3021399974822998, -0.16631999611854553, -0.31373998522758484, 0.05968799814581871, 0.6507300138473511, 0.9289299845695496, -0.6086400151252747, 0.2864300012588501, 0.5460299849510193, -0.1412300020456314, -0.34933000802993774, 0.27733999490737915, 0.3058899939060211, -0.004934099968522787, -0.17020000517368317, -0.4302600026130676, 0.023264000192284584, -0.26447999477386475, 0.4634000062942505], u'aluminum': [0.17599999904632568, -0.17916999757289886, -0.22026999294757843, -1.1598000526428223, -0.48719000816345215, -0.1257600039243698, -0.16440999507904053, 0.24255000054836273, -0.2106200009584427, -0.8602100014686584, -0.4559600055217743, -0.2480199933052063, -0.21770000457763672, -0.210999995470047, -0.3569999933242798, -0.4842599928379059, -0.19833000004291534, 0.20295000076293945, 0.09363599866628647, -0.7631800174713135, -0.1165900006890297, 0.010730000212788582, 0.10158000141382217, 0.705590009689331, 0.2343599945306778, -0.6247400045394897, 0.12303999811410904, 0.24070000648498535, -0.28356000781059265, -0.307779997587204, 0.012664999812841415, 0.38324999809265137, 0.3492099940776825, 0.24178999662399292, -0.20106999576091766, 0.6245599985122681, -0.3198400139808655, 0.011253000237047672, 0.15466000139713287, 0.7809699773788452, -0.9522500038146973, -0.18554000556468964, -0.26054999232292175, 0.12685999274253845, -0.1306300014257431, 0.06165999919176102, -0.4226999878883362, -0.34053000807762146, -0.04170700162649155, 0.4246000051498413, 0.07195200026035309, 0.2978599965572357, 0.07720199972391129, 0.02979999966919422, 0.5041499733924866, 0.2829500138759613, 0.5229300260543823, 0.2262199968099594, 0.849590003490448, 0.11650999635457993, 0.14608000218868256, 0.8113200068473816, 0.15674999356269836, -0.7514299750328064, 0.33184999227523804, 0.19913999736309052, -0.6223499774932861, 0.3961000144481659, -0.11960999667644501, 0.24818000197410583, -0.52360999584198, 0.08620499819517136, 0.2535800039768219, 0.23738999664783478, -0.2366899996995926, 0.18300999701023102, -0.18546999990940094, 0.0789019986987114, 0.043088000267744064, 0.20535999536514282, -0.12883000075817108, -0.3097200095653534, -0.22509999573230743, -0.34848999977111816, 0.16690999269485474, -0.048902999609708786, -0.040348999202251434, 0.064751997590065, -0.40968000888824463, 0.34488001465797424, 0.7453200221061707, 0.29899999499320984, 0.021176999434828758, 0.18389999866485596, -0.3481200039386749, -0.041058000177145004, -0.8974000215530396, -0.10379000008106232, -0.41065001487731934, -0.19429999589920044, -0.262690007686615, 0.3971399962902069, 0.2192700058221817, -0.6629700064659119, 0.9742500185966492, -0.18725000321865082, 0.2345699965953827, 0.12898999452590942, -0.7651799917221069, -0.03939099982380867, -0.016984999179840088, -0.37564998865127563, -0.35635998845100403, -0.7926499843597412, -0.0014945999719202518, -0.2800399959087372, -0.06327799707651138, 0.896809995174408, 0.3985399901866913, 0.5217800140380859, -0.533050000667572, -0.788100004196167, 0.45353999733924866, 0.2545500099658966, -0.18849000334739685, 0.11699999868869781, 0.1257299929857254, -0.1133200004696846, -0.020716000348329544, -0.0818220004439354, -0.2994000017642975, 0.7854099869728088, -0.3595300018787384, 0.2766000032424927, 0.0955910012125969, 0.6751800179481506, -1.1154999732971191, -0.08138900250196457, -0.195810005068779, -0.210999995470047, 0.7485399842262268, 0.6715400218963623, 0.5473200082778931, -0.7291899919509888, -0.1060200035572052, 0.09027499705553055, 0.18556000292301178, -0.7651699781417847, -0.049166999757289886, -0.7793099880218506, 0.6392199993133545, -0.5362399816513062, -0.19830000400543213, -0.9185500144958496, 0.3136900067329407, -0.3972199857234955, -0.2533099949359894, -0.354779988527298, -0.19825999438762665, 0.21185000240802765, 0.2993899881839752, 0.07434500008821487, -0.303710013628006, 0.307669997215271, 0.8894699811935425, 0.29905998706817627, 0.0627409964799881, 0.4778299927711487, 0.5847799777984619, -0.38130998611450195, -0.7740100026130676, 0.3546299934387207, -0.29151999950408936, 0.03466999903321266, 0.25475001335144043, -0.6431499719619751, -0.26249000430107117, -0.07220400124788284, 0.17542999982833862, -0.7444000244140625, 0.23960000276565552, 0.031383998692035675, 0.6073799729347229, 0.08769799768924713, 0.3034000098705292, -0.38172000646591187, 1.067199945449829, 0.4825499951839447, 0.43400999903678894, 0.2855899930000305, 0.16461999714374542, 0.5303400158882141, 0.004796300083398819, 0.2828100025653839, 0.22306999564170837, 0.015935000032186508, 0.2304600030183792, 0.5132399797439575, 0.039361998438835144, 0.08383200317621231, -0.33623000979423523, 0.1919499933719635, 0.15731999278068542, 0.14791999757289886, -0.0034574000164866447, 0.9742699861526489, 0.12710000574588776, -0.21036000549793243, -1.156499981880188, -0.23643000423908234, 0.5140100121498108, 0.5233200192451477, 0.1632000058889389, -0.39739999175071716, 0.3680799901485443, 0.4029200077056885, 0.21404999494552612, -0.3002600073814392, -0.03833499923348427, 0.1658799946308136, 0.3483699858188629, -0.036035001277923584, 0.6698799729347229, -0.7353299856185913, -0.0072591002099215984, 0.6537399888038635, 0.21122999489307404, 0.4236299991607666, -0.13958999514579773, -0.3765999972820282, -0.132750004529953, -0.14148999750614166, 0.06996600329875946, 0.19325000047683716, 0.3356100022792816, -0.03619299829006195, 0.47056999802589417, -0.37950998544692993, -0.47255000472068787, -0.3635599911212921, -0.04494599997997284, -0.15904000401496887, -0.47811999917030334, 0.17981000244617462, -0.5099700093269348, -0.578029990196228, 0.4670400023460388, -0.7562299966812134, -0.5599799752235413, 0.18540999293327332, 0.11266999691724777, 0.3117400109767914, 0.30632999539375305, -0.00705880019813776, 0.6329699754714966, 0.2193399965763092, -0.4521700143814087, -0.4639100134372711, -0.4603100121021271, -0.3538300096988678, -0.2660500109195709, -0.13044999539852142, -0.05231799930334091, -0.0576849989593029, 0.4932900071144104, 0.6589099764823914, 0.16798999905586243, -0.18418000638484955, 0.8499900102615356, 0.3012999892234802, 0.7892500162124634, -0.0687670037150383, 0.3700000047683716, -0.1483200043439865, -0.4945099949836731, -0.2732599973678589, -0.35962000489234924, 0.00682140002027154, -1.0233999490737915, 0.15985000133514404, 0.263839989900589, -0.9428899884223938, -0.7825300097465515, 0.4916900098323822, 0.44863998889923096, 0.3027699887752533, 0.5540800094604492, 0.039778001606464386, 0.08178900182247162, 0.04424599930644035, -0.5154299736022949, 0.057735998183488846, 0.1279900074005127, 0.23477999866008759, 0.40959998965263367, 0.5707899928092957, -0.008538800291717052, -0.4654200077056885, -0.19035999476909637, 0.7021300196647644], u'cable': [-0.05146399885416031, -0.35207998752593994, 0.20100000500679016, 0.14993999898433685, -0.08122500032186508, 0.7513899803161621, -0.46720999479293823, 0.028845999389886856, -0.07481399923563004, -1.191100001335144, 0.12366999685764313, 0.16857999563217163, 0.10486999899148941, -0.2913399934768677, 0.2505300045013428, 0.25154998898506165, -0.3727000057697296, -0.2721000015735626, 0.5524600148200989, -0.37446001172065735, 0.02131900005042553, -0.2718000113964081, 0.2156900018453598, 0.7822200059890747, -0.03781900182366371, -0.24950000643730164, 0.045726001262664795, 0.33566001057624817, 0.31345999240875244, -0.0029400999192148447, -0.459989994764328, -0.17204999923706055, -0.051093000918626785, 0.25630998611450195, -0.9099400043487549, 0.4119400084018707, -0.20878000557422638, -0.24544000625610352, 0.4009999930858612, 0.2533999979496002, -0.08786500245332718, 0.21265999972820282, -0.08917699754238129, 0.5202500224113464, -0.09254699945449829, 0.4588800072669983, -0.0036422000266611576, -0.27059999108314514, 0.36021000146865845, -0.2442300021648407, -0.3057900071144104, 0.06045600026845932, -0.6486499905586243, -0.4655199944972992, 0.6275200247764587, 0.44023001194000244, -0.1211400032043457, 0.1263899952173233, -0.25600001215934753, -0.11930999904870987, 0.022043999284505844, 0.18380999565124512, 0.2960300147533417, 0.18282000720500946, 0.3924899995326996, 0.7052599787712097, -0.07112199813127518, 0.7923399806022644, 0.005462599918246269, 0.6417499780654907, -0.2282399982213974, 0.659030020236969, -0.43773001432418823, 0.5220900177955627, 0.2149599939584732, 0.7552499771118164, -0.5569999814033508, -0.18152999877929688, -0.39458000659942627, 0.0272659994661808, -0.3030099868774414, -0.9894199967384338, 0.038644999265670776, -0.430510014295578, -0.34018000960350037, 0.6683499813079834, -0.08220600336790085, -0.3305700123310089, 0.15417000651359558, -0.24175000190734863, 0.015948999673128128, 0.4592599868774414, 0.08839999884366989, 0.4808099865913391, -0.11145000159740448, -0.20419999957084656, -0.4004400074481964, 0.4353100061416626, 0.5103700160980225, -0.7566800117492676, 0.03016900084912777, 6.3184997998178e-05, -0.3362500071525574, -0.3957599997520447, 0.9000999927520752, 0.4720900058746338, 0.166360005736351, 0.04321400076150894, -0.3369300067424774, 0.1467200070619583, 0.17870000004768372, -0.1816300004720688, -0.07033900171518326, -0.16576999425888062, 0.438289999961853, -0.05182800069451332, -0.3539299964904785, -0.07133299857378006, -0.5192000269889832, 0.0915059968829155, 0.60971999168396, -0.7322700023651123, 1.0537999868392944, -0.7340499758720398, 0.03989800065755844, -0.3202599883079529, -0.33059999346733093, 0.0625389963388443, -0.04299499839544296, -0.09900099784135818, -0.3373900055885315, -0.06803999841213226, 0.1408199965953827, 0.27074000239372253, 0.3261899948120117, 0.0223229993134737, 0.43053001165390015, 0.449290007352829, -0.08937200158834457, -0.1661899983882904, -0.7648599743843079, 0.20227999985218048, 0.782480001449585, -0.11686000227928162, -0.5615800023078918, 0.21127000451087952, -0.02514900080859661, -0.6136699914932251, -0.06180800125002861, -0.3741300106048584, 0.746429979801178, 0.373089998960495, -0.08556900173425674, -0.7012199759483337, 0.4394800066947937, -0.03111100010573864, 0.6742200255393982, 0.4632300138473511, -0.47404998540878296, -0.3353300094604492, 0.4498000144958496, -0.0786999985575676, -0.29857999086380005, 0.07008299976587296, 0.46904999017715454, 0.733489990234375, -0.11879999935626984, -0.3355199992656708, -0.011001000180840492, 0.9023299813270569, -0.3112100064754486, 0.4345499873161316, -1.307800054550171, 1.0051000118255615, 0.09266400337219238, 0.5561299920082092, -0.37185001373291016, 0.7740200161933899, 0.2670600116252899, 0.2784999907016754, 0.33851999044418335, -0.11977999657392502, 0.061278000473976135, -0.4486199915409088, 0.10836999863386154, 0.1432500034570694, 0.25975000858306885, 0.4418500065803528, -0.1614599972963333, -0.5469899773597717, 0.2760300040245056, 0.08781100064516068, 0.06585200130939484, 0.46869000792503357, 0.23431000113487244, -0.21890999376773834, -0.8044400215148926, 0.043793000280857086, -0.4413500130176544, -0.35708001255989075, 0.7927299737930298, 0.16496999561786652, 0.32861998677253723, -0.29554998874664307, 0.10380999743938446, 0.5320500135421753, -0.17837999761104584, 0.20695999264717102, 0.44501999020576477, -0.09987600147724152, -0.27167001366615295, 0.04826100170612335, 0.913320004940033, -0.06344600021839142, -0.12987999618053436, 0.20115000009536743, -0.3052099943161011, -0.5563099980354309, 0.18716000020503998, 0.29322001338005066, 0.40042999386787415, 0.09414300322532654, 0.664870023727417, -0.43619000911712646, 0.6904299855232239, 0.14833000302314758, 0.1311199963092804, -1.2005000114440918, 0.5590299963951111, -0.18788999319076538, -0.3432300090789795, 0.2449900060892105, 0.08178800344467163, 0.22637000679969788, 0.33173999190330505, -0.46654000878334045, 0.8101300001144409, -0.3457599878311157, -0.41440001130104065, -0.23288999497890472, 0.19394999742507935, -0.23594999313354492, 0.6739400029182434, -0.5682399868965149, -0.4743199944496155, -0.04985100030899048, -0.16715000569820404, -0.30695000290870667, 0.2936600148677826, -0.05527399852871895, -0.05916599929332733, 0.9057300090789795, -0.01783199980854988, 0.06046399846673012, 0.09474699944257736, 0.39458000659942627, -0.30066999793052673, -0.019545000046491623, 0.2834300100803375, 0.023520000278949738, 0.24264000356197357, -0.1669899970293045, -0.383109986782074, 0.5486999750137329, 0.2655099928379059, -0.3570599853992462, 0.5766699910163879, -0.026406999677419662, -0.024838000535964966, 0.13042999804019928, 0.1416199952363968, 0.04464700073003769, -0.16788999736309052, -0.133200004696846, -0.1578100025653839, 0.36866000294685364, -1.261199951171875, 0.036107998341321945, -0.1989700049161911, -0.22246000170707703, -0.21985000371932983, -0.30709999799728394, 0.013147000223398209, -0.1992100030183792, -0.36664000153541565, -0.3792099952697754, 0.04090899974107742, -0.37439998984336853, -0.27324000000953674, 0.5319700241088867, 0.07521100342273712, 0.40119999647140503, 0.18668000400066376, -0.32888999581336975, -0.6421700119972229, 0.22258999943733215, 0.15981000661849976, 0.020532000809907913, 0.36649999022483826, -0.22945000231266022], u'gemstone': [-0.18780000507831573, 0.8053500056266785, -0.20044000446796417, 0.5100799798965454, 0.022324999794363976, 0.10385999828577042, 0.7959499955177307, 0.025279000401496887, 0.0006029100040905178, -0.26899999380111694, -0.4075999855995178, 0.3266499936580658, 0.14544999599456787, 0.11693000048398972, 0.00922510027885437, 0.29563000798225403, -0.7214699983596802, 0.6591299772262573, 0.059974998235702515, -0.395550012588501, 0.24188999831676483, -0.22401000559329987, -0.45552998781204224, 0.13305999338626862, 0.14409999549388885, -0.48993000388145447, -0.6706200242042542, 0.298909991979599, 0.3515700101852417, 0.0153590003028512, 0.3896099925041199, 0.221110001206398, -0.16095000505447388, 0.5771499872207642, 0.26322999596595764, -0.2967900037765503, -0.5179399847984314, 0.23038999736309052, 0.5985999703407288, -0.38141998648643494, 0.06193799898028374, 0.990369975566864, 0.8338599801063538, -0.18015000224113464, -0.0757879987359047, -0.07968299835920334, -0.36777999997138977, 0.03830200061202049, -0.8865799903869629, -0.38651999831199646, -0.23983000218868256, -0.24258999526500702, -0.15226000547409058, 0.47258999943733215, 0.2859100103378296, -0.6857799887657166, -0.5169199705123901, 0.20969000458717346, -0.07196599990129471, 0.13136999309062958, -0.028568999841809273, 0.027679000049829483, -0.5332800149917603, -0.044318001717329025, 0.3905400037765503, 0.8212299942970276, -0.515209972858429, -0.21682000160217285, 0.3542400002479553, 0.056703999638557434, 0.3902899920940399, -0.053846001625061035, 0.13950000703334808, -0.2475699931383133, 0.47720998525619507, 0.3922100067138672, 0.14710000157356262, -0.43296998739242554, -0.1300099939107895, 0.08753100037574768, 0.6301100254058838, -0.2806200087070465, 0.24172000586986542, -0.11524999886751175, 1.0262000560760498, 0.4057199954986572, -0.1834000051021576, -0.5364500284194946, 0.4544200003147125, -0.3717699944972992, -0.5598199963569641, 0.4402199983596802, 0.2226399928331375, -0.3030099868774414, -0.4297400116920471, 0.06868100166320801, -0.11492999643087387, 0.3006199896335602, 0.18055999279022217, -0.135670006275177, 0.45313000679016113, 0.32809001207351685, 0.5915399789810181, 0.37564000487327576, -0.25251999497413635, -0.4645799994468689, 0.6043300032615662, 0.6634100079536438, 0.4778900146484375, 0.1719599962234497, -0.04328100010752678, 0.4922899901866913, -0.01979999989271164, -0.20693999528884888, 0.26677998900413513, 0.34994998574256897, 0.5174700021743774, -0.046785999089479446, 0.8976899981498718, 0.525439977645874, 0.40860000252723694, 0.4418500065803528, -0.28685998916625977, -0.22259999811649323, -0.08746100217103958, -0.1177700012922287, 0.09365999698638916, 0.7052000164985657, -0.45855000615119934, -0.02686299942433834, -0.3264400064945221, -0.2754800021648407, 0.04268399998545647, -0.5194900035858154, -0.10241000354290009, 0.39261001348495483, -0.4999299943447113, -0.1985500007867813, -0.13342000544071198, 0.266620010137558, 0.8465499877929688, 0.866599977016449, -0.2510800063610077, -0.19866999983787537, 1.2151000499725342, -0.3547999858856201, 0.025442000478506088, -0.7002599835395813, -0.11806000024080276, 0.31356000900268555, -0.8852900266647339, -0.054816000163555145, -0.7615699768066406, -0.25894999504089355, -0.7420399785041809, -0.07095500081777573, -0.7183700203895569, -0.5336300134658813, -0.20031000673770905, -0.12244000285863876, -0.6274899840354919, -0.4426099956035614, -0.4119499921798706, 0.4924899935722351, 0.3367300033569336, -0.9420499801635742, 0.19102999567985535, -0.27834001183509827, -0.6106100082397461, -0.4909000098705292, -0.41214999556541443, -0.14431999623775482, 0.3705500066280365, 0.7900000214576721, -0.7471200227737427, -0.03272299841046333, 0.4719499945640564, 0.16050000488758087, -0.17207999527454376, 0.17657999694347382, 0.42524999380111694, -0.07422900199890137, -0.1342500001192093, -0.44071999192237854, 0.04029799997806549, 0.11475999653339386, 0.1402300000190735, 0.645039975643158, 0.12454000115394592, -0.3558399975299835, -0.25957000255584717, -0.2290000021457672, -0.4287799894809723, 0.1622599959373474, 0.05758000165224075, 0.5437300205230713, -0.5887200236320496, -0.42849001288414, -0.6284300088882446, -0.03161599859595299, -0.3170199990272522, 0.2975600063800812, -0.2540700137615204, -0.5811499953269958, 0.38339999318122864, 0.6189900040626526, 0.3068599998950958, 0.2669000029563904, 0.294189989566803, 0.039489999413490295, 0.5809500217437744, 0.326090008020401, 0.29513999819755554, -0.19251999258995056, -0.195360004901886, -0.06774099916219711, 0.4944700002670288, -0.02295600064098835, -0.47867000102996826, 0.29688000679016113, -0.21525000035762787, -0.6385400295257568, -0.4099699854850769, -0.19280999898910522, 0.030027000233530998, 0.4763300120830536, 0.15976999700069427, 0.47512999176979065, -0.1797800064086914, -0.4706000089645386, 0.04486300051212311, -0.41995999217033386, -0.17238999903202057, 0.22397999465465546, 0.4524199962615967, -0.12081000208854675, 0.15178999304771423, -0.21710999310016632, 0.10723000019788742, -0.9957600235939026, 0.034818001091480255, 0.06515199691057205, -0.449429988861084, -0.6453400254249573, -0.3641600012779236, 0.7488399744033813, -0.5238900184631348, -0.2854900062084198, -0.09911300241947174, -0.06484100222587585, -0.5173699855804443, -0.12791000306606293, -0.092910997569561, -0.13683000206947327, 0.3066299855709076, 0.24720999598503113, 0.21523000299930573, -0.03353099897503853, 0.03069400042295456, 0.2695100009441376, 0.08676300197839737, -0.5222499966621399, 0.5436300039291382, -0.007751599885523319, 0.4653399884700775, 0.39812999963760376, -0.12498000264167786, 0.052726998925209045, -0.20417000353336334, -0.3931199908256531, -0.08870399743318558, -0.2672399878501892, 0.5480700135231018, -0.06661400198936462, -0.15945999324321747, 0.4079299867153168, 0.42879998683929443, 0.04992299899458885, -0.4629499912261963, 0.6256499886512756, -0.10894999653100967, -0.19742000102996826, -0.39034000039100647, -0.018959999084472656, 0.7026399970054626, -0.17615999281406403, -0.39726001024246216, 0.008772799745202065, 0.6149500012397766, -0.29111000895500183, 0.4207800030708313, 0.022019000723958015, -0.4659700095653534, 0.4154199957847595, -0.3496299982070923, 0.0817129984498024, 0.9029899835586548, -0.9203100204467773, 0.7313299775123596, 0.41534000635147095], u'bracelet': [-0.27364999055862427, -0.17628000676631927, -0.2822200059890747, -0.12601999938488007, -0.37702998518943787, -0.04797700047492981, -0.006893699988722801, -0.9677500128746033, 0.06383399665355682, -0.6755899786949158, 0.6672599911689758, 0.019509000703692436, 0.03403500095009804, -0.21258999407291412, -0.10106000304222107, -0.28404000401496887, -0.8128700256347656, 0.8264200091362, -0.641290009021759, -0.5070499777793884, 0.5334399938583374, -0.5412799715995789, -0.9398900270462036, -0.5779399871826172, -0.3076399862766266, -0.4890100061893463, -0.16690999269485474, 0.6061699986457825, -0.5570499897003174, -0.21825000643730164, 0.23388999700546265, 0.20900000631809235, -0.13792000710964203, 0.19588999450206757, -0.02885100059211254, 0.2856999933719635, -0.476500004529953, -0.05107500031590462, 0.03136200085282326, 0.1920199990272522, -0.4187900125980377, -0.7345899939537048, 0.5500699877738953, -0.04032700136303902, -0.2591400146484375, -0.1776300072669983, -0.051646001636981964, -1.0448999404907227, -0.07394599914550781, 0.6579399704933167, 0.3767400085926056, -0.014937000349164009, 0.5982999801635742, 0.13492000102996826, 0.02825699932873249, -0.8443700075149536, -0.9389299750328064, 0.10739000141620636, -0.10589999705553055, -0.12723000347614288, -0.13639000058174133, -0.07197900116443634, -0.23327000439167023, 0.3498699963092804, 0.6295599937438965, -0.15333999693393707, -0.45423001050949097, 0.21348999440670013, 0.28734999895095825, 0.24658000469207764, 0.061636000871658325, 0.0177800003439188, 0.042785000056028366, -0.2064400017261505, 0.3104900121688843, -0.4087600111961365, 0.8813199996948242, -0.586929976940155, -0.25536999106407166, -0.5845999717712402, 0.32311999797821045, 0.18810999393463135, 0.3161599934101105, 0.5306199789047241, -0.2351900041103363, -0.8393300175666809, -0.34029000997543335, 0.052354998886585236, 0.11528000235557556, 0.5014899969100952, -0.5580199956893921, 0.2768700122833252, 0.4003700017929077, 0.003302199998870492, 0.2418999969959259, 0.24060000479221344, -0.06254000216722488, -0.03411199897527695, 0.23823000490665436, -0.17607000470161438, 0.3039200007915497, 0.7606800198554993, -0.45691001415252686, 0.23642000555992126, 0.3541400134563446, -0.14680999517440796, 0.386790007352829, 0.2898699939250946, -0.4225600063800812, -0.9192100167274475, -0.044599998742341995, 0.6410099864006042, -0.39601999521255493, -0.34536001086235046, 0.3479900062084198, 0.028008999302983284, -0.0012504999758675694, 0.12419000267982483, 0.48096999526023865, 0.26510998606681824, -0.20826999843120575, 0.24849000573158264, 0.16438999772071838, -0.16380999982357025, 0.6984999775886536, 0.13981999456882477, -0.14817999303340912, -0.4187699854373932, -0.12886999547481537, 0.45427000522613525, 0.0381539985537529, -0.12115000188350677, -0.5959799885749817, -0.4442499876022339, -0.46035000681877136, -0.1218700036406517, 0.48500001430511475, -0.43160000443458557, -0.3063400089740753, -0.14080999791622162, 0.18855999410152435, 0.8740900158882141, 0.3910599946975708, 0.05705200135707855, 0.7956799864768982, -0.9171299934387207, -0.48072999715805054, 0.030045999214053154, 0.5679799914360046, 0.407150000333786, 0.007624499965459108, -0.3689799904823303, -0.19995999336242676, -0.09489999711513519, 0.2322700023651123, -0.04479200020432472, -0.056171998381614685, -0.5151399970054626, -0.30629000067710876, -0.17824000120162964, 0.5342000126838684, 0.032836999744176865, -0.19599999487400055, -0.06939700245857239, 0.5964499711990356, 0.31213998794555664, 0.5042799711227417, 0.21823999285697937, 0.016845999285578728, 0.149849995970726, -0.13161000609397888, 0.5379400253295898, 0.7817800045013428, 0.618619978427887, 0.44971999526023865, -0.7339400053024292, 0.04880300164222717, 0.16345000267028809, 0.05442899838089943, -0.41694000363349915, -0.1491599977016449, 0.48559001088142395, 0.424780011177063, -0.07589399814605713, -0.008934799581766129, -0.056115999817848206, 0.4068000018596649, 0.869379997253418, 0.14564000070095062, -0.5151399970054626, -0.4637399911880493, -0.19801999628543854, 0.16095000505447388, 0.6570299863815308, -0.03602699935436249, 0.28435999155044556, -0.09485500305891037, -0.21616999804973602, 0.39618998765945435, 0.024806000292301178, 0.7188299894332886, 0.2583000063896179, 0.35065001249313354, 0.5620999932289124, 0.31126999855041504, 0.1839900016784668, -0.25005999207496643, 0.01890300028026104, -0.7099699974060059, 0.4298799932003021, 0.5239999890327454, -0.2087000012397766, 0.5127699971199036, -0.3438900113105774, 0.25398001074790955, -0.46654000878334045, -0.4474300146102905, -0.21368999779224396, 0.1324000060558319, 0.8504400253295898, -0.19699999690055847, -0.4757699966430664, 0.5834599733352661, 0.7369499802589417, -0.03707199916243553, 0.4867900013923645, -0.5120000243186951, -0.16134999692440033, -0.3115699887275696, -0.06469999998807907, 0.6742299795150757, -0.4602999985218048, 0.09456299990415573, 0.16325999796390533, -0.06274200230836868, 0.16058999300003052, 0.6482899785041809, -0.34922999143600464, 0.36590999364852905, 0.2546899914741516, 0.3122600018978119, -0.1731799989938736, -0.4524900019168854, 0.6551100015640259, 0.436489999294281, 0.29580000042915344, 0.0005146400071680546, -0.418830007314682, 0.05124500021338463, -0.5802199840545654, -0.46408000588417053, -0.5262100100517273, -0.20140999555587769, -0.15910999476909637, -0.13680000603199005, 0.36134999990463257, 0.5358099937438965, -0.28470999002456665, -0.02320699952542782, -0.16189999878406525, -0.17030000686645508, -0.041161999106407166, 0.26409998536109924, 0.08777400106191635, -0.15814000368118286, 0.5673199892044067, 0.265859991312027, 0.12669000029563904, -0.14154000580310822, 0.017796000465750694, -0.005132900085300207, -0.7705100178718567, 0.4283300042152405, -0.07652000337839127, -0.6628100275993347, -0.5036600232124329, -0.4126499891281128, -0.29631999135017395, -0.4979900121688843, 0.4138599932193756, 0.0762609988451004, 0.518750011920929, -0.57819002866745, -0.06376200169324875, -0.40713998675346375, -0.024903999641537666, -0.34999001026153564, -0.43191999197006226, -0.46152999997138977, -0.5639100074768066, -0.29190000891685486, 0.7630599737167358, -0.4855000078678131, 0.3847000002861023, -0.050390999764204025, -1.2166999578475952, 1.0404000282287598, 0.6852700114250183, 0.36281999945640564, 0.028946999460458755], u'candy': [-0.2712700068950653, 0.10567999631166458, -0.4511899948120117, -0.21895000338554382, -0.44176000356674194, 0.13997000455856323, 0.37814998626708984, -0.18151000142097473, -0.1598999947309494, -0.43584999442100525, 0.18950000405311584, -0.8794199824333191, 0.20747999846935272, 0.25800999999046326, -0.18856999278068542, 0.17462000250816345, -0.047850001603364944, 0.27612000703811646, 0.10825999826192856, 0.32276999950408936, 0.5853300094604492, 0.1839500069618225, 0.05963600054383278, 0.414029985666275, -0.3967300057411194, -0.11305999755859375, -0.9660900235176086, -0.3007200062274933, -0.01568000018596649, -0.5967000126838684, -0.5478000044822693, 0.3081899881362915, -0.19231000542640686, 0.03352800011634827, -0.6618499755859375, 0.4592199921607971, -0.5140100121498108, -0.05220099911093712, -0.47220999002456665, 0.3303399980068207, -0.5598199963569641, -0.361519992351532, 0.14202000200748444, 1.0683000087738037, -0.3396500051021576, -0.35412999987602234, 0.4560199975967407, -0.7116199731826782, 0.14611999690532684, 0.2709699869155884, -0.008646699599921703, 0.17639000713825226, 0.027915000915527344, 0.3834800124168396, -0.6520699858665466, -0.2289000004529953, -0.4655900001525879, 0.051343001425266266, 0.3680799901485443, -0.05256599932909012, -0.04292500019073486, 0.0951400026679039, -0.27831000089645386, -0.08982300013303757, 0.5285400152206421, -0.250900000333786, -0.23795999586582184, -0.19089999794960022, -0.22804999351501465, 0.011768000200390816, -0.05910699814558029, 0.01338099967688322, -0.3485499918460846, -0.02216400019824505, 0.273250013589859, -0.26385998725891113, 0.10777000337839127, -0.16388000547885895, 0.3899799883365631, -0.12752999365329742, 0.6543999910354614, 0.1178399994969368, -0.2664099931716919, 0.05913800001144409, 0.36869001388549805, -0.5678200125694275, 0.13222000002861023, -0.4440099895000458, -0.0026889999862760305, 0.09115500003099442, -0.17732000350952148, 0.14215999841690063, -0.0340769998729229, 0.08935800194740295, -0.33908000588417053, 0.3816699981689453, 0.054607000201940536, -0.2186100035905838, -0.24501000344753265, -0.3643200099468231, 0.25211000442504883, 0.5177800059318542, -0.42610999941825867, -0.5710399746894836, 0.04250499978661537, -0.2646700143814087, -0.2231999933719635, 0.21525000035762787, -0.18925000727176666, 0.24997000396251678, 0.2335599958896637, 0.3848100006580353, 0.08289100229740143, -0.15147000551223755, 0.7358800172805786, -0.30779001116752625, -0.6512100100517273, 0.5105699896812439, 0.2388399988412857, -0.07330100238323212, -0.25968000292778015, -0.07529900223016739, 0.3352000117301941, 0.026405999436974525, -0.33621999621391296, -0.08651600033044815, 0.42504000663757324, 0.1171099990606308, 0.16568000614643097, 0.3096500039100647, -0.12987999618053436, 0.04003399983048439, -0.12836000323295593, 0.3402799963951111, -0.12244000285863876, 0.3197999894618988, 0.263619989156723, 0.7835599780082703, -0.1707800030708313, -0.1691100001335144, 0.26100000739097595, -0.029575999826192856, -0.27149999141693115, -0.11861000210046768, 0.15594999492168427, 0.3815099895000458, -0.4576300084590912, -0.04229599982500076, 0.32196998596191406, -0.4646199941635132, -0.6173999905586243, 0.17237000167369843, 0.21908000111579895, -0.03570700064301491, -0.5439299941062927, -0.3732300102710724, -0.646589994430542, -0.32714998722076416, -0.16500000655651093, 0.29082998633384705, 0.39621999859809875, -0.32102999091148376, 0.146479994058609, -0.31411001086235046, 0.1396300047636032, 0.3664399981498718, 0.2771899998188019, 0.21946999430656433, 0.47189998626708984, -0.22931000590324402, -0.23678000271320343, 0.06243100017309189, 0.03408300131559372, 0.5382400155067444, 0.26151999831199646, -0.158610001206398, 0.14361999928951263, -0.45083001255989075, -0.2779400050640106, -0.6116099953651428, -0.2453099936246872, 0.4394499957561493, -0.01825599931180477, 0.19986000657081604, 0.38172000646591187, -0.22221000492572784, 0.8997600078582764, -0.4653100073337555, 0.6444100141525269, -0.3047100007534027, 0.23577000200748444, 0.3693099915981293, -0.3744199872016907, -0.4802800118923187, -0.5652599930763245, 0.4419400095939636, -0.536620020866394, -0.005699600093066692, 0.08961299806833267, 0.2438099980354309, 0.8295300006866455, 0.11388999968767166, 0.9595500230789185, 0.24437999725341797, -0.42212000489234924, -0.5829899907112122, 0.19268999993801117, 0.5504000186920166, -0.11558999866247177, -0.21041999757289886, -0.20991000533103943, -0.5311800241470337, -0.30028998851776123, 0.2740600109100342, -0.24492000043392181, 0.38324999809265137, 0.543720006942749, -0.06838200241327286, 0.49017998576164246, -0.127470001578331, 0.08273299783468246, 0.19415000081062317, -0.57669997215271, 0.40292999148368835, -0.6495599746704102, 0.21570999920368195, 0.4196600019931793, 0.48535001277923584, 0.5220800042152405, -0.3100399971008301, 0.2827700078487396, -0.16354000568389893, -0.3217099905014038, -0.005062299780547619, 0.2720000147819519, 0.19543999433517456, 0.07369700074195862, -0.018691999837756157, -0.7116199731826782, -0.49799999594688416, -0.2390100061893463, -0.2677899897098541, -0.43400999903678894, -0.36945998668670654, -0.6635900139808655, -0.21819999814033508, -0.19644999504089355, 0.07073900103569031, 0.18957999348640442, 0.32183998823165894, 0.20506000518798828, -0.20507000386714935, -0.20645000040531158, 0.1736299991607666, 0.8125900030136108, 0.11441999673843384, -0.07720500230789185, -0.38778001070022583, 0.2223300039768219, -0.10916999727487564, 0.1100199967622757, -0.9075700044631958, 0.29072999954223633, -0.17694999277591705, 0.5400400161743164, -0.2893899977207184, 0.02980799973011017, -0.2323099970817566, 0.455159991979599, 0.938759982585907, -0.07577499747276306, -0.03305999934673309, -0.12300000339746475, 0.12515999376773834, -0.023930000141263008, -0.22142000496387482, -0.8155099749565125, -0.6269299983978271, -1.2692999839782715, 0.03082899935543537, 0.14956000447273254, 0.689050018787384, -0.36469998955726624, -0.3416000008583069, 0.0654740035533905, 0.7263500094413757, 0.45914000272750854, -0.3308199942111969, 0.21521000564098358, 0.26736000180244446, 0.18601000308990479, -0.1286199986934662, 0.0552930012345314, 0.43634000420570374, -0.4061799943447113, -0.41383999586105347, 0.04857200011610985, 0.21683000028133392, -0.3036800026893616, -0.2867799997329712], u'lightning': [0.11146000027656555, 0.12588000297546387, 0.15341000258922577, -0.4240500032901764, -0.17441999912261963, 0.07833100110292435, 0.27695000171661377, 0.16085000336170197, -0.359250009059906, -0.46340999007225037, 0.6337900161743164, 0.39684998989105225, -0.06038599833846092, -0.38804998993873596, 0.05378099903464317, 1.2375999689102173, -0.046817000955343246, 0.12072999775409698, -0.16721999645233154, 0.8514299988746643, -0.2523300051689148, -0.32311001420021057, -0.1337299942970276, -0.47760000824928284, 0.6457899808883667, 0.15198999643325806, 0.29137998819351196, -0.34751999378204346, 0.19035999476909637, -0.10639999806880951, 0.6656299829483032, 0.11839000135660172, -0.5293899774551392, 0.07421000301837921, -1.2192000150680542, -0.22386999428272247, -1.169700026512146, -0.014485999941825867, 0.5018500089645386, 0.06929299980401993, 0.34150999784469604, 0.4702799916267395, -0.6920999884605408, -0.016575999557971954, -0.49605000019073486, -0.5873799920082092, -0.12453000247478485, -0.5585799813270569, -0.7335000038146973, -0.43237999081611633, 0.20167000591754913, 0.3363899886608124, -0.15206000208854675, 0.11886999756097794, -0.33122000098228455, 0.03894300013780594, 0.05194900184869766, -0.1935500055551529, 0.994949996471405, 0.3053300082683563, -0.7037000060081482, 0.2305700033903122, -0.23051999509334564, -0.09398800134658813, -0.4099000096321106, -0.11451999843120575, 0.2612299919128418, 0.5791900157928467, -0.17896999418735504, 0.48087000846862793, 0.4170199930667877, 0.40918999910354614, 0.031178999692201614, 0.37229999899864197, -0.49215999245643616, -0.4555099904537201, -0.9924799799919128, -0.11022000014781952, 0.13558000326156616, -0.04499800130724907, -0.17687000334262848, -0.2507700026035309, -0.2840999960899353, -0.3217499852180481, -0.7855799794197083, -0.20714999735355377, -0.2300799936056137, 0.42493999004364014, -0.07356200367212296, -0.5911399722099304, 0.8143699765205383, 0.3484399914741516, 0.1163100004196167, -0.4097500145435333, 0.12939000129699707, -0.037452999502420425, -0.015758000314235687, -0.1269499957561493, 0.05047700181603432, -0.23820999264717102, -0.16797000169754028, 0.2468699961900711, 0.2528800070285797, 0.23124000430107117, 0.578790009021759, -0.46219998598098755, 0.4009400010108948, -0.047370001673698425, -0.5144000053405762, 0.08011700212955475, 0.0049911001697182655, -0.15068000555038452, -0.03886500000953674, 0.6095200181007385, 0.16958999633789062, -0.24924999475479126, 0.011517999693751335, -0.1674399971961975, 0.29910001158714294, -0.2399200052022934, -0.10468000173568726, -0.5138000249862671, -0.1412000060081482, 0.2436700016260147, -0.01764100044965744, -0.22923000156879425, 0.002992900088429451, 0.7187700271606445, 0.21153999865055084, 0.3814300000667572, 0.5801600217819214, 0.4986799955368042, -0.2120400071144104, 0.010354000143706799, 0.2697399854660034, -0.0336729995906353, 0.14287999272346497, -0.09644199907779694, 0.10524000227451324, -0.2811700105667114, 0.05214700102806091, 0.41578999161720276, -0.6911799907684326, 0.40393999218940735, 0.08596699684858322, 0.2793700098991394, 0.37290000915527344, 0.1553100049495697, -0.09401100128889084, -0.5860300064086914, 0.18749000132083893, -0.5836899876594543, 0.1775899976491928, 0.23976999521255493, -0.31040000915527344, -0.7390900254249573, 0.7756500244140625, -0.1143999993801117, 0.2746399939060211, -0.1680299937725067, 0.4920699894428253, -0.7187899947166443, 0.2109300047159195, 0.33629998564720154, 0.21901999413967133, 0.7154200077056885, 0.36379000544548035, 0.3693700134754181, -0.8571400046348572, -0.3561500012874603, 0.2198600023984909, -0.20860999822616577, -0.25731000304222107, 0.3865799903869629, -0.28975000977516174, -0.2162500023841858, -0.29563000798225403, 0.23717999458312988, 0.25297001004219055, 0.08723600208759308, -0.325080007314682, -0.2945399880409241, 0.35903000831604004, 0.014298000372946262, 0.03805699944496155, 0.09113100171089172, 0.39798998832702637, 0.3042300045490265, 0.19551999866962433, -0.5437300205230713, -0.37617000937461853, 0.3679400086402893, -0.25005000829696655, 0.28707000613212585, 0.7705100178718567, -0.21573999524116516, 0.07077399641275406, -0.5977500081062317, 0.292169988155365, 0.2043199986219406, 0.6434000134468079, 0.2622799873352051, -0.1306300014257431, -0.47742998600006104, 0.2018200010061264, 0.13500000536441803, -0.1943800002336502, -0.0066766999661922455, 0.05121999979019165, -0.024108000099658966, 0.32128000259399414, 0.09797099977731705, 0.16534000635147095, -0.3580799996852875, 0.0032611999195069075, -0.7239500284194946, 0.3982599973678589, 0.07629799842834473, 0.2871200144290924, -0.4272199869155884, 0.011233000084757805, -0.21907000243663788, 0.021724000573158264, -0.5832099914550781, -0.41725999116897583, -0.2898699939250946, 0.12841999530792236, 0.04485499858856201, 0.46560001373291016, -0.2617500126361847, 0.08452799916267395, 0.2965500056743622, 0.18987999856472015, -0.39169999957084656, -0.2919600009918213, -0.25185999274253845, -0.07912500202655792, -0.21699999272823334, -0.06152400001883507, 0.6246899962425232, 0.19877000153064728, -0.14573000371456146, -0.06499599665403366, -0.3375299870967865, 0.1486400067806244, -0.009059700183570385, 0.024337999522686005, 0.48412999510765076, -0.2782000005245209, 0.037790000438690186, -0.4313200116157532, 0.20338000357151031, -0.4259699881076813, -0.5651699900627136, 0.6243600249290466, 0.35607999563217163, 0.6241199970245361, -0.39131999015808105, -0.154339998960495, 0.02642199955880642, -0.5617799758911133, 0.16171999275684357, -0.14854000508785248, 0.28909000754356384, -0.2752799987792969, -0.05968499928712845, -0.13019999861717224, 0.08570100367069244, 0.05810900032520294, 0.2350199967622757, 0.04196399822831154, -0.6075900197029114, -0.05362499877810478, 0.21122999489307404, 0.3843100070953369, 0.7241399884223938, -1.0458999872207642, -0.21077999472618103, -0.3039799928665161, -0.0307219997048378, -0.5409500002861023, 0.7787500023841858, 0.366100013256073, 0.012509999796748161, 0.40713998675346375, 0.10286000370979309, -0.2942599952220917, -0.15232999622821808, -0.1020599976181984, -0.15414999425411224, 0.37424999475479126, 0.6883299946784973, -0.6214100122451782, -0.04521999880671501, 0.5323799848556519, 0.3894999921321869, 0.0010210999753326178, -0.37988001108169556, 0.3694100081920624, 0.2527399957180023], u'bag': [-0.08716700226068497, 0.23816999793052673, 0.1476999968290329, 0.11441999673843384, 0.3106600046157837, -0.2472500056028366, -0.16221000254154205, 0.1846199929714203, -0.06842699646949768, -1.0479999780654907, -0.238319993019104, -0.1743299961090088, -0.1501999944448471, 0.04142799973487854, -0.20486000180244446, 0.12535999715328217, -0.4661499857902527, 0.3384400010108948, -0.19524000585079193, 0.0013126999838277698, 0.36928999423980713, -0.636680006980896, -0.4994100034236908, 0.023308999836444855, -0.5803899765014648, -0.08631599694490433, -0.42041000723838806, 0.14440999925136566, 0.7267299890518188, -0.3476699888706207, -0.055667001754045486, -0.11565999686717987, 0.05093400180339813, 0.13926999270915985, -0.674780011177063, 0.7160500288009644, -0.5047900080680847, 0.4103100001811981, -0.980400025844574, 0.7102199792861938, -0.5832899808883667, -0.6017199754714966, 0.2511399984359741, 0.18577000498771667, 0.08054500073194504, 0.1468300074338913, 0.6465700268745422, -0.5127599835395813, -0.17569999396800995, 0.47404998540878296, 0.329120010137558, -0.3444800078868866, 0.0751900002360344, -0.07334399968385696, -0.049368999898433685, -0.04591900110244751, -0.14913000166416168, -0.5056999921798706, 0.3073500096797943, -0.2423200011253357, -0.007406599819660187, 0.10638000071048737, 0.0689300000667572, 0.4766699969768524, -0.11929000169038773, -0.3729499876499176, -0.5460799932479858, -0.4648500084877014, -0.34325000643730164, -0.10604000091552734, -0.07445300370454788, -0.41857001185417175, 0.021896999329328537, 0.07124800235033035, -0.18840999901294708, -0.03625800088047981, 0.08364800363779068, -0.5370399951934814, 0.286980003118515, -0.19628000259399414, -0.021577000617980957, 0.30862000584602356, 0.35095998644828796, 0.2174299955368042, -0.2586100101470947, -0.32332998514175415, -0.3237699866294861, -0.05172299966216087, -0.8105999827384949, -0.4495899975299835, 0.2753399908542633, -0.1963600069284439, 0.03679399937391281, -0.14298999309539795, 0.24472999572753906, -0.009774800390005112, -0.4097000062465668, 0.4298099875450134, 0.10670000314712524, -0.6617199778556824, -0.18402999639511108, 0.5734599828720093, -0.2961600124835968, -0.8413900136947632, -0.3398300111293793, -0.8353800177574158, 0.44457000494003296, 0.2858400046825409, -0.4588199853897095, -0.20305000245571136, -0.2971700131893158, 0.4233199954032898, -0.06123200058937073, 0.016468999907374382, 0.034832000732421875, 0.3804300129413605, 0.17538000643253326, 0.14916999638080597, 0.21717000007629395, -0.9813200235366821, 0.08072800189256668, -0.12070000171661377, 0.6788600087165833, 0.21741999685764313, -0.07941100001335144, 0.20531000196933746, 0.19668999314308167, -0.1268099993467331, 0.26541998982429504, 0.40689000487327576, 0.2401600033044815, 0.01769299991428852, 0.09059099853038788, 0.28352999687194824, -0.01999399997293949, -0.2498600035905838, 0.099542997777462, -0.031665001064538956, -0.3968200087547302, 0.30546000599861145, 0.05073099955916405, 0.2676199972629547, 0.04258500039577484, -0.5930799841880798, -0.38078001141548157, -0.049710001796483994, 0.08612599968910217, 0.164000004529953, -0.07929600030183792, -0.005903699900954962, -0.3168199956417084, 0.653980016708374, 0.05225500091910362, -0.44543999433517456, 0.18626999855041504, -0.07700400054454803, -0.15702000260353088, -0.25422000885009766, -0.02699200063943863, 0.4769200086593628, 0.48240000009536743, -0.5333600044250488, -0.07697799801826477, 0.415039986371994, 0.5536100268363953, -0.3506999909877777, 0.024065999314188957, 0.7519999742507935, 0.16339999437332153, -0.259799987077713, 0.1589999943971634, -0.07967200130224228, 0.09503799676895142, -0.04704400151968002, -0.03762499988079071, -0.5536100268363953, -0.028459999710321426, 0.26767998933792114, 0.0328500010073185, -0.7551699876785278, 0.7422599792480469, 0.16412000358104706, 0.11959999799728394, -0.010565999895334244, -0.16921000182628632, 0.17622999846935272, 0.8486899733543396, 0.34773001074790955, 0.21755999326705933, -0.6002100110054016, 0.43147000670433044, 0.4915899932384491, -0.10750000178813934, -0.20080000162124634, -0.8006899952888489, 0.20162999629974365, -0.3459300100803375, 0.38317999243736267, -0.1285099983215332, 0.38837000727653503, 0.4035399854183197, 0.0945729985833168, 0.8880000114440918, 0.047391001135110855, 0.04288699850440025, -0.6189299821853638, 0.119159996509552, 0.2092200070619583, -0.6278499960899353, -0.2210800051689148, -0.2716299891471863, -0.07879400253295898, -0.006659199949353933, 0.683709979057312, 0.08685000240802765, -0.9472200274467468, 0.09973599761724472, -0.37244999408721924, -0.4436500072479248, 0.2576799988746643, 0.27601000666618347, 0.4874500036239624, 0.16116000711917877, 0.26416999101638794, 0.39504000544548035, 0.3355399966239929, -0.38993000984191895, -0.21518999338150024, 0.5844399929046631, -0.36399000883102417, 0.08227600157260895, -0.11985000222921371, -0.29837000370025635, 0.25863999128341675, 0.5980200171470642, 0.4758799970149994, -0.09140399843454361, 0.22592000663280487, -0.5331900119781494, 0.19721999764442444, 0.12060000002384186, -0.009128199890255928, -0.2220900058746338, 0.09917999804019928, -0.4020799994468689, -0.40560001134872437, 0.3756699860095978, -0.5575299859046936, -0.20550000667572021, -0.2199299931526184, -0.17927999794483185, 0.16726000607013702, 0.35929998755455017, -0.5149700045585632, 0.4627299904823303, 0.41898998618125916, 0.16881999373435974, -0.22336000204086304, 0.003317799884825945, -0.258760005235672, 0.2272700071334839, -0.34178000688552856, 0.01863200031220913, 0.3862000107765198, -0.023222999647259712, 0.1501300036907196, 0.06524299830198288, 0.15684999525547028, -0.40525999665260315, 0.3463900089263916, 0.04640699923038483, 0.2367199957370758, 0.04885299876332283, 0.47095000743865967, -0.47889000177383423, -0.5506399869918823, -1.3133000135421753, 0.2515299916267395, -0.638159990310669, 0.27685999870300293, -0.4094099998474121, 0.21584999561309814, 0.34266000986099243, -0.32460999488830566, -0.48903998732566833, 0.3241499960422516, 0.44784000515937805, -0.43599000573158264, -0.5429499745368958, -0.09770099818706512, 0.08726900070905685, -0.04427900165319443, -0.13234999775886536, 0.11473000049591064, 0.029608000069856644, -0.368120014667511, 0.33768001198768616, -0.1555500030517578, -0.12961000204086304, 0.06895499676465988], u'sand': [-0.3140600025653839, -0.22137999534606934, -0.9797800183296204, 0.148499995470047, 0.2248300015926361, -0.2967100143432617, 0.3673099875450134, 0.3198600113391876, 0.5907899737358093, -0.6918699741363525, 0.12042000144720078, -0.5933700203895569, -0.19915999472141266, 0.14534999430179596, -0.45118001103401184, -0.5862399935722351, -0.2951200008392334, -0.06768099963665009, 0.3026599884033203, 0.4748600125312805, -0.16082000732421875, 0.08896999806165695, -0.18775999546051025, 0.3174999952316284, -0.8321300148963928, -0.04096199944615364, -0.18378999829292297, 0.4128299951553345, -0.2715800106525421, 0.6717399954795837, 0.5925599932670593, 0.6989799737930298, -0.5456399917602539, 0.18591000139713287, 0.03706600144505501, 0.28115999698638916, -0.2246599942445755, 0.43542999029159546, 0.21595999598503113, 0.665340006351471, -0.2791599929332733, 0.8006399869918823, 0.46219998598098755, -0.3126299977302551, 0.9716699719429016, -0.02214200049638748, 0.7474300265312195, 0.21974000334739685, 0.02550699934363365, 0.07299300283193588, -0.12355999648571014, 0.46491000056266785, -0.4521600008010864, -0.09266199916601181, 0.12514999508857727, 0.489439994096756, -0.1157900020480156, -0.19043000042438507, 0.6335700154304504, 0.37000998854637146, 0.08189799636602402, 0.3835099935531616, 0.8413900136947632, -0.15389999747276306, 0.11553999781608582, -0.38909000158309937, -0.1726599931716919, -0.09141399711370468, -0.5362100005149841, -0.2031099945306778, 0.014063999988138676, 0.3509199917316437, -0.5825899839401245, 0.11529000103473663, -0.4737200140953064, 0.38471001386642456, 0.2031099945306778, -0.03297099843621254, 0.5841500163078308, -0.185029998421669, -0.057948999106884, -0.22081999480724335, -0.21684999763965607, 0.05637900158762932, -0.342739999294281, 0.37415000796318054, 0.19541999697685242, -0.041172999888658524, 0.2795499861240387, -0.36219000816345215, 0.05913100019097328, 0.08673600107431412, 0.12122999876737595, -0.02703399956226349, -0.09205300360918045, -0.009580099955201149, 0.15039999783039093, 0.03610499948263168, 0.20965999364852905, -0.020299000665545464, 0.05116000026464462, 0.6039900183677673, -0.18577000498771667, -0.4963099956512451, -0.5071399807929993, 0.09376399964094162, 0.32249000668525696, -0.144679993391037, 0.15544000267982483, -0.8123300075531006, -0.08892499655485153, -0.5644400119781494, -0.08691500127315521, -0.16761000454425812, -0.23346999287605286, 0.37077000737190247, -0.04030599817633629, 0.4702700078487396, -0.2533099949359894, 0.12713000178337097, 0.16572999954223633, -0.11166000366210938, 0.030602000653743744, 0.1919099986553192, -0.17256000638008118, -0.05873199924826622, 0.0575140006840229, 0.44652000069618225, 0.12436000257730484, -0.4328700006008148, -0.1865600049495697, 0.7466300129890442, -0.029113000258803368, 0.39921998977661133, 0.02606399916112423, -0.2818300127983093, -0.10067000240087509, -0.08521600067615509, 0.30803999304771423, 0.305620014667511, 0.14539000391960144, 0.03853600099682808, -0.2734600007534027, -0.46557000279426575, -0.30858999490737915, -0.6644600033760071, 0.4230499863624573, 0.27981001138687134, -0.33803999423980713, 0.023533999919891357, 0.04069900140166283, 0.3662700057029724, -0.8810499906539917, -0.4159199893474579, 0.303600013256073, -0.16436000168323517, 0.09034900367259979, -0.3481200039386749, 0.9740099906921387, 0.6203100085258484, -0.12752999365329742, -0.6111000180244446, 0.053599998354911804, 0.681689977645874, 0.40755999088287354, -0.4740599989891052, 0.5660099983215332, 0.3921999931335449, 0.07912500202655792, 0.004698299802839756, 0.1160999983549118, 0.30594000220298767, 0.40435999631881714, 0.07709699869155884, 0.16311000287532806, -0.016870999708771706, -0.12419000267982483, 0.35776999592781067, -0.35638999938964844, -0.5653700232505798, 0.06326200067996979, -0.0996680036187172, 0.0745289996266365, 0.49314001202583313, -0.5114499926567078, -0.38565000891685486, 0.6060299873352051, -0.480459988117218, 0.6333600282669067, 0.19898000359535217, 0.5691800117492676, 0.5358999967575073, 0.24990999698638916, -0.09191100299358368, 0.6381199955940247, -0.1267700046300888, 0.39504000544548035, -0.39983999729156494, 0.12568999826908112, -0.38135001063346863, 1.1110999584197998, 0.37244001030921936, -0.02284500002861023, -0.1788800060749054, 0.3671700060367584, 0.3658199906349182, 0.5503399968147278, -0.681850016117096, -0.29392001032829285, 0.34185001254081726, -0.16335000097751617, 0.05252499878406525, 0.5426999926567078, -0.15227000415325165, 0.1833299994468689, 0.20358000695705414, 0.4751499891281128, -0.227960005402565, 0.004645800217986107, -0.6875100135803223, 0.7394999861717224, 0.048204999417066574, 0.35958999395370483, -0.23789000511169434, 0.16422000527381897, -0.3739599883556366, 0.18424999713897705, -0.3450700044631958, 0.18379999697208405, -0.4717099964618683, 0.1742199957370758, 0.06387700140476227, 0.10760000348091125, 0.18132999539375305, 0.2473199963569641, 0.20171000063419342, -0.5786399841308594, -0.21660999953746796, -0.06350299715995789, -0.6794899702072144, -0.5448799729347229, 0.3574399948120117, -0.5281199812889099, -0.37490999698638916, -1.2674000263214111, 0.30316001176834106, -0.0502999983727932, -0.17858000099658966, -0.29886001348495483, -0.784529983997345, -0.34564000368118286, -0.36201000213623047, 0.4776400029659271, -0.25898998975753784, 0.7557600140571594, -0.3415200114250183, -0.16266000270843506, -0.22499999403953552, -0.21952000260353088, 0.09250500053167343, -0.20566000044345856, -0.18153999745845795, -0.5005099773406982, -0.33364999294281006, 0.3940199911594391, -0.5600000023841858, 0.09220200031995773, -0.31885001063346863, -0.34389999508857727, -0.23899999260902405, -0.3129200041294098, -0.2751300036907196, -0.706820011138916, 0.5628700256347656, -0.006284600123763084, -0.31968000531196594, -1.0943000316619873, 0.08192799985408783, -0.6544600129127502, -0.16468000411987305, -0.8141400218009949, 0.23000000417232513, -0.6362199783325195, -0.11986000090837479, 0.479449987411499, 0.27785998582839966, 0.3490299880504608, -0.14264999330043793, 0.11527000367641449, -0.2388100028038025, -0.315530002117157, 0.31773000955581665, -0.25174999237060547, 0.7394999861717224, 0.16051000356674194, -0.19619999825954437, 0.8220900297164917, 0.3503299951553345, 0.25308001041412354, -0.5489199757575989], u'steps': [0.15883000195026398, -0.0876379981637001, 0.09270299971103668, -0.4241600036621094, -0.21265999972820282, -0.25971001386642456, -0.31738001108169556, -0.4050999879837036, 0.20552000403404236, -1.5115000009536743, -0.6522899866104126, 0.3138599991798401, -0.015577999874949455, -0.05442100018262863, -0.012713000178337097, 0.1636199951171875, -1.0582000017166138, -0.18842999637126923, -0.19870999455451965, -0.29455000162124634, -0.25804999470710754, -0.05260099843144417, 0.24792000651359558, 0.13422000408172607, -0.49751999974250793, 0.46693000197410583, -0.10796999931335449, -0.18081000447273254, -0.3216499984264374, 0.3686699867248535, -0.09230300039052963, 0.09832099825143814, 0.28852999210357666, -0.21543000638484955, -0.6790300011634827, 0.4548799991607666, 0.1076899990439415, -0.0223619993776083, -0.3540099859237671, -0.39201000332832336, -0.419840008020401, 0.36507999897003174, -0.23465000092983246, 0.11959999799728394, -0.553600013256073, -0.10958000272512436, 0.19096000492572784, 0.2950499951839447, -0.32267001271247864, -0.13970999419689178, -0.4331200122833252, -0.24003000557422638, 0.348470002412796, 0.1983499974012375, -0.14869999885559082, 0.7071999907493591, -0.1340699940919876, 0.15020999312400818, -0.3582099974155426, 0.44056999683380127, 0.5914599895477295, 0.3669300079345703, 0.24562999606132507, 0.3586699962615967, -0.10153999924659729, 0.16335000097751617, 0.13016000390052795, 0.16404999792575836, 0.16957999765872955, 0.017393000423908234, -0.005433899816125631, 0.0939600020647049, -0.04711100086569786, -0.41376999020576477, 0.8103600144386292, -0.015088999643921852, -0.268669992685318, 0.18292999267578125, 0.35097000002861023, -0.6283900141716003, -0.3849700093269348, 0.012236000038683414, 0.6336399912834167, 0.13638000190258026, 0.34942999482154846, -0.14746999740600586, -0.22530999779701233, 0.2668299973011017, -0.1902099996805191, -0.004433699883520603, 0.23236000537872314, 0.1734900027513504, -0.18463000655174255, 0.6227399706840515, -0.08800199627876282, -0.4624199867248535, -0.29649001359939575, -0.4023599922657013, 0.07100000232458115, -0.1553100049495697, -0.2829299867153168, 0.2448599934577942, -0.22960999608039856, -0.04380600154399872, -0.09300599992275238, -0.27818000316619873, -0.12217999994754791, -0.04897499829530716, 0.3071100115776062, -0.3811599910259247, -0.07570400089025497, -0.511430025100708, -0.6361100077629089, -0.013728000223636627, -0.5145800113677979, -0.19146999716758728, 0.5348899960517883, -0.4406999945640564, -0.4454199969768524, -0.41165998578071594, 0.0016261000419035554, 0.21137000620365143, -0.13444000482559204, -0.23930999636650085, -0.18106000125408173, -0.032287999987602234, 0.16629000008106232, 0.17428000271320343, -0.09110300242900848, -0.4654400050640106, 0.0887639969587326, 0.26739999651908875, 0.0008634600089862943, 0.4447900056838989, 0.19083000719547272, -0.24740999937057495, -0.01044899970293045, -0.10867000371217728, -0.1167600005865097, 0.03203999996185303, -0.05140399932861328, -0.41310998797416687, -0.09877099841833115, 0.18195000290870667, -0.5881100296974182, -0.0628189966082573, -0.26104000210762024, 0.2284799963235855, -0.11063999682664871, -0.09803800284862518, -0.4121699929237366, 0.22201000154018402, -0.04069399833679199, -0.023240000009536743, 0.061462998390197754, 0.15139000117778778, -0.006359400227665901, -0.13223999738693237, -0.7705900073051453, 0.1819400042295456, 0.46129998564720154, 0.23732000589370728, -0.1691499948501587, -0.038888998329639435, 0.11324000358581543, 0.018727999180555344, 0.20833000540733337, 0.27592000365257263, 0.05190499871969223, 0.40174999833106995, 0.0835300013422966, 0.10406000167131424, 0.6588299870491028, -0.4705600142478943, 0.21288999915122986, -0.17938999831676483, 0.34793999791145325, 0.11748000234365463, -0.005347100086510181, 0.025286000221967697, -0.12918999791145325, -0.532289981842041, 0.16263000667095184, -0.29802998900413513, -0.5353400111198425, 0.5308399796485901, -0.49842000007629395, -0.09080199897289276, 0.09679900109767914, 0.22458000481128693, 0.3072099983692169, 0.014202999882400036, 0.5249999761581421, -0.2983599901199341, 0.22056999802589417, 0.10075999796390533, -0.040043000131845474, -0.22994999587535858, 0.2908099889755249, -0.5747600197792053, 0.8543699979782104, 0.1805099993944168, -0.051899999380111694, 0.5638399720191956, 0.1838199943304062, 0.2028300017118454, -0.06865400075912476, -0.44380998611450195, -0.018323000520467758, 0.05788600072264671, 0.005382299888879061, 0.1304199993610382, -0.5315600037574768, 0.382779985666275, 0.1983499974012375, 0.18716000020503998, 0.49505001306533813, -0.6367899775505066, -0.08117999881505966, -0.16193999350070953, 0.28964999318122864, 0.3896600008010864, -0.24677999317646027, -0.02797500044107437, 0.07965300232172012, -0.19415000081062317, 0.03279000148177147, 0.6483200192451477, 0.13468000292778015, -0.22922000288963318, -0.2711699903011322, 0.1683499962091446, 0.02918899990618229, 0.21393999457359314, -0.272379994392395, 0.2894099950790405, 0.11969000101089478, -0.35607999563217163, 0.30296000838279724, -0.10100000351667404, -0.07376900315284729, -0.09662699699401855, -0.1853100061416626, 0.4560999870300293, -0.1655000001192093, 0.05059799924492836, 0.5618699789047241, 0.09545200318098068, -0.40397000312805176, 0.25637999176979065, 0.09924600273370743, -0.599049985408783, -0.023951999843120575, -0.05302799865603447, 0.4742499887943268, -0.17291000485420227, 0.07521799951791763, 0.2040500044822693, -0.0216279998421669, 0.557699978351593, 0.09115199744701385, 0.19828000664710999, 0.19035999476909637, -0.1677200049161911, 0.20003999769687653, 0.03245700150728226, -0.3128400146961212, 0.7114199995994568, 0.11828000098466873, -0.19892999529838562, 0.03208399936556816, -0.15118999779224396, -0.33862999081611633, 0.21041999757289886, 0.19970999658107758, -0.05726899951696396, -2.059499979019165, -0.09432800114154816, 1.034000039100647, -0.4780699908733368, 0.6254799962043762, -0.37101998925209045, 0.010324000380933285, -0.14327000081539154, -0.21740999817848206, -0.38339999318122864, -0.20582999289035797, 0.35580000281333923, -0.1340000033378601, -0.28185001015663147, 0.0941699966788292, -0.08494599908590317, 0.2356400042772293, 0.14842000603675842, -0.16323000192642212, 0.560670018196106, -0.46869999170303345, 0.07327400147914886, -0.08175099641084671, -0.11455000191926956], u'knife': [0.030812999233603477, 0.11738999933004379, 0.12256000190973282, -0.26554998755455017, 0.5069599747657776, 0.3734700083732605, -0.22257000207901, -0.20558999478816986, 0.10349000245332718, -0.8253499865531921, -0.3727700114250183, -0.05632299929857254, 0.3125, 0.5572699904441833, -0.3341200053691864, 0.042656999081373215, -0.5379199981689453, 0.4139299988746643, -0.20861999690532684, -0.18934999406337738, -0.10508999973535538, 0.039027001708745956, 0.19407999515533447, 0.01701500080525875, 0.37349000573158264, -0.23340000212192535, -0.09732100367546082, -0.7677299976348877, -0.14576999843120575, -0.45357999205589294, -0.05191199854016304, 0.8790799975395203, 0.32161998748779297, -0.17885999381542206, -0.21616999804973602, 0.05481300130486488, -0.21583999693393707, 0.2147500067949295, -0.6340000033378601, 0.20406000316143036, 0.7276999950408936, 0.14087000489234924, 0.2696000039577484, -0.8338000178337097, 0.15497000515460968, 0.6092299818992615, -0.24412000179290771, -0.2695100009441376, 0.2658799886703491, 0.5577700138092041, -0.26736998558044434, -0.26820001006126404, 0.6201000213623047, 0.11427000164985657, -0.4243299961090088, -0.629010021686554, -0.8113999962806702, -0.10407000035047531, 0.5546200275421143, 0.17541000247001648, 0.11727000027894974, 0.6423599720001221, -0.2276500016450882, 0.7026200294494629, -0.3978999853134155, -0.7211700081825256, -0.6951900124549866, -0.3364199995994568, 0.13402999937534332, -0.35604000091552734, 0.1060900017619133, 0.14013999700546265, -0.3269500136375427, 0.3714900016784668, 0.19643999636173248, 0.07169699668884277, -0.3003999888896942, -0.04162300005555153, -0.6243699789047241, -0.47718000411987305, 0.07504899799823761, 0.39827999472618103, 0.6649100184440613, -0.17440000176429749, 0.23631000518798828, -0.30052998661994934, -0.891319990158081, -0.22609999775886536, -0.6556699872016907, 0.013395999558269978, 0.09440100193023682, 0.30452001094818115, 0.009967000223696232, -0.3986299932003021, 0.04255099967122078, -0.46233999729156494, 0.2156900018453598, 0.3021000027656555, 0.6552600264549255, -0.11043000221252441, -0.32725998759269714, 0.4620499908924103, -0.175929993391037, -0.26673001050949097, 0.14573000371456146, -0.4440299868583679, 0.5926700234413147, 0.021059999242424965, -0.09191299974918365, 0.5620700120925903, -0.3388499915599823, 0.9434499740600586, 0.04715399816632271, -0.5935099720954895, -0.019881000742316246, -0.03565799817442894, -0.7500799894332886, 0.3149999976158142, -0.12417999655008316, -0.802079975605011, -0.5786799788475037, 0.4801099896430969, -0.3921000063419342, -0.6148300170898438, -0.5909900069236755, 0.2735300064086914, -0.23810000717639923, -0.41561999917030334, -0.09578400105237961, -0.4732399880886078, -0.23590999841690063, 0.5347300171852112, 0.7275699973106384, 0.1859699934720993, -0.5586599707603455, -0.1750199943780899, 0.31470999121665955, 0.3326900005340576, 0.8009700179100037, 0.038481999188661575, 0.6307799816131592, 0.21127000451087952, -0.23544000089168549, -0.4889500141143799, -0.024085000157356262, 0.8489400148391724, 0.37996000051498413, -0.009405200369656086, -0.12005999684333801, -0.16207000613212585, -0.08607800304889679, 0.9889299869537354, 0.4653800129890442, -0.3565399944782257, 0.3457399904727936, 0.07895100116729736, 0.2701599895954132, 0.0753839984536171, -0.16345000267028809, 0.4801200032234192, 0.4002000093460083, -0.06597200036048889, 0.3017500042915344, 0.04814299941062927, 0.28433001041412354, 0.02779800072312355, 0.4071800112724304, 0.11672999709844589, -0.31259000301361084, -0.40382999181747437, -0.3763900101184845, -0.14509999752044678, 0.10301999747753143, -0.2232300043106079, -0.7715399861335754, -0.026583999395370483, -0.30855000019073486, -0.20343999564647675, 0.7212499976158142, -0.37571001052856445, 0.28501999378204346, 0.04388900101184845, -0.08225700259208679, 0.10288000106811523, 0.06017100065946579, -0.43435001373291016, 0.4084700047969818, 1.1055999994277954, 0.22408999502658844, 0.4030100107192993, 0.06908000260591507, 0.43709999322891235, -0.024907000362873077, 0.07074800133705139, 0.2604199945926666, 0.5473899841308594, -0.7299699783325195, -0.42465999722480774, -0.09819900244474411, 0.34634000062942505, 0.3889099955558777, -0.24070000648498535, 0.45778998732566833, 0.6728600263595581, -0.28703999519348145, -0.20303000509738922, 0.1792300045490265, -0.05056999996304512, -0.581279993057251, 0.14159999787807465, 0.36956000328063965, -0.29596999287605286, -0.26958000659942627, 0.39897000789642334, 0.48568999767303467, -0.27663999795913696, 0.399370014667511, 0.11704999953508377, -0.7202000021934509, 0.031369999051094055, -0.7922999858856201, 0.31773999333381653, 0.33348000049591064, 0.06300199776887894, -0.9889000058174133, -0.5260099768638611, -0.5505599975585938, -0.5978999733924866, 0.059393998235464096, -0.5446299910545349, 0.7208799719810486, -0.14922000467777252, -0.5834599733352661, -0.547980010509491, 0.5172799825668335, 0.49994000792503357, -0.5111200213432312, 0.33037999272346497, -0.17681999504566193, 0.0316540002822876, 0.8673700094223022, -0.11810000240802765, -0.12665000557899475, -0.45333001017570496, -0.2688100039958954, -0.13364000618457794, 0.505810022354126, -0.1090100035071373, -0.05684899911284447, -0.4364300072193146, 0.0201990008354187, 0.004362999927252531, 0.45010998845100403, -0.14545999467372894, 0.45311999320983887, 0.4213300049304962, 0.12240999937057495, 0.12801000475883484, -0.03070100024342537, 0.015270000323653221, 0.3750300109386444, -0.4503900110721588, 0.1287499964237213, 0.37481001019477844, 0.006719899829477072, 0.6723600029945374, -0.4807400107383728, 0.7951899766921997, -0.9288899898529053, 0.5197299718856812, -0.02338399924337864, 0.004427900072187185, 0.19860999286174774, 0.4424099922180176, 0.3998900055885315, -0.3787499964237213, -1.426300048828125, 0.01693199947476387, -1.0957000255584717, 0.1262499988079071, 0.12782999873161316, 0.5864599943161011, -0.035920001566410065, 0.04347199946641922, -0.08051799982786179, 0.2965799868106842, -0.036584001034498215, -0.19008000195026398, -0.008589199744164944, 0.27129000425338745, 0.5537099838256836, -0.47505998611450195, 0.05384000018239021, 0.34703999757766724, -0.010308999568223953, 0.3165299892425537, -0.10096000134944916, 0.5074399709701538, 0.046778999269008636, 0.4003700017929077], u'clay': [-0.11208000034093857, 0.6687800288200378, -0.40283000469207764, -0.31185001134872437, 0.19189999997615814, -0.09656400233507156, -0.07435200363397598, -0.0738620012998581, -0.052418000996112823, -0.05637599900364876, 0.31988000869750977, 0.03547300025820732, -0.25527000427246094, 0.31975001096725464, -0.9006900191307068, -0.4411799907684326, -0.6830400228500366, 0.4436500072479248, 0.30667999386787415, 0.33090001344680786, -0.06155399978160858, -0.30292001366615295, 0.1629199981689453, 0.012275000102818012, -0.641260027885437, -0.6624699831008911, -0.2928699851036072, 0.48965999484062195, 0.12050999701023102, 1.0551999807357788, 0.07695599645376205, 0.7262700200080872, -0.7373999953269958, -0.12511999905109406, -0.4002299904823303, 0.19922000169754028, 0.04165399819612503, 0.020351000130176544, 0.029301999136805534, 0.033197999000549316, -0.048650000244379044, 0.04433400183916092, 0.5359100103378296, -0.010614999569952488, 0.9057000279426575, 0.28624001145362854, 0.2004300057888031, 0.7737100124359131, -0.007521599996834993, 0.14257000386714935, 0.20818999409675598, 0.6045299768447876, -0.3244999945163727, 0.2861799895763397, 0.6800900101661682, 0.6281999945640564, -0.2736299932003021, -0.3883500099182129, 0.7093200087547302, -0.1447100043296814, -0.09490799903869629, -0.5657399892807007, 0.32679998874664307, -0.11309000104665756, 0.18682999908924103, -0.6308599710464478, -0.5798400044441223, -0.1348399966955185, -0.3971700072288513, -0.563759982585907, 0.04866499826312065, -0.07224000245332718, -0.1471399962902069, 0.14505000412464142, -0.3956199884414673, -0.6394299864768982, 0.10326000303030014, 0.09154599905014038, -0.09855099767446518, -0.24952000379562378, 0.24133999645709991, -0.6085000038146973, -0.2377299964427948, -0.18324999511241913, 0.128370001912117, 0.2167699933052063, 0.8171200156211853, 0.3942599892616272, -0.16032999753952026, 0.1775600016117096, 0.03982599824666977, 0.259660005569458, -0.14214999973773956, -0.13011999428272247, 0.19518999755382538, 0.00652820011600852, -0.2825399935245514, 0.7265499830245972, 0.02528199926018715, -0.6133900284767151, 0.1114799976348877, 0.07554800063371658, -0.1400900036096573, -0.48704999685287476, 0.15828000009059906, 0.6477299928665161, -0.3684299886226654, -0.443230003118515, -0.274509996175766, -0.19133000075817108, 0.26399001479148865, 0.0859379991889, 0.1304599940776825, -0.36212998628616333, -0.9912400245666504, -0.178739994764328, -0.5744400024414062, 0.4431700110435486, 0.3959299921989441, -0.08637700229883194, -0.2900499999523163, 0.36649999022483826, -0.06011199951171875, -0.15838000178337097, 0.03855700045824051, 0.5385100245475769, 0.5131999850273132, -0.1812800019979477, 0.01533500012010336, 0.5263699889183044, -0.507070004940033, 0.8219599723815918, -0.1273999959230423, 0.22878000140190125, -0.32253000140190125, -0.1411599963903427, -0.08752299845218658, 0.38464000821113586, 0.12103000283241272, 0.3943899869918823, 0.6250699758529663, -0.2768400013446808, 0.08188100159168243, -0.08908600360155106, 0.2370699942111969, 0.12538999319076538, -0.7303599715232849, 0.6660400032997131, -0.3615100085735321, -0.28602999448776245, 0.10964000225067139, -0.2564600110054016, -0.7974600195884705, 0.06755100190639496, 0.47894999384880066, 0.12685999274253845, -0.6957499980926514, -0.17438000440597534, 0.2670300006866455, -0.09397900104522705, -0.41190001368522644, -0.25824999809265137, 0.0620650015771389, 0.6647700071334839, 0.4275200068950653, -0.03372799977660179, 0.4436900019645691, -0.075654998421669, -0.036942001432180405, -0.03510599955916405, 0.11085999757051468, 0.5747399926185608, 0.47991999983787537, 0.12671999633312225, -0.28933000564575195, 0.5222399830818176, -0.026652999222278595, -0.5372700095176697, -0.014010000042617321, -0.5968899726867676, 0.4077799916267395, 0.5501800179481506, 0.7299200296401978, -0.0939669981598854, -0.7684800028800964, -0.4064199924468994, -0.6251999735832214, 0.10199999809265137, 0.264710009098053, -0.301470011472702, 0.4244700074195862, 0.23388999700546265, -0.14695000648498535, 0.0704130008816719, 0.7194700241088867, 0.12859000265598297, 0.16824999451637268, 0.1407800018787384, 0.1482599973678589, -0.774590015411377, 1.4707000255584717, 0.009727099910378456, -0.30226999521255493, 0.031132999807596207, 0.5634099841117859, -0.5072000026702881, 0.10665000230073929, -0.00727220019325614, -0.7922999858856201, 0.13019999861717224, -0.3765999972820282, 0.7307900190353394, 0.4002099931240082, -0.6640300154685974, 0.23638999462127686, 0.20986999571323395, 0.07957500219345093, 0.16775000095367432, -0.16031000018119812, -1.2152999639511108, 0.059774000197649, -0.3184399902820587, -0.23723000288009644, 0.18565000593662262, -0.17714999616146088, 0.12411999702453613, 0.07231000065803528, 0.288349986076355, -0.1823599934577942, -0.13267000019550323, 0.023029999807476997, 0.09139200299978256, 0.4916599988937378, -0.14395000040531158, -0.1599300056695938, 0.23739999532699585, -0.39535999298095703, 0.685670018196106, 0.3532699942588806, 0.10118000209331512, -0.6079300045967102, 0.07167399674654007, -0.48987001180648804, -0.7636399865150452, -0.2854599952697754, 0.41732001304626465, -1.1122000217437744, -0.24764999747276306, -0.12064000219106674, 0.4493800103664398, 0.8939599990844727, -0.20276999473571777, -0.5140399932861328, -0.8954200148582458, 0.3558500111103058, -0.06787300109863281, 0.20467999577522278, 0.10357999801635742, 0.09644900262355804, -0.035020001232624054, -0.07549700140953064, 0.12693999707698822, -0.17663000524044037, -0.015496999956667423, 0.35666000843048096, 0.16176000237464905, 0.6505799889564514, 0.3805699944496155, 0.17655999958515167, 0.2199299931526184, -0.426829993724823, -0.24305999279022217, 0.07052399963140488, 0.2993699908256531, -0.06233600154519081, 0.09097900241613388, -0.7948600053787231, 0.5686200261116028, -0.8816199898719788, 0.5117400288581848, -0.10305000096559525, -0.2078399956226349, 0.01805800013244152, 0.3552199900150299, -0.04114999994635582, 0.0480399988591671, 0.3607400059700012, -0.12871000170707703, 0.29903000593185425, 0.4463199973106384, -0.15481999516487122, 0.4003100097179413, 0.3339399993419647, 0.7696599960327148, 0.3086000084877014, 0.1566700041294098, 0.3792099952697754, -0.13991999626159668, -0.29826000332832336, 0.5024700164794922], u'tower': [0.08844199776649475, -0.6464499831199646, -0.6453199982643127, -0.722760021686554, 0.18242000043392181, 0.91593998670578, 0.40852001309394836, 0.3038800060749054, -0.5125200152397156, -0.8531699776649475, 0.473690003156662, -0.4255799949169159, 1.337499976158142, 0.07779999822378159, 0.5524799823760986, 0.1243399977684021, -0.20398999750614166, 0.0003159299958497286, -0.39743998646736145, -0.4786899983882904, -0.351859986782074, -0.18738999962806702, 0.20496000349521637, 0.5571600198745728, 0.13763000071048737, 0.09175299853086472, -0.4067099988460541, 0.3119199872016907, -0.8923199772834778, 0.29350998997688293, 0.8664799928665161, 0.3420200049877167, -0.6423799991607666, 0.1979299932718277, 0.09714499861001968, 0.3304699957370758, 0.1009100005030632, -0.5315499901771545, 0.5561699867248535, 0.046817000955343246, -0.23946000635623932, 0.012226000428199768, -0.6647499799728394, 0.85930997133255, -0.11118000000715256, 0.035801999270915985, 0.019173000007867813, 0.08555000275373459, -0.08261699974536896, -0.38392001390457153, -0.34926000237464905, 0.13854999840259552, 0.18774999678134918, 0.02798300050199032, -0.03057200089097023, 0.11079999804496765, 0.2557699978351593, 0.5675600171089172, 0.3509800136089325, 0.13603000342845917, 0.15828000009059906, -0.03481699898838997, 0.6020799875259399, 0.30250999331474304, 0.3757399916648865, 0.024204999208450317, 0.5043500065803528, 0.4709799885749817, 0.6664000153541565, -0.43876999616622925, 0.15473000705242157, 0.1061599999666214, -0.2425999939441681, -0.09508399665355682, -0.2884199917316437, 0.8536800146102905, -0.1858299970626831, -0.3697800040245056, -0.22111999988555908, 0.023408999666571617, -0.2600899934768677, 0.4584999978542328, -0.33362001180648804, 0.5888599753379822, 0.21205000579357147, -0.19859999418258667, 0.3820500075817108, 0.47001001238822937, 0.15861999988555908, 0.0601780004799366, 0.9631100296974182, -0.4208100140094757, 0.3932499885559082, 0.7574700117111206, -0.3915199935436249, -0.2619200050830841, -0.3973200023174286, 0.11315999925136566, 0.6294900178909302, -0.4143899977207184, -0.5810099840164185, 0.3167099952697754, 0.11705999821424484, 0.02798599936068058, 0.18474000692367554, -0.4647899866104126, 0.03570299968123436, -0.01673799939453602, -0.13673999905586243, -0.04869699850678444, -0.4311800003051758, -0.23632000386714935, -0.20962999761104584, 0.11155000329017639, -0.0957380011677742, -0.25951001048088074, -0.8104400038719177, 0.365090012550354, -0.4386900067329407, -0.1075500026345253, 0.4053899943828583, -0.7540299892425537, 0.11428999900817871, -0.02718600071966648, -0.736840009689331, -0.7784199714660645, 0.2157299965620041, -0.0244159996509552, -0.43501999974250793, -0.4595800042152405, -0.10469000041484833, 0.7533699870109558, -0.047529999166727066, -0.4868600070476532, 0.22472000122070312, -0.20744000375270844, 0.24348999559879303, -0.38842999935150146, -0.5660200119018555, -0.36726999282836914, -0.43174999952316284, 0.11121000349521637, 0.10119999945163727, 0.7142000198364258, -0.29594001173973083, -0.3112800121307373, 0.2007800042629242, 0.1407099962234497, -0.08977500349283218, -0.5440999865531921, 0.17889000475406647, 0.24202999472618103, 0.21074999868869781, -0.396699994802475, 0.4738999903202057, 0.5051699876785278, -0.016520999372005463, 0.20381000638008118, -0.34060999751091003, 0.6606500148773193, 0.36368998885154724, -0.21708999574184418, 0.5725700259208679, -0.20449000597000122, -0.07725799828767776, 0.5527300238609314, 0.009973700158298016, -0.40702998638153076, 0.4844299852848053, 0.41019999980926514, -0.4998300075531006, -0.2179899960756302, 0.1513500064611435, -0.2759400010108948, 0.23568999767303467, 0.3649500012397766, -0.26669999957084656, -0.10633999854326248, 0.02662700042128563, -0.9807000160217285, -0.21905000507831573, 0.2800300121307373, 0.3210600018501282, -0.25481998920440674, -0.16283999383449554, -0.23523999750614166, -0.08395899832248688, 0.22996999323368073, -0.13785000145435333, 0.4673599898815155, 0.9836999773979187, 0.20948000252246857, -0.09576500207185745, -0.02334900014102459, -0.015758000314235687, -0.42037999629974365, -0.690779983997345, -0.4459800124168396, 0.19720999896526337, 0.007765800226479769, 0.9777299761772156, -0.18686999380588531, 0.006923899985849857, -0.12385000288486481, -0.2769100069999695, 0.1995999962091446, -0.03804999962449074, -0.35300999879837036, 0.49483001232147217, 0.11302000284194946, 0.7088800072669983, 0.516539990901947, -0.4744099974632263, -0.8736799955368042, -0.27410000562667847, -0.10080000013113022, -0.15237000584602356, -0.2786799967288971, 0.11174999922513962, -0.14722000062465668, -0.08376999944448471, -0.30309000611305237, 0.5986499786376953, 0.28821998834609985, 0.08493500202894211, -0.0029442000668495893, -0.30101001262664795, 0.022338999435305595, -0.15643000602722168, -0.5361999869346619, -0.0827300027012825, -0.038839999586343765, -0.6647999882698059, 0.294730007648468, 0.1434199959039688, -0.20991000533103943, 0.053328998386859894, 0.2582699954509735, 0.01896899938583374, -0.21689000725746155, -0.06409700214862823, -0.2135400027036667, 0.09598600119352341, -0.3113900125026703, -0.05347700044512749, 0.4627000093460083, 0.3053300082683563, -0.010877000167965889, 0.2967199981212616, -0.2303600013256073, 0.32962000370025635, -0.08162099868059158, -0.34373000264167786, 0.22463999688625336, -0.012392999604344368, 0.33197999000549316, -0.09198900312185287, -0.35394999384880066, -0.05057799816131592, -0.3258900046348572, -0.29969000816345215, 0.45662999153137207, 0.3862600028514862, -0.2820099890232086, -0.5008900165557861, -0.577750027179718, 0.4084399938583374, -0.019780000671744347, 0.32785001397132874, -0.34046000242233276, 0.08846300095319748, -0.0512159988284111, 0.6039800047874451, -0.04891199991106987, -0.3813199996948242, 0.15877999365329742, -1.5678000450134277, -0.08188299834728241, -0.3595600128173828, -0.1839900016784668, -0.18559999763965607, -0.3108699917793274, 0.09681499749422073, -1.2333999872207642, -0.022698000073432922, 0.02806299924850464, -0.2929899990558624, 0.5268200039863586, -0.3405799865722656, -0.0964839980006218, 0.5452899932861328, 0.38975000381469727, -0.49254000186920166, -0.5067800283432007, 0.6678500175476074, -0.01371499989181757, 0.3922100067138672, 0.060697998851537704, 0.5687000155448914, 0.009721499867737293], u'river': [0.045180998742580414, -0.5207200050354004, 0.3230400085449219, -0.6194900274276733, 0.030194999650120735, 0.36434000730514526, 0.49713999032974243, -0.09396900236606598, 0.2641899883747101, -1.0081000328063965, -0.6944100260734558, -0.4202499985694885, -0.5163900256156921, 0.04094399884343147, 0.48739001154899597, -0.08733899891376495, -0.34158000349998474, 0.1765899956226349, 0.9357600212097168, 0.8018500208854675, -0.6808900237083435, -0.06749200075864792, 0.3398500084877014, 0.05770200118422508, -0.37549999356269836, -0.5855799913406372, -0.17294999957084656, -0.6442099809646606, -0.42188000679016113, 0.4946199953556061, 1.1949000358581543, 0.40206998586654663, 0.26782000064849854, 0.6140099763870239, 0.6488500237464905, 0.2252500057220459, -0.27296000719070435, -0.20417000353336334, -0.08838000148534775, -0.1754399985074997, -0.895550012588501, 0.13083000481128693, 0.10717999935150146, 1.0121999979019165, 0.15881000459194183, 0.5350099802017212, 0.6352599859237671, 0.4994199872016907, 0.4065699875354767, -0.1137399971485138, 0.23615999519824982, 0.21438999474048615, 0.37382999062538147, -0.24774999916553497, -0.08225400000810623, 0.46057000756263733, 0.1935099959373474, -0.3178899884223938, 0.1306000053882599, 0.527239978313446, 0.05459300056099892, -0.267769992351532, 0.6119099855422974, -0.42372000217437744, 0.014569000340998173, -0.49717000126838684, -0.3415699899196625, 0.513159990310669, -0.4587399959564209, 0.2319899946451187, 0.4923200011253357, -0.08224800229072571, -0.08189400285482407, -0.3194600045681, -0.48107999563217163, -0.36792999505996704, 0.07378800213336945, 0.5062599778175354, -0.07232800126075745, -0.6378499865531921, -0.23090000450611115, -0.39798998832702637, -0.13124999403953552, -0.5937899947166443, 0.2797200083732605, -0.09862399846315384, -0.5949000120162964, -0.15528999269008636, 0.2121499925851822, -0.19054000079631805, 0.46081000566482544, 0.520799994468689, 0.9060500264167786, 0.32089999318122864, -0.06684699654579163, 0.05268700048327446, 0.616320013999939, -0.23680000007152557, 0.35097000002861023, -0.019245000556111336, -0.0726109966635704, 0.3259499967098236, 0.21556000411510468, -0.22434000670909882, 0.418830007314682, 0.4970499873161316, 0.7046399712562561, 0.31084999442100525, 0.003041400108486414, 0.07824599742889404, -0.3048799932003021, -1.0360000133514404, -0.5333399772644043, -0.17357000708580017, -0.6498200297355652, -0.14395000040531158, 0.329039990901947, 0.0372450016438961, 0.165910005569458, -0.32868000864982605, -0.220770001411438, -0.5665500164031982, -0.44047999382019043, -0.015084000304341316, -0.14847999811172485, -0.2239599972963333, 0.14966000616550446, 0.2659200131893158, -0.3380500078201294, -0.1361899971961975, -0.3566400110721588, 0.3741599917411804, 0.24166999757289886, -0.4426499903202057, -0.03126800060272217, -0.3034999966621399, 0.8968499898910522, -0.6000400185585022, -0.25571998953819275, 0.00032635999377816916, -0.12955999374389648, 0.39746999740600586, 0.033351998776197433, -0.3824000060558319, -0.7615500092506409, 0.0821790024638176, 0.9501199722290039, 0.47933998703956604, -0.4725300073623657, 0.08672799915075302, 0.7779800295829773, 0.2204499989748001, -0.4512600004673004, -0.45761001110076904, 1.5621000528335571, -0.06119000166654587, 0.3078399896621704, -0.42026999592781067, -0.24729999899864197, 0.4458500146865845, -0.14659999310970306, -0.2901799976825714, 0.581570029258728, -0.019120000302791595, -0.15714000165462494, -0.68163001537323, -0.04583200067281723, -0.5149700045585632, -1.2603000402450562, 0.15277999639511108, -0.5029500126838684, 0.5078099966049194, 0.015499000437557697, 0.06104699894785881, 0.16584999859333038, -0.45792001485824585, -0.5555400252342224, -0.8961899876594543, -0.5406500101089478, -0.2545900046825409, 0.13152000308036804, 0.6849700212478638, -0.39294999837875366, 0.20077000558376312, 0.25084999203681946, -0.7086499929428101, -0.10621999949216843, -0.12155000120401382, 0.4955900013446808, 0.2440200001001358, -0.04454699903726578, 1.4377000331878662, -0.19256000220775604, -0.2760399878025055, -0.022755000740289688, 0.5080999732017517, 0.49312999844551086, -0.9308800101280212, -0.14330999553203583, 0.7291300296783447, 1.6569000482559204, 0.3070099949836731, -0.6828299760818481, 0.10267999768257141, -0.39434000849723816, -0.16575999557971954, -0.3265399932861328, 0.717170000076294, -0.24232999980449677, 0.512220025062561, -0.3300800025463104, -0.19304999709129333, -0.10730999708175659, -0.19177000224590302, 0.5935099720954895, 0.051697999238967896, -0.2772800028324127, -0.004841200076043606, 0.10931000113487244, -0.439520001411438, 0.5745999813079834, -0.46386998891830444, 0.3660399913787842, -0.6293799877166748, 0.2795200049877167, 0.2614699900150299, -0.057978998869657516, 0.14678999781608582, 0.3129900097846985, 0.12464000284671783, -0.547819972038269, 0.3898400068283081, -0.6383299827575684, 0.07745900005102158, 0.7353000044822693, 0.13739000260829926, -0.5471900105476379, 0.055500999093055725, 0.4616200029850006, 0.11304999887943268, -0.40735000371932983, 0.7774199843406677, 0.21190999448299408, -0.18851999938488007, -0.6175199747085571, 0.0749329999089241, 0.46823999285697937, 0.614549994468689, -0.25110000371932983, -0.3348099887371063, 0.21493999660015106, 0.04192899912595749, 0.3560500144958496, 0.1621599942445755, 0.4277600049972534, -0.2175700068473816, -0.11337999999523163, 0.5008800029754639, 0.03180000185966492, 0.1472499966621399, -0.016445999965071678, 0.06293000280857086, -0.2939000129699707, -0.4066599905490875, -0.012122999876737595, 0.32444000244140625, 0.18939000368118286, -0.7382799983024597, -0.32747000455856323, 0.367220014333725, 0.1628900021314621, -1.0214999914169312, 0.06230499967932701, 0.6369699835777283, 0.4889200031757355, 0.3184399902820587, -1.2032999992370605, -0.5450500249862671, 0.23568999767303467, 0.5707799792289734, -0.2687999904155731, 0.6227499842643738, 0.593559980392456, -0.909600019454956, -0.8214600086212158, 0.29256001114845276, 0.4128200113773346, -0.3164600133895874, 0.631630003452301, 0.33722999691963196, -0.4163999855518341, -0.23096999526023865, 0.05191100016236305, 0.363290011882782, 0.03641999885439873, 0.07034800201654434, 0.2481900006532669, 0.5988600254058838, 0.32697999477386475, 0.6746699810028076], u'clothes': [-0.17869000136852264, -0.24796999990940094, -0.3010199964046478, -0.12793000042438507, -0.15004999935626984, 0.03686000034213066, 0.3825399875640869, 0.3225499987602234, 0.22105999290943146, -1.6690000295639038, -0.0006307900184765458, -0.326449990272522, 0.2598100006580353, 0.4945400059223175, 0.04696999862790108, -0.7570499777793884, 0.5304099917411804, -0.28327998518943787, 0.04687900096178055, -0.03711000084877014, 0.18803000450134277, -0.042319998145103455, 0.3472500145435333, -0.22623999416828156, 0.07767599821090698, -0.3522000014781952, 0.037443000823259354, 0.033576998859643936, 0.6958299875259399, 0.20543000102043152, 0.09134799987077713, 0.21845999360084534, -0.5120199918746948, 0.30292999744415283, -0.715939998626709, 0.5594199895858765, -0.16317999362945557, -0.27538999915122986, 0.010878999717533588, -0.4127100110054016, 0.08822900056838989, -1.159000039100647, 0.0748870000243187, -0.5860000252723694, 0.08146999776363373, 0.319240003824234, 0.594760000705719, -0.23608000576496124, 0.271479994058609, -0.11901000142097473, 0.03248799964785576, -0.2661600112915039, 0.35896000266075134, -0.3023500144481659, -0.3472599983215332, -0.2610599994659424, -0.5379800200462341, -0.7081699967384338, -0.047835998237133026, -0.25652000308036804, -0.05034799873828888, -0.4605099856853485, 0.22563999891281128, 0.1659799963235855, -0.2567700147628784, -0.43751999735832214, -0.4583899974822998, -0.08053900301456451, -0.2574099898338318, 0.20104999840259552, -0.0462459996342659, -0.26941999793052673, -0.37310999631881714, -0.2092600017786026, 0.34880998730659485, -0.21764999628067017, -0.018316000699996948, -0.17449000477790833, 0.019363999366760254, -0.3586600124835968, -0.09759599715471268, -0.04201199859380722, -0.2874700129032135, 0.2943199872970581, -0.09750699996948242, -0.009144400246441364, -0.19257000088691711, -0.11111000180244446, -0.15895000100135803, 0.33660998940467834, -0.09854800254106522, -0.0806180015206337, -0.09684000164270401, -0.07018700242042542, -0.11023999750614166, -0.12744000554084778, 0.1877399981021881, -0.5706899762153625, 0.45151999592781067, -0.12849000096321106, 0.06167599931359291, 0.14541000127792358, -0.4838300049304962, 0.1866299957036972, -0.20489999651908875, -0.4485599994659424, 0.24368000030517578, 0.0840580016374588, -0.2240699976682663, -0.17655999958515167, -0.29794999957084656, 0.49494999647140503, -0.17770999670028687, -0.09090600162744522, 0.09131599962711334, 0.5036600232124329, 0.2351599931716919, 0.5323200225830078, 0.38354000449180603, -0.8105000257492065, 0.06807100027799606, 0.2757999897003174, 0.6527000069618225, 0.6302000284194946, -0.3060399889945984, 0.39381998777389526, -0.19012999534606934, -0.0068720001727342606, 0.3942599892616272, 0.18344999849796295, -0.002738500013947487, -0.2594600021839142, 0.1971299946308136, 0.13259999454021454, -0.43650999665260315, 0.35868000984191895, 0.3059999942779541, -0.12358999997377396, -0.10778000205755234, 0.27175000309944153, -0.06260599941015244, -0.06289000064134598, 0.6302899718284607, -0.18118999898433685, -0.3339099884033203, 0.44729000329971313, 0.18639999628067017, 0.6368700265884399, 0.12202999740839005, -0.32653000950813293, -0.1934799998998642, 0.3658199906349182, -0.30164000391960144, -1.0664000511169434, 0.07546699792146683, -0.17969000339508057, -0.37049999833106995, -0.32892999053001404, 0.5911499857902527, 0.28042998909950256, 0.44203999638557434, -0.75586998462677, -0.34261998534202576, -0.0682620033621788, 0.045538000762462616, 0.04513600096106529, 0.40988999605178833, 0.6748999953269958, 0.4180600047111511, 0.0630050003528595, 0.06182200089097023, 0.3457599878311157, -0.5240300297737122, 0.14339999854564667, -0.46625998616218567, 0.1525299996137619, 0.19485999643802643, 0.11102999746799469, -0.13046999275684357, -0.2373100072145462, 0.07136400043964386, -0.3403399884700775, 0.24094000458717346, 0.12530000507831573, 0.34073999524116516, 0.5724800229072571, 0.6237499713897705, 0.40105998516082764, 0.22247999906539917, -0.42559999227523804, 0.149849995970726, -0.09210000187158585, -0.6617000102996826, 0.09449200332164764, -0.11705999821424484, 0.18546000123023987, -1.0740000009536743, -0.025388000532984734, -0.504800021648407, 0.2693899869918823, 0.2661300003528595, -0.3162499964237213, 0.621150016784668, 0.8365899920463562, 0.7097899913787842, -0.43772000074386597, 0.35864999890327454, 0.6612300276756287, -0.8861200213432312, -0.6374899744987488, -0.3001500070095062, 0.36243999004364014, -0.24623000621795654, 0.646399974822998, 0.37248000502586365, 0.020393000915646553, 0.9487599730491638, -0.6665400266647339, 0.1041100025177002, -0.22513000667095184, 0.3715600073337555, 0.03544899821281433, 0.07052099704742432, 0.1589300036430359, 0.23734000325202942, 0.20446999371051788, -0.20819999277591705, 0.07638999819755554, 0.21528999507427216, 0.10535000264644623, 0.5420699715614319, -0.3616800010204315, -0.3714199960231781, 0.07103200256824493, 0.6478400230407715, 0.08809500187635422, 0.1377599984407425, -0.13579000532627106, -0.776669979095459, 0.36796998977661133, 0.42232999205589294, -0.030086999759078026, -0.2118300050497055, 0.7991899847984314, -0.02989800088107586, 0.2724199891090393, 0.14500999450683594, -0.12777000665664673, -0.04631099849939346, 0.12020000070333481, 0.38686999678611755, 0.11259999871253967, 0.18233999609947205, -0.3519099950790405, 0.7046300172805786, 0.22045999765396118, -0.25867998600006104, 0.22022999823093414, -0.14282000064849854, -0.3405199944972992, -0.03556099906563759, -0.738860011100769, -0.3005000054836273, -0.1639299988746643, -0.17944000661373138, -0.14629000425338745, -0.2125300019979477, 0.25047001242637634, -0.162540003657341, 0.21811999380588531, 0.12387000024318695, -0.1143300011754036, -0.1208299994468689, -0.4331299960613251, -0.22171999514102936, 0.1919499933719635, -1.4453999996185303, 0.01727299951016903, -0.731440007686615, -0.14733999967575073, 0.36212000250816345, 0.13551999628543854, 0.6881099939346313, 0.05244100093841553, -0.2674799859523773, 0.923039972782135, 0.3885999917984009, 0.4530099928379059, -0.03413299843668938, -0.2528400123119354, -0.031571999192237854, 0.099310003221035, -0.07377000153064728, 0.7652199864387512, -0.1387999951839447, -0.6989700198173523, 0.3477399945259094, 0.49619001150131226, 0.4569399952888489, 0.3591800034046173], u'copper': [-0.36403000354766846, 0.07048899680376053, -0.21379999816417694, -0.3989599943161011, 0.2398100048303604, 0.037842001765966415, -0.10080000013113022, 0.1193000003695488, 0.1491599977016449, -1.1770000457763672, -1.263100028038025, -0.22429999709129333, -0.29236000776290894, -0.15929999947547913, -0.18455000221729279, -0.45511001348495483, -0.5346199870109558, -0.12258999794721603, -0.2295600026845932, -0.6386200189590454, -0.6267799735069275, -0.1433899998664856, 0.39184001088142395, 0.3112199902534485, 0.22265000641345978, -0.8851500153541565, -0.005137300118803978, 0.28876999020576477, -0.2268500030040741, 0.35705000162124634, 0.43349000811576843, 0.5987799763679504, -0.3267900049686432, 0.5318899750709534, 0.06676500290632248, 0.46761998534202576, -0.30094000697135925, 0.02613300085067749, 0.2951500117778778, 0.34275999665260315, -0.846780002117157, -0.010366000235080719, -0.17177000641822815, 0.12511000037193298, 0.03937400132417679, -0.2659800052642822, -0.26330000162124634, -0.2153400033712387, 0.006231499835848808, 0.14541999995708466, 0.35721999406814575, 0.5382400155067444, -0.05401400104165077, 0.5134599804878235, 0.7949399948120117, 0.08734799921512604, 0.08423999696969986, 0.10841000080108643, 0.3611299991607666, -0.2640399932861328, -0.15595999360084534, 0.4291900098323822, 0.49814000725746155, 0.26006001234054565, 0.5573499798774719, 0.17295999825000763, -0.33048000931739807, 0.28033000230789185, -0.18087999522686005, -0.08531899750232697, 0.07758600264787674, -0.19718000292778015, 0.07324399799108505, 0.5259799957275391, -0.7355999946594238, 0.049059998244047165, 0.20157000422477722, -0.19399000704288483, 0.0025202000979334116, -0.2953000068664551, -0.10633999854326248, -0.7253699898719788, -0.006385699845850468, -0.034926000982522964, 0.5615699887275696, 0.20134000480175018, -0.47058001160621643, -0.3740200102329254, -0.2087000012397766, -0.2172199934720993, 0.7144299745559692, -0.06872300058603287, -0.13333000242710114, -0.020711999386548996, -0.20823000371456146, 0.37856000661849976, -0.7364299893379211, 0.026917999610304832, 0.350739985704422, 0.4817099869251251, -0.1369599997997284, -0.1985500007867813, 0.21943999826908112, -0.16651999950408936, 0.6726899743080139, 0.23803000152111053, 0.2649100124835968, 0.5184400081634521, -0.7559000253677368, 0.22045999765396118, -0.3380599915981293, -0.5629500150680542, -0.27472999691963196, -0.7874699831008911, 0.4717000126838684, 0.012815999798476696, 0.3729200065135956, 0.5311899781227112, 0.45375001430511475, 0.606909990310669, -0.3468700051307678, -0.6684799790382385, -0.11069999635219574, -0.02751999907195568, -0.05407999828457832, -0.05538399890065193, 0.20059999823570251, 0.7958199977874756, -0.23656000196933746, -0.31725001335144043, -0.24171000719070435, 0.9965100288391113, -0.4148600101470947, 0.03309899941086769, 0.22244000434875488, 0.7388100028038025, -0.8443700075149536, 0.27039000391960144, 0.14093999564647675, 0.01713700033724308, -0.13634000718593597, 0.15285000205039978, 0.19074000418186188, -0.5971900224685669, 0.14970000088214874, -0.13122999668121338, 0.052271999418735504, -0.20868000388145447, 0.15494999289512634, -1.079300045967102, 0.5598000288009644, 0.32892000675201416, -0.320360004901886, -0.36333999037742615, 0.3866899907588959, -0.12270999699831009, -0.49616000056266785, -0.30948999524116516, -0.5662299990653992, -0.6761400103569031, 0.152319997549057, -0.12043999880552292, 0.20892000198364258, 0.27046999335289, 0.329259991645813, 0.30223000049591064, 0.22896000742912292, -0.22498999536037445, 0.31929001212120056, -0.16787999868392944, -0.9203699827194214, 0.7938699722290039, -0.11413999646902084, 0.08675800263881683, 0.5837299823760986, -0.30417001247406006, 0.1373099982738495, 0.05599899962544441, -0.045244000852108, 0.19583000242710114, 0.10814999788999557, 0.4247100055217743, 0.08388199657201767, -0.3097499907016754, 0.06558900326490402, -0.17159000039100647, 1.0889999866485596, 0.7397199869155884, 0.04033299908041954, -0.47679001092910767, 0.44273999333381653, 0.8685700297355652, 0.31080999970436096, 0.6389600038528442, -0.40625, -0.46678999066352844, -0.27250999212265015, -0.36880001425743103, -0.24860000610351562, 0.3322199881076813, 0.24515999853610992, 0.5231299996376038, -0.3043299913406372, 0.2707900106906891, 0.3686000108718872, 0.8670799732208252, 0.4289200007915497, -0.3238599896430969, -0.43957000970840454, -0.2547000050544739, 0.338809996843338, 0.1851699948310852, 0.33636000752449036, -0.16293999552726746, -0.049396999180316925, 0.761680006980896, 0.3090499937534332, -0.4079599976539612, -0.27366000413894653, 0.33678001165390015, -0.3573099970817566, -0.57573002576828, 0.1286800056695938, -0.7166900038719177, -0.2653200030326843, 0.38760998845100403, -0.39925000071525574, 0.09199099987745285, -0.2780900001525879, -0.3197399973869324, -0.7752500176429749, 0.6332899928092957, -0.16479000449180603, 0.3346500098705292, -0.047954998910427094, -0.22848999500274658, 0.7045300006866455, -0.7090100049972534, -0.29967001080513, -0.5168899893760681, -0.31700000166893005, 0.36305001378059387, -0.3231000006198883, 0.04182799905538559, -0.7771199941635132, -0.3124299943447113, -0.2333800047636032, -0.7452999949455261, 0.024234000593423843, 0.22909000515937805, -0.23837999999523163, 0.25892001390457153, -0.1897599995136261, -0.5654199719429016, 0.8763499855995178, 0.24275000393390656, -0.02606399916112423, 0.010691000148653984, -0.48458001017570496, -0.2713199853897095, -0.6277700066566467, 0.25248000025749207, 0.357589989900589, 0.48118001222610474, 0.18614999949932098, 0.43904998898506165, 0.4334999918937683, 0.20582999289035797, 0.08998599648475647, 0.13553999364376068, -0.08193500339984894, -0.6127499938011169, 0.5769299864768982, 0.4695500135421753, -0.6256499886512756, -0.27632999420166016, -0.48197001218795776, -0.3424200117588043, -0.9282100200653076, 0.07445099949836731, 0.03844200074672699, -0.1880899965763092, -0.7506099939346313, 0.420879989862442, 0.0308810006827116, -0.37915000319480896, -0.13503000140190125, -0.456059992313385, -0.33390000462532043, -0.00884309969842434, -0.7464900016784668, 0.4442000091075897, -0.40156999230384827, 0.5730199813842773, 0.5270100235939026, 0.7443400025367737, 0.08438099920749664, -0.8551300168037415, 0.10563000291585922, 0.06958799809217453], u'creek': [-0.9266700148582458, 0.08375100046396255, 0.029627999290823936, 0.199630007147789, -0.05751900002360344, 0.07459200173616409, -0.08342699706554413, 0.17159999907016754, 0.694350004196167, 0.02685300074517727, -0.7113900184631348, 0.3648900091648102, 0.5851500034332275, 0.37369000911712646, 0.3809199929237366, 0.006835499778389931, -0.14664000272750854, 0.1468999981880188, 1.0706000328063965, 0.7317600250244141, -0.17497999966144562, 0.03602899983525276, 0.4925599992275238, -0.037801001220941544, -0.42239999771118164, -0.042667001485824585, -0.3001500070095062, -0.41277000308036804, -0.5977200269699097, 0.4117000102996826, 1.021399974822998, 0.4178600013256073, 0.3230299949645996, 0.3805699944496155, 0.24081000685691833, 0.1836100071668625, -0.017041999846696854, 0.3800300061702728, -0.3422200083732605, -0.3075999915599823, -0.6651399731636047, 0.46445000171661377, 0.2932800054550171, 0.5777300000190735, 0.2776600122451782, 0.3870899975299835, 0.36653000116348267, 0.23972000181674957, 0.5089700222015381, 0.08202599734067917, 0.04336100071668625, -0.15272000432014465, 0.023141000419855118, 0.2558700144290924, 0.09656699746847153, -0.2655400037765503, -0.5883200168609619, 0.1657000035047531, 0.571150004863739, 0.6820899844169617, 0.16051000356674194, -0.44192999601364136, 0.44402000308036804, -0.18327000737190247, 0.17942999303340912, -0.7559300065040588, -0.19975000619888306, 0.04188000038266182, -0.30935999751091003, -0.28137001395225525, 0.26124000549316406, -0.20211000740528107, -0.8364400267601013, 0.49772998690605164, -0.7702800035476685, -0.49915000796318054, 0.328110009431839, 0.2047100067138672, -0.14016999304294586, -0.35229000449180603, -0.12483999878168106, -0.30118998885154724, 0.30706000328063965, -0.6517900228500366, 0.5860300064086914, -0.3822599947452545, -0.12609000504016876, 0.24626000225543976, 0.16824999451637268, -0.10513000190258026, 0.27309998869895935, 0.1933099925518036, 0.6980100274085999, 0.3771499991416931, -0.3303399980068207, 0.3375900089740753, 0.5341100096702576, -0.18820999562740326, -0.2083600014448166, 0.17067000269889832, -0.379830002784729, -0.3909499943256378, -0.3168500065803528, -0.10374999791383743, -0.02289400063455105, 0.40654999017715454, 0.6741399765014648, -0.25913000106811523, 0.01623399928212166, 0.12925000488758087, -0.4496699869632721, -0.8458700180053711, 0.24414999783039093, -0.11285000294446945, -0.353549987077713, -0.2662999927997589, -0.09400899708271027, 0.40490999817848206, -0.18045000731945038, 0.15333999693393707, -0.28290998935699463, 0.054882001131772995, -0.33322998881340027, 0.06170700117945671, -0.1736299991607666, 0.22303999960422516, 0.22694000601768494, 0.07227999716997147, -0.1094600036740303, -0.08017800003290176, -0.016373999416828156, 0.16263000667095184, 0.3249000012874603, -0.08958200365304947, 0.19598999619483948, 0.07358500361442566, 0.4465300142765045, 0.10307999700307846, 0.0452830009162426, -0.6822599768638611, 0.6848400235176086, 0.06135300174355507, -0.043372999876737595, -0.5710099935531616, 0.0334089994430542, -0.45840999484062195, 0.5986700057983398, 0.43101000785827637, -0.07049799710512161, 0.34498998522758484, 0.7813599705696106, 0.8924800157546997, -0.5401399731636047, -0.23122000694274902, 1.0920000076293945, 0.1677899956703186, 0.4568699896335602, -0.43669000267982483, -0.687690019607544, 0.08023399859666824, 0.6057800054550171, -0.7579799890518188, 0.5598199963569641, 0.07699599862098694, -0.035691998898983, 0.128370001912117, 0.4653800129890442, -0.11016000062227249, -0.6126999855041504, -0.053286999464035034, -0.3246299922466278, 0.40375998616218567, 0.6875699758529663, 0.609279990196228, -0.22957000136375427, 0.3353999853134155, -0.4439300000667572, -0.24804000556468964, 0.5254700183868408, -0.032336000353097916, 0.027811000123620033, 1.1497999429702759, 0.49382999539375305, -0.13872000575065613, 0.07478500157594681, -0.3806700110435486, -0.2638700008392334, -0.099932000041008, 0.5676699876785278, -0.01081900019198656, -0.004930099938064814, 1.4763000011444092, -0.46869999170303345, -0.2249400019645691, -0.11201000213623047, 0.2147199958562851, 0.3326599895954132, -1.388200044631958, 0.20753000676631927, 0.718280017375946, 1.5110000371932983, -0.18258999288082123, 0.20172999799251556, -0.4325900077819824, 0.030246000736951828, 0.4078899919986725, -0.325190007686615, 0.16446000337600708, -0.149959996342659, 0.3540300130844116, -0.08088699728250504, -0.06885600090026855, -0.10074000060558319, -0.40553000569343567, 0.1567399948835373, 0.16737000644207, 0.0654359981417656, 0.21494999527931213, 0.17116999626159668, -0.5433400273323059, 0.060784000903367996, -0.3022499978542328, 0.27195999026298523, -0.3019700050354004, 0.48087000846862793, -0.031654998660087585, -0.06790599972009659, 0.1036200001835823, 0.013519000262022018, 0.28422001004219055, -0.8393800258636475, -0.17580999433994293, -0.8161900043487549, -0.20284000039100647, 0.3065299987792969, 0.6274799704551697, 0.33939000964164734, -0.8287299871444702, 0.027726000174880028, -0.602370023727417, -0.333050012588501, 0.5429400205612183, -0.35936999320983887, -0.5770900249481201, -1.2203999757766724, -0.09741999953985214, 0.370279997587204, 0.2709900140762329, 0.38201001286506653, -0.7572799921035767, -0.22703999280929565, 0.4004899859428406, -0.3731299936771393, -0.30118000507354736, 0.5814800262451172, -0.700439989566803, 0.05105999857187271, 0.32615000009536743, 0.2912200093269348, 0.4559899866580963, -0.3451800048351288, 0.2790299952030182, -0.30399999022483826, -0.31363001465797424, -0.5919700264930725, 0.06913100183010101, 0.8052899837493896, -0.4144800007343292, -0.017568999901413918, 0.41071000695228577, -0.23201000690460205, -0.4195399880409241, 0.15129999816417694, 0.4348900020122528, -0.07431100308895111, 0.06973499804735184, -0.596809983253479, 0.8160300254821777, -0.16011999547481537, 0.34654998779296875, -0.34332001209259033, -0.029836999252438545, 0.5625100135803223, -0.38659998774528503, -0.6692399978637695, -0.11918999999761581, 0.31918999552726746, -0.5443500280380249, 0.21610000729560852, -0.4002400040626526, -0.5089399814605713, -0.2837499976158142, -0.0947989970445633, 0.49393001198768616, -0.40547001361846924, -0.024071000516414642, -0.09421999752521515, 0.6491000056266785, 0.09839700162410736, 0.6203799843788147], u'fence': [0.34518998861312866, -0.02495099976658821, -0.4591499865055084, -0.018334999680519104, -0.2356400042772293, -0.11479999870061874, 0.1720300018787384, -0.2775900065898895, -0.3025200068950653, -0.6962000131607056, -0.11714000254869461, 0.5007200241088867, 0.23672999441623688, -0.49079999327659607, -0.07819899916648865, 0.452210009098053, -0.7340400218963623, 0.11896000057458878, 0.18849000334739685, 0.6123200058937073, 0.24808000028133392, -0.22301000356674194, 0.13496999442577362, -0.36013999581336975, 0.11477000266313553, -0.4270299971103668, 0.08646199852228165, 0.5855500102043152, -0.11084000021219254, 0.2776699960231781, 0.27807000279426575, 0.26635000109672546, -0.1645900011062622, 0.3201799988746643, 0.07006700336933136, -0.13547000288963318, 0.10944999754428864, -0.3959999978542328, 0.036044999957084656, 0.06819400191307068, 0.10715000331401825, 0.11495000123977661, -0.2404399961233139, -0.4740000069141388, -0.363319993019104, 0.5615400075912476, -0.16705000400543213, -0.21397000551223755, 0.16498999297618866, 0.1296900063753128, -0.7336099743843079, -0.0005034999921917915, -0.32844001054763794, -0.14785000681877136, 0.008492800407111645, -0.013457000255584717, -0.2870100140571594, -0.7168700098991394, -0.37380000948905945, 0.1190200001001358, 0.5257599949836731, 0.18390999734401703, 0.2811200022697449, -0.08358900249004364, 0.4538800120353699, -0.3018999993801117, 0.21341000497341156, 0.3162800073623657, 0.508109986782074, -0.1902800053358078, -0.20826999843120575, 0.16575999557971954, -0.03958800062537193, 0.3531300127506256, -0.48194000124931335, -0.10445000231266022, -0.08695100247859955, 0.257099986076355, 0.14747999608516693, -0.27316999435424805, 0.429720014333725, -0.07298000156879425, -0.05423299968242645, -0.10937999933958054, 0.10785000026226044, 0.15222999453544617, -0.23062999546527863, -0.2322700023651123, 0.29243001341819763, -0.10977999866008759, 0.36552000045776367, 0.11490000039339066, 0.19686000049114227, 0.2593100070953369, -0.3215300142765045, -0.3295600116252899, -0.3436099886894226, -0.34536999464035034, -0.7465800046920776, -0.48173999786376953, -0.29241999983787537, 0.4200100004673004, 0.225490003824234, -0.3995400071144104, 0.24827000498771667, 0.10818000137805939, 0.39785000681877136, 0.25883999466896057, 0.12498000264167786, -0.2064799964427948, -0.7051500082015991, -0.33202001452445984, -0.20806999504566193, -1.0319000482559204, 0.37485000491142273, 0.4357900023460388, 0.13402000069618225, 0.07274399697780609, 0.16035999357700348, -0.35738998651504517, 0.3622100055217743, -0.2697100043296814, 0.08978799730539322, -0.3922399878501892, -0.2496200054883957, -0.27459999918937683, 0.31885001063346863, -0.06185400113463402, -0.26482000946998596, -0.23529000580310822, 0.2849999964237213, 0.7184399962425232, 0.11327999830245972, 0.8257499933242798, -0.19607999920845032, 0.16670000553131104, 0.2869200110435486, 0.38144999742507935, -0.43957000970840454, -0.6616299748420715, -0.3481999933719635, 0.07333599776029587, -0.19468000531196594, -0.6495000123977661, -0.8322799801826477, 0.17205999791622162, 0.2805800139904022, -0.2587999999523163, -0.13492999970912933, -0.08676400035619736, -0.4589399993419647, 0.185479998588562, 0.009118299931287766, -0.25488999485969543, -0.09997700154781342, -0.4025599956512451, -0.07979200035333633, 0.10791999846696854, 0.07288999855518341, 0.5490300059318542, 0.2703999876976013, 0.3800399899482727, 0.26728999614715576, -0.7456499934196472, 0.4607599973678589, 0.15182000398635864, -0.0884379968047142, 0.5080100297927856, 0.07694700360298157, -0.14339999854564667, -0.057374998927116394, -0.2685000002384186, 0.9142799973487854, -0.6523600220680237, -0.009935200214385986, -0.18217000365257263, -0.3469400107860565, 0.6860700249671936, 0.14768999814987183, -0.9436900019645691, 0.3362399935722351, -0.28534001111984253, 0.0403360016644001, 0.49028998613357544, -0.04170500114560127, 0.047874998301267624, 0.21448999643325806, 0.10823000222444534, 0.5079600214958191, 0.7926899790763855, -0.18458999693393707, -0.01711300015449524, 0.02300499938428402, -0.5745599865913391, -0.3811900019645691, -0.07134599983692169, -0.012749999761581421, -0.17781999707221985, 0.38141000270843506, -0.05749399960041046, 0.8695700168609619, 0.11856000125408173, -0.14429999887943268, -0.05313999950885773, 0.1125200018286705, 0.16866999864578247, 0.8716899752616882, -0.553629994392395, 0.30757999420166016, 0.5811499953269958, 0.2768799960613251, 0.2980799973011017, -0.10216999799013138, -0.37296000123023987, -0.3732599914073944, 0.23813000321388245, -0.4183799922466278, 0.3279300034046173, 0.5569300055503845, 0.563730001449585, 0.9991599917411804, -0.23758000135421753, 0.4967299997806549, -0.24740000069141388, 0.29721999168395996, -0.022724000737071037, -0.13018999993801117, -0.27261000871658325, -0.15172000229358673, -0.5199400186538696, -0.07863900065422058, -0.15094999969005585, -0.36333000659942627, 0.4931100010871887, 0.12342000007629395, -0.18167999386787415, -0.22832000255584717, -0.3619999885559082, 0.3715499937534332, 0.02287600003182888, 0.5174700021743774, 0.02789199911057949, -0.25303998589515686, -0.026523999869823456, -0.4309299886226654, -0.34439000487327576, 0.2789900004863739, -0.02971000038087368, 0.5281299948692322, -0.3089199960231781, -0.4291999936103821, -0.19277000427246094, 0.37310001254081726, -0.43046998977661133, 0.9323599934577942, 0.1269800066947937, 0.18105000257492065, -0.6622300148010254, 0.17048999667167664, 0.22604000568389893, 0.5872600078582764, -0.09323800355195999, 0.349480003118515, -0.31630000472068787, -0.5496900081634521, 0.24342000484466553, 0.0012354999780654907, -0.38420000672340393, 0.07673099637031555, 0.06598500162363052, 0.17858000099658966, -0.11860000342130661, -0.12710000574588776, -0.6823300123214722, -0.49498000741004944, -0.006443500053137541, -1.7972999811172485, 0.3347100019454956, -0.42146000266075134, 0.2932099997997284, 0.5449900031089783, -0.5997499823570251, -0.23127000033855438, 0.2383899986743927, -0.09004899859428406, -0.027363000437617302, 0.8503599762916565, 0.08011899888515472, -0.09209900349378586, 0.18775999546051025, 0.7207599878311157, -0.3140900135040283, -0.48138999938964844, -0.007485100068151951, 0.12063000351190567, 0.11877000331878662, 0.1092899963259697, -0.12117999792098999, 0.4206100106239319, 0.178849995136261], u'house': [-0.37070000171661377, -0.08120899647474289, -0.446260005235672, 0.09739500284194946, 0.19829000532627106, -0.04123200103640556, 0.21188999712467194, 0.223690003156662, -0.5991500020027161, -1.3555999994277954, -0.0037263999693095684, -0.5505899786949158, 0.021564999595284462, 0.0106819998472929, 0.04604699835181236, 0.4708299934864044, -0.19259999692440033, 0.09360300004482269, 0.1981000006198883, 0.18291999399662018, 0.2398499995470047, 0.448170006275177, 0.25240999460220337, 0.31303998827934265, -0.31769001483917236, 0.03759400174021721, -0.08782199770212173, -0.06956899911165237, -0.019032999873161316, 0.2518100142478943, 0.5274999737739563, 0.10401000082492828, -0.5695599913597107, 0.6812000274658203, -0.6893600225448608, 0.8408100008964539, 0.041774000972509384, -0.4463599920272827, -0.3075900077819824, -0.2814500033855438, 0.6351699829101562, 0.5041199922561646, -0.33981001377105713, 0.6919100284576416, -0.15073999762535095, 0.16806000471115112, -0.3420200049877167, -0.4484499990940094, 0.05982999876141548, 0.12643000483512878, -0.23142999410629272, -0.09223199635744095, -0.090829998254776, 0.2164199948310852, 0.6258000135421753, -0.3259899914264679, -0.5210400223731995, 0.34727001190185547, -0.0849670022726059, -0.19764000177383423, 0.44523000717163086, -0.4472300112247467, 0.4372600018978119, 0.2606300115585327, 0.654259979724884, -1.4531999826431274, 0.32054999470710754, -0.37514999508857727, -0.228970006108284, -0.700689971446991, -0.17773999273777008, -0.017621999606490135, -0.24696999788284302, -0.2395700067281723, -0.49052000045776367, 0.12087000161409378, -0.24289999902248383, 0.2923400104045868, -0.33522000908851624, -0.010882000438869, 0.27623000741004944, 0.5426200032234192, 0.5223600268363953, 0.056655000895261765, 0.523140013217926, 0.014538000337779522, -0.36858999729156494, 0.8135700225830078, -0.17749999463558197, -0.3927899897098541, -0.039007000625133514, -0.6505299806594849, -0.0841900035738945, 0.48217999935150146, 0.13495999574661255, -0.3684000074863434, -0.3318899869918823, 0.1666100025177002, 0.24202999472618103, -0.47508999705314636, -0.1902099996805191, 0.2973000109195709, -0.1351500004529953, 0.06829000264406204, 0.13021999597549438, -0.25433000922203064, -0.2024099975824356, -0.3799099922180176, 0.045892998576164246, 0.21821999549865723, -0.18371999263763428, -0.14904999732971191, -0.4565599858760834, 0.32638001441955566, -0.19266000390052795, 0.6675099730491638, -0.6721900105476379, -0.12582999467849731, -0.16200000047683716, -0.7183300256729126, 0.053339000791311264, -0.00148760003503412, -0.40470001101493835, -0.33921000361442566, 0.1545799970626831, -0.42247000336647034, -0.35561999678611755, -0.10341999679803848, 0.5255299806594849, -0.1607999950647354, -0.11343999952077866, 0.2539600133895874, -0.1341399997472763, -0.07925699651241302, 0.08544900268316269, 0.39976000785827637, 0.068122997879982, 0.2733300030231476, 0.006091199815273285, -0.3232400119304657, 0.004777499940246344, 0.09818000346422195, -0.2848699986934662, 0.6802899837493896, -0.55690997838974, -0.4576199948787689, 0.14469000697135925, -0.2055400013923645, -0.35850998759269714, -0.1738000065088272, 0.47363999485969543, 0.6671299934387207, -0.10373999923467636, -0.4124099910259247, -0.147039994597435, 0.3753199875354767, 0.06656000018119812, -0.016395000740885735, 0.1866299957036972, -0.4503200054168701, 0.5366399884223938, 0.034285999834537506, -0.3219200074672699, -0.34615999460220337, -0.2381799966096878, 0.44071000814437866, -0.478659987449646, -0.2984600067138672, 0.39070001244544983, 0.24291999638080597, 0.08303199708461761, -0.23303000628948212, -0.2309899926185608, 0.329800009727478, -0.07858700305223465, 0.4724999964237213, -0.1312599927186966, -0.41328999400138855, 0.4061700105667114, 0.0582519993185997, -0.20309999585151672, -0.470550000667572, 0.19533999264240265, 0.34303000569343567, 0.03748299926519394, -0.10955999791622162, 0.0378119982779026, 0.326449990272522, 0.15158000588417053, 0.4365699887275696, 0.18512000143527985, -0.30562999844551086, -0.3383199870586395, -0.3874399960041046, -0.028474999591708183, 0.5895900130271912, 0.3865399956703186, 0.2093600034713745, -0.39261001348495483, -0.38703998923301697, 0.7989199757575989, 0.03316599875688553, 0.08845599740743637, 0.04359599947929382, 0.07775100320577621, -0.4963499903678894, 0.15892000496387482, -0.20937000215053558, -0.31349000334739685, -0.17062999308109283, 0.07221899926662445, 0.007902700453996658, -0.012118999846279621, 0.06904800236225128, 0.016228999942541122, 0.44339001178741455, 0.0655049979686737, -0.41655999422073364, 0.4684799909591675, -0.06512100249528885, 0.7751299738883972, -0.667900025844574, -0.23964999616146088, -0.07467500120401382, -0.22267000377178192, 0.044819001108407974, -0.08320999890565872, 0.11062999814748764, -0.12897999584674835, -0.49514999985694885, -0.4446699917316437, -0.34624001383781433, 0.20625999569892883, 0.40264999866485596, 0.679390013217926, -0.3855400085449219, 0.24812999367713928, 0.38054999709129333, 0.09204600006341934, 0.15208999812602997, 0.1744299978017807, 0.47780999541282654, 0.386819988489151, 0.48688000440597534, -0.43606001138687134, -0.10044000297784805, 0.15226000547409058, 0.21901999413967133, -0.129380002617836, 0.06255000084638596, -0.06353899836540222, -0.2815999984741211, -0.05455699935555458, -0.28297001123428345, 0.7129999995231628, -0.09504199773073196, -0.13725000619888306, -0.01696299947798252, 0.766290009021759, 0.2501299977302551, 0.035075001418590546, 0.21472999453544617, -0.17035000026226044, -0.16234999895095825, -0.3463200032711029, 0.10513000190258026, 0.5209500193595886, 0.22473999857902527, 0.5482500195503235, 0.056366998702287674, -0.12455999851226807, -0.35433000326156616, -0.10479000210762024, -0.29269999265670776, -0.18943999707698822, -0.01806900091469288, -2.368499994277954, 0.2603299915790558, 0.5124800205230713, 0.14435000717639923, -0.4023300111293793, -0.019897999241948128, 0.08500000089406967, -0.022857999429106712, 0.05130600184202194, 0.5351300239562988, 0.08492299914360046, 0.7966700196266174, 0.08674400299787521, -0.35760998725891113, -0.39221999049186707, -0.12030000239610672, 0.3737899959087372, 0.10174000263214111, -0.07487300038337708, -0.009938499890267849, -0.2870199978351593, 0.030515000224113464, -0.3316799998283386, 1.0232000350952148], u'fish': [0.5639299750328064, 0.25832000374794006, 0.01178400032222271, 0.0413610003888607, 0.15147000551223755, 0.7130600214004517, 0.01647000014781952, 0.3509500026702881, 0.08946800231933594, -0.7242100238800049, 0.12383999675512314, -0.49437999725341797, -0.5038599967956543, 0.43674999475479126, 0.05016700178384781, -0.5058799982070923, 0.007323700003325939, -0.02775299921631813, -0.6970999836921692, 0.7293199896812439, -0.46779999136924744, 0.5557000041007996, 0.2325800061225891, 0.49132999777793884, -0.01938300020992756, 0.29284998774528503, -0.09325099736452103, -0.21550999581813812, -0.4063900113105774, 0.018716000020503998, -0.3604399859905243, 0.38124001026153564, -0.6636599898338318, -0.405239999294281, -0.3203999996185303, 0.2872200012207031, 0.5389699935913086, 0.01594799943268299, -0.2526000142097473, 0.17089000344276428, -0.22936999797821045, -0.10899999737739563, 0.4120500087738037, 0.5186799764633179, -0.418830007314682, 0.04595499858260155, 0.3628599941730499, -0.2630600035190582, 0.38183000683784485, 0.645550012588501, -0.11914999783039093, 0.23025000095367432, 0.6056100130081177, -0.4194999933242798, 0.06064099818468094, 0.6559200286865234, -0.24174000322818756, -0.04874899983406067, -0.1855199933052063, 0.15932999551296234, 0.21863000094890594, -0.31951001286506653, 0.9200800061225891, 0.04532599821686745, -0.05282700061798096, -0.6127300262451172, -0.8080599904060364, 0.032634999603033066, -0.10971999913454056, -0.002690100111067295, 0.4292599856853485, -0.04387100040912628, -0.09476800262928009, 0.23666000366210938, -0.45396000146865845, 0.2475000023841858, 0.5605900287628174, 0.7588599920272827, 0.03172500059008598, -0.2826499938964844, 0.3488200008869171, -0.0743580013513565, -0.43178001046180725, -0.31376001238822937, 0.2914400100708008, -0.3039900064468384, 0.13036000728607178, -0.02047700062394142, -0.4568899869918823, -0.4538399875164032, 0.15390999615192413, -0.1434199959039688, -0.0998070016503334, -0.16604000329971313, 0.33500000834465027, 0.4276899993419647, -0.19367000460624695, 0.06341800093650818, -0.10699000209569931, -0.2017199993133545, 0.12522999942302704, -0.21083000302314758, 0.5932199954986572, -0.7355800271034241, 0.197160005569458, 0.28216999769210815, 0.44826000928878784, 0.09268400073051453, 0.021158000454306602, 0.06668400019407272, 0.0900140032172203, 0.2198999971151352, -0.8599200248718262, -0.04577299952507019, 0.4328800141811371, -0.21527999639511108, -0.061218999326229095, 0.27171000838279724, -0.1775200068950653, 0.24192999303340912, -0.6338800191879272, -0.5251799821853638, -0.18950000405311584, 0.8159099817276001, -0.07454799860715866, 0.29976001381874084, 0.05737299844622612, 0.11857999861240387, 0.6703100204467773, 0.47484999895095825, -0.1309799998998642, 0.29945001006126404, 0.637220025062561, 0.09256300330162048, -0.16558000445365906, -0.20398999750614166, 0.7954400181770325, -0.20747999846935272, 0.01952200010418892, 0.8102800250053406, 0.6085500121116638, 0.0575530007481575, 0.4657900035381317, -0.8011500239372253, -0.38545000553131104, 0.3406499922275543, -0.02910199947655201, 0.16936999559402466, -0.11785999685525894, -0.3242200016975403, -0.2650899887084961, 0.23062999546527863, -0.1059499979019165, -0.15207000076770782, 0.17455999553203583, -0.25071999430656433, -0.2852199971675873, -0.2451300024986267, -0.11739999800920486, -0.23502999544143677, 0.3866400122642517, 0.19755999743938446, 0.32975998520851135, -0.11102999746799469, -0.002702699974179268, -0.21337999403476715, 0.3627200126647949, -0.20397000014781952, -0.41370001435279846, 0.16041000187397003, 0.0028880001045763493, -0.4208900034427643, 0.061051998287439346, 0.25992000102996826, -0.006703900173306465, 0.11914999783039093, 0.071492999792099, 0.06828700006008148, 0.4170700013637543, -0.5688499808311462, 0.4694400131702423, 0.18016000092029572, -0.23380999267101288, -0.5474299788475037, 0.21886000037193298, -0.568340003490448, 0.7118800282478333, 0.652679979801178, 0.6289399862289429, -0.14961999654769897, -0.054875001311302185, 0.8858199715614319, -0.4133700132369995, -0.18317000567913055, 0.49514999985694885, 0.4578700065612793, 0.013300999999046326, -0.6900299787521362, -0.3920300006866455, -0.34435999393463135, 0.9336699843406677, -0.08368899673223495, -0.26747000217437744, -0.30324000120162964, -0.38249000906944275, 0.7268999814987183, -0.060054000467061996, -0.3172900080680847, -0.06277500092983246, 0.17093999683856964, -0.0044479998759925365, -0.4891200065612793, 0.31272000074386597, -0.026562999933958054, 0.3335300087928772, 0.1052900031208992, 0.4193899929523468, 0.2240000069141388, 0.40509000420570374, -0.35280001163482666, 0.35705000162124634, -0.3838300108909607, -0.5392600297927856, -0.40195000171661377, 0.27781999111175537, 0.04694199934601784, -0.05998000130057335, -0.10933999717235565, 0.20747999846935272, 0.4494599997997284, 0.2678000032901764, 0.4910599887371063, 0.04539699852466583, -0.031589001417160034, 0.44442999362945557, 0.3296700119972229, -0.2503400146961212, 0.36469998955726624, -0.17036999762058258, -0.01307000033557415, -0.7895500063896179, 0.49720001220703125, -0.3121899962425232, -0.19155000150203705, -1.222000002861023, -0.0343950018286705, 0.785539984703064, -0.20058999955654144, -0.5918800234794617, -0.983519971370697, 0.1793999969959259, -0.4991599917411804, 0.03608199954032898, 0.27171000838279724, 0.8347899913787842, 0.5765699744224548, 0.21422000229358673, 0.8360199928283691, -0.4412499964237213, 0.17734000086784363, -0.2473199963569641, -0.03283800184726715, -0.5930600166320801, -0.39212000370025635, 0.5033800005912781, -0.23079000413417816, -0.10513000190258026, 0.3240000009536743, -0.3024199903011322, 0.24603000283241272, -0.14650000631809235, 0.35920000076293945, 0.5705900192260742, 0.49094000458717346, 0.257099986076355, 0.07566999644041061, -1.766800045967102, 0.1332699954509735, -0.79653000831604, -0.2462799996137619, -0.1972299963235855, 0.7951200008392334, 0.0775739997625351, 0.13919000327587128, -1.0390000343322754, -0.3458400070667267, -0.23251000046730042, -0.16495999693870544, 0.7552400231361389, 0.16896000504493713, -0.3830699920654297, 0.40898001194000244, 0.13253000378608704, -0.20523999631404877, -0.4056699872016907, -0.6198499798774719, 0.2983799874782562, -0.448060005903244, 0.20656000077724457, -0.38853999972343445], u'salmon': [0.5381799936294556, -0.4968799948692322, 0.06293699890375137, 0.5186600089073181, 0.04802900180220604, 0.8987699747085571, -0.651229977607727, 0.1014999970793724, -0.08582600206136703, -0.12411999702453613, -0.18250000476837158, -0.5287600159645081, -0.8230100274085999, 0.5202400088310242, -0.6508100032806396, -0.13535000383853912, -0.18807999789714813, 0.5265700221061707, -0.724590003490448, 0.8880900144577026, -0.8465399742126465, 0.01408699993044138, 0.13022999465465546, 0.12796999514102936, 0.06734900176525116, -0.4344500005245209, -0.19933000206947327, -0.30882999300956726, -0.5739499926567078, -0.7363499999046326, 0.5878599882125854, -0.11247000098228455, -0.09629999846220016, -0.953029990196228, -0.04456999897956848, 0.43869999051094055, 0.18176999688148499, 0.4895800054073334, -0.16101999580860138, 0.07997500151395798, -0.3261300027370453, 0.2484399974346161, 0.2030699998140335, 0.45471999049186707, -0.20983999967575073, 0.12263999879360199, -0.18176999688148499, 0.06629999727010727, 0.4401400089263916, 0.2690199911594391, -0.41839998960494995, -0.41628000140190125, 0.7650700211524963, -0.5841799974441528, 0.244719997048378, 0.6446999907493591, -0.1230200007557869, -0.33959001302719116, -0.2902899980545044, 0.752560019493103, 0.6245599985122681, -0.6749299764633179, 0.547819972038269, -0.28209999203681946, -0.4702099859714508, -0.3737800121307373, -1.0687999725341797, -0.18086999654769897, 0.1656699925661087, -0.03888799995183945, 0.8833299875259399, 0.27362000942230225, -0.25644999742507935, 0.5426099896430969, -0.534030020236969, 0.17656999826431274, 1.0242999792099, 0.6362000107765198, -0.23874999582767487, -0.29969000816345215, 0.14790000021457672, -0.14982999861240387, -0.5436999797821045, 0.006927299778908491, 0.2715800106525421, -0.0949689969420433, -0.34125998616218567, 0.3375000059604645, -0.24130000174045563, -0.8680800199508667, 0.47461000084877014, -0.01181500032544136, -0.8387600183486938, 0.11023999750614166, 0.1423500031232834, 0.5087699890136719, 0.39636000990867615, 0.025286000221967697, -0.03034299984574318, 0.6473600268363953, 0.20082999765872955, -0.4534200131893158, 0.8931000232696533, -0.9989799857139587, 0.004229600075632334, 0.4532800018787384, 0.20161999762058258, 0.587440013885498, -0.4649699926376343, 0.13759000599384308, -0.17839999496936798, -0.40112999081611633, -0.7224299907684326, -0.21863999962806702, 0.004026900045573711, -0.5290799736976624, -0.16029000282287598, 0.23836000263690948, 0.19663000106811523, -0.13309000432491302, -0.4846400022506714, -0.6205400228500366, -0.4418500065803528, 0.7200000286102295, -0.04707000032067299, -0.4378100037574768, 0.38444000482559204, 0.07661399990320206, 0.504859983921051, 0.27465999126434326, -0.2434300035238266, 0.6497200131416321, -0.04584300145506859, -0.21379999816417694, 0.3798699975013733, -0.5356400012969971, 0.2910600006580353, -0.09937799721956253, 0.0189449992030859, -0.11902999877929688, 0.5246400237083435, -0.16015000641345978, 0.12699000537395477, -0.7047299742698669, -0.24650999903678894, -0.155689999461174, 0.5434600114822388, 0.1019200012087822, 0.03423300012946129, -0.5579400062561035, -0.046199001371860504, -0.10763999819755554, -0.008882800117135048, -0.6093299984931946, -0.1789100021123886, -0.600130021572113, 0.15520000457763672, -0.5127599835395813, -0.5861600041389465, 0.10593999922275543, 0.22682000696659088, 0.4248400032520294, -0.023639999330043793, -0.10183999687433243, 0.12925000488758087, 0.11806000024080276, 0.06255800276994705, -0.2230300009250641, -0.20882000029087067, 0.5039700269699097, -0.7458500266075134, -0.5893800258636475, -0.3499299883842468, -0.18203000724315643, 0.36131998896598816, 0.05702599883079529, 0.1808300018310547, -0.30281999707221985, 0.13662000000476837, -0.34977999329566956, 0.3094100058078766, 0.4023300111293793, 0.1961199939250946, -0.7551299929618835, -0.11082000285387039, -0.5416399836540222, 0.4175199866294861, -0.22186000645160675, -0.05444300174713135, -0.09753499925136566, -0.6371600031852722, 1.0616999864578247, -0.6339700222015381, -0.15476000308990479, 0.7021300196647644, 0.47258999943733215, -0.3492000102996826, -0.5807899832725525, -0.265859991312027, 0.1918099969625473, 0.8179500102996826, -0.3146600127220154, -0.08087699860334396, -0.04121899977326393, -0.08067700266838074, 1.2178000211715698, 0.02085999958217144, 0.13944000005722046, 0.13890999555587769, 0.23298999667167664, 0.46889999508857727, -0.5668100118637085, 0.2705399990081787, 0.41589000821113586, -0.11925999820232391, 0.36548998951911926, 0.5374799966812134, -0.08124999701976776, -0.06354500353336334, -0.10863000154495239, 0.09667900204658508, -0.19801999628543854, -0.6133400201797485, -0.8194000124931335, 0.5563399791717529, 0.3181999921798706, 0.28964000940322876, 0.06093800067901611, 0.13760000467300415, 0.37178000807762146, -0.7610899806022644, 0.5280299782752991, 0.10570000112056732, -0.057013001292943954, 0.63441002368927, 0.08507800102233887, 0.07900899648666382, 0.7176399827003479, -0.13230000436306, -0.3285599946975708, -0.5917500257492065, 0.5164499878883362, 0.3436099886894226, 0.040327999740839005, -0.8047699928283691, 0.26537999510765076, 0.4305900037288666, -0.24108000099658966, -0.55035001039505, -0.6232799887657166, -0.22968000173568726, 0.07288999855518341, 0.4087800085544586, -0.06361299753189087, 1.0430999994277954, 0.17930999398231506, -0.05863200128078461, 0.7628200054168701, 0.12165000289678574, 0.3675900101661682, 0.16756999492645264, -0.3739500045776367, -0.40463000535964966, -0.1472799926996231, -0.1619500070810318, 0.033121999353170395, 0.014773000031709671, 0.25051000714302063, 0.2558499872684479, 0.22121000289916992, -0.12306000292301178, -0.04055299982428551, 0.03027700074017048, 0.05056999996304512, 0.04780999943614006, 0.12887999415397644, -0.6411399841308594, -0.04090600088238716, -0.2640700042247772, -0.23720000684261322, -0.2753799855709076, 0.42239999771118164, -0.12105999886989594, 0.6558399796485901, -1.0341999530792236, -0.31330999732017517, 0.049591001123189926, -0.1060900017619133, 0.6335800290107727, 0.23235000669956207, 0.06297799944877625, 0.07159499824047089, 0.3990199863910675, -0.23118999600410461, -0.516759991645813, -0.47214001417160034, 0.4208199977874756, -0.3530600070953369, 0.23722000420093536, 0.43678998947143555], u'library': [-0.6812199950218201, -0.35798001289367676, -0.40630000829696655, -0.20000000298023224, 0.7470399737358093, 0.22709999978542328, -0.029903000220656395, 0.02438499964773655, 0.26241999864578247, -0.9046300053596497, 0.2627499997615814, 0.04962800070643425, 0.43549999594688416, -0.1367799937725067, 0.19377000629901886, -0.04509799927473068, 0.01217699982225895, -0.1611199975013733, 0.16720999777317047, 0.07944999635219574, 0.23475000262260437, -0.6096199750900269, 0.051913000643253326, 0.28459998965263367, 0.41383999586105347, 0.4774700105190277, 0.0035383999347686768, 0.08730600029230118, -0.06851799786090851, 0.2184399962425232, 0.5075100064277649, 0.16121000051498413, -0.29471999406814575, 1.222599983215332, -0.5267699956893921, -0.11073999851942062, 0.08851999789476395, -0.1823599934577942, -0.3300800025463104, -0.5438299775123596, 0.039698999375104904, -0.42127999663352966, -0.2530199885368347, 0.9986199736595154, 0.3464300036430359, -0.028195999562740326, 0.8453599810600281, 0.12466000020503998, 0.04432699829339981, -0.5993199944496155, 0.11841999739408493, -0.021544000133872032, -0.16091999411582947, -0.01845799945294857, 0.41756001114845276, -0.34332001209259033, 0.1515900045633316, 0.17768999934196472, -0.34891998767852783, 0.16966000199317932, -0.16986000537872314, 0.1915999948978424, 0.5526599884033203, 0.4237000048160553, 0.3734999895095825, -0.3014799952507019, 0.028537999838590622, 0.06972000002861023, -0.30862000584602356, -0.4840199947357178, -0.5144699811935425, -0.3497700095176697, 0.3500500023365021, 0.40171998739242554, -0.2416200041770935, -0.25481998920440674, -0.3074899911880493, 0.13830000162124634, 0.5108500123023987, -0.3064900040626526, -0.34360000491142273, -0.25995999574661255, 0.11883000284433365, -0.7975299954414368, 0.14449000358581543, 0.03482300043106079, -0.34217000007629395, 0.3953999876976013, 0.11994999647140503, -0.3366200029850006, 0.05649000033736229, -0.23091000318527222, -0.20438000559806824, 0.476529985666275, 0.4717499911785126, 0.19395999610424042, 0.40880000591278076, 0.15428000688552856, -0.37011000514030457, -0.6371399760246277, 0.17649999260902405, -0.39952999353408813, 0.0004642799904104322, -0.03320299834012985, -0.05790799856185913, -0.370169997215271, 0.11917000263929367, 0.206619992852211, 0.18943999707698822, 0.2591400146484375, -0.5484300255775452, 0.13955999910831451, -0.05307599902153015, 0.09866400063037872, -0.8980799913406372, -0.06103700026869774, -0.2635500133037567, 0.14067000150680542, 0.2517800033092499, 0.044179998338222504, 0.025203000754117966, 0.889680027961731, -0.07183899730443954, -0.07375100255012512, 0.1404999941587448, -0.12964999675750732, -0.23968000710010529, -0.21063999831676483, 0.44086000323295593, 0.4304800033569336, -0.0685259997844696, -0.14072999358177185, 0.13989000022411346, -0.5454599857330322, 0.2602899968624115, 0.09465000033378601, -0.10552000254392624, 0.3487200140953064, -0.5598599910736084, 0.2157299965620041, -0.10429999977350235, -0.3565100133419037, -0.22976000607013702, 0.3032299876213074, 0.9207000136375427, -0.4894599914550781, -0.32256999611854553, -0.6109899878501892, -0.3729499876499176, -0.08690100163221359, 0.5157899856567383, 0.0003313100023660809, 0.26225998997688293, -0.18086999654769897, -0.37650999426841736, -0.05314100161194801, -0.11698000133037567, 0.16741999983787537, 0.07506900280714035, -0.3709399998188019, 0.34341999888420105, 0.5041099786758423, 0.30226001143455505, 0.1450899988412857, -0.07381000369787216, -0.21881000697612762, 0.03805200010538101, 0.23757000267505646, 0.20103999972343445, 0.5742200016975403, 0.034956999123096466, -0.687309980392456, -0.48475000262260437, 0.38479000329971313, 0.3863300085067749, 0.37428000569343567, -0.121799997985363, -0.2297700047492981, 0.12417999655008316, -0.9252399802207947, -0.09761299937963486, -0.01600000075995922, 0.21870000660419464, -0.5591999888420105, 0.15838000178337097, 0.01991800032556057, -0.12957000732421875, -0.13700999319553375, -0.20486000180244446, 0.4486500024795532, 0.06441599875688553, 0.5622599720954895, -0.22144000232219696, -0.8769000172615051, -0.35554999113082886, 0.1530500054359436, -0.18977999687194824, 0.24435000121593475, -0.5183500051498413, 0.10972999781370163, -0.016397999599575996, -0.2346699982881546, -0.14760999381542206, -0.18650999665260315, 0.21012000739574432, -0.8385400176048279, -0.7548400163650513, -0.4271399974822998, 0.09684299677610397, -0.37907999753952026, 0.15399999916553497, -0.00026077000075019896, -0.3015899956226349, 0.07277899980545044, 0.08723600208759308, 0.39131999015808105, -1.0255999565124512, -0.21863000094890594, 0.04416099935770035, -0.2554500102996826, -0.693880021572113, -0.09519200026988983, -0.9916800260543823, -0.1519400030374527, 0.21042999625205994, -0.314300000667572, -0.5400699973106384, 0.12345000356435776, -0.05558599904179573, -0.06242400035262108, -0.38888001441955566, -0.5507699847221375, -0.4194900095462799, 0.16346000134944916, 0.3404200077056885, 0.29460999369621277, 0.44866999983787537, 0.24368000030517578, -0.45100998878479004, -0.2641099989414215, 0.04971100017428398, 0.2288299947977066, 0.2020300030708313, -0.37849000096321106, -0.5625399947166443, 0.24903999269008636, -0.632319986820221, 0.12872999906539917, -0.246629998087883, -0.34466999769210815, 0.47578001022338867, -0.3571600019931793, -0.22472000122070312, -0.5600200295448303, 0.199180006980896, -0.33083999156951904, 0.07404900342226028, -0.18490999937057495, 0.7434899806976318, -0.20409999787807465, -0.2000499963760376, 0.7167999744415283, 0.12377999722957611, -0.1566299945116043, -0.30726000666618347, -0.0703049972653389, 0.2862899899482727, 0.22408999502658844, 0.072502002120018, 0.08740700036287308, -0.20239999890327454, -0.1523600071668625, 0.5208100080490112, -0.5297999978065491, -0.30445000529289246, 0.006233700085431337, -1.7771999835968018, 0.11753000319004059, 0.6676499843597412, 0.09624499827623367, -0.47348999977111816, -0.34314000606536865, 0.4630100131034851, -0.8426200151443481, 0.032506998628377914, 0.5720099806785583, -0.45559000968933105, 0.5372599959373474, 0.06559000164270401, -0.294950008392334, 0.40849998593330383, -0.06111900135874748, -0.6752300262451172, -0.05209200084209442, -0.041854001581668854, 0.05113700032234192, 0.8585600256919861, -0.40233999490737915, -0.31178000569343567, -0.00611159997060895], u'computer': [-0.2762799859046936, 0.13999000191688538, 0.09851899743080139, -0.6401900053024292, 0.0319879986345768, 0.10066000372171402, -0.18672999739646912, -0.371289998292923, 0.5974000096321106, -2.0404999256134033, 0.22368000447750092, -0.02631399966776371, 0.7240800261497498, -0.438289999961853, 0.48886001110076904, -0.003548600012436509, -0.10006000101566315, -0.305869996547699, -0.1562100052833557, -0.06813599914312363, 0.21104000508785248, 0.2928699851036072, -0.08886100351810455, -0.20462000370025635, -0.5760200023651123, 0.34525999426841736, 0.4138999879360199, 0.17916999757289886, 0.2514300048351288, -0.2267799973487854, -0.10102999955415726, 0.14575999975204468, 0.2012699991464615, 0.3181000053882599, -0.7890700101852417, -0.22193999588489532, -0.2483299970626831, -0.015103000216186047, -0.2004999965429306, -0.026441000401973724, 0.18550999462604523, 0.33781999349594116, -0.33542999625205994, 0.8611699938774109, -0.04708300158381462, -0.17009000480175018, 0.30437999963760376, 0.09411899745464325, 0.3243499994277954, -0.811710000038147, 0.8896600008010864, -0.39149001240730286, 0.1682800054550171, 0.14316000044345856, 0.0036339000798761845, -0.06455700099468231, 0.04577700048685074, -0.3224799931049347, 0.04894300177693367, 0.1681700050830841, 0.06834399700164795, 0.5422700047492981, 0.1249300017952919, 0.6974200010299683, -0.03719399869441986, 0.33079999685287476, -0.42193999886512756, 0.33970001339912415, 0.2764599919319153, -0.016002999618649483, -0.21827000379562378, 0.4453499913215637, 0.3537899851799011, -0.022089000791311264, 0.21375000476837158, 0.432669997215271, -0.3289699852466583, 0.0961650013923645, 0.31264999508857727, -0.30527999997138977, 0.2612600028514862, -0.6536399722099304, -0.7801399827003479, -0.2315399944782257, 0.12112999707460403, 0.3489600121974945, -0.5544400215148926, 0.46619001030921936, -0.16519999504089355, 0.11610999703407288, -0.766759991645813, 0.6950200200080872, -0.1569799929857254, -0.12489999830722809, 0.5650500059127808, 0.6449900269508362, -0.5740299820899963, -0.033548999577760696, 0.3289799988269806, -1.402500033378601, -0.3114300072193146, 0.6454899907112122, -0.06153399869799614, -0.6929500102996826, 0.0006089400267228484, -0.5654399991035461, 0.1918099969625473, -0.19208000600337982, -0.6267300248146057, -0.009747300297021866, -0.5504000186920166, -0.5612800121307373, -0.19603000581264496, 0.2925400137901306, 0.09857600182294846, -0.05939500033855438, 0.003361599985510111, 0.1951500028371811, -0.6070299744606018, 0.34261998534202576, 0.09521099925041199, -0.07941100001335144, 0.14305000007152557, -0.5656899809837341, -0.06588699668645859, 0.15166999399662018, -0.1350499987602234, 0.19571000337600708, 0.22811999917030334, 0.035346001386642456, -0.22508999705314636, 0.1890999972820282, -0.3734799921512604, 0.12504999339580536, 0.4624899923801422, -0.32218998670578003, 0.9064300060272217, 0.11595000326633453, 0.11627999693155289, 0.22960999608039856, 0.24009999632835388, -0.06160899996757507, 0.3932499885559082, -0.06506600230932236, 0.42256999015808105, 0.5687999725341797, 0.49803999066352844, -0.6130800247192383, 0.41468000411987305, -0.13447999954223633, 0.6043000221252441, -0.06546200066804886, -0.08537600189447403, 0.1911499947309494, 0.39925000071525574, 0.37494999170303345, -0.18491999804973602, 0.061751000583171844, -0.387470006942749, -0.3033500015735626, -0.38210999965667725, 0.28220999240875244, -0.10286000370979309, -0.5866000056266785, 0.8292199969291687, 0.25130999088287354, 0.24772000312805176, 0.8748199939727783, -0.31358999013900757, 0.8162099719047546, -0.9008100032806396, -0.7793300151824951, -1.0089999437332153, 0.3647199869155884, -0.11562000215053558, -0.24841000139713287, 0.0945269986987114, -0.4226599931716919, 0.060391999781131744, -0.15365000069141388, -0.06960400193929672, 0.00512919994071126, 0.3957200050354004, -0.15692000091075897, 0.35708001255989075, -0.3516499996185303, 0.3529599905014038, -0.5221999883651733, 0.5139999985694885, -0.17764000594615936, -0.1027199998497963, -0.39640000462532043, 0.30417999625205994, 0.0736590027809143, -0.11685000360012054, 0.14298999309539795, -0.36809998750686646, 0.276419997215271, -0.46682998538017273, -0.3263300061225891, 0.5110700130462646, 0.023945000022649765, 0.11722999811172485, 0.21761000156402588, -0.17388999462127686, -0.6119300127029419, -0.5944899916648865, 0.47749000787734985, -0.5900800228118896, -0.3609200119972229, -0.0995739996433258, -0.043097998946905136, -0.15106000006198883, -0.14336000382900238, -0.03113500028848648, 0.17887000739574432, -0.6422100067138672, 0.17241999506950378, 0.3391599953174591, 0.8718100190162659, -0.7723000049591064, 0.5319499969482422, -0.5276299715042114, 0.17509999871253967, 0.31042999029159546, -0.1517699956893921, -0.227060005068779, 0.10802999883890152, 0.4491899907588959, 0.07001599669456482, 0.20850999653339386, 0.2151699960231781, -0.6171200275421143, -0.09996999800205231, 0.005501999985426664, 0.07678599655628204, 0.280460000038147, 0.4233100116252899, -0.5892500281333923, 0.07055400311946869, 0.3992300033569336, 0.0902009978890419, 0.17138999700546265, -0.17282000184059143, -0.5367500185966492, -0.46439000964164734, -0.578499972820282, -0.6831099987030029, 0.059383001178503036, 0.124269999563694, -0.145579993724823, 0.5768700242042542, -0.5749899744987488, -0.05164499953389168, 0.3840999901294708, 0.13046999275684357, 0.33785998821258545, 0.332040011882782, 0.40119001269340515, 0.26388999819755554, -0.36952999234199524, -0.2979699969291687, -0.6681600213050842, -0.11883000284433365, 0.5013300180435181, 0.2060299962759018, -0.32558000087738037, -0.12241999804973602, 0.506659984588623, 0.16353000700473785, -0.10672000050544739, 0.22363999485969543, 0.2391500025987625, -0.5550900101661682, -0.4843200147151947, -0.012164999730885029, -1.7992000579833984, 0.3231000006198883, -0.26309001445770264, -0.32537999749183655, -0.5827000141143799, 0.15098999440670013, 0.33838000893592834, 0.12007000297307968, 0.41394999623298645, -0.15553000569343567, -0.19301000237464905, 0.05886000022292137, -0.5242000222206116, -0.3716999888420105, 0.5620499849319458, -0.6580100059509277, -0.49796000123023987, 0.2434699982404709, 0.12872999906539917, 0.336650013923645, -0.07260899990797043, -0.15685999393463135, -0.14187000691890717, -0.2648800015449524], u'palm': [-0.775950014591217, -0.31459999084472656, -0.08367999643087387, -0.18377000093460083, 0.2189600020647049, -0.19154000282287598, 0.17688000202178955, -0.40005001425743103, 0.7567300200462341, -0.3224000036716461, 0.0785129964351654, 0.09193599969148636, -0.03327300027012825, -0.22267000377178192, 0.27542001008987427, -0.17395000159740448, 0.09647999703884125, 0.06374000012874603, -0.11868999898433685, 0.33557000756263733, -0.2007399946451187, -0.23179000616073608, -0.13104000687599182, -0.1444700062274933, -0.2717899978160858, 0.49584001302719116, -0.22123000025749207, 0.6019999980926514, -0.17566999793052673, 0.7386900186538696, 0.3137800097465515, 0.9486799836158752, -0.6679800152778625, 0.7016500234603882, -1.0951999425888062, -0.23294000327587128, -0.09099700301885605, -0.34856998920440674, 0.2013300061225891, -0.20720000565052032, -0.5072900056838989, -0.2088800072669983, -0.42594999074935913, -0.2025199979543686, 0.6405500173568726, -1.0369000434875488, 0.6904500126838684, -0.08968500047922134, 1.0120999813079834, 0.22705000638961792, 0.14883999526500702, 0.061181001365184784, 0.11225999891757965, -0.2606300115585327, 0.040890999138355255, 0.16800999641418457, -0.7778099775314331, 0.19431999325752258, 0.6254500150680542, -0.71288001537323, 0.06409800052642822, 0.08820600062608719, 0.23565000295639038, 0.747730016708374, 0.036336999386548996, 0.07035599648952484, 0.07424599677324295, 0.528659999370575, -0.1626800000667572, -0.7003499865531921, -0.16898000240325928, -0.11599999666213989, 0.06902500241994858, 0.7322400212287903, -0.4152899980545044, 0.2054399996995926, 0.27915000915527344, 0.0670280009508133, -0.18193000555038452, 0.23934000730514526, -0.22255000472068787, 0.3876200020313263, -0.5497400164604187, 0.6407999992370605, 0.21783000230789185, -0.07008799910545349, -0.49671998620033264, 0.5160899758338928, 0.2885800004005432, -0.10357999801635742, -0.549560010433197, -0.7030199766159058, -0.028636999428272247, -0.3553299903869629, 0.5355700254440308, 0.21407000720500946, 0.19694000482559204, -0.6928799748420715, -0.09579099714756012, 0.007963400334119797, -0.1539900004863739, 0.4581199884414673, 0.2302899956703186, -0.03397500142455101, -0.26568999886512756, -0.14643999934196472, 0.02561499923467636, -0.1673000007867813, -0.24815000593662262, -0.47023001313209534, -0.16943000257015228, 0.19797000288963318, 0.2326200008392334, 0.18061000108718872, 0.10792999714612961, -0.4070200026035309, 0.0035999000538140535, 0.4155699908733368, -1.0342999696731567, -0.070421002805233, -0.3090899884700775, -0.426580011844635, 0.012701000086963177, -0.17566999793052673, -0.2604300081729889, 0.268310010433197, 0.3998500108718872, -0.3186500072479248, 0.04936100170016289, -0.44244998693466187, 0.7867599725723267, 0.4450500011444092, -0.5024799704551697, 1.0514999628067017, -0.2362000048160553, -0.46884000301361084, 0.14098000526428223, -0.2520500123500824, 0.6904900074005127, -0.017514999955892563, 0.4810599982738495, 0.3878999948501587, -0.08125600218772888, 0.39223000407218933, -0.20487000048160553, 0.6054999828338623, 0.0858360007405281, -0.2735700011253357, -0.26798999309539795, 0.2175299972295761, 1.100600004196167, 0.3875400125980377, 0.33849000930786133, -0.0746069997549057, -0.21854999661445618, -0.12483999878168106, 0.007656500209122896, -0.02785399928689003, -0.0691170021891594, 0.6370499730110168, 0.28913000226020813, -0.1456100046634674, 0.05744500085711479, 0.21096999943256378, -0.14080999791622162, 0.023406000807881355, 0.28773999214172363, 0.050032999366521835, -0.5184400081634521, 0.37975001335144043, -0.6410599946975708, -0.43873998522758484, -0.8748599886894226, 0.4437499940395355, 0.023687999695539474, -0.4341199994087219, 0.4934599995613098, 0.5383599996566772, -0.2823199927806854, -0.3142400085926056, 0.20388999581336975, 0.4454599916934967, 0.36344999074935913, 0.31130000948905945, 0.455949991941452, -0.4871399998664856, 0.3115200102329254, -0.35864999890327454, 0.21142999827861786, 0.036699000746011734, -0.2262900024652481, 1.264799952507019, -0.28679001331329346, 0.10206999629735947, -0.2618800103664398, 0.23747000098228455, 1.0155999660491943, 0.24438999593257904, -0.23555999994277954, -0.35857999324798584, 0.6015499830245972, -0.5440300107002258, -0.7541099786758423, -0.3112500011920929, 0.3020400106906891, -0.44374001026153564, 0.9757500290870667, -0.0678509995341301, 0.11004000157117844, -0.4259899854660034, -0.07255099713802338, 0.49182000756263733, 0.012757999822497368, -0.31746000051498413, 0.18246999382972717, 0.28415998816490173, 0.009879199787974358, 0.1261799931526184, 0.19323000311851501, -0.032072000205516815, 0.3677999973297119, -0.5252400040626526, 0.074413001537323, 0.647629976272583, -0.061500001698732376, 0.19054999947547913, -0.7880899906158447, -0.18000000715255737, -0.007250899914652109, -0.08842500299215317, 0.4481000006198883, 0.1419299989938736, 0.12586000561714172, 0.37084999680519104, 0.39013001322746277, 0.6409299969673157, 0.028728000819683075, -0.5300099849700928, -0.2569099962711334, -0.6962100267410278, -0.20812000334262848, -0.23868000507354736, -0.3286899924278259, 0.014693999662995338, -0.623740017414093, -0.2224999964237213, -0.22165000438690186, -0.531279981136322, -0.8339400291442871, -0.08353199809789658, 0.2943499982357025, 0.167820006608963, 0.07513400167226791, -0.4526900053024292, 0.007948500104248524, -0.1656000018119812, 0.6752899885177612, 0.212009996175766, 0.2171899974346161, -0.3057900071144104, 0.1290300041437149, 0.07416199892759323, -0.294050008058548, -1.0095000267028809, -0.4069899916648865, -0.09545599669218063, -0.13181999325752258, -0.18347999453544617, 0.12678000330924988, 0.06956499814987183, -0.6561099886894226, -0.5662000179290771, 0.13267000019550323, -0.030479000881314278, -0.6136999726295471, -0.5374000072479248, -1.2319999933242798, 0.4515700042247772, -0.8678299784660339, -0.3568100035190582, -0.37946000695228577, -0.46875, -1.0175000429153442, 0.22891999781131744, -0.6226400136947632, -0.4507899880409241, 0.44968000054359436, -0.7331200242042542, -0.022708000615239143, -0.06072600185871124, 0.04675000160932541, -0.010276000015437603, 0.41510000824928284, -0.008517700247466564, -0.33932000398635864, 0.16740000247955322, -0.5970500111579895, -0.03264100104570389, 0.33223000168800354, -0.7417399883270264], u'roof': [0.12231999635696411, -0.4127500057220459, -0.6300699710845947, -0.7417299747467041, -0.1795099973678589, 0.10960999876260757, -0.13210999965667725, 0.17680999636650085, -0.6793299913406372, -0.9522799849510193, -0.2356400042772293, 0.46022000908851624, 0.40143001079559326, 0.21593999862670898, -0.38113999366760254, 0.4383600056171417, -0.12193000316619873, 0.270330011844635, 0.167480006814003, -0.07858899980783463, 0.035937000066041946, 0.11649999767541885, -0.070933997631073, 0.2175299972295761, -0.07109200209379196, -0.1888899952173233, -0.051534999161958694, 0.37299999594688416, -0.8166099786758423, 0.8055700063705444, 0.39809998869895935, 0.6316199898719788, -0.4783099889755249, 0.3075700104236603, 0.025629999116063118, 0.14076000452041626, 0.20821000635623932, -0.714680016040802, 0.6875699758529663, 0.430510014295578, 0.23423999547958374, -0.019021999090909958, -0.3446800112724304, 0.19029000401496887, -0.1406099945306778, 0.5740900039672852, 0.42603999376296997, 0.3978399932384491, -0.5725499987602234, -0.5784199833869934, -0.6316999793052673, 0.3431600034236908, 0.01458400022238493, -0.48304998874664307, 0.16703000664710999, 0.41874000430107117, 0.3263700008392334, 0.317220002412796, 0.2091600000858307, 0.10507000237703323, -0.011634999886155128, 0.004651300143450499, 0.2805800139904022, 0.368910014629364, -0.27333998680114746, -0.4275200068950653, 0.1125200018286705, -0.0313120000064373, -0.16568000614643097, -0.4713900089263916, -0.475739985704422, -0.48952001333236694, 0.03479500114917755, 0.1880200058221817, -0.021219000220298767, -0.31817999482154846, -0.262719988822937, -0.3295600116252899, -0.2047400027513504, -0.5084699988365173, -0.008566300384700298, 0.26899999380111694, 0.09073100239038467, -0.026069000363349915, -0.2653200030326843, 0.4877699911594391, 0.23374000191688538, -0.07013200223445892, -0.14174999296665192, -0.15692000091075897, 0.8549900054931641, -0.09261500090360641, 0.2968200147151947, 0.40786001086235046, -0.2988100051879883, -0.4037899971008301, -0.2515600025653839, -0.1926400065422058, 0.039103999733924866, -0.43140000104904175, -0.7221400141716003, 0.6439999938011169, 0.39906999468803406, -0.20892000198364258, 0.6139600276947021, 0.321370005607605, -0.05509199947118759, -0.11405999958515167, -0.4834800064563751, 0.19724999368190765, -0.22099000215530396, -0.042642999440431595, 0.045542001724243164, -0.22902999818325043, -0.12530000507831573, 0.21209999918937683, -0.9394500255584717, 0.11234000325202942, -0.36682000756263733, -0.16298000514507294, 0.44391000270843506, -0.7934600114822388, 0.5387300252914429, 0.8042200207710266, -0.07570300251245499, -0.6395400166511536, 0.024210000410676003, 0.4285700023174286, 0.1417199969291687, 0.23472000658512115, 0.16007000207901, 1.0997999906539917, 0.11208000034093857, 0.39173999428749084, 0.89656001329422, -0.15071000158786774, -0.6248000264167786, 0.45879998803138733, -0.329039990901947, 0.13565999269485474, -0.02653300017118454, 0.4241600036621094, 0.020930999889969826, -0.09466099739074707, -0.5343300104141235, -0.04435800015926361, 0.5111799836158752, 0.11454000324010849, -0.027347000315785408, -0.40011999011039734, -0.19080999493598938, -0.08975800126791, 0.001053099986165762, -1.1067999601364136, 0.3188300132751465, 0.47773000597953796, 0.12144000083208084, 0.1857299953699112, -0.06771499663591385, 0.24548999965190887, 0.07485499978065491, -0.4612100124359131, -0.3677600026130676, 0.3131200075149536, 0.5026900172233582, 0.7251899838447571, -0.2261199951171875, 0.23393000662326813, 0.699720025062561, -0.45370998978614807, -0.32016000151634216, 0.32907000184059143, 0.47025999426841736, -0.5902100205421448, 0.027999000623822212, 0.3203499913215637, -0.21152999997138977, 0.07552699744701385, 0.2329300045967102, -0.9743300080299377, 0.35850998759269714, -0.5948299765586853, 0.166360005736351, -0.047322001308202744, -0.18328000605106354, 0.04675700142979622, 0.6582099795341492, 0.186599999666214, 0.27195999026298523, 0.24382999539375305, 0.5817999839782715, 0.3125799894332886, -0.3478200137615204, 0.15842999517917633, -0.427839994430542, 0.2736800014972687, -0.06623099744319916, 0.45318999886512756, 0.3184199929237366, 0.08279000222682953, 0.6981300115585327, 0.03355100005865097, -0.3113099932670593, -0.085316002368927, 0.05783899873495102, 0.11547999829053879, 0.11990000307559967, -0.5176100134849548, 0.04599599912762642, 0.13615000247955322, 0.40630999207496643, 0.1365399956703186, -0.09087800234556198, -0.35273998975753784, 0.6416500210762024, 0.12437999993562698, -0.0037533000577241182, -0.31810998916625977, 0.31259000301361084, 0.11173000186681747, 0.720300018787384, -0.20035000145435333, 0.6347200274467468, -0.4181399941444397, 0.135110005736351, -0.1468600034713745, -0.36904001235961914, -0.10577999800443649, 0.10148999840021133, -0.12841999530792236, 0.2668200135231018, -0.23883000016212463, -0.033695999532938004, -0.01674499921500683, 0.3252300024032593, -0.06547500193119049, -0.11649999767541885, -0.0032190000638365746, -0.36937999725341797, 0.17089000344276428, 0.6243199706077576, -0.2822200059890747, -0.16404999792575836, -0.2696399986743927, -0.32050999999046326, -0.4429599940776825, -0.03105200082063675, -0.5316100120544434, -0.32916000485420227, 0.5087599754333496, 0.010339999571442604, 0.21073000133037567, -0.19438999891281128, -0.7523900270462036, 0.5571399927139282, 0.41843000054359436, -0.10379000008106232, -0.37450000643730164, -0.015704000368714333, 0.016527000814676285, -0.57573002576828, -0.014599000103771687, 0.08227399736642838, -0.0545319989323616, -0.04583900049328804, -0.32194000482559204, 0.11524999886751175, -0.27303001284599304, 0.3343999981880188, -0.7879499793052673, -0.12660999596118927, -0.5397999882698059, 0.09739000350236893, -0.4290800094604492, -0.8177800178527832, -0.38222000002861023, -1.7383999824523926, -0.008944300003349781, -0.7174199819564819, 0.09707000106573105, -0.2130099982023239, -0.2368299961090088, -0.01927799917757511, -0.2884199917316437, -0.4212999939918518, 0.6562399864196777, 0.133310005068779, 0.09513899683952332, -0.0975549966096878, -0.49775001406669617, -0.23778000473976135, 0.1736000031232834, -0.3646399974822998, 0.442220002412796, 0.15665000677108765, 0.20353999733924866, -0.0018312999745830894, -0.059021998196840286, 0.6371899843215942, 0.3606399893760681], u'sea': [0.2991900146007538, -0.11731000244617462, -0.00899249967187643, -0.3705900013446808, -0.06722000241279602, 0.1516299992799759, -0.06110500171780586, 0.29587000608444214, 0.3651599884033203, -1.5087000131607056, 0.46160000562667847, -0.15761999785900116, 0.015131000429391861, 0.3137899935245514, 0.490339994430542, 0.23761999607086182, 0.27667000889778137, 0.448199987411499, -0.6463299989700317, 0.6601200103759766, -0.6513100266456604, 0.36983999609947205, -0.41850000619888306, -0.05362199991941452, -0.009783700108528137, -0.12771999835968018, 0.470550000667572, 0.6526399850845337, -0.37119001150131226, 0.480459988117218, 0.3928599953651428, -0.061889998614788055, -0.8892499804496765, -0.5509499907493591, 0.35034000873565674, -0.32708999514579773, 0.2997500002384186, -0.056088000535964966, -0.035725999623537064, 0.46678000688552856, -0.2754400074481964, -0.01793999969959259, 0.41067999601364136, 0.16943000257015228, -0.3851099908351898, 0.29284000396728516, 0.518589973449707, 0.5630900263786316, 0.24122999608516693, 0.09960699826478958, -0.2042199969291687, 0.11269000172615051, -0.49399998784065247, -0.8751500248908997, -0.3132700026035309, 0.4303700029850006, -0.11784999817609787, 0.4660800099372864, 0.13484999537467957, -0.2950800061225891, 0.07126399874687195, 0.3164699971675873, 1.1064000129699707, -0.35238999128341675, 0.11417999863624573, -0.30375999212265015, -0.5699599981307983, 0.7082399725914001, -0.15449999272823334, 0.16617999970912933, 0.19869999587535858, -0.17903000116348267, -0.1968899965286255, -0.2789100110530853, -0.766290009021759, 0.23562000691890717, 0.6958299875259399, -0.16906000673770905, 0.5733100175857544, 0.10814999788999557, -0.19524000585079193, -0.29041001200675964, -0.5941100120544434, 0.23573000729084015, 0.20467999577522278, 0.4740299880504608, -0.23048000037670135, -0.15125000476837158, -0.14287999272346497, -0.563480019569397, -0.10655999928712845, 0.4021199941635132, -0.08299700170755386, -0.22390000522136688, -0.3012799918651581, 0.6713799834251404, 0.5238999724388123, 0.37303999066352844, 0.14026999473571777, 0.16143999993801117, 0.43261998891830444, 0.5707700252532959, 0.425929993391037, 0.13321000337600708, -0.4432399868965149, 0.10221999883651733, 0.40268999338150024, 0.2715499997138977, 0.14914999902248383, 0.024383999407291412, 0.07134799659252167, -0.2731899917125702, -0.05303899943828583, -0.20827999711036682, 0.19783000648021698, -0.30608999729156494, 0.10706000030040741, 0.21874000132083893, 0.059144001454114914, 0.12594999372959137, -0.3573000133037567, -0.8370199799537659, -0.6871399879455566, 0.005110799800604582, 0.37950000166893005, 0.6943699717521667, 0.09544900059700012, 0.03564999997615814, 0.5260699987411499, -0.4436799883842468, -0.2946299910545349, 0.23533999919891357, 0.2520599961280823, 0.5491300225257874, 0.5469599962234497, 0.32997000217437744, 0.17746999859809875, -0.010207000188529491, -0.4081999957561493, -0.056547001004219055, 0.24876999855041504, 0.14090000092983246, -0.39412999153137207, -0.31325000524520874, -0.8692600131034851, -0.011931000277400017, 0.546209990978241, 0.511650025844574, -0.27309998869895935, -0.0036448000464588404, 0.07326500117778778, 0.09993100166320801, 0.16575999557971954, -0.37744998931884766, 0.7121099829673767, 0.6035000085830688, -0.13560999929904938, -0.4070200026035309, 0.16904999315738678, 0.10412999987602234, 0.08219199627637863, -0.15620000660419464, 0.4061200022697449, 0.291920006275177, 0.010975000448524952, -0.3542799949645996, 0.13127000629901886, 0.43439000844955444, -0.07363799959421158, 0.7523199915885925, -0.07870099693536758, -0.14765000343322754, 0.09442000091075897, 0.07187700271606445, -0.19167999923229218, 0.052296001464128494, 0.04760900139808655, 0.24071000516414642, -0.013849999755620956, -0.3634699881076813, -0.1253499984741211, 0.13027000427246094, 0.2674500048160553, 0.23916000127792358, 0.5708000063896179, -0.768530011177063, 0.13985000550746918, 0.3532100021839142, -0.20980000495910645, -0.16614000499248505, 0.12789000570774078, 0.41624999046325684, -0.03220200166106224, -1.0963000059127808, 0.2688100039958954, 0.5083900094032288, -0.2983100116252899, -1.1095000505447388, 0.25488999485969543, -0.2237900048494339, 1.267699956893921, 0.16649000346660614, -0.19984999299049377, -0.28110000491142273, 0.4884200096130371, 0.36777999997138977, 0.18152999877929688, -0.6755899786949158, 0.30105000734329224, 0.1632699966430664, 0.05102099850773811, -0.25863000750541687, 0.03452400118112564, -0.38958001136779785, -0.035516999661922455, -0.13760000467300415, 1.0162999629974365, -0.1066799983382225, -0.32596999406814575, -0.2824600040912628, 1.2192000150680542, 0.4191800057888031, 0.1242000013589859, 0.038982998579740524, -0.21337999403476715, -0.3449600040912628, -0.2699800133705139, -0.7308499813079834, -0.18140000104904175, -0.5179499983787537, 0.520110011100769, -0.22322000563144684, -0.22628000378608704, -0.07067199796438217, 0.8319100141525269, 0.19022999703884125, -0.24928000569343567, -0.18850000202655792, 0.021688999608159065, 0.3121199905872345, -0.7085199952125549, 0.23818999528884888, -0.12502999603748322, 0.16991999745368958, -0.758650004863739, 0.016327999532222748, 0.11298999935388565, 0.0034248000010848045, -0.03263400122523308, -0.16234999895095825, -0.5893700122833252, 0.5483800172805786, 0.4027400016784668, 0.23457999527454376, -0.02359599992632866, 0.1442900002002716, 0.4071199893951416, 0.13214999437332153, 0.21811999380588531, 0.5044800043106079, 0.06839899718761444, -0.5064899921417236, -0.01229500025510788, -0.5626099705696106, -0.24289999902248383, 0.19043999910354614, 0.07106100022792816, -0.7846400141716003, -0.3156000077724457, -0.3718299865722656, 0.31880998611450195, -0.2653599977493286, 0.14228999614715576, 0.33750998973846436, -0.08476399630308151, 0.5440899729728699, -1.8004000186920166, -0.020357999950647354, 0.21841000020503998, -0.27943000197410583, -0.4689899981021881, 0.6694700121879578, 0.1972299963235855, -0.46858999133110046, -0.15442000329494476, -0.6257799863815308, -0.2059900015592575, -0.08188900351524353, 0.36353999376296997, 0.22763000428676605, -0.9174200296401978, 0.6023399829864502, -0.0785290002822876, -0.21348999440670013, -0.36337000131607056, 0.6354699730873108, 0.25262999534606934, -0.2201399952173233, -0.11699999868869781, -0.19186000525951385], u'mirror': [-0.11970999836921692, -0.17784999310970306, 0.2176399976015091, -0.29218998551368713, 0.10636000335216522, 0.29308998584747314, -0.06045899912714958, -0.018866000697016716, 0.10152000188827515, -1.3595999479293823, 0.12535999715328217, 0.42642998695373535, 0.6161100268363953, -0.15353000164031982, 0.3160400092601776, 0.2696000039577484, 0.2719700038433075, 0.1520099937915802, -0.388590008020401, -0.48541000485420227, -0.07830800116062164, 0.6456999778747559, 0.21899999678134918, 0.6084100008010864, 0.42612001299858093, -0.2278899997472763, -0.0609779991209507, -0.09128700196743011, 0.4919799864292145, -0.014178999699652195, -0.15276999771595, 0.25731000304222107, -0.046904999762773514, 0.2815200090408325, -0.9347299933433533, 0.5311899781227112, -0.38168999552726746, -0.10678999871015549, -0.24514000117778778, 0.7162600159645081, -0.04313499853014946, 0.05887399986386299, -0.04393000155687332, -0.011203999631106853, -0.01894400082528591, 0.2874799966812134, -0.3307099938392639, -0.10625000298023224, -0.12101999670267105, -0.46869999170303345, -0.10042999684810638, 0.09999299794435501, 0.2976999878883362, -0.034995000809431076, 0.5749099850654602, 0.24387000501155853, 0.3080900013446808, 0.24595999717712402, 0.16344000399112701, 0.022098999470472336, -0.0061217001639306545, 0.20291000604629517, 0.25788000226020813, 0.5505300164222717, 0.43731001019477844, -0.13752000033855438, -0.20455999672412872, -0.11116000264883041, 0.21608999371528625, 0.030806999653577805, 0.16489000618457794, -0.4256500005722046, -0.02490999922156334, -0.06452299654483795, -0.03781300038099289, -0.031929999589920044, -0.17127999663352966, -0.3329800069332123, -0.15711000561714172, 0.0024111999664455652, -0.7387199997901917, 0.6450099945068359, -0.07308799773454666, -0.7452999949455261, 0.03266200050711632, 0.39221999049186707, -0.13431000709533691, 0.3710100054740906, -0.20344999432563782, 0.49413999915122986, 0.20746999979019165, 0.09418299794197083, -0.07815799862146378, 0.3849700093269348, -0.24196000397205353, 0.027532000094652176, -0.33722999691963196, -0.23997999727725983, 0.7018899917602539, -1.2038999795913696, 0.2797600030899048, 0.5508400201797485, -0.04078799858689308, -0.07926400005817413, 0.6586300134658813, 0.6244999766349792, 0.13471999764442444, 0.06862799823284149, 0.13220000267028809, 0.2884800136089325, -0.02814899943768978, -0.047995999455451965, 0.5134199857711792, -0.5525799989700317, -0.12555000185966492, -0.010224999859929085, -0.18126000463962555, 0.36065998673439026, -0.26969999074935913, -0.16208000481128693, 0.356469988822937, -0.26513999700546265, 0.1978600025177002, 0.19023999571800232, -0.07991400361061096, -0.5787799954414368, 0.14103999733924866, 0.14805999398231506, -0.506630003452301, -0.18756000697612762, 0.05982000008225441, -0.5251700282096863, 0.16968999803066254, 0.722100019454956, -0.00310550001449883, 0.43575000762939453, -0.49546000361442566, -0.37790000438690186, 0.09735400229692459, -0.4232400059700012, 0.2536799907684326, 0.1075500026345253, -0.7040200233459473, 0.0077149998396635056, 0.1693599969148636, 0.15746000409126282, 0.11845000088214874, 0.2646700143814087, 0.5791199803352356, -0.17475999891757965, 0.3806000053882599, -0.10018999874591827, -0.035634998232126236, -0.3655500113964081, -0.19280999898910522, -0.23215000331401825, -0.21143999695777893, -0.13176000118255615, 0.42803001403808594, 0.08159299939870834, 0.7097799777984619, -0.40630000829696655, 0.20547999441623688, -0.11924999952316284, 0.19920000433921814, 0.8547000288963318, 0.3738499879837036, -0.18196000158786774, 0.21435000002384186, 0.21198000013828278, -0.26034998893737793, -0.17236000299453735, 0.43612000346183777, 0.016753999516367912, -0.05230199918150902, -0.29151999950408936, -0.12362000346183777, 0.4868299961090088, 0.08014599978923798, -0.26003000140190125, 0.23670999705791473, -0.19845999777317047, -0.14957000315189362, 0.49285998940467834, 0.41960999369621277, -0.34751999378204346, 0.6831300258636475, -0.11028999835252762, -0.0403049997985363, 0.06521999835968018, -0.09002300351858139, 0.05179800093173981, 0.09877300262451172, 0.36127999424934387, -0.021183999255299568, 0.5860400199890137, 0.5109400153160095, -0.4011499881744385, -0.5110099911689758, -0.5504699945449829, 0.8011299967765808, -0.22940999269485474, -0.4809800088405609, 0.1495400071144104, 0.24282999336719513, -0.17007000744342804, -0.5137500166893005, -0.3238700032234192, -0.7590500116348267, -0.19255000352859497, 0.413239985704422, 0.8098800182342529, 0.03013399988412857, -0.05393899977207184, 0.3627600073814392, -0.00935280043631792, 0.23523999750614166, -0.568310022354126, 0.581309974193573, 0.5101000070571899, -0.09840899705886841, 0.7578700184822083, -0.5192300081253052, 0.4095500111579895, -0.14736999571323395, -0.032586000859737396, -0.1874600052833557, 0.1898300051689148, 0.16482999920845032, -0.4099999964237213, 0.30959001183509827, -0.03551800176501274, -0.2417600005865097, -0.7482399940490723, -0.11298999935388565, 0.0230260007083416, -0.19338999688625336, 0.133760005235672, -0.2289000004529953, -0.4424700140953064, 0.22896000742912292, -0.28314000368118286, -0.32328999042510986, 0.22283999621868134, -0.27761000394821167, -0.3314099907875061, -0.20268000662326813, 0.023507999256253242, 0.052274998277425766, 0.6073499917984009, -0.1799599975347519, -0.44887998700141907, 0.1958799958229065, -0.002768999896943569, -0.15602000057697296, 0.23886999487876892, -0.0012301000533625484, -0.18357999622821808, -0.20356999337673187, -0.7068700194358826, 0.2985599935054779, 0.024629000574350357, 0.5943099856376648, -0.23826999962329865, -0.19391000270843506, -0.31233999133110046, 0.23573000729084015, 0.5902699828147888, -0.05535700172185898, 0.27605000138282776, 0.31832998991012573, 0.3182699978351593, 0.5549299716949463, 0.539929986000061, -0.3454799950122833, 0.028540000319480896, -0.6972500085830688, 0.18998999893665314, 0.03994600102305412, -0.10413999855518341, 0.16629000008106232, 0.0478690005838871, -0.3021399974822998, -0.16898000240325928, 0.2991800010204315, -0.20860999822616577, -0.011056999675929546, -0.20720000565052032, 0.08986800163984299, 0.14139999449253082, 0.05796699970960617, 0.36500000953674316, -0.45677998661994934, 0.12152999639511108, 0.5478900074958801, 0.32986998558044434, 0.2265399992465973, 0.5720400214195251, -0.19199000298976898, 0.3895399868488312], u'candle': [-0.07668200135231018, -0.4842599928379059, -0.1455399990081787, -0.3152100145816803, -0.57955002784729, 0.14869999885559082, -0.553380012512207, -0.03180500119924545, -0.3228999972343445, 0.2850300073623657, -0.4923200011253357, 0.3931100070476532, 0.1736000031232834, -0.5565800070762634, -0.14196999371051788, -0.004003399983048439, -0.3459100127220154, -0.025615999475121498, -0.6811000108718872, -0.40261000394821167, 0.20241999626159668, 0.03062400035560131, 0.14350000023841858, 0.6521300077438354, 0.704479992389679, -0.2045300006866455, -0.3576200008392334, -0.44780999422073364, -0.09607400000095367, 0.41923999786376953, 0.210999995470047, 0.39337998628616333, -0.29381999373435974, 0.35523998737335205, -0.2967599928379059, 0.4269300103187561, -0.3355900049209595, -0.03167299926280975, 0.8666800260543823, -0.0389150008559227, 0.2521899938583374, 0.08900000154972076, 0.013415999710559845, 0.27208998799324036, -0.09778600186109543, -0.6355100274085999, 0.21229000389575958, -0.5107799768447876, 0.42263999581336975, 0.15984000265598297, 0.17782999575138092, 0.3167099952697754, -0.22001999616622925, -0.1851000040769577, -0.5895900130271912, 0.41620999574661255, -0.16557000577449799, -0.0693420022726059, 0.7465699911117554, 0.5395699739456177, 0.31650999188423157, 0.18848000466823578, -0.16999000310897827, 0.8103500008583069, 0.08360700309276581, -0.3157300055027008, 0.13919000327587128, 0.2545199990272522, -0.11839000135660172, 0.21477000415325165, 0.24879999458789825, -0.31812000274658203, 0.3238300085067749, 0.009194900281727314, 0.06631799787282944, 0.5143700242042542, 0.013780999928712845, -0.6297699809074402, -0.2610599994659424, -0.2910600006580353, -0.023972999304533005, 0.18253999948501587, -0.8212800025939941, 0.014402000233530998, 0.5950800180435181, 0.005069099832326174, -0.21743999421596527, 0.12781000137329102, -0.005985999945551157, 0.19354000687599182, 0.5131099820137024, -0.48622000217437744, -0.3228999972343445, -0.3444199860095978, 0.4017300009727478, 0.05168899893760681, 0.6782199740409851, 0.1636500060558319, -0.2918199896812439, -0.23214000463485718, 0.7417600154876709, 0.07350599765777588, 0.18433000147342682, 0.08044599741697311, 0.36531001329421997, -0.03249200060963631, 0.4645000100135803, 0.11085999757051468, -0.3680399954319, -0.26649999618530273, -0.20941999554634094, -0.07260199636220932, 0.1844799965620041, 0.30156001448631287, -0.44811999797821045, -0.5591800212860107, -0.5250399708747864, 0.2458599954843521, 0.4223000109195709, -0.6130499839782715, -0.32381999492645264, -0.11531999707221985, -0.03617800027132034, -0.014054999686777592, 0.18480999767780304, -0.619949996471405, 0.5867699980735779, -0.24156999588012695, -0.15613999962806702, -0.2546499967575073, 0.8978899717330933, 0.3329800069332123, 0.5216000080108643, 0.2327599972486496, 0.5948399901390076, 0.1970899999141693, -0.5131300091743469, -0.08337800204753876, 0.13711999356746674, -0.38453999161720276, 0.09527099877595901, -0.4677099883556366, -0.27136000990867615, -0.21187999844551086, -0.04604199901223183, 0.3357299864292145, -0.4536899924278259, 0.25488999485969543, -0.37768998742103577, -0.6456400156021118, -0.4495700001716614, 0.31251001358032227, 0.463809996843338, 0.147489994764328, -0.2916199862957001, 0.0009523199987597764, 0.05530799925327301, -0.3911600112915039, -0.46821001172065735, -0.2083600014448166, 0.2924799919128418, -0.3458099961280823, 0.11587999761104584, 0.13966000080108643, -0.08449199795722961, 0.634909987449646, -0.37310999631881714, -0.13853000104427338, 0.013663000427186489, -0.4478699862957001, -0.17118999361991882, 0.33351001143455505, 0.4304800033569336, -0.05955199897289276, -0.13247999548912048, -0.5168799757957458, -0.05322299897670746, 0.5647799968719482, 0.12348999828100204, -0.18425999581813812, 0.17369000613689423, 0.08464299887418747, 0.6146199703216553, -0.3571000099182129, 0.5393400192260742, -0.0654980018734932, 0.9539600014686584, 0.24192999303340912, 0.07283700257539749, -0.40880998969078064, 0.49191999435424805, -0.5601599812507629, -0.20633000135421753, 0.027775999158620834, -0.011900999583303928, -0.056196000427007675, -0.695330023765564, 0.056196000427007675, -0.46744000911712646, 0.1226700022816658, -0.019706999883055687, 0.6553400158882141, 0.681119978427887, -0.216729998588562, 0.4849799871444702, -0.06887499988079071, -0.13447000086307526, 0.2807300090789795, -0.2026599943637848, -0.004895600024610758, 0.0749799981713295, 0.13494999706745148, 0.16335000097751617, -0.6478000283241272, 0.46031999588012695, 0.304639995098114, 0.5028899908065796, -0.24687999486923218, 0.2568100094795227, 0.5776200294494629, 0.05226700007915497, -0.007865600287914276, -0.7519999742507935, 0.39902999997138977, -0.6618000268936157, -0.16946999728679657, -0.07511799782514572, 0.773140013217926, 0.36146000027656555, -0.03381200134754181, 0.4689199924468994, -0.4838399887084961, -0.020694000646471977, -0.15723000466823578, 0.48144999146461487, -0.3154500126838684, -0.4011000096797943, 0.25473999977111816, -0.1360200047492981, -0.4862000048160553, -0.08895300328731537, -0.4091799855232239, -0.8446400165557861, -0.6003999710083008, 0.049862999469041824, -0.15974999964237213, 0.20201000571250916, 0.11759000271558762, -0.1908400058746338, -0.15271000564098358, 0.17709000408649445, -0.4514800012111664, 0.2943499982357025, -0.08178000152111053, 0.2757300138473511, -0.10773999989032745, -0.32221001386642456, -0.6878799796104431, 0.024522999301552773, -0.6001600027084351, -0.37182000279426575, 0.21118000149726868, 0.5622000098228455, 0.26252999901771545, -0.3202100098133087, 0.10340999811887741, -0.3472999930381775, 0.22273999452590942, 0.5695199966430664, 0.12043999880552292, 0.3520300090312958, 0.10933999717235565, 0.605139970779419, 0.6152300238609314, -0.2351900041103363, -0.0790570005774498, -0.7979099750518799, -0.17506000399589539, -0.6397500038146973, -0.36107000708580017, -0.31839999556541443, -0.09679099917411804, -0.23577000200748444, -0.3986299932003021, 0.1617799997329712, 0.8054599761962891, -0.058389000594615936, -0.4486500024795532, -0.2947399914264679, 0.2590700089931488, 0.10409999638795853, 0.2336599975824356, 0.26078000664711, -0.13744999468326569, -0.020545000210404396, -0.31419000029563904, 0.8759199976921082, -0.1035500019788742, -0.1349100023508072, 0.7726200222969055], u'bay': [0.06016699969768524, -0.2179899960756302, -0.0459199994802475, -0.7308700084686279, 0.19224999845027924, -0.2993899881839752, -0.22123999893665314, 0.3984200060367584, -0.06162099912762642, -0.4421299993991852, 0.08299800008535385, 0.3107599914073944, 0.42204999923706055, 0.41328001022338867, 0.5591899752616882, 0.7189800143241882, -0.25968000292778015, 0.041179001331329346, 0.0361189991235733, 0.7811099886894226, -1.01010000705719, -0.807420015335083, -0.5646600127220154, -0.3921400010585785, -0.06810300052165985, -0.20092999935150146, 0.1385200023651123, 0.1833599954843521, -0.35058000683784485, 0.3301500082015991, 0.6371999979019165, 0.3432300090789795, -0.1551699936389923, 0.23851999640464783, -0.4697299897670746, 0.12088999897241592, -0.6423699855804443, -0.0067889997735619545, 0.007006899919360876, -0.8324699997901917, -0.6716099977493286, 0.7553799748420715, -0.3578000068664551, 0.976170003414154, -0.43042999505996704, 0.3506599962711334, 0.4445599913597107, 0.10600999742746353, -0.500469982624054, 0.40257999300956726, -0.3362100124359131, 0.5112800002098083, -0.25815001130104065, -0.12145999819040298, -0.05585699900984764, -0.45458000898361206, 0.5420600175857544, 0.31207001209259033, -0.5815200209617615, -0.12963999807834625, -0.1429000049829483, -0.021952999755740166, 0.1945900022983551, 0.06128599867224693, -0.05939599871635437, -1.1384999752044678, -0.10385999828577042, -0.07941199839115143, -0.05867600068449974, 0.4299300014972687, -0.1731799989938736, 0.08967199921607971, -0.44971999526023865, 0.04131700098514557, -0.8230199813842773, -0.5634199976921082, 0.3140600025653839, 0.1599300056695938, 0.23541000485420227, -0.23454000055789948, -0.6978999972343445, 0.23631000518798828, -0.03991499915719032, -0.029758000746369362, -0.1297300010919571, -0.20214000344276428, -0.018042000010609627, -0.42166000604629517, 0.3219600021839142, -0.6947000026702881, 0.2958199977874756, 0.22109000384807587, -0.05173899978399277, -0.09493099898099899, -0.39792999625205994, 0.35286998748779297, 0.2952300012111664, -0.456959992647171, 0.12966999411582947, -0.22600999474525452, -0.0771780014038086, -0.19032999873161316, 0.008196599781513214, 0.04529000073671341, 0.4569000005722046, 0.22925999760627747, -0.03200000151991844, -0.0980760008096695, -0.21291999518871307, 0.6635000109672546, -0.46435999870300293, -0.965969979763031, 0.009312000125646591, -0.027403000742197037, 0.127470001578331, -0.28534001111984253, 0.1593099981546402, 0.594219982624054, -0.4555000066757202, 0.4705600142478943, 0.10672000050544739, -0.6009799838066101, -0.07959400117397308, 0.08963800221681595, 0.3086400032043457, -0.5237399935722351, -0.3072899878025055, 0.39362001419067383, 0.0860230028629303, 0.3594299852848053, -0.006057499907910824, -0.18209999799728394, -0.3735800087451935, -0.36890000104904175, 0.3986800014972687, -0.06277400255203247, 0.6019999980926514, -0.40727001428604126, -0.16259999573230743, 0.08140400052070618, 0.6454200148582458, 0.003813300048932433, -0.2671400010585785, 0.4834800064563751, -0.3435800075531006, 0.4067099988460541, 0.25084999203681946, 0.054611001163721085, -0.4748300015926361, -0.12828999757766724, 0.8705400228500366, -0.6614699959754944, 0.3753199875354767, -0.10894999653100967, 0.2730199992656708, -0.6679999828338623, 0.08790100365877151, 0.029260000213980675, -0.2255299985408783, 0.42361000180244446, 0.5079699754714966, -0.26822999119758606, -0.12240999937057495, 0.4670799970626831, 0.2838599979877472, 0.0722000002861023, 0.5643399953842163, 0.3635900020599365, -0.5446299910545349, 0.16816000640392303, 0.4503600001335144, -0.30948999524116516, -0.40801000595092773, 0.08218800276517868, -0.6448400020599365, -0.2143000066280365, -0.6219199895858765, 0.49709001183509827, 0.0817900002002716, -0.06065399944782257, -0.12466999888420105, 0.4440099895000458, 0.3284200131893158, -0.12723000347614288, 0.7468000054359436, -0.3794899880886078, 0.4313499927520752, -0.11146000027656555, 0.6220499873161316, 0.6075199842453003, -0.18705999851226807, 1.1698999404907227, 0.2503100037574768, 0.03959900140762329, 0.5364900231361389, 0.13771000504493713, 0.558210015296936, -0.3703800141811371, 0.061260998249053955, -0.07236599922180176, 0.9876400232315063, 0.06622400134801865, -0.1308099925518036, -0.5860199928283691, -0.2017199993133545, 0.5201399922370911, 0.347460001707077, -0.08941300213336945, 0.3989199995994568, -0.1402300000190735, 0.08487900346517563, 0.03936300054192543, 0.13710999488830566, 0.3165999948978424, -0.19607999920845032, 0.016481999307870865, -0.3006899952888489, -0.4045400023460388, 0.007672599982470274, -0.642300009727478, 1.3315999507904053, -0.13989999890327454, 0.23968000710010529, 0.4806100130081177, 0.29864999651908875, -0.4408000111579895, 0.16091999411582947, -0.561460018157959, 0.31828999519348145, -0.19599999487400055, 0.2976900041103363, -0.38749998807907104, -0.3514299988746643, 0.2226099967956543, 0.015525000169873238, 0.3883500099182129, -0.20760999619960785, -0.3784100115299225, -0.5611799955368042, -0.29409998655319214, -0.4967299997806549, -0.11970999836921692, 0.9900500178337097, 0.36434000730514526, -0.4964900016784668, 0.0122060002759099, -0.15986000001430511, 0.8209400177001953, -0.5923100113868713, -0.4734799861907959, -0.1581300050020218, 0.616919994354248, 0.004963899962604046, 0.11768999695777893, 0.46790000796318054, 0.18140999972820282, 0.7960900068283081, -0.2870100140571594, -0.5921199917793274, 0.26365000009536743, -0.4244000017642975, -0.13334999978542328, -0.3171199858188629, 0.15139000117778778, -0.5071899890899658, -0.0994039997458458, 0.4974899888038635, -0.4655100107192993, 0.06041799858212471, -0.6138100028038025, -0.10023999959230423, -0.7870200276374817, 0.2088399976491928, 1.0699000358581543, -0.05134899914264679, -0.041450001299381256, -1.3986999988555908, 0.6289799809455872, 0.06752700358629227, 0.025384999811649323, -0.7588000297546387, 0.6349800229072571, -0.4118799865245819, -0.16707000136375427, -0.7528799772262573, 0.04942600056529045, -0.669189989566803, -0.38199999928474426, 0.7437800168991089, 0.016071999445557594, -0.15880000591278076, 0.14580999314785004, -0.5503799915313721, -0.6039400100708008, 0.230430006980896, 0.3794499933719635, 0.2344599962234497, 0.20985999703407288, -0.2729699909687042, 0.35113999247550964], u'chicken': [0.1857299953699112, 0.297870010137558, 0.5051900148391724, -0.27270999550819397, -0.3319999873638153, -0.7595499753952026, 0.16703000664710999, 0.23003999888896942, -0.21854999661445618, -0.17308999598026276, -0.28578001260757446, -0.4327000081539154, -0.6769000291824341, 0.6187400221824646, -0.24939000606536865, 0.30724000930786133, -0.10956999659538269, 0.5290700197219849, -0.13819999992847443, 0.03929800167679787, 0.09621699899435043, 0.2957000136375427, 0.4612500071525574, 0.021043000742793083, -0.31453999876976013, 0.1015700027346611, 0.11069999635219574, -0.3160800039768219, 0.2455500066280365, -0.37130001187324524, -1.2529000043869019, 0.03726299852132797, -0.36219000816345215, -0.03615099936723709, -0.48113998770713806, 0.7025600075721741, -0.09581100195646286, 0.15636999905109406, -0.1708800047636032, 0.22213000059127808, 0.2179100066423416, -0.4102799892425537, -0.37459999322891235, 0.09531500190496445, 0.11181999742984772, -0.3767699897289276, 0.5823000073432922, -0.1592400074005127, -0.1470700055360794, 0.5231299996376038, 0.26565998792648315, 0.07027199864387512, 0.22669999301433563, 0.09232600033283234, 0.1022299975156784, -0.018386000767350197, -0.04949900135397911, 0.005669999867677689, -0.14399999380111694, -0.30425000190734863, 0.21730999648571014, 0.032698001712560654, 0.24824999272823334, -0.3166700005531311, -0.3508700132369995, -0.3151499927043915, -0.01259199995547533, 0.025707000866532326, -0.08719400316476822, 0.5138999819755554, 0.49636998772621155, 0.5018799901008606, 0.45756998658180237, -0.2847299873828888, -0.45069000124931335, 0.11356999725103378, 0.895389974117279, 0.5763999819755554, 0.10357999801635742, -0.21513999998569489, 0.0010735000250861049, 0.42254000902175903, -0.08077900111675262, -0.181659996509552, 0.1457899957895279, -0.3189300000667572, -0.6599299907684326, 0.35144999623298645, -0.36711999773979187, -0.8466600179672241, 0.2761799991130829, -0.29684001207351685, -0.04216200113296509, 0.30250999331474304, -0.05523199960589409, -0.1989700049161911, -0.3081299960613251, 0.8073499798774719, -0.26262998580932617, 0.18628999590873718, -0.04262800142168999, -0.18352000415325165, 0.35016998648643494, -0.8415300250053406, -0.35253000259399414, -0.14361999928951263, 0.30807000398635864, 0.08849900215864182, -0.331169992685318, 0.5106800198554993, 0.3609200119972229, -0.03331499919295311, -0.7140200138092041, -0.07273399829864502, 0.13796000182628632, -0.66211998462677, -0.38346999883651733, 0.32236000895500183, 0.18488000333309174, -0.13273000717163086, 0.0629189983010292, 0.03178799897432327, 0.7077000141143799, -0.10100000351667404, -0.5551400184631348, 0.31273001432418823, -0.4293999969959259, 0.2781600058078766, -0.12116999924182892, 0.48096999526023865, 0.06317699700593948, 0.6760200262069702, -0.15480999648571014, 0.6930699944496155, 0.07733500003814697, -0.7076500058174133, -0.03657099977135658, -0.44304999709129333, -0.3924599885940552, 0.7982100248336792, 0.33744001388549805, 0.3371500074863434, -0.19704000651836395, -0.39570000767707825, -0.8966900110244751, 0.16811999678611755, -0.12935000658035278, 0.07649599760770798, -0.15501999855041504, -0.4110499918460846, -0.7488399744033813, 0.5437300205230713, 0.0343330018222332, 0.3377799987792969, -0.21813000738620758, -0.39607998728752136, -0.3887999951839447, -0.3910500109195709, -0.378930002450943, -0.4477500021457672, 0.5652199983596802, 0.5204499959945679, -0.3487499952316284, 0.17044000327587128, 0.308789998292923, -0.10163000226020813, 0.0520239993929863, -0.27689000964164734, -0.2171500027179718, -0.5583800077438354, 0.06545300036668777, -0.14778000116348267, -0.2936300039291382, 0.2188899964094162, -0.3069100081920624, 0.20344999432563782, -0.24541999399662018, -0.17137999832630157, 0.7340800166130066, -0.7634599804878235, 0.09624599665403366, -0.010023999959230423, 0.05215099826455116, -0.7559199929237366, -0.4585700035095215, -0.0014952999772503972, 0.5138099789619446, -0.03778799995779991, -0.00044748999061994255, -0.39594000577926636, -0.2882100045681, 0.9596199989318848, -0.6406599879264832, 0.05510300025343895, 0.07247699797153473, 0.09679999947547913, -0.017772000283002853, -0.5692800283432007, -0.34092000126838684, 0.21367999911308289, 0.8825600147247314, 0.10243000090122223, 0.12024000287055969, 0.5106599926948547, 0.01418600045144558, 0.815310001373291, 0.6623200178146362, -0.47609999775886536, 0.17948000133037567, -0.04558499902486801, -0.12848000228405, -0.6008399724960327, 0.22690999507904053, 0.3739599883556366, 0.3899399936199188, -0.03691500052809715, 0.650629997253418, -0.903659999370575, -0.1726199984550476, 0.5072199702262878, 0.46465998888015747, -0.2529299855232239, -0.3673200011253357, -0.717490017414093, 0.3084299862384796, -0.14390000700950623, 0.1265999972820282, -0.07255899906158447, -0.1506900042295456, 0.4753299951553345, 0.05038199946284294, 0.06401599943637848, -0.5073099732398987, 0.05009400099515915, 0.4137499928474426, 0.30331000685691833, 0.5964099764823914, -0.22583000361919403, -0.810230016708374, 0.301800012588501, -0.4647600054740906, -0.633080005645752, -0.34477999806404114, -0.4332900047302246, -1.0742000341415405, -0.08053699880838394, 0.15737000107765198, 0.14404000341892242, -0.36204999685287476, -1.0844999551773071, 0.3736099898815155, 0.1754699945449829, -0.0158270001411438, 0.47861000895500183, 0.30219998955726624, 0.33987998962402344, -0.0882129967212677, 0.5010700225830078, -0.26579999923706055, 0.43893998861312866, 0.3571699857711792, 0.07941599935293198, -0.4593900144100189, -0.4265100061893463, 0.22836999595165253, -0.1876700073480606, -0.4923799932003021, -0.09727499634027481, 0.029711000621318817, -0.528689980506897, -0.5270299911499023, 0.08676999807357788, 0.6906399726867676, 0.04197600111365318, 0.09796299785375595, 0.08734799921512604, -1.3765000104904175, 0.053008001297712326, -1.2448999881744385, -0.5357900261878967, -0.02945300005376339, -0.20711000263690948, 0.001144499983638525, 0.09033700078725815, -0.4087899923324585, 0.633430004119873, 0.4770599901676178, -0.08679100126028061, -0.17744000256061554, 0.04450799897313118, -0.22387999296188354, -0.11710000038146973, 0.41150999069213867, 0.19097000360488892, -0.5311300158500671, -1.0162999629974365, 0.33643001317977905, -0.6054499745368958, 0.26447001099586487, 0.28016000986099243], u'ribbon': [-0.009848699904978275, 0.20702999830245972, 0.31692999601364136, -0.5009999871253967, -0.4533900022506714, 0.6714000105857849, -0.7940300107002258, -0.2614699900150299, -0.1432799994945526, -0.45361000299453735, -0.030503999441862106, 0.5908300280570984, -0.672950029373169, 0.2701599895954132, 0.1031000018119812, 0.186939999461174, -0.3270600140094757, -0.0472709983587265, -0.27421998977661133, -0.005926600191742182, -0.2187100052833557, -0.30573999881744385, -0.5171099901199341, -0.027070000767707825, 0.3713900148868561, -0.1926099956035614, -0.20178000628948212, -0.4756700098514557, -0.1665399968624115, 0.1731799989938736, 1.0390000343322754, -0.618939995765686, 0.5534499883651733, 0.5559700131416321, -0.7806500196456909, 1.0073000192642212, 0.11721000075340271, -0.2291799932718277, 0.2102999985218048, 0.1192300021648407, -0.29732999205589294, 0.020695000886917114, 0.09226399660110474, 0.008838700130581856, -0.17673000693321228, -0.2798199951648712, -0.04322599992156029, -0.48524001240730286, 0.47595998644828796, -0.4473100006580353, -0.5034000277519226, 0.23113000392913818, 0.31905999779701233, -0.5041099786758423, -0.5676500201225281, -0.2809799909591675, -0.43814000487327576, 0.13502000272274017, 0.2977699935436249, 0.20202000439167023, 0.43845999240875244, -0.35412999987602234, 0.0994499996304512, -0.15631000697612762, 0.721310019493103, -0.11980000138282776, -0.07136400043964386, 0.6919800043106079, -0.03639199957251549, 0.16993999481201172, -0.05326500162482262, 0.045820001512765884, 0.09472999721765518, -0.1220100000500679, -0.13975000381469727, 0.21504999697208405, 0.26374998688697815, 0.5395399928092957, -0.09528400003910065, -0.4437499940395355, 0.230430006980896, -0.0895719975233078, 0.2224300056695938, -0.5598300099372864, 0.08292199671268463, -0.4099099934101105, -0.04439200088381767, -0.37852999567985535, -0.24492999911308289, 0.28543001413345337, -0.05346599966287613, 0.03712499886751175, 0.42684999108314514, -0.21577000617980957, -0.08299300074577332, 0.1563899964094162, 0.6797400116920471, 0.2163500040769577, -0.24572999775409698, 0.0571650005877018, 0.5648800134658813, 0.3914499878883362, 0.21778999269008636, -0.4793199896812439, 0.22322000563144684, -0.36563000082969666, 0.04432699829339981, -0.026352999731898308, -0.8022199869155884, -0.16221000254154205, -0.21435000002384186, 0.8636900186538696, 0.26287001371383667, -0.5364699959754944, -0.44905999302864075, -0.10533999651670456, 0.0422700010240078, 0.6452599763870239, 0.06846799701452255, -0.2760699987411499, 0.001781799946911633, 0.28598999977111816, 0.9682300090789795, -0.3683199882507324, 0.5940799713134766, -0.22224999964237213, -0.5803700089454651, -0.10199999809265137, -0.2950100004673004, -0.051173001527786255, -0.02436700090765953, -0.4219299852848053, 0.3390499949455261, -0.01285799965262413, -0.7595400214195251, -0.0472709983587265, -0.1906999945640564, 0.8527799844741821, 0.3813000023365021, -0.06608200073242188, 0.37973999977111816, 0.696340024471283, -0.3059599995613098, -0.930899977684021, 0.3790000081062317, -0.14722000062465668, -0.07463899999856949, -0.8017699718475342, 0.33834001421928406, 0.29864001274108887, 0.3143500089645386, -0.2796500027179718, 0.613290011882782, -0.19363999366760254, 0.2046000063419342, 0.05663599818944931, 0.16872000694274902, -1.2102999687194824, -0.40509000420570374, 0.4484899938106537, -0.05539900064468384, 0.06869199872016907, -0.3687799870967865, 0.23157000541687012, 0.3954299986362457, -0.1910800039768219, -0.2938700020313263, 0.492900013923645, -0.44971001148223877, -0.06988800317049026, -0.15525999665260315, -0.24963000416755676, 0.21568000316619873, 0.320279985666275, -0.20381000638008118, -0.6128100156784058, -0.4799099862575531, 0.011309999972581863, -0.05404699966311455, -0.46011999249458313, -0.4313800036907196, 0.03482399880886078, 0.4035699963569641, -0.12264999747276306, 0.359609991312027, -0.30149000883102417, 0.2081100046634674, 0.5590299963951111, 0.4422299861907959, 0.12205000221729279, 0.4675000011920929, 0.09279000014066696, 0.04067299887537956, -0.030307000502943993, 0.3043000102043152, 0.5864899754524231, -0.361380010843277, -0.16966000199317932, -0.0067750997841358185, 0.1396999955177307, 0.3454599976539612, 0.20126000046730042, 0.562030017375946, 0.44025999307632446, -0.23526999354362488, 0.3213199973106384, -0.10796000063419342, 0.3093000054359436, -0.17550000548362732, 0.26475998759269714, 0.690500020980835, -0.25666001439094543, 0.354449987411499, -0.12942999601364136, 0.10180000215768814, 0.2828100025653839, -0.030299000442028046, -0.8686699867248535, 0.42640000581741333, -0.13196000456809998, 0.44005000591278076, -0.7363899946212769, -0.06913500279188156, 0.13550999760627747, -0.10220000147819519, -0.6767600178718567, -0.15670999884605408, -0.06123699992895126, -0.26627999544143677, -0.52538001537323, 0.3892799913883209, -0.1646299958229065, -0.2815000116825104, 0.14469000697135925, 0.38133999705314636, -0.012435000389814377, 0.04326999932527542, -0.8684499859809875, 0.672249972820282, 0.030024999752640724, 0.2672800123691559, -0.19875000417232513, -0.12476000189781189, -0.5708500146865845, -0.7805500030517578, 0.06394200026988983, 0.178849995136261, -0.08741399645805359, 0.008418800309300423, -0.2551499903202057, 0.07440400123596191, 0.32183000445365906, 0.18213999271392822, -0.6424000263214111, 0.539870023727417, -0.28011998534202576, 0.26846998929977417, 0.16432000696659088, 0.032568998634815216, -0.08716999739408493, 0.4547399878501892, -0.26093998551368713, 0.576770007610321, -0.38457998633384705, 0.10990999639034271, -0.014228999614715576, -0.25224000215530396, 0.36820998787879944, -0.20430000126361847, -0.20624999701976776, 0.13787999749183655, -0.4221799969673157, -0.1905599981546402, 0.1737300008535385, -0.7495399713516235, 0.04674199968576431, -0.9477999806404114, -0.3044399917125702, 0.002579400083050132, -0.3366200029850006, 0.07441999763250351, 0.24831999838352203, -0.22755999863147736, -0.41888999938964844, -0.17178000509738922, 0.29407998919487, -0.12163999676704407, -0.4990899860858917, -0.055087000131607056, -0.41165000200271606, 0.3401300013065338, 0.5505300164222717, -0.719219982624054, 0.48166999220848083, -0.5316200256347656, 0.03697900101542473, 0.5092099905014038, 0.46531999111175537, 0.36939001083374023, -0.8857600092887878], u'redwood': [-0.022092999890446663, -0.41510000824928284, -0.4859899878501892, -0.24108999967575073, 0.5154899954795837, 0.007511700037866831, -0.060189999639987946, 0.3458299934864044, 0.4063299894332886, 0.02360999956727028, -0.7820500135421753, -0.02700899913907051, 0.05689699947834015, 0.0426350012421608, 0.23991000652313232, 0.20369000732898712, -0.028674999251961708, -0.5957900285720825, 0.2050900012254715, 0.292930006980896, 0.4417499899864197, 0.19979000091552734, 0.5782300233840942, -0.08459799736738205, -0.18074999749660492, -0.16975000500679016, 0.05812099948525429, -0.14639000594615936, -0.4360800087451935, 0.7268800139427185, -0.17531000077724457, 0.24673999845981598, -0.020076999440789223, -0.06588900089263916, -0.13440999388694763, -0.28600001335144043, 0.1840900033712387, -0.07851999998092651, -0.1438799947500229, -0.26708999276161194, 0.16760000586509705, 0.1402900069952011, 0.14601999521255493, -0.18174000084400177, 0.059661999344825745, -0.017690999433398247, 0.3545199930667877, 0.24657000601291656, 0.3762800097465515, -0.07627499848604202, -0.6124600172042847, -0.015131999738514423, -0.15914000570774078, -0.2927199900150299, -0.10374999791383743, -0.24692000448703766, -0.2043900042772293, -0.1406800001859665, -0.09880000352859497, 0.8027999997138977, -0.04992299899458885, -0.30546000599861145, 0.22719000279903412, 0.24256999790668488, 0.04142199829220772, -0.2214599996805191, -0.6615999937057495, 0.22758999466896057, 0.12870000302791595, -0.40217000246047974, -0.2751699984073639, -0.06149499863386154, -0.1691800057888031, 0.491210013628006, -0.2337000072002411, 0.9613400101661682, 0.043039001524448395, -0.375110000371933, 0.0044432999566197395, 0.038589999079704285, -0.9041500091552734, -0.005800699815154076, 0.009391400031745434, 0.0031512000132352114, 0.044335998594760895, 0.6532099843025208, -0.13167999684810638, 0.8915299773216248, 0.41367998719215393, 0.3906500041484833, 0.24859000742435455, 0.015561000443994999, 0.6982499957084656, 0.6823999881744385, -0.1434900015592575, 0.7448700070381165, 0.3949599862098694, -0.09178800135850906, -0.5370799899101257, 0.2569200098514557, 0.323419988155365, 0.8300099968910217, -0.7907500267028809, -0.014665000140666962, -0.28112998604774475, 0.29175999760627747, -0.09321100264787674, -0.12251000106334686, -0.04154700040817261, -0.2143000066280365, -0.6366400122642517, -0.9606099724769592, 0.23542000353336334, -0.08173900097608566, -0.13264000415802002, -0.488070011138916, -0.19946999847888947, 0.2926200032234192, -0.3628999888896942, 0.16007000207901, -0.46320000290870667, -0.09350399672985077, -0.29499998688697815, 0.35916998982429504, 0.00789059977978468, -0.4263699948787689, 0.0641620010137558, -0.09000799804925919, 0.02522199973464012, -0.24194000661373138, -0.10266000032424927, 0.28505998849868774, 0.22121000289916992, 0.09801699966192245, 0.8804399967193604, 0.3042699992656708, -0.16332000494003296, -0.25183001160621643, 0.12005999684333801, -0.3340800106525421, 0.9876899719238281, -0.009852300398051739, 0.0682540014386177, 0.12620000541210175, 0.7884100079536438, -0.33327001333236694, 1.0226999521255493, -0.2605299949645996, -0.49838998913764954, -0.5759900212287903, 0.41071999073028564, 0.3298499882221222, 0.16255000233650208, 0.02041199989616871, -0.5454000234603882, -0.1645900011062622, 0.350629985332489, 0.12002000212669373, -0.5202100276947021, -0.023249000310897827, 0.19829000532627106, 0.22269999980926514, 0.29958999156951904, 0.025002000853419304, 0.07946699857711792, 0.06418699771165848, 0.46351000666618347, 0.8749099969863892, -0.12246999889612198, 0.6118800044059753, -0.9440400004386902, -0.3212299942970276, -0.08634000271558762, 0.2974900007247925, 0.28644001483917236, -0.1115799993276596, 0.6936900019645691, 0.027204999700188637, 0.3553600013256073, -0.8110600113868713, 0.03317900002002716, 0.23973999917507172, 0.73471999168396, -0.12482000142335892, 0.30744001269340515, -0.8278700113296509, -0.1277499943971634, -0.6556400060653687, 0.04334200173616409, -0.15715999901294708, 0.2668200135231018, 0.16493000090122223, 0.5112000107765198, -0.48969000577926636, -0.05007600039243698, 0.486519992351532, 0.4637799859046936, -0.24573999643325806, -0.24856999516487122, 0.8388199806213379, -0.18485000729560852, -0.666920006275177, -0.1489800065755844, -0.2227499932050705, -0.6051300168037415, -0.05121999979019165, 0.18669000267982483, 0.5890600085258484, 0.1984100043773651, -0.46985000371932983, -0.0754299983382225, -0.21875999867916107, -0.22495999932289124, -0.554390013217926, -0.4661799967288971, 0.7549999952316284, -0.5712800025939941, 0.13534000515937805, -0.023951999843120575, 0.1411599963903427, 0.4976699948310852, 0.22694000601768494, -0.22432999312877655, -0.2844099998474121, 0.22909000515937805, 0.3948099911212921, -0.25940001010894775, -0.32708001136779785, -0.0385890007019043, 0.3778800070285797, -0.1431100070476532, 0.2662700116634369, -0.052584998309612274, -0.24435999989509583, 0.6470999717712402, -0.31949999928474426, -0.055091001093387604, 0.0001105900009861216, 0.1840600073337555, -0.4192799925804138, -0.07456699758768082, -0.23187999427318573, -0.041391998529434204, 0.3273000121116638, -0.3037700057029724, 0.5610100030899048, 0.1502700001001358, 0.14794999361038208, 0.13131000101566315, -0.3993400037288666, -0.07918799668550491, 0.410290002822876, 0.25940999388694763, -0.25964000821113586, 0.1405400037765503, -0.1346299946308136, 0.24547000229358673, 0.25944000482559204, 0.7033100128173828, -0.2788099944591522, 0.22147999703884125, -0.1295900046825409, -0.04453499987721443, -0.0006146300002001226, -0.24301999807357788, 0.10508999973535538, 0.19565999507904053, 0.25843000411987305, 0.10514000058174133, -0.19746999442577362, -0.4617699980735779, 0.047871001064777374, 0.1687600016593933, -0.16742999851703644, -0.34391000866889954, -0.3479999899864197, 0.24657000601291656, 0.6515799760818481, -0.05826700106263161, -0.6141300201416016, 0.26392999291419983, -0.1096000000834465, -0.7463399767875671, 0.5325700044631958, -0.12154000252485275, -0.2927600145339966, -0.05561700090765953, -0.14799000322818756, -0.3472599983215332, -0.40242999792099, 0.5656300187110901, -0.0794840008020401, -0.16710999608039856, 0.36316999793052673, 0.4144200086593628, 0.15790000557899475, -0.35249000787734985, 0.04843499884009361, -0.7094699740409851, 0.7198799848556519], u'shower': [-0.0248780008405447, -0.09989900141954422, -0.2076999992132187, -0.5239599943161011, -0.3131900131702423, 0.12793999910354614, -0.1576700061559677, 0.12437999993562698, 0.6261600255966187, -0.3679499924182892, -0.5946300029754639, 0.5443699955940247, 0.03122599981725216, -0.19902999699115753, 0.3468799889087677, -0.13346000015735626, 0.4906800091266632, 0.33886000514030457, 0.2501699924468994, -0.24291999638080597, -0.0845239982008934, 0.3203999996185303, -0.11969000101089478, 0.36965999007225037, 0.16851000487804413, -0.8165299892425537, 0.04784499853849411, -0.1552799940109253, 0.3258199989795685, -0.265500009059906, 0.18848000466823578, -0.3799799978733063, -0.2581599950790405, 0.05596499890089035, -0.6679900288581848, 0.3607900142669678, -0.29482999444007874, 0.541130006313324, -0.2747400104999542, -0.15873000025749207, 0.07702399790287018, 0.28185001015663147, 0.008907600305974483, -0.3694800138473511, -0.43825000524520874, 0.40950000286102295, 0.49698999524116516, 0.3469899892807007, 0.17539000511169434, -0.3564800024032593, -0.5878900289535522, -0.2522200047969818, 0.6005499958992004, 0.01075000036507845, -0.25725001096725464, 0.19659000635147095, 0.06299199908971786, 0.392520010471344, 0.6941099762916565, 0.31134000420570374, 0.17775000631809235, -0.18223999440670013, 0.16095000505447388, 0.42396000027656555, -0.13247999548912048, -0.4501799941062927, -0.38253000378608704, -0.08657799661159515, -0.25679001212120056, 0.15699000656604767, 0.062185000628232956, -0.42579999566078186, -0.11330000311136246, -0.05949399992823601, 0.12606999278068542, -0.07948800176382065, -0.3943699896335602, -0.3974500000476837, -0.36469000577926636, -0.6495400071144104, -0.04915900155901909, -0.18400000035762787, 0.12610000371932983, 0.46852999925613403, 0.5608599781990051, -0.08732099831104279, -0.05482599884271622, -0.2984899878501892, -0.11959999799728394, -0.5786200165748596, 0.48677998781204224, 0.2444400042295456, 0.5373299717903137, -0.03973200172185898, -0.5577399730682373, 0.3554700016975403, 0.22155000269412994, 0.14595000445842743, 0.6675500273704529, -0.050415001809597015, -0.3623200058937073, 0.4146600067615509, -0.6065899729728699, -0.15425999462604523, 0.5272700190544128, -0.20735999941825867, 0.22211000323295593, 0.3043699860572815, -0.7260199785232544, -0.061402998864650726, -0.120619997382164, 0.40786999464035034, 0.12454000115394592, 0.00823570042848587, -0.29366999864578247, 0.4565500020980835, -0.07100299745798111, 0.019874999299645424, -0.03212499991059303, -0.6812999844551086, 0.4566600024700165, -0.24163000285625458, -0.019960999488830566, 0.460099995136261, 0.2747899889945984, -0.2481199949979782, 0.040139999240636826, 0.37786000967025757, -0.18154999613761902, 0.10926000028848648, 0.34591999650001526, 0.01998800039291382, 0.4626399874687195, -0.009464999660849571, -0.09788300096988678, -0.15916000306606293, -0.5027499794960022, -0.3215000033378601, -0.088639996945858, -0.361299991607666, -0.49998000264167786, -0.7450199723243713, -0.16033999621868134, -0.09773600101470947, -0.3714599907398224, -0.18174999952316284, 0.07651200145483017, 0.2336599975824356, 0.6479399800300598, -0.35332998633384705, -0.5696499943733215, -0.3475799858570099, -0.14698000252246857, 0.2778399884700775, -0.11533000320196152, 0.08893799781799316, 0.2699599862098694, -0.28255000710487366, -0.4260700047016144, 0.31001999974250793, 0.2280000001192093, -0.550000011920929, 0.32447001338005066, 0.19294999539852142, 0.34551000595092773, 0.5882800221443176, 0.6524999737739563, 0.8994399905204773, 0.42761000990867615, 0.043494001030921936, -0.22978000342845917, 0.5869899988174438, 0.3200800120830536, -0.08139699697494507, -0.5559999942779541, -0.11153999716043472, 0.22439000010490417, 0.7022299766540527, 0.3793500065803528, -0.26914000511169434, 0.3525699973106384, -0.4631499946117401, 0.27375999093055725, 0.3246699869632721, -0.2406100034713745, 0.07803600281476974, 0.80308997631073, 0.9190700054168701, -0.39917999505996704, -0.5390400290489197, 0.7329800128936768, 0.029529999941587448, -0.9824699759483337, -0.2198999971151352, -0.3221699893474579, 0.037932999432086945, -0.4598200023174286, 0.3708699941635132, -0.13414999842643738, -0.24334999918937683, -0.04998200014233589, -0.06328800320625305, 0.019435999915003777, 0.1424800008535385, 0.18367999792099, 0.03049900010228157, -0.34404000639915466, -0.14361999928951263, -0.8181899785995483, -0.5065500140190125, -0.6820099949836731, -0.07453600317239761, -0.2663399875164032, -0.06389100104570389, 0.28532999753952026, -0.3074899911880493, -0.24564999341964722, -0.7802900075912476, -0.411080002784729, 0.12167999893426895, 0.15658999979496002, 0.35120999813079834, -0.10350000113248825, -0.3522000014781952, 0.06844499707221985, -0.4828900098800659, -0.7550600171089172, -0.07140800356864929, 0.5588099956512451, 0.18388999998569489, 0.8764500021934509, -0.7861499786376953, -0.2555699944496155, -0.9298200011253357, 0.23075999319553375, 0.04742300137877464, -0.8224200010299683, -0.35923001170158386, 0.16282999515533447, -0.5897200107574463, 0.33998000621795654, 0.028248999267816544, -0.19494999945163727, 0.3447299897670746, 0.028398999944329262, -0.18480999767780304, 0.24961000680923462, -0.1117200031876564, -0.6522200107574463, -0.008006599731743336, -0.13370999693870544, -0.5661500096321106, -0.029712000861763954, -0.3137899935245514, 0.34683001041412354, 0.08430899679660797, 0.08977500349283218, -0.7431600093841553, 0.21695999801158905, 0.7000600099563599, -0.38416001200675964, -0.5574700236320496, 0.06957399845123291, -0.4141499996185303, -0.2785800099372864, -0.737309992313385, -0.15836000442504883, 0.6645200252532959, 0.4093700051307678, 0.12633000314235687, 0.2251800000667572, 0.03995899856090546, -0.6047099828720093, -0.26302000880241394, -0.12039999663829803, 0.04015899822115898, -1.4299999475479126, 0.2793799936771393, -1.2755000591278076, 0.017896000295877457, -0.5390999913215637, 0.27272000908851624, 0.2637999951839447, -0.13167999684810638, 0.05492600053548813, 0.7888000011444092, 0.19315999746322632, 0.4765999913215637, -0.22949999570846558, -0.04249199852347374, -0.37487998604774475, 0.14184999465942383, 0.2649100124835968, -0.08745899796485901, -0.02522600069642067, -0.6392899751663208, 0.3814699947834015, 0.1508300006389618, 0.3535099923610687, 0.21107999980449677], u'wire': [0.021611999720335007, 0.05245399847626686, -0.3948099911212921, -0.4303300082683563, -0.1357100009918213, -0.06013999879360199, 0.39684998989105225, -0.30230000615119934, -0.15392999351024628, -1.260599970817566, -0.03999299928545952, 0.22322000563144684, 0.19800999760627747, -0.25110000371932983, -0.16493000090122223, -0.4054499864578247, -1.2448999881744385, 0.16929000616073608, 0.2951500117778778, -0.05433500185608864, 0.3520300090312958, -0.4683400094509125, 0.18839000165462494, 0.5962200164794922, -0.001604399993084371, -0.10463999956846237, -0.005458099767565727, 0.6032099723815918, 0.3870199918746948, 0.15240000188350677, -0.2999500036239624, -0.14646999537944794, 0.482340008020401, 0.4508799910545349, -0.5967699885368347, 0.10617999732494354, 0.25944000482559204, -0.2248300015926361, 0.29276999831199646, 0.7707499861717224, 0.00012815000081900507, -0.8351699709892273, -0.2576799988746643, 0.21149000525474548, -0.3807600140571594, 0.23930999636650085, -0.04959399998188019, 0.16742999851703644, 0.5343800187110901, 0.44242000579833984, 0.6391599774360657, 0.6018400192260742, 0.34101998805999756, -0.04975999891757965, 0.18459999561309814, -0.24818000197410583, -0.6600000262260437, -0.9811699986457825, -0.20206999778747559, -0.12634000182151794, 0.9605100154876709, 0.6040999889373779, 0.4684300124645233, 0.39939001202583313, 0.5146200060844421, 0.5221999883651733, -0.27702999114990234, 0.6761900186538696, 0.8201799988746643, 0.20758000016212463, 0.38495999574661255, 0.4597899913787842, 0.003339800052344799, 0.544979989528656, -0.5116999745368958, 0.6985599994659424, 0.11935999989509583, 0.1429399996995926, 0.38019999861717224, -0.24501000344753265, 0.6214600205421448, -0.24775999784469604, 0.05719299986958504, -0.1274999976158142, -0.6016899943351746, -0.08124300092458725, -0.3106600046157837, 0.1137399971485138, -0.1111999973654747, -0.03322400152683258, 0.05686299875378609, -0.26688000559806824, -0.07225099951028824, 0.36138999462127686, -0.16845999658107758, 0.10649999976158142, -0.2648000121116638, 0.1713400036096573, -0.10656999796628952, 0.1135300025343895, -0.1565999984741211, 0.36552000045776367, -0.1792300045490265, -0.40459999442100525, 0.9823300242424011, -0.301690012216568, 0.4115700125694275, 0.15175999701023102, -0.3581100106239319, 0.11479999870061874, -0.33816999197006226, 0.1392499953508377, -0.3652600049972534, -0.9844599962234497, 0.3004800081253052, 0.4672200083732605, -0.45688000321388245, 0.37957000732421875, -0.14760999381542206, -0.36757001280784607, 0.6692000031471252, -0.021656999364495277, 0.8418800234794617, -0.592710018157959, 0.19077999889850616, -0.22357000410556793, -0.7122700214385986, -0.593209981918335, -0.5712800025939941, 0.0241519995033741, 0.1485999971628189, 0.685920000076294, 0.44130000472068787, 0.7392200231552124, -0.6462200284004211, 0.21332000195980072, 0.30702999234199524, 0.7001199722290039, -0.3082199990749359, -0.7668600082397461, -0.6737200021743774, 0.17301000654697418, -0.2083200067281723, -0.8236500024795532, -0.23157000541687012, 0.1733900010585785, -0.1506499946117401, -1.0080000162124634, 0.11997000128030777, -0.4056999981403351, 0.9284300208091736, 0.30527999997138977, -0.3534500002861023, -0.4323999881744385, 0.21352000534534454, -0.4711500108242035, 0.3779500126838684, 0.1365399956703186, -0.048677001148462296, 0.23021000623703003, -0.24116000533103943, 0.08286699652671814, -0.17038999497890472, -0.25183001160621643, 0.14982999861240387, 0.6833999752998352, 0.0603489987552166, -0.044169001281261444, -0.3857100009918213, 0.05337199941277504, -0.36800000071525574, 0.4078199863433838, -0.5674899816513062, -0.014801000244915485, -0.11683999747037888, -0.16410000622272491, -0.4146000146865845, 0.44001999497413635, -0.012071000412106514, -0.7459499835968018, 0.4831700026988983, -0.18478000164031982, -0.10028000175952911, 0.5861899852752686, -0.4338200092315674, -0.11840999871492386, 0.7421900033950806, 0.12126000225543976, 0.3616600036621094, 0.2662999927997589, -0.5186600089073181, 0.3624500036239624, -0.11069999635219574, -0.1050800010561943, -0.3080100119113922, -0.4768500030040741, -0.5095800161361694, -0.4652099907398224, -0.16821999847888947, -0.012581000104546547, 0.23033000528812408, 0.36458998918533325, 0.1653199940919876, 0.24772000312805176, 0.24128000438213348, 0.794700026512146, 0.24796999990940094, -1.1758999824523926, -0.16561999917030334, -0.3568800091743469, 0.41363000869750977, 0.5619300007820129, 0.4020000100135803, -0.09793400019407272, -0.14392000436782837, 0.3863300085067749, 0.2658500075340271, -0.3933899998664856, 0.05049299821257591, -0.03240099921822548, 0.7394599914550781, 0.011323999613523483, 0.726610004901886, -0.05703999847173691, 0.27399998903274536, 0.12636999785900116, -0.3090600073337555, -0.3852100074291229, 0.2943199872970581, -0.14114999771118164, -0.2866399884223938, 0.1400900036096573, -0.0939439982175827, -0.05731400102376938, -0.1365399956703186, -0.23427000641822815, 0.03432299941778183, -0.6599799990653992, -0.16694000363349915, 0.27713000774383545, 0.11958999931812286, -0.3006100058555603, -0.2585600018501282, 0.213469997048378, -0.23250000178813934, -0.5044800043106079, 0.03039100021123886, 0.14961999654769897, 0.30243998765945435, -0.8649600148200989, -0.05845300108194351, -0.019686000421643257, 0.10708999633789062, -0.7445799708366394, 1.3562999963760376, -0.010790999978780746, 0.29673001170158386, -0.4191800057888031, 0.3146199882030487, 0.4111100137233734, 0.45859000086784363, -0.12709000706672668, 0.23739999532699585, 0.29350000619888306, 0.12272000312805176, -0.361050009727478, 0.034692998975515366, -0.3689500093460083, -0.11210999637842178, 0.0069367000833153725, -0.13892999291419983, -0.10366000235080719, 0.849120020866394, 0.1263899952173233, -0.39809998869895935, 0.6098399758338928, -1.3558000326156616, 0.4095799922943115, -0.834089994430542, -0.12363000214099884, 0.46070998907089233, -0.1745699942111969, -0.8152999877929688, 0.45579999685287476, 0.09597299993038177, -0.1712999939918518, 0.019442999735474586, 0.07479999959468842, -0.5562400221824646, 0.035937000066041946, 0.6139299869537354, -0.17691999673843384, -0.6892899870872498, 0.409960001707077, 0.36873000860214233, 0.10424000024795532, -0.3489600121974945, 0.26374000310897827, 0.03446599841117859, -0.05295500159263611], u'leaf': [-0.19212999939918518, 0.7793800234794617, 0.07630199939012527, -0.557420015335083, -0.2798500061035156, -0.14020000398159027, -0.2928999960422516, 0.14044000208377838, -0.040626998990774155, -0.3824400007724762, -0.22181999683380127, 0.5963799953460693, -0.7333899736404419, 0.37591999769210815, -0.034487999975681305, 0.3309899866580963, -0.844980001449585, -0.004673699848353863, -0.07080700248479843, -0.3259899914264679, -0.5351399779319763, 0.07971599698066711, 0.21478000283241272, 0.07250600308179855, 0.26280999183654785, -0.5806300044059753, -0.11929000169038773, -0.35666000843048096, 0.35440999269485474, 0.09118500351905823, 0.11935999989509583, 0.4087600111961365, -0.16301999986171722, 0.3161799907684326, -0.6691100001335144, 0.19944000244140625, -0.2874799966812134, -0.050613000988960266, -0.009379999712109566, -0.06776200234889984, -0.8511000275611877, 0.15921999514102936, -0.3678799867630005, -0.303739994764328, 0.3681100010871887, -0.8611400127410889, 0.21807999908924103, 0.30289000272750854, -0.6973099708557129, -0.44839999079704285, -0.06901299953460693, -0.2979399859905243, -0.03999499976634979, -0.03654199838638306, -0.10041999816894531, -0.2151000052690506, -0.3263300061225891, 0.323529988527298, 0.0070615001022815704, -0.21814000606536865, -0.1310500055551529, -0.33004000782966614, 0.38960000872612, -0.054037000983953476, -0.12093999981880188, -0.12189999967813492, -0.3631899952888489, 0.37233999371528625, -0.1726900041103363, 0.2797200083732605, -0.33932000398635864, -0.3147200047969818, -0.08184300363063812, 0.7458999752998352, -1.225000023841858, 0.3926900029182434, 0.24627000093460083, -0.42423999309539795, -0.08461199700832367, -0.008894099853932858, -0.4464299976825714, 0.19257000088691711, -0.5254899859428406, -0.3240399956703186, -0.2002599984407425, 0.15028999745845795, -0.3087199926376343, -0.3140000104904175, -0.6164000034332275, -0.006736000068485737, 0.3850399851799011, -0.42386001348495483, 0.23056000471115112, -0.334199994802475, 0.05225500091910362, -0.2492000013589859, 0.2298399955034256, -0.3070400059223175, 0.4330500066280365, -0.09096899628639221, 0.2942200005054474, -0.46452000737190247, -0.34233999252319336, 0.03380399942398071, -0.8305500149726868, 0.06793399900197983, 0.05587000027298927, -0.3074199855327606, -0.02418000064790249, 0.5957900285720825, -0.03846300020813942, 0.38398000597953796, 0.4547399878501892, -0.2642799913883209, -0.31992998719215393, 0.3152799904346466, -0.563510000705719, 0.5330700278282166, -0.41442999243736267, 0.17556999623775482, -0.45732998847961426, -0.8593699932098389, 0.5182499885559082, 0.0976639986038208, -0.5329099893569946, 0.028139999136328697, -0.07698400318622589, 0.6063799858093262, -0.13600000739097595, 0.6333000063896179, 0.2008499950170517, 1.0746999979019165, 0.13225999474525452, 0.09022200107574463, 0.3970299959182739, -0.04890900105237961, -0.23037000000476837, 0.04885600134730339, -0.06612599641084671, -0.11632999777793884, 0.79899001121521, 0.6460000276565552, -0.4943599998950958, -0.4103200137615204, 0.1004600003361702, 0.17518000304698944, 0.08052200078964233, -0.5375800132751465, 0.8523100018501282, -0.06580399721860886, -0.1773499995470047, -0.18474000692367554, -0.08566799759864807, -0.1801300048828125, 0.08231800049543381, -0.2902100086212158, 0.2743000090122223, -0.6952199935913086, -0.22116999328136444, 0.001855500042438507, -0.08706499636173248, -0.5199900269508362, -0.03853899985551834, 0.03623099997639656, 0.5069000124931335, 0.08224199712276459, -0.2621000111103058, 0.6905099749565125, -0.3601300120353699, -0.8814600110054016, -0.332179993391037, -0.48827001452445984, 0.02477400004863739, 0.4898500144481659, 0.2347699999809265, -0.45565998554229736, 0.6538199782371521, 0.7981299757957458, 0.061896998435258865, -0.35499998927116394, -0.35453000664711, -0.14390000700950623, -0.3614700138568878, -0.30292001366615295, 0.4651600122451782, 0.1421699970960617, 0.6559299826622009, 0.09432999789714813, 0.7712900042533875, 0.06218999996781349, -0.18250000476837158, 0.9620800018310547, 0.11771000176668167, 0.4495899975299835, 0.17566999793052673, 0.31957000494003296, 0.7640299797058105, -0.17362000048160553, -0.0039923000149428844, 1.0815000534057617, 0.21198999881744385, -0.035585999488830566, -0.055955998599529266, 0.03258499875664711, 0.20687000453472137, 0.18624000251293182, 0.5508099794387817, -0.00026592001086100936, 0.20347000658512115, -0.12004999816417694, 0.8617900013923645, -0.7152299880981445, 0.45596998929977417, -0.06038999930024147, 0.162540003657341, -0.06796000152826309, 0.272379994392395, 0.11022000014781952, -0.03898699954152107, -0.013275000266730785, 0.2610799968242645, -0.42318999767303467, -0.6974499821662903, 0.06792700290679932, -0.3170599937438965, 0.4312500059604645, 0.2641099989414215, -0.19794000685214996, 0.12643000483512878, 0.27358001470565796, -0.024153999984264374, -0.554669976234436, 0.3781299889087677, 0.30289000272750854, -0.05415499955415726, 0.21196000277996063, 0.2188200056552887, -0.6573100090026855, -0.7114499807357788, -0.5687299966812134, 0.29829999804496765, -0.5887799859046936, -0.5357900261878967, -0.24244000017642975, -1.333400011062622, -0.10194999724626541, 0.2649799883365631, 0.6249200105667114, -0.6280500292778015, -0.11416000127792358, -0.02203799970448017, 0.02959899976849556, -0.357699990272522, -0.4323999881744385, 0.7591599822044373, 0.5316399931907654, 0.561489999294281, -0.08079499751329422, -0.04645700007677078, -0.315310001373291, -0.4135900139808655, -0.605970025062561, 0.40018999576568604, 0.003300500102341175, -0.01563899964094162, 0.34419000148773193, 0.4146699905395508, 0.47694000601768494, 0.0731630027294159, -0.04755700007081032, -0.013125999830663204, -0.3549399971961975, -0.05705900117754936, 0.46845000982284546, -0.4762299954891205, 0.36445000767707825, -0.5850200057029724, -0.576449990272522, -0.04423699900507927, -0.180649995803833, -0.33417999744415283, -0.09279099851846695, -0.5704699754714966, -0.04216200113296509, -0.5335000157356262, 0.11591000109910965, 0.2337699979543686, -0.29603999853134155, 0.1011900007724762, -0.3671799898147583, 0.06294099986553192, 0.4457400143146515, -0.10228999704122543, 0.3844200074672699, 0.35600998997688293, 0.4993099868297577, 0.10209000110626221, 0.26774001121520996, -0.0037094999570399523, -0.48954999446868896], u'jewelry': [0.1306300014257431, 0.36125999689102173, -0.6030499935150146, 0.22869999706745148, 0.20601999759674072, 0.020987000316381454, 0.5874000191688538, -0.17427000403404236, -0.25540000200271606, -1.1065000295639038, -0.15514999628067017, -0.4785900115966797, -0.3109099864959717, 0.8440899848937988, 0.2244500070810318, -1.045799970626831, 0.09715799987316132, 0.07868599891662598, -0.25964000821113586, -0.20819999277591705, 0.37766000628471375, 0.1257999986410141, 0.009919200092554092, -0.13831999897956848, -0.15365000069141388, -0.5988799929618835, -0.31080999970436096, -0.2257000058889389, 0.20074999332427979, 0.06360699981451035, 0.3013800084590912, 0.9493100047111511, -0.10824000090360641, 0.3172900080680847, -0.523859977722168, 0.23863999545574188, -0.2738499939441681, -0.0536389984190464, 0.288129985332489, -0.3420200049877167, -0.48214998841285706, -0.7323499917984009, 0.23343999683856964, 0.3517400026321411, 0.10903999954462051, -0.23746000230312347, 0.46650999784469604, -0.4820300042629242, -0.024342000484466553, 0.4221299886703491, -0.1278800070285797, -0.4375, 0.4711199998855591, 0.2652300000190735, -0.33289000391960144, -0.6098099946975708, -1.0896999835968018, -0.07479699701070786, -0.10739000141620636, -0.4346100091934204, -0.051190998405218124, 0.22797000408172607, -0.134210005402565, -0.1062299981713295, 0.7431600093841553, 0.20521000027656555, -0.46435999870300293, -0.07148800045251846, 0.33261001110076904, -0.4952299892902374, -0.22519999742507935, -0.5933399796485901, 0.5269299745559692, -0.05869400128722191, 0.30013999342918396, -0.17092999815940857, -0.006229899823665619, -0.5732600092887878, 0.1305299997329712, -0.1936199963092804, -0.30518999695777893, -0.04757000133395195, -0.13913999497890472, 0.17422999441623688, 0.5319700241088867, -0.06374900043010712, -0.42792001366615295, -0.476500004529953, -0.2680499851703644, 0.4160600006580353, 0.11015000194311142, 0.09224899858236313, -0.13760000467300415, -0.30000999569892883, 0.2739199995994568, 0.3496699929237366, -0.20973999798297882, -0.41005000472068787, 0.5178999900817871, -0.37498000264167786, 0.42802000045776367, 0.3428399860858917, 0.03696399927139282, 0.06679300218820572, -0.016579000279307365, -0.9123799800872803, 0.9020000100135803, -0.25582998991012573, -0.04056699946522713, -0.5602200031280518, -0.0477680005133152, 0.3990800082683563, -0.04764999821782112, 0.03881699964404106, 0.397460013628006, 0.3102799952030182, 0.23246000707149506, 0.27077001333236694, 0.2381500005722046, -0.1482200026512146, 0.5036299824714661, 0.43206000328063965, 0.24900999665260315, -0.01815900020301342, -0.24669000506401062, -0.024431999772787094, 0.24483999609947205, 0.16913999617099762, -0.05115300044417381, -0.09657800197601318, -0.3643200099468231, -0.13502000272274017, 0.01974399946630001, -0.20985999703407288, -0.6910300254821777, 0.5125899910926819, 0.37882000207901, 0.12117999792098999, -0.13072000443935394, 0.25409001111984253, 0.05915600061416626, 0.09160099923610687, 0.8875100016593933, -0.3491300046443939, 0.7531700134277344, -0.22624999284744263, -0.1257600039243698, 0.3181599974632263, -0.15046000480651855, -0.41637998819351196, 0.32868000864982605, 0.028579000383615494, 0.13181999325752258, -0.7728000283241272, -0.5662599802017212, 0.02768700011074543, -0.7124699950218201, -0.10533999651670456, -0.10805000364780426, -0.20562000572681427, 0.5954300165176392, 0.2529999911785126, 0.30160000920295715, 0.21704000234603882, 0.6567000150680542, -0.2414100021123886, 1.1448999643325806, -0.3832800090312958, 0.16322000324726105, 0.36021000146865845, -0.5404800176620483, 0.22223000228405, -0.16355000436306, 0.29381999373435974, -0.08078700304031372, 0.35853999853134155, -0.2739199995994568, 0.14328999817371368, -0.018806999549269676, -0.023365000262856483, -0.23601000010967255, -0.2488500028848648, -0.12635000050067902, 0.043101999908685684, 0.5093799829483032, 0.15498000383377075, 0.9580199718475342, 0.5866600275039673, -0.06215300038456917, -0.44881999492645264, 0.17441999912261963, -0.27215999364852905, -0.3786099851131439, 0.541379988193512, 0.17524999380111694, -0.17654000222682953, -0.5603899955749512, -0.5316500067710876, -0.1426600068807602, 0.0217289999127388, 0.42778000235557556, -0.1500599980354309, 0.1664399951696396, 0.5444599986076355, 0.6568199992179871, 0.260919988155365, -0.01643799990415573, 0.3867500126361847, -0.6648499965667725, -0.3254700005054474, 0.2874700129032135, 0.618910014629364, 0.197270005941391, 0.325980007648468, 0.2025900036096573, -0.0310830008238554, 0.48691999912261963, -0.23122000694274902, -0.27915000915527344, 0.14067000150680542, 0.08503899723291397, -0.7408900260925293, -0.0763079971075058, -0.09667400270700455, -0.4153299927711487, 0.6246200203895569, -0.5590000152587891, 0.3683300018310547, 0.7324600219726562, 0.2487799972295761, 0.3530200123786926, -0.01524799969047308, -0.2555600106716156, 0.42204999923706055, 0.3224799931049347, 0.3008100092411041, 0.629610002040863, -0.3542500138282776, -0.7005400061607361, -0.23274999856948853, 0.11314000189304352, 0.23763999342918396, -0.15445999801158905, 0.6314299702644348, 0.05350799858570099, -0.05710100010037422, -0.1659500002861023, -0.5781800150871277, 0.0641779974102974, 0.38019001483917236, 0.2993600070476532, 0.15047000348567963, 0.05538799986243248, -0.17903999984264374, 0.30987000465393066, -0.021362999454140663, -0.3151499927043915, -0.044190000742673874, 0.1999099999666214, -0.2036599963903427, -0.27823999524116516, -0.777970016002655, 0.053463999181985855, 0.07854799926280975, 0.29183000326156616, -0.09916900098323822, -0.04066599905490875, 0.30074000358581543, 0.030030999332666397, 0.2980000078678131, 0.2478799968957901, -0.017465999349951744, 0.17595000565052032, -0.1448100060224533, -0.36493998765945435, -0.0508279986679554, -0.9142500162124634, -0.11789000034332275, -1.3460999727249146, -0.12081000208854675, 0.5376700162887573, 0.2845900058746338, 0.02741299942135811, -0.12256000190973282, -0.2008800059556961, 0.6075800061225891, 0.015484999865293503, 0.1899300068616867, 0.06322299689054489, -0.06603100150823593, 0.28384000062942505, -0.13744999468326569, -0.3203499913215637, 0.8785499930381775, -0.1655000001192093, -0.2723900079727173, 0.6957799792289734, -0.009079400449991226, 0.5099200010299683, 0.00026298000011593103], u'lead': [0.05884300172328949, 0.40626001358032227, -0.4595099985599518, 0.18254999816417694, -0.18449999392032623, -0.42201000452041626, -0.3834899961948395, 0.3056100010871887, 0.9052799940109253, -0.9515500068664551, 0.07426200062036514, 0.011189999990165234, -0.6607099771499634, 0.051079001277685165, 0.2122800052165985, -0.010638000443577766, -0.36777999997138977, 0.03553999960422516, -0.006157999858260155, 0.3153400123119354, -0.47102999687194824, -0.4758400022983551, 0.11174999922513962, -0.0649230033159256, -0.22556999325752258, -0.19111000001430511, 0.38078001141548157, -0.24695999920368195, -0.23588000237941742, -0.3880600035190582, -0.4882600009441376, 0.01669500023126602, 0.20069999992847443, 0.014360000379383564, -1.6449999809265137, -0.21942999958992004, 0.0117790000513196, 0.23160000145435333, 0.25220999121665955, 0.21119999885559082, -0.36149001121520996, -0.3856799900531769, 0.15584999322891235, -0.2127400040626526, -0.30191001296043396, -0.30243998765945435, 0.1966100037097931, -0.02796100080013275, -0.25602999329566956, -0.2854999899864197, -0.2531599998474121, 0.30289000272750854, -0.04812699928879738, 0.18382999300956726, 0.21132999658584595, -0.0027568999212235212, 0.3531099855899811, -0.08211199939250946, 0.10299000144004822, 0.13654999434947968, 0.06295599788427353, 0.5373700261116028, 0.35069000720977783, 0.13211999833583832, 0.5590699911117554, -0.065870001912117, 0.32401999831199646, 0.14076000452041626, 0.15848000347614288, -0.2483299970626831, -0.050390999764204025, -0.026580000296235085, 0.27605000138282776, 0.24664999544620514, 0.24849000573158264, 0.11648999899625778, -0.5921599864959717, 0.22859999537467957, -0.30616000294685364, -0.5024799704551697, -0.13109000027179718, 0.4385800063610077, 0.15320999920368195, 0.5393400192260742, -0.16731999814510345, 0.19540999829769135, 0.10822000354528427, 0.21003000438213348, 0.15423999726772308, -0.3940199911594391, 0.15255999565124512, 0.34014999866485596, 0.018799999728798866, -0.6295199990272522, -0.6229599714279175, -0.0425879992544651, -0.4133400022983551, -0.16401000320911407, -0.32412999868392944, -0.5534899830818176, -0.18785999715328217, -0.05865899845957756, 0.3675900101661682, -0.18709999322891235, 0.2172199934720993, 0.0006722899852320552, -0.24684999883174896, 0.19083000719547272, -0.5116699934005737, -0.3229900002479553, 0.03315199911594391, -0.20367999374866486, -0.06321299821138382, -0.1629199981689453, 0.3175700008869171, 0.6836100220680237, 0.30202001333236694, 0.039709001779556274, 0.3765699863433838, -0.007819700054824352, -0.4314799904823303, 0.21403999626636505, 0.33105000853538513, 0.12932999432086945, -0.21080000698566437, -0.31876999139785767, -0.07220300287008286, 0.07077699899673462, -0.1609800010919571, -0.08305999636650085, 0.4858100116252899, 0.2960900068283081, -0.08056899905204773, -0.11048000305891037, 0.6120399832725525, 0.7713900208473206, -0.25036001205444336, -0.22107000648975372, 0.3345000147819519, 0.5723000168800354, -0.45208999514579773, -0.2608700096607208, 0.33562999963760376, 0.20430999994277954, -0.42052000761032104, -0.050703998655080795, 0.20868000388145447, -0.27333998680114746, 0.13278000056743622, -0.20381000638008118, 0.46094998717308044, 0.10307999700307846, 0.27709999680519104, -0.1665399968624115, 0.6467599868774414, 0.4934900104999542, -0.412880003452301, 0.5123199820518494, -0.0905739963054657, 0.0196749996393919, -0.33928999304771423, -0.14539000391960144, 0.21020999550819397, -0.039882998913526535, -0.0879879966378212, 0.054179999977350235, -0.09771499782800674, -0.0031568999402225018, -0.597350001335144, 0.30807000398635864, 0.0778999999165535, -0.012392999604344368, 0.14861999452114105, -0.006022999994456768, 0.0989060029387474, -0.3591499924659729, -0.5066199898719788, 0.2686600089073181, -0.13600000739097595, 0.7709599733352661, 0.06710100173950195, 0.5381900072097778, 0.5603500008583069, 0.10734999924898148, -0.5701299905776978, -0.08855800330638885, -0.8523600101470947, -0.2088800072669983, -0.13221000134944916, 0.33258000016212463, 0.29269999265670776, 0.37209001183509827, -0.08570300042629242, -0.08284900337457657, 0.32951000332832336, -0.21844999492168427, 0.49897998571395874, -0.008831200189888477, 0.42407000064849854, 0.18640999495983124, 1.0520000457763672, 0.4218200147151947, -0.0013776000123471022, -0.41284000873565674, 0.02265400066971779, 0.6196799874305725, 0.010418999940156937, -0.2724199891090393, -0.7446500062942505, 0.34727999567985535, 0.14041000604629517, 0.20009000599384308, 0.33274000883102417, 0.6069899797439575, 0.35589998960494995, 0.19325000047683716, 0.41705000400543213, -0.051628999412059784, 0.10165999829769135, -0.3548699915409088, 0.05741700157523155, -0.10975000262260437, -0.5028799772262573, -0.0044824001379311085, -0.1246500015258789, 0.2747499942779541, -0.24688999354839325, 0.47238001227378845, -0.22267000377178192, 0.1170400008559227, -0.23537999391555786, 0.2984200119972229, 0.3627699911594391, -0.0512939989566803, -0.34560999274253845, 0.3357900083065033, 0.06130199879407883, 0.18357999622821808, 0.14981000125408173, 0.31856000423431396, -0.01792600005865097, 0.14003999531269073, 0.09427999705076218, 0.039361998438835144, -0.9168699979782104, -0.04997200146317482, 0.24792000651359558, 0.4244900047779083, -0.10007999837398529, 0.1926400065422058, -0.23684999346733093, -0.28582999110221863, -0.2364799976348877, -0.265500009059906, 0.7471699714660645, -0.06415999680757523, 0.28110000491142273, -0.3055199980735779, -0.28244999051094055, 0.2665500044822693, -0.09155400097370148, -0.6607999801635742, -0.02215700037777424, 0.2267400026321411, -0.37380000948905945, 0.14670999348163605, 0.4151900112628937, -0.060839999467134476, 0.23746000230312347, -0.7507299780845642, 0.2937299907207489, 0.22684000432491302, -0.19437000155448914, 0.18723000586032867, 0.19701999425888062, -0.2848599851131439, -1.3188999891281128, -0.4140099883079529, 0.23255999386310577, 0.12791000306606293, 0.09021099656820297, 0.3940199911594391, -0.07093299925327301, 0.4253700077533722, 0.24651999771595, 0.04112299904227257, -0.43007001280784607, 0.3092600107192993, 0.13289999961853027, 0.1844799965620041, -0.5990899801254272, -0.17847999930381775, -0.6263200044631958, 0.6103399991989136, 0.10062000155448914, 0.6920099854469299, -0.262470006942749, -0.7986099720001221, -0.46720001101493835, -0.25231000781059265], u'garage': [0.1714099943637848, -0.024204999208450317, -0.6056900024414062, -0.05585800111293793, 0.15432000160217285, -0.10322000086307526, 0.24619999527931213, 0.1661600023508072, 0.03523299843072891, -0.162540003657341, -0.047262001782655716, -0.020997999235987663, 0.736810028553009, 0.3774600028991699, 0.08424399793148041, 0.14032000303268433, -0.34095001220703125, -0.39838001132011414, -0.17810000479221344, 0.47067001461982727, 0.48820000886917114, 0.707610011100769, 0.054388001561164856, 0.041127000004053116, -0.18807999789714813, -0.21236999332904816, -0.011106000281870365, 0.4941500127315521, -0.1103999987244606, -0.25161999464035034, -0.2625100016593933, 0.11259999871253967, -0.11578000336885452, 0.17632000148296356, 0.029349999502301216, 0.5141100287437439, -0.48100998997688293, -0.32615000009536743, -0.16512000560760498, 0.047143999487161636, -0.0764160007238388, 0.2790899872779846, -0.32447001338005066, 0.13405999541282654, 0.5200300216674805, 0.5521399974822998, 0.5616899728775024, -0.049573998898267746, -0.25189000368118286, -0.4069100022315979, 0.13824999332427979, -0.35089999437332153, -0.15306000411510468, 0.2458599954843521, 0.33228999376296997, 0.03685100004076958, 0.04027299955487251, 0.24772000312805176, 0.000771439983509481, -0.3520199954509735, 0.05708400160074234, 0.24406999349594116, 0.1657799929380417, 0.2578200101852417, -0.126010000705719, -0.342629998922348, 0.33164000511169434, -0.24602000415325165, -0.2376600056886673, -0.2934899926185608, -0.30776000022888184, -0.2028300017118454, -0.09194900095462799, 0.40834999084472656, -0.8328999876976013, -0.057133998721838, -0.45809000730514526, -0.46604999899864197, 0.5087299942970276, -0.44130998849868774, -0.09651000052690506, 0.5616199970245361, 0.4296000003814697, -0.07686000317335129, -0.07923799753189087, -0.2331400066614151, 0.5392600297927856, 0.7534599900245667, -0.14530999958515167, 0.385560005903244, 0.9824900031089783, 0.43682000041007996, 0.4181300103664398, 0.07318700104951859, 0.03104100003838539, -0.3937700092792511, 0.013206999748945236, -0.6934599876403809, 0.4927099943161011, -0.6530100107192993, -0.12918999791145325, 0.19480000436306, 0.04659700021147728, -0.10897000133991241, 0.16000999510288239, -0.2090200036764145, 0.17044000327587128, 0.10763999819755554, -0.13662000000476837, -0.21734000742435455, -0.1492999941110611, -0.32300999760627747, 0.21521000564098358, 0.19824999570846558, 0.054558999836444855, 0.10277999937534332, -0.264490008354187, -0.1457500010728836, -0.5940300226211548, -0.13702000677585602, 0.16128000617027283, -0.1956000030040741, 0.33959001302719116, -0.3929400146007538, -0.2252500057220459, -0.04411400109529495, -0.3234800100326538, -0.41936999559402466, 0.23905999958515167, 0.24868999421596527, 0.8230699896812439, -0.017062000930309296, 0.6257299780845642, -0.19616000354290009, 0.23799000680446625, 0.12500999867916107, 0.13738000392913818, -0.15175999701023102, -0.48906001448631287, -0.14535999298095703, 0.10825999826192856, -0.21379999816417694, 0.30741000175476074, -0.03166399896144867, -0.17560000717639923, 0.07098899781703949, 0.04395899921655655, 0.07978100329637527, 0.09325499832630157, -0.00150090001989156, 0.005167100112885237, 0.6240299940109253, -0.1647000014781952, -0.9689800143241882, 0.3042300045490265, 0.11366000026464462, 0.07059799879789352, 0.42798998951911926, 0.2883799970149994, 0.32809001207351685, 0.09754899889230728, 0.2686299979686737, -0.2118300050497055, -0.009622800163924694, 0.3677600026130676, 0.3993299901485443, 0.16030000150203705, -0.09668800234794617, 0.44249001145362854, -0.2537600100040436, 0.12830999493598938, 0.4687899947166443, 0.5004900097846985, -0.16585999727249146, -0.17829999327659607, 0.36976999044418335, -0.43678000569343567, -0.1942799985408783, 0.43560999631881714, -0.36654001474380493, 0.16232000291347504, -0.10041999816894531, 0.09769999980926514, 0.3944999873638153, 0.19625000655651093, 0.05955599993467331, 0.9829999804496765, 0.1703999936580658, 0.2692300081253052, 0.16258999705314636, 0.484140008687973, -0.1906599998474121, -0.10045000165700912, 0.06488200277090073, -0.3911300003528595, 0.006660799961537123, -0.40619000792503357, 0.4228900074958801, -0.3800300061702728, 0.008769599720835686, 0.480459988117218, -0.3772900104522705, 0.43716999888420105, -0.350380003452301, 0.16902999579906464, -0.11907999962568283, -0.025557000190019608, -0.3018999993801117, -0.030758999288082123, -0.0023572000209242105, -0.2518799901008606, -0.17780999839305878, -0.25207000970840454, 0.13654999434947968, 0.06803800165653229, 0.3767299950122833, -0.2924000024795532, 0.16550999879837036, 0.3768399953842163, 0.5975099802017212, 0.7352200150489807, -0.4323199987411499, -0.48789000511169434, -0.26429998874664307, 0.1945900022983551, -0.08306500315666199, -0.527970016002655, -0.037762001156806946, 0.26949000358581543, -0.09740500152111053, 0.16592000424861908, -0.2879300117492676, -0.298909991979599, -0.31911998987197876, 0.5359399914741516, 0.057746998965740204, 0.5276600122451782, 0.5090699791908264, -0.9867600202560425, 0.45344001054763794, 0.26594001054763794, -0.2606799900531769, -0.24199999868869781, 0.1709900051355362, 0.060460999608039856, -0.6822999715805054, 0.4072299897670746, -0.16242000460624695, -0.12943999469280243, 0.0748559981584549, 0.06405899673700333, 0.24884000420570374, -0.23083999752998352, -0.06370899826288223, -0.24977999925613403, -0.03475100174546242, 0.1906999945640564, -0.07694800198078156, 0.4588199853897095, 0.010387999936938286, 0.13461999595165253, 0.08706299960613251, 0.14110000431537628, -0.038885001093149185, -0.11151000112295151, 0.10181999951601028, 0.05688700079917908, -0.0628649964928627, -0.0665379986166954, -0.4712600111961365, -0.12555000185966492, 0.16085000336170197, -0.13506999611854553, -0.6240500211715698, 0.3026300072669983, 0.1126599982380867, -1.5491000413894653, 0.22915999591350555, -0.2243500053882599, -0.07867400348186493, -0.4196299910545349, -0.10961999744176865, -0.21499000489711761, 0.25793999433517456, -0.21212999522686005, 0.690090000629425, 0.21730999648571014, 0.04353199899196625, -0.13773000240325928, -0.1834300011396408, 0.056046999990940094, -0.17971999943256378, -0.6489499807357788, 0.45614999532699585, 0.3722800016403198, 0.26506999135017395, 0.2355400025844574, 0.06105300039052963, 0.2696399986743927, 0.576229989528656], u'armor': [0.23994000256061554, -0.00035538000520318747, -0.4088999927043915, -0.18571999669075012, -0.30946001410484314, 0.07068099826574326, 0.45013999938964844, 0.3596700131893158, -0.1143300011754036, -1.0305999517440796, 0.3476400077342987, -0.10126999765634537, -0.613510012626648, 0.27250999212265015, -0.3903999924659729, 0.10938999801874161, 0.07823199778795242, 0.028032999485731125, -0.4499500095844269, -0.7025399804115295, 0.7197999954223633, -0.4544000029563904, 0.37303999066352844, 0.02646300010383129, 0.3995400071144104, -0.3359000086784363, 0.4725300073623657, 0.5789099931716919, 0.21936999261379242, 0.8840299844741821, 0.40459001064300537, 0.4774799942970276, 0.12042000144720078, -0.412990003824234, 0.21773000061511993, -0.5580400228500366, 0.27059000730514526, 0.11563000082969666, 0.4890500009059906, 0.09544499963521957, 0.07780399918556213, -0.5049700140953064, 0.16437000036239624, -0.17152999341487885, 0.6054999828338623, 0.10294000059366226, 0.14979000389575958, -0.5541899800300598, 0.2356099933385849, -0.45188000798225403, -0.13541999459266663, 0.13420000672340393, 0.1615699976682663, 0.3526900112628937, -0.3897700011730194, -0.6668499708175659, 0.14771999418735504, -0.2621000111103058, 0.539139986038208, -0.15119999647140503, -0.0838640034198761, -0.3408600091934204, -0.1408900022506714, -0.21353000402450562, 0.28317001461982727, 0.6348199844360352, -0.46435999870300293, -0.02496900036931038, 0.36447998881340027, -0.23021000623703003, 0.3963499963283539, -0.13409000635147095, 0.32401999831199646, -0.3032599985599518, 0.2652199864387512, 0.6595100164413452, -0.5621200203895569, 0.048955999314785004, -0.4673199951648712, -0.3825800120830536, 0.2889400124549866, 0.4965899884700775, 0.11625000089406967, 0.04799100011587143, -0.031867001205682755, 0.19875000417232513, -0.012144000269472599, 0.07087100297212601, -0.8351699709892273, -0.016769999638199806, 0.5306000113487244, 0.12547999620437622, 0.024497000500559807, 0.5043500065803528, -0.430869996547699, 0.40895000100135803, -1.121500015258789, 0.659060001373291, 0.573989987373352, -0.1603900045156479, -0.6369900107383728, 0.6139500141143799, -0.0073945000767707825, 0.5399600267410278, 0.6966699957847595, -0.675320029258728, 0.41251999139785767, 0.5406399965286255, -0.07754100114107132, -0.020428000018000603, 0.4071199893951416, 1.1593999862670898, -0.21439999341964722, -0.5227800011634827, -0.6320400238037109, 0.6303300261497498, 0.09421099722385406, -0.03286899998784065, 0.29420000314712524, -0.2706100046634674, 0.3599399924278259, -0.0684249997138977, -0.40821000933647156, 0.25655001401901245, -1.173699975013733, -0.4895699918270111, 0.27039000391960144, -0.06348399817943573, 0.06634700298309326, 0.4307500123977661, -0.14010000228881836, -0.01838500052690506, 0.28001999855041504, 0.6545699834823608, 0.23111000657081604, 0.1102600023150444, -0.16958999633789062, -0.1251399964094162, 0.3208400011062622, 0.7714800238609314, 0.39601999521255493, 0.3147299885749817, 0.20242999494075775, -0.06664100289344788, 0.2284500002861023, 0.10307999700307846, 0.2612600028514862, 0.0121069997549057, -1.1054999828338623, -0.44293999671936035, -0.037838999181985855, -0.9782299995422363, -0.40097999572753906, -0.3019300103187561, 0.6332700252532959, -0.053300999104976654, 0.19349999725818634, -0.2132200002670288, 0.26451000571250916, -0.14007000625133514, 0.14580999314785004, -0.20510999858379364, -0.009821799583733082, -0.17789000272750854, 0.43303000926971436, -0.4090900123119354, 0.06282400339841843, 0.3113499879837036, -0.18926000595092773, -0.031108999624848366, 0.17635999619960785, 0.7215999960899353, 0.4809400141239166, -0.02543400041759014, -0.16391000151634216, -0.5878599882125854, -0.5519000291824341, 0.2689099907875061, 0.20851999521255493, -0.7706500291824341, 0.3164600133895874, -0.3514400124549866, -0.010471999645233154, -0.18369999527931213, 0.0926389992237091, 0.35277000069618225, 0.3515700101852417, 0.6197800040245056, 0.2012300044298172, -0.6265900135040283, -0.27752000093460083, 0.359499990940094, 0.11183000355958939, 0.09637100249528885, 0.26736998558044434, 0.20600999891757965, -0.27138999104499817, -0.23360000550746918, 0.3198300004005432, -0.3139300048351288, 0.5764899849891663, 0.07877200096845627, 0.13761000335216522, -0.1780499964952469, 0.3720499873161316, -0.1878499984741211, -0.019564999267458916, 0.0631370022892952, -0.3354699909687042, -0.09998700022697449, 0.34599000215530396, -0.147939994931221, 0.008011600002646446, -0.2906999886035919, -0.2361299991607666, -0.8409000039100647, 0.8511800169944763, 0.41670000553131104, -0.2323800027370453, -0.10544999688863754, -0.01672700047492981, 0.18987999856472015, 0.3515099883079529, -0.4675599932670593, -0.1125200018286705, 0.21004000306129456, -0.3011699914932251, -0.3942199945449829, 0.052852001041173935, -0.6408600211143494, -0.0562639981508255, 0.10040999948978424, -0.2604300081729889, 0.057732000946998596, 0.15584999322891235, -0.5453199744224548, 0.20263999700546265, -0.2820900082588196, 0.14271000027656555, 0.6599699854850769, 0.46814000606536865, -0.13586999475955963, 0.39765000343322754, 0.07392100244760513, -0.33566999435424805, -0.17768999934196472, -0.8317599892616272, -0.305620014667511, 0.13273000717163086, -0.05307300016283989, -0.2418700009584427, -0.1286499947309494, -0.18765999376773834, -0.19884000718593597, 0.28567999601364136, -0.2965799868106842, -0.29526999592781067, -0.5496500134468079, 0.02849300019443035, -0.0791039988398552, -0.47661998867988586, 0.08991699665784836, 0.3023900091648102, 0.08818499743938446, -0.09723400324583054, 0.36851000785827637, -0.2793099880218506, -0.24462999403476715, -0.06382399797439575, 0.349590003490448, -0.5982000231742859, -0.16074000298976898, -0.08567100018262863, 0.19997000694274902, -0.7185699939727783, 0.05863400176167488, -0.7035099864006042, -0.1318800002336502, -1.1191999912261963, 0.3950499892234802, 0.7444999814033508, -0.1135300025343895, 0.23124000430107117, 0.23871000111103058, 0.23670999705791473, 0.42493999004364014, -0.2801100015640259, 0.40132999420166016, 0.9532999992370605, -0.32534000277519226, -0.16068999469280243, -0.24583999812602997, -0.4557200074195862, 0.5831699967384338, -0.14469000697135925, 0.8105199933052063, 0.29583001136779785, 0.14970999956130981, -0.17050999402999878, -0.5044000148773193], u'vacuum': [0.6152999997138977, 0.38826999068260193, 0.17948000133037567, -0.48980000615119934, 0.1623300015926361, 0.1844400018453598, 0.3628000020980835, 0.44784998893737793, 0.658050000667572, -1.7776000499725342, -0.0471780002117157, 0.1214200034737587, 0.23251000046730042, -0.5913500189781189, -0.1339299976825714, -0.016625000163912773, -0.3365199863910675, 0.24034999310970306, 0.07574199885129929, 0.48883000016212463, 0.2602800130844116, -0.35436999797821045, 0.6809800267219543, 0.19544999301433563, 0.09415200352668762, 0.2714200019836426, -0.18817000091075897, 0.36146000027656555, 0.6121799945831299, 0.06647499650716782, -0.21762999892234802, -0.16579000651836395, -0.1846799999475479, 0.06141100078821182, 0.40922001004219055, 0.1425900012254715, -0.0878710001707077, 0.47060999274253845, -0.3335599899291992, 0.7427700161933899, -0.6289399862289429, 0.6115999817848206, 0.161080002784729, -0.10839000344276428, -0.4788300096988678, -0.5418199896812439, 0.5981799960136414, 0.2568899989128113, 0.41183000802993774, 0.4809400141239166, 0.20034000277519226, 0.01743300072848797, -0.38269999623298645, 0.6329799890518188, 0.4431700110435486, -0.31619998812675476, 0.48583000898361206, 0.2874799966812134, 0.09720099717378616, 1.0535000562667847, 0.13244999945163727, 0.06641100347042084, 0.4598099887371063, 0.02369300089776516, 0.003754599951207638, 0.006022400222718716, -0.22256000339984894, -0.16223999857902527, -0.26447999477386475, 0.2592400014400482, 0.32813000679016113, -0.3216699957847595, 0.24894000589847565, -0.1732800006866455, -0.21800999343395233, -0.11789999902248383, -0.4745199978351593, 0.08332400023937225, -0.02511800080537796, -0.2816700041294098, -0.21276000142097473, -0.4196699857711792, -0.17295999825000763, -0.0724719986319542, -0.03559200093150139, 0.20893999934196472, 0.4976300001144409, 0.22152000665664673, -0.22506000101566315, -0.039657000452280045, -0.24562999606132507, -0.2509399950504303, -0.20249000191688538, 0.4550800025463104, -0.4436599910259247, -0.4175400137901306, -0.34057000279426575, -0.17228999733924866, 0.797819972038269, -0.4005500078201294, 0.09267999976873398, 0.4454199969768524, -0.21913999319076538, -0.7374200224876404, -0.42094001173973083, 0.15870000422000885, 0.015192000195384026, 0.28575998544692993, -0.2774699926376343, 0.6803299784660339, -0.17178000509738922, 0.18423999845981598, 0.8595700263977051, -0.014041000045835972, 0.16711999475955963, 0.1298699975013733, 0.21118000149726868, 0.020325999706983566, 0.1850699931383133, -0.5011000037193298, 0.8683199882507324, -0.13832999765872955, 0.2921600043773651, -0.3452799916267395, 0.22133000195026398, -0.20874999463558197, 0.3750999867916107, 0.06383199989795685, 0.08631200343370438, 0.8504899740219116, 0.5151299834251404, 0.14027999341487885, 0.15542000532150269, 0.35321998596191406, -0.09364999830722809, -0.00950970035046339, -0.15981000661849976, 0.11847999691963196, 0.09712500125169754, 0.20458999276161194, 0.24097999930381775, -0.02143999934196472, 0.03197300061583519, -0.23512999713420868, -0.4165799915790558, -0.3267099857330322, 0.01813500002026558, 0.1306300014257431, -0.07148399949073792, 0.10849999636411667, -0.3158400058746338, 0.15715999901294708, -0.3119100034236908, -0.22811000049114227, 0.4505099952220917, 0.2502799928188324, -0.1320600062608719, 0.18005000054836273, 0.6063699722290039, 0.1103300005197525, -0.131290003657341, -0.4361500144004822, 0.30779001116752625, 0.6259599924087524, 0.6969699859619141, 0.06123699992895126, 0.3158999979496002, 0.35975000262260437, -0.15525999665260315, -0.3361800014972687, 0.1761299967765808, 0.24675999581813812, 0.44863998889923096, -0.14170999825000763, 0.33511999249458313, -0.5248399972915649, -0.18434999883174896, 0.7419099807739258, -0.5706899762153625, -0.3315899968147278, -0.19115999341011047, 0.27006998658180237, 0.7192100286483765, 0.6462200284004211, -0.17297999560832977, 0.247639998793602, 0.4891799986362457, 0.14067000150680542, 0.3915199935436249, -0.5776699781417847, 0.8484200239181519, -0.13992999494075775, -0.0734969973564148, 0.7640399932861328, 0.09356500208377838, 0.4122300148010254, -0.4030799865722656, -0.0442189984023571, 0.03067700006067753, 0.5770099759101868, -0.2568100094795227, 0.9527599811553955, 0.3719100058078766, 0.07774099707603455, -0.019842000678181648, -0.3812200129032135, -0.09266100078821182, -0.15661999583244324, -0.13722999393939972, -0.18299999833106995, -0.21006999909877777, -0.018962999805808067, 0.7101799845695496, 0.6501799821853638, 0.35738998651504517, -0.2787100076675415, -0.11495000123977661, -0.6354600191116333, -0.21401000022888184, 0.09282799810171127, -0.04290600121021271, -0.0019525999668985605, -0.3347199857234955, -0.4361500144004822, 0.2394700050354004, -0.460860013961792, -0.22806000709533691, 0.1292400062084198, 0.7516099810600281, -0.04461599886417389, 0.22477999329566956, -0.2803399860858917, -0.009112999774515629, -0.3635199964046478, -0.2912299931049347, 0.17270000278949738, -0.10666000097990036, -0.4076699912548065, -0.17513999342918396, -0.350629985332489, 0.2624500095844269, 0.0722000002861023, -0.2714399993419647, 0.6073700189590454, -0.6652799844741821, -1.0428999662399292, 0.47701001167297363, -0.13234999775886536, 0.3078500032424927, -0.047338999807834625, 0.1523600071668625, -0.055803000926971436, -0.3248699903488159, -0.4328800141811371, -0.27195000648498535, 0.05489199981093407, -0.23161999881267548, 0.048948999494314194, 0.8257899880409241, 0.3560200035572052, -0.02659500017762184, -0.9639700055122375, -0.4085899889469147, 0.14368000626564026, 0.30573999881744385, -0.18860000371932983, -0.17630000412464142, 0.4378400146961212, 0.8414899706840515, -0.39294999837875366, -0.43481001257896423, 0.6561099886894226, -0.4321100115776062, -0.16413000226020813, -0.0072964997962117195, -0.0811379998922348, -0.39636000990867615, 0.3278599977493286, -0.07062800228595734, 0.43481001257896423, -0.05313200131058693, -0.006355599965900183, -0.16033999621868134, 0.367220014333725, 0.86531001329422, 0.13288000226020813, 0.30107998847961426, 0.4180600047111511, 0.11976999789476395, -0.3712399899959564, -0.30204999446868896, 0.1796099990606308, 0.6595199704170227, -0.010049999691545963, 0.6842700242996216, 0.21041999757289886, -0.12668000161647797, -0.20607000589370728, -0.2868100106716156, 0.9678699970245361], u'granite': [-0.44776999950408936, 0.026924999430775642, -0.2526099979877472, -0.5906699895858765, 0.40112000703811646, -0.06355900317430496, 0.2502099871635437, -0.11614000052213669, 0.14071999490261078, -0.28925999999046326, -0.7170000076293945, -0.38183000683784485, 0.39201000332832336, 0.39190998673439026, 0.579200029373169, -0.017532000318169594, -0.4298099875450134, -0.3496899902820587, -0.16888000071048737, -0.5410699844360352, -0.6516299843788147, 0.34551000595092773, 0.04324999824166298, 0.37389999628067017, -0.6895999908447266, -0.5691499710083008, -0.0379600003361702, 0.6198199987411499, -0.6168500185012817, 0.22883999347686768, 0.7884399890899658, 0.9860399961471558, -0.6023799777030945, 0.07298800349235535, 0.4183500111103058, 0.12393999844789505, -0.4476200044155121, -0.5673499703407288, 0.501990020275116, -0.3883900046348572, -0.2427700012922287, 0.1617400050163269, 0.4867100119590759, 0.1856199949979782, 0.08755899965763092, 0.20777000486850739, -0.023659000173211098, 0.044199999421834946, 0.3149299919605255, -0.5158900022506714, -0.601639986038208, 0.29892998933792114, 0.09891600161790848, 0.3328999876976013, -0.13097000122070312, 0.17381000518798828, -0.5385500192642212, 0.31915000081062317, -0.30171999335289, 0.3420700132846832, 0.45021000504493713, 0.25435999035835266, 0.25982001423835754, -0.3618200123310089, 0.2669599950313568, -0.19628000259399414, -0.5547000169754028, 0.5632299780845642, 0.015212999656796455, -0.31279999017715454, -0.10809999704360962, 0.17726999521255493, -0.6620299816131592, 0.008476699702441692, -0.033576998859643936, 0.3167699873447418, 0.20149999856948853, -0.02814899943768978, 0.33983999490737915, -0.7748299837112427, -0.34435001015663147, 0.60903000831604, 0.2898600101470947, -0.27191001176834106, 0.3434799909591675, 0.6937199831008911, 0.2360599935054779, -0.29273998737335205, 0.3200399875640869, 0.6014599800109863, 0.5288599729537964, 0.1684499979019165, 0.7798399925231934, 0.5504900217056274, -0.7130500078201294, 0.005068299826234579, 0.16348999738693237, -0.5522599816322327, 0.37240999937057495, 0.356440007686615, -0.16292999684810638, 0.5999400019645691, -0.3100700080394745, -0.34185001254081726, -0.33500999212265015, -0.20823000371456146, -0.028282999992370605, 0.23883000016212463, 0.28095999360084534, -0.8080899715423584, -0.14993999898433685, -0.48131000995635986, -0.06632100045681, -0.6892399787902832, -0.31134000420570374, -0.3363400101661682, 0.09663499891757965, 0.08554799854755402, 0.1439400017261505, -0.25185999274253845, 0.06011800095438957, 0.18297000229358673, -0.3297500014305115, 0.3855299949645996, -0.0676800012588501, -0.3006399869918823, 0.2439499944448471, 0.4064300060272217, -0.5809699892997742, -0.47628000378608704, -0.1117900013923645, 0.754859983921051, 0.22182999551296234, -0.2614000141620636, 0.44571998715400696, -0.1932000070810318, -0.7027699947357178, 0.25317999720573425, 0.41677001118659973, 0.04968399927020073, 0.06792400032281876, -0.16912999749183655, -0.09183099865913391, -0.356330007314682, 0.28321000933647156, 0.10683000087738037, 0.04958000034093857, -0.37457001209259033, 0.23082999885082245, -0.574679970741272, 0.335099995136261, -0.021283000707626343, -0.13913999497890472, -0.23632000386714935, 0.3675299882888794, 0.21126000583171844, -0.1662999987602234, -0.11776000261306763, 0.06913500279188156, 0.2963300049304962, -0.6310200095176697, 0.10691999644041061, 0.12309999763965607, 0.4159899950027466, 0.7999399900436401, 0.004567199852317572, 0.3956800103187561, 0.1806900054216385, 0.48127999901771545, 0.028502000495791435, -0.13252000510692596, 0.28167998790740967, 0.45111000537872314, -0.24634000658988953, -0.17090000212192535, 0.34360000491142273, -0.23946000635623932, 0.31477001309394836, 0.10213000327348709, -0.9388999938964844, 0.05179800093173981, 0.17635999619960785, 0.7978600263595581, -0.8182500004768372, -0.6885499954223633, -0.3402999937534332, 0.6537299752235413, 0.37606000900268555, 0.10452999919652939, 0.5648000240325928, 0.8003699779510498, 0.41947001218795776, 0.47881999611854553, 0.3121199905872345, 0.6906899809837341, 0.18685999512672424, -0.2565099895000458, -0.6862000226974487, -0.22914999723434448, -0.12591999769210815, 1.0651999711990356, -0.5066699981689453, -0.5161899924278259, -0.37615999579429626, -0.1325100064277649, 0.00955200009047985, 0.04469199851155281, -0.37112998962402344, -0.7353299856185913, -0.5263699889183044, 0.6973599791526794, 0.5205000042915344, -0.015980999916791916, -0.4322099983692169, -0.2512499988079071, 0.47086000442504883, 0.1315699964761734, -0.5607600212097168, -0.030417999252676964, -0.07482600212097168, 0.18910999596118927, 0.582069993019104, 0.6595900058746338, -0.4940299987792969, -0.02370000071823597, 0.018039999529719353, 0.08405599743127823, -0.06155800074338913, -0.09154199808835983, -0.43129000067710876, -0.46241000294685364, -0.239889994263649, -0.031008999794721603, -0.0632769986987114, -0.04704400151968002, -0.0648140013217926, -0.35019999742507935, -0.37073999643325806, 0.04134700074791908, -0.4359099864959717, -0.2353000044822693, 0.007486999966204166, -0.08688300102949142, -0.6108999848365784, -0.8270900249481201, 0.189410001039505, 0.18147000670433044, -0.021304000169038773, -0.07372000068426132, 0.13913999497890472, 0.28529998660087585, -0.1525000035762787, 0.1169700026512146, -0.3257800042629242, 0.24004000425338745, -0.09447699785232544, 0.0293550007045269, -0.29124000668525696, -0.43435999751091003, 0.18807999789714813, -0.12728999555110931, 0.4939500093460083, 0.7205299735069275, -0.5522199869155884, 0.4725799858570099, 0.026430999860167503, 0.9492200016975403, 0.6938599944114685, 0.6076800227165222, -0.1706400066614151, -0.1057400032877922, -0.33757999539375305, -0.09520000219345093, -0.5651000142097473, -0.2700999975204468, 0.4913400113582611, -0.31696999073028564, 0.3064599931240082, -0.4652799963951111, 0.23871999979019165, -0.09884999692440033, -0.24470999836921692, -0.2978599965572357, -0.17308999598026276, 0.18643000721931458, 0.2916800081729889, -0.5552700161933899, -0.13871000707149506, 0.8124099969863892, -0.2739099860191345, 0.40108999609947205, 0.6928399801254272, -0.07662499696016312, 0.1992100030183792, -0.06644099950790405, 0.3094800114631653, 0.15749000012874603, 0.061868999153375626, 0.0751579999923706, 0.027907000854611397], u'ice': [-0.2854599952697754, 0.2621999979019165, 0.04704200103878975, -0.19529999792575836, -0.2788200080394745, -0.021655000746250153, -0.09509900212287903, 0.21141000092029572, 0.11073999851942062, -0.6259499788284302, -0.460750013589859, -0.6428999900817871, -0.396479994058609, 0.08449999988079071, -0.2779200077056885, 0.17377999424934387, 0.1603499948978424, -0.007599900010973215, -0.595770001411438, 1.194000005722046, -0.1241300031542778, 0.15528999269008636, 0.16867999732494354, 0.46606001257896423, -0.22437000274658203, 0.3422900140285492, -0.04927099868655205, -0.03892999887466431, -0.788129985332489, -0.010598000138998032, -0.17640000581741333, 0.15535999834537506, 0.00852159969508648, -0.34130001068115234, -1.1270999908447266, 0.6012799739837646, 0.11288999766111374, 0.45170000195503235, 0.6342300176620483, 0.16797000169754028, -0.7915800213813782, 0.24708999693393707, 0.39906999468803406, 0.5657100081443787, -0.8123999834060669, 0.12134999781847, 0.677370011806488, 0.02910899929702282, -0.021738000214099884, 0.5476099848747253, -0.19484999775886536, 0.14618000388145447, -0.5586400032043457, -0.5699899792671204, -0.021345000714063644, 0.5679699778556824, 0.1447400003671646, -0.03406500071287155, 0.9571099877357483, 0.605459988117218, 0.12139999866485596, 0.6596400141716003, 0.07916100323200226, -0.6460300087928772, -0.5847399830818176, -0.08963999897241592, -0.8788999915122986, 0.597819983959198, -0.6710299849510193, -0.23485000431537628, 0.5512099862098694, 0.2851000130176544, -0.6505299806594849, -0.028214000165462494, -0.566349983215332, -0.6214399933815002, 0.2603699862957001, -0.7700700163841248, 0.17506000399589539, -0.21690000593662262, 0.0007989800069481134, 0.08847499638795853, -0.22025999426841736, -0.404339998960495, -0.3918200135231018, -0.2816999852657318, 0.32774999737739563, 0.037758998572826385, -0.6720200181007385, -0.14928999543190002, -0.15167999267578125, 0.49750998616218567, 0.07762700319290161, -0.36625999212265015, -0.08914700150489807, 0.29072999954223633, 0.028033999726176262, -0.558899998664856, -0.013319999910891056, -0.3795500099658966, 0.5084900259971619, 0.2526400089263916, -0.02739899978041649, -0.3032599985599518, -0.37637999653816223, 0.17502999305725098, 0.25336000323295593, 0.03205699846148491, -0.3242399990558624, 0.6835100054740906, -0.08072199672460556, 0.042858999222517014, 0.1859399974346161, -0.5240200161933899, -0.11776000261306763, -0.042534999549388885, 0.25519999861717224, 0.20552000403404236, -0.33355000615119934, 0.11388000100851059, -0.08852499723434448, 0.033066000789403915, 0.040192000567913055, -0.3893199861049652, -0.10941000282764435, 0.11795999854803085, 0.20632000267505646, 0.17016999423503876, 0.3871400058269501, -0.09019699692726135, -0.39364001154899597, 0.8182100057601929, 0.02098900079727173, 0.15177999436855316, 0.6746000051498413, 1.065400010702433e-05, 0.09870799630880356, 0.40536999702453613, -0.32148000597953796, 0.037087999284267426, 0.4867500066757202, 0.06635899841785431, -0.49908000230789185, 0.3417699933052063, 0.23074999451637268, 0.4280500113964081, 0.3642899990081787, 0.14172999560832977, -0.15952999889850616, -0.35701999068260193, 0.4326600134372711, 0.0756239965558052, -0.13559000194072723, -0.16539999842643738, 0.6652799844741821, -0.3898699879646301, -0.47269999980926514, -0.41471999883651733, 0.30077001452445984, -0.21100999414920807, 0.33427000045776367, -0.5004299879074097, -0.003216400044038892, 0.020512999966740608, 0.44488999247550964, 0.7107399702072144, 0.37229999899864197, 0.7131199836730957, 0.575689971446991, -0.017613999545574188, 0.1619500070810318, 0.09852900356054306, -0.19133000075817108, 0.07368200272321701, -0.0662819966673851, 0.3728800117969513, 0.3585599958896637, -0.24289000034332275, 0.20499999821186066, -0.14848999679088593, -0.2908500134944916, -0.22432999312877655, 0.6227800250053406, -0.398470014333725, 0.47839999198913574, -0.8413699865341187, 0.9330800175666809, -0.4489800035953522, -0.11824999749660492, -0.2974500060081482, 0.5526000261306763, 0.5634300112724304, 0.040762998163700104, -0.16187000274658203, -0.4032500088214874, 0.25279998779296875, -0.4518899917602539, 0.005002100020647049, 0.46772998571395874, 0.47979000210762024, 1.2833000421524048, -0.11958000063896179, 0.4235999882221222, 0.3447299897670746, -0.40542998909950256, -0.05929100140929222, -0.03743499889969826, -0.35148000717163086, -0.290910005569458, 0.10168000310659409, -0.07610300183296204, 0.4520600140094757, -0.30125999450683594, 0.4549500048160553, -0.24484999477863312, -0.5058500170707703, 0.2424899935722351, -0.07763499766588211, -0.41007000207901, -0.44086000323295593, 0.9355300068855286, 0.4338900148868561, 0.39028000831604004, -0.5943999886512756, -0.5176299810409546, 0.1259700059890747, -0.6052700281143188, 0.11204999685287476, -0.07743199914693832, -0.19059999287128448, 0.6840599775314331, -0.024775000289082527, 0.3369700014591217, -0.8956500291824341, 0.10716000199317932, -0.08750300109386444, -0.5202599763870239, -0.02258400060236454, 0.44620999693870544, 0.15815000236034393, -0.392769992351532, 0.34637001156806946, 0.6026399731636047, -0.45002999901771545, -0.8912400007247925, -0.29326000809669495, -0.03823100030422211, 0.16023999452590942, -0.04464299976825714, -0.05385800078511238, 0.5766100287437439, 0.09309399873018265, 0.19059999287128448, -0.3220899999141693, 0.1933099925518036, -0.21265000104904175, 0.45816001296043396, -0.3959999978542328, 0.5600799918174744, 0.6783000230789185, -0.6149600148200989, -0.08980300277471542, -0.0636110007762909, 0.5677099823951721, 0.2765499949455261, -0.6347299814224243, -0.19859999418258667, 0.4063499867916107, 0.12286999821662903, -0.34106001257896423, 0.15286000072956085, 0.2283399999141693, -0.02455800026655197, 0.3424699902534485, 0.614549994468689, 0.11918999999761581, -1.4321000576019287, -0.7882999777793884, -0.7006099820137024, -0.21618999540805817, -0.37731000781059265, 0.2895500063896179, -0.09918399900197983, -0.18212999403476715, 0.3160400092601776, 0.10509999841451645, -0.2064400017261505, 0.3104400038719177, 0.4412899911403656, 0.5892199873924255, -0.1780800074338913, 0.4921000003814697, -0.24550999701023102, 0.35863998532295227, 0.16121000051498413, -0.7044000029563904, 0.6419900059700012, -0.15297000110149384, -0.14902999997138977, 0.02326899953186512], u'mud': [-0.23960000276565552, 0.26589998602867126, -0.06528200209140778, 0.0576849989593029, -0.19271999597549438, -0.3058300018310547, 0.33243000507354736, 0.03688599914312363, 0.15846000611782074, -0.27935001254081726, -0.08395499736070633, -0.22010000050067902, -0.40446001291275024, 0.042952001094818115, 0.018411999568343163, -0.09898699820041656, -0.3703700006008148, 0.2596000134944916, 0.564329981803894, 0.17318999767303467, 0.06552600115537643, -0.29398998618125916, -0.06870300322771072, 0.32236000895500183, -0.5785599946975708, -0.5351799726486206, 0.39941999316215515, -0.18355999886989594, 0.46751001477241516, 0.6738499999046326, 0.3707999885082245, 0.40505000948905945, -0.263949990272522, -0.06920900195837021, 0.40950000286102295, 0.8893300294876099, -0.0937729999423027, 0.3359600007534027, 0.6573500037193298, 0.43852001428604126, 0.14351999759674072, 0.5148500204086304, 0.7762399911880493, -0.04614400118589401, 0.4276300072669983, 0.33094000816345215, 0.7215099930763245, 0.046806998550891876, -0.03466000035405159, -0.14010000228881836, -0.2718600034713745, 0.6959800124168396, 0.005632500164210796, -0.7038599848747253, 0.4301399886608124, 0.10028000175952911, 0.15998999774456024, -0.2819499969482422, -0.036472998559474945, 0.2270199954509735, 0.06719300150871277, -0.14688000082969666, 0.6230900287628174, 0.15113000571727753, 0.0016850000247359276, -0.2391899973154068, 0.016388999298214912, 0.23277999460697174, -0.3270600140094757, -0.365119993686676, 0.08861400187015533, 0.4965499937534332, -0.5794900059700012, 0.2023099958896637, -0.3009200096130371, -0.6479399800300598, 0.6458799839019775, -0.05709400027990341, 0.3182600140571594, -0.3149999976158142, 0.2351199984550476, -0.3009600043296814, -0.21213999390602112, 0.24650999903678894, -0.4430299997329712, 0.3846299946308136, 0.1028899997472763, -0.5370799899101257, -0.1607699990272522, -0.31205999851226807, 0.15625, 0.08000099658966064, 0.5498300194740295, -0.04794200137257576, 0.5011799931526184, -0.01588200032711029, -0.0370820015668869, 0.007052899803966284, 0.6273400187492371, 0.2592099905014038, 0.2847900092601776, 0.22750000655651093, -0.271230012178421, -0.6412000060081482, 0.053787000477313995, 0.3186599910259247, 0.40507999062538147, -0.05477600172162056, 0.013520999811589718, 0.2339800000190735, -0.48984000086784363, -0.4695799946784973, -0.9732999801635742, -0.3154599964618683, -0.3069800138473511, 0.5597699880599976, -0.25060999393463135, 0.26688000559806824, -0.002621399937197566, -0.63919997215271, 0.33392998576164246, -0.7914999723434448, 0.06451299786567688, 0.891510009765625, -0.34251999855041504, 0.20116999745368958, -0.2996799945831299, 0.48563000559806824, 0.18839000165462494, -0.07452599704265594, -0.24484999477863312, 1.2587000131607056, 0.3512200117111206, 0.49052000045776367, 0.27469000220298767, -0.16652999818325043, -0.30882999300956726, -0.018807999789714813, -0.13120000064373016, -0.12182000279426575, 0.47571998834609985, -0.008024300448596478, -0.6464999914169312, -0.5200600028038025, -0.5582000017166138, -0.10553999990224838, 0.46733999252319336, 0.6985099911689758, -0.6313899755477905, 0.3086099922657013, -0.19697999954223633, -0.1238899976015091, -1.146299958229065, -0.240339994430542, 0.48146000504493713, 0.696120023727417, 0.8715000152587891, -0.6117100119590759, 0.8672699928283691, 0.01916399970650673, -0.18092000484466553, -0.5820199847221375, -0.18324999511241913, 0.28005000948905945, -0.056735001504421234, 0.22457000613212585, 0.7300099730491638, 0.378030002117157, 0.06883600354194641, -0.5445799827575684, 0.4954499900341034, 0.7904899716377258, 0.3405799865722656, 0.10374999791383743, -0.3050999939441681, 0.47742998600006104, 0.1457200050354004, 0.20724999904632568, 0.13155999779701233, -0.2778699994087219, -0.13600000739097595, -0.16673000156879425, 0.483460009098053, 0.3235200047492981, -0.5304200053215027, -0.7199299931526184, 0.7875300049781799, 0.29429998993873596, 0.3633100092411041, -0.5151299834251404, 0.8825100064277649, 0.23593999445438385, -0.24026000499725342, -0.27577999234199524, 0.4924199879169464, -0.07902800291776657, -0.02528100088238716, -0.14284999668598175, -0.17473000288009644, -0.19429999589920044, 0.516219973564148, -0.02261199988424778, 0.2100600004196167, 0.871150016784668, 0.13322000205516815, -0.24361999332904816, 0.6028100252151489, -0.00267699989490211, -0.4820899963378906, 0.8833699822425842, -0.5318300127983093, 0.35986000299453735, 0.5504900217056274, -0.41025999188423157, -0.05238699913024902, 0.3928300142288208, -0.009624199941754341, -0.3815700113773346, 0.18334999680519104, -0.040084000676870346, 0.7571600079536438, -0.16695000231266022, 0.3325200080871582, 0.10172999650239944, 0.17964999377727509, -0.38955000042915344, -0.25001001358032227, -0.14398999512195587, -0.31404998898506165, -0.5506600141525269, -0.012978999875485897, -0.23478999733924866, 0.04658300057053566, -0.045841000974178314, 0.4576900005340576, -0.2744700014591217, -0.14429999887943268, -0.0669270008802414, 0.07580099999904633, 0.033376000821590424, -0.12487000226974487, 0.30498000979423523, -0.4549899995326996, -0.35339000821113586, -1.1202000379562378, -0.05764500051736832, 0.15996000170707703, 0.4395500123500824, 0.010395999997854233, -0.051975999027490616, 0.38582998514175415, 0.1453000009059906, -0.20559999346733093, -0.9771599769592285, 0.5684300065040588, -0.08461999893188477, 0.03756599873304367, -0.025515999644994736, -0.05997699871659279, -0.12596000730991364, 0.10029999911785126, -0.27952998876571655, -0.25870999693870544, -0.21352000534534454, 0.37716999650001526, 0.06299000233411789, 0.0622359998524189, -0.5963699817657471, -0.5259400010108948, -0.7220399975776672, -0.7326599955558777, -0.30160000920295715, 0.004407700151205063, 0.14041000604629517, -0.21066999435424805, 0.40345999598503113, -1.402500033378601, -0.15297000110149384, -0.7193400263786316, 0.005300600081682205, -0.3464300036430359, 0.13707999885082245, 0.4242999851703644, -0.30882999300956726, -0.038256000727415085, 0.03885199874639511, -0.044105999171733856, 0.0447469986975193, 0.3993600010871887, -0.20183999836444855, -0.599370002746582, 0.3842499852180481, 0.4286800026893616, 0.2872900068759918, 0.0626399964094162, -0.01881200075149536, 0.4224900007247925, 0.7691900134086609, -0.35604000091552734, 0.16143999993801117], u'floor': [-0.10661999881267548, -0.048402998596429825, -0.7442100048065186, -0.5770999789237976, -0.011385000310838223, 0.2476699948310852, 0.049240998923778534, -0.26166999340057373, 0.15440000593662262, -1.0801000595092773, -0.21741999685764313, -0.2257400006055832, 0.18351000547409058, 0.11620000004768372, -0.0025770000647753477, 0.5914099812507629, -0.04300900176167488, 0.9287999868392944, 0.07105699926614761, -0.022092999890446663, -0.13083000481128693, 0.17552000284194946, 0.07222100347280502, -0.13739000260829926, -0.022065000608563423, 0.04949900135397911, 0.76350998878479, -0.21209000051021576, 0.18020999431610107, -0.005227900110185146, 0.4724299907684326, 0.43595001101493835, -0.03910500183701515, -0.3131200075149536, -1.0908000469207764, 0.47394001483917236, 0.4640200138092041, -0.48113998770713806, 0.572629988193512, 0.11410000175237656, -0.33410000801086426, -0.213809996843338, -0.28325000405311584, 0.4623500108718872, 0.46108001470565796, 0.4574899971485138, 0.19652000069618225, -0.239329993724823, -0.5534499883651733, -0.29754000902175903, -0.0626550018787384, 0.3978799879550934, -0.11963000148534775, -0.40081000328063965, 0.2316800057888031, 0.07037899643182755, -0.22210000455379486, 0.3112800121307373, 0.24876999855041504, 0.2562200129032135, 0.1824900060892105, -0.3989199995994568, 0.47227999567985535, 0.2552100121974945, -0.1139099970459938, -0.6913599967956543, 0.6334599852561951, -0.19878999888896942, -0.022634999826550484, -0.2503100037574768, -0.4112200140953064, -0.2632800042629242, -0.3753199875354767, -0.04168599843978882, -0.24269999563694, 0.1637199968099594, -0.2782300114631653, -0.006637999787926674, 0.15253999829292297, -0.46691998839378357, 0.5690699815750122, -0.12456999719142914, 0.19102999567985535, -0.05768999829888344, 0.0935090035200119, 0.042830001562833786, -0.12049999833106995, 0.04492500051856041, -0.2955000102519989, 0.3725999891757965, 0.42827001214027405, -0.256989985704422, -0.021720999851822853, 0.4442099928855896, -0.27008000016212463, -0.4598599970340729, -0.7070599794387817, -0.03605400025844574, 0.7710999846458435, -0.7959100008010864, -0.0120430001989007, 0.5621200203895569, -0.3273099958896637, -0.33039000630378723, -0.16419999301433563, -0.4048599898815155, 0.3031100034713745, -0.1928199976682663, 0.3580099940299988, 0.32673999667167664, -0.6456000208854675, -0.05886499956250191, -0.2900699973106384, 0.34077000617980957, -0.9788100123405457, 0.7035499811172485, -0.32853999733924866, -0.03554699942469597, -0.5283799767494202, 0.09470999985933304, 0.07794900238513947, -0.06081800162792206, -0.08083400130271912, 0.31617000699043274, -0.48976999521255493, -0.3169899880886078, 0.11394000053405762, -0.22453999519348145, 0.6762999892234802, -0.008326999843120575, -0.09477700293064117, 0.5161399841308594, 0.28022000193595886, 0.009180099703371525, 0.1415800005197525, 0.31690001487731934, -0.2407499998807907, 0.5590699911117554, 0.04170000180602074, 0.2840000092983246, -0.058442000299692154, -0.08831299841403961, 0.029045000672340393, 0.21671000123023987, -0.5464900135993958, -0.2957899868488312, 0.1506499946117401, 0.005749399773776531, -0.07655700296163559, -0.6704400181770325, 0.03994100168347359, 0.5952600240707397, -0.23500999808311462, -0.3828999996185303, -0.20303000509738922, 0.8367300033569336, -0.3413499891757965, 0.26954999566078186, -0.15928000211715698, -0.18905000388622284, 0.09088200330734253, 0.05299999937415123, 0.08857399970293045, 0.33754000067710876, 0.5808600187301636, 0.15355999767780304, 0.11907999962568283, 0.478630006313324, 0.32346001267433167, 0.8260300159454346, -0.5983099937438965, 0.17885999381542206, 0.6408799886703491, -0.09970299899578094, -0.24710999429225922, 0.4671599864959717, 0.045430999249219894, 0.4502600133419037, -0.0946900025010109, -0.674340009689331, 0.17065000534057617, -0.357450008392334, -0.12477999925613403, 0.14250999689102173, -0.21321000158786774, -0.04715900123119354, 1.0307999849319458, 0.34637999534606934, 0.06478600203990936, 0.6726899743080139, 1.0441999435424805, -0.2889699935913086, -0.2900499999523163, -0.09123200178146362, 0.21230000257492065, 0.5176100134849548, -0.3479999899864197, 0.1696700006723404, -0.8184999823570251, 0.12060999870300293, 1.125599980354309, -0.11918999999761581, -0.2895300090312958, -0.24932000041007996, 0.19485999643802643, -0.0322050005197525, -0.162990003824234, -0.12167999893426895, -0.07488399744033813, 0.13262000679969788, -0.14234000444412231, -0.0662980005145073, -0.0008363800006918609, -0.19612999260425568, 0.018814999610185623, 0.25123000144958496, 0.20135000348091125, -0.5410100221633911, -0.08694600313901901, -0.5062500238418579, 0.2390899956226349, 0.28584998846054077, 0.004580399952828884, -0.012373000383377075, 0.09990900009870529, 0.17712000012397766, -0.5208600163459778, -0.0588809996843338, -0.4295699894428253, -0.15169000625610352, 0.1876399964094162, -0.45329999923706055, 0.4321399927139282, 0.17110000550746918, 0.8694999814033508, -0.48598000407218933, -0.27211999893188477, 0.3441700041294098, -0.12246000021696091, 0.2942200005054474, -0.034258000552654266, 0.304749995470047, 0.009088699705898762, 0.13187000155448914, -0.3961299955844879, -0.2527500092983246, 0.15360000729560852, 0.21729999780654907, -0.034919001162052155, 0.10885000228881836, 0.21921999752521515, -0.3891400098800659, 0.17753000557422638, -0.39563000202178955, 0.3386499881744385, 0.28703999519348145, -0.1102600023150444, -0.11879000067710876, 0.7639200091362, 0.4705199897289276, -0.45868000388145447, -0.322409987449646, 0.27796998620033264, -0.5098400115966797, -0.12099999934434891, -0.30720001459121704, 0.015119999647140503, 0.33970001339912415, -0.09121599793434143, -0.5014500021934509, -0.3580799996852875, -0.5096700191497803, -0.2313700020313263, 0.05990000069141388, -0.10493999719619751, 0.15752999484539032, -1.7925000190734863, 0.16116000711917877, -0.5370399951934814, 0.15515999495983124, -0.592960000038147, -0.28137001395225525, 0.22610999643802643, -0.41255998611450195, 0.09732300043106079, 0.51596999168396, -0.21673999726772308, 0.32923999428749084, -0.29218000173568726, -0.3955399990081787, 0.2874000072479248, 0.5148900151252747, -0.43264999985694885, 0.7076699733734131, 0.2824000120162964, 0.12664000689983368, 0.2561799883842468, -0.49974000453948975, -0.023440999910235405, 0.20058000087738037], u'branch': [-0.5733000040054321, -0.2707499861717224, 0.1964700073003769, -0.020711999386548996, 0.058646999299526215, -0.2318599969148636, -0.04538000002503395, 0.4377099871635437, 0.3581399917602539, -1.0059000253677368, -0.5924999713897705, 0.2624100148677826, 0.2795200049877167, -0.008023899979889393, 0.2244900017976761, 0.023017000406980515, -0.13989999890327454, 0.05886099860072136, 0.0026879000943154097, 0.2879500091075897, -0.21875999867916107, -0.7028499841690063, 0.17914000153541565, -0.19561000168323517, 0.31738001108169556, 0.7928100228309631, -0.08585300296545029, -0.19431999325752258, -0.26416999101638794, 0.2950200140476227, 0.3752799928188324, 0.029947999864816666, 0.5230699777603149, 0.5523099899291992, 0.23446999490261078, -0.14055000245571136, 0.4626300036907196, -0.08779200166463852, -0.224030002951622, -0.4138199985027313, -0.2771100103855133, 0.04714599996805191, 0.029827000573277473, 0.23506000638008118, 0.3042699992656708, 0.3171600103378296, -0.03584799915552139, 0.376120001077652, 0.16703000664710999, 0.03446200117468834, -0.8097299933433533, -0.1860799938440323, 0.16395999491214752, 0.38569000363349915, -0.040516000241041183, 0.1745000034570694, -0.0681380033493042, -0.12426000088453293, -0.15137000381946564, -0.41179001331329346, 0.47558000683784485, -0.02964800037443638, 0.2004700005054474, 0.3612099885940552, -0.03721199929714203, -0.4247500002384186, -0.29436999559402466, 0.4424999952316284, -0.12239000201225281, 0.5203199982643127, 0.2522299885749817, 0.126460000872612, 0.11121000349521637, 0.23847000300884247, -0.4754300117492676, -0.2027900069952011, -0.5682399868965149, 0.13036000728607178, -0.4876500070095062, -0.21728000044822693, -0.4491499960422516, -0.08861199766397476, 0.004897700157016516, -0.23350000381469727, 0.6295199990272522, -0.03409299999475479, 0.24925999343395233, 0.21514999866485596, 0.03952199965715408, 0.7230799794197083, -0.43055999279022217, -0.04562599956989288, -0.05239799991250038, 0.02776299975812435, 0.054621998220682144, -0.38383999466896057, 0.09033399820327759, -0.008381299674510956, 0.45232999324798584, -0.21513999998569489, 0.14969000220298767, -0.30234000086784363, -0.058681998401880264, -0.10397999733686447, 0.10243000090122223, -0.36427998542785645, 0.11879999935626984, -0.2713199853897095, -0.27928000688552856, 1.0115000009536743, -0.41071999073028564, -0.7978600263595581, 0.35238999128341675, 0.02286599949002266, 0.31578001379966736, 0.4002000093460083, -0.3618699908256531, 0.6045200228691101, -0.06934499740600586, -0.04493299871683121, 0.1846500039100647, -0.0021903999149799347, 0.6248700022697449, -0.3673500120639801, -0.2100600004196167, -0.23781999945640564, -0.18427999317646027, -0.10948000103235245, 0.33796000480651855, 0.10806000232696533, -0.1220100000500679, -0.027688000351190567, 0.11964000016450882, -0.5756099820137024, -0.017619000747799873, 0.26096999645233154, 0.43619000911712646, 0.30393001437187195, -0.2541100084781647, -0.2683500051498413, -0.04146699979901314, 0.008136999793350697, -0.3088200092315674, -0.4430199861526489, 0.17272000014781952, -0.043063998222351074, -0.15098999440670013, 0.34332001209259033, 0.13232000172138214, 0.3467099964618683, 0.852869987487793, -0.26469001173973083, 0.7257500290870667, -0.016787000000476837, 0.6721699833869934, 0.6713100075721741, 0.3034600019454956, -0.28115999698638916, -0.06975500285625458, 0.19528000056743622, -0.04373500123620033, 1.1582000255584717, 0.10712999850511551, 0.14159999787807465, -0.24361999332904816, -0.046852000057697296, -0.23124000430107117, -0.46748000383377075, -0.07114800065755844, 0.17362000048160553, -0.41086000204086304, -1.0246000289916992, -0.26072999835014343, 0.31196001172065735, 0.05522900074720383, 0.6034600138664246, 0.05663599818944931, -0.31477999687194824, 0.07338599860668182, -0.6514099836349487, 0.20794999599456787, 0.07230599969625473, -0.160180002450943, 0.3884600102901459, 0.4624600112438202, -0.10378000140190125, -0.4383299946784973, 0.06857699900865555, 0.22654999792575836, -0.00942359957844019, 0.27722999453544617, 0.47082000970840454, -0.22939999401569366, -0.036674000322818756, 0.03040499985218048, 0.2491299957036972, -0.38001999258995056, -0.7120699882507324, -0.29451000690460205, 0.6881700158119202, 0.5167700052261353, 0.25633999705314636, -0.41574999690055847, -0.21012000739574432, -0.003653299994766712, 0.12065000087022781, -0.010943000204861164, 0.2345300018787384, 0.6463900208473206, 0.2347699999809265, 0.1392199993133545, 0.25306999683380127, 0.43331000208854675, -0.07599300146102905, 0.14315000176429749, -0.2680400013923645, 0.2223300039768219, 0.3930400013923645, 0.028439000248908997, -0.6969299912452698, 0.06600800156593323, -0.5738800168037415, -0.701200008392334, -0.0007376900175586343, 0.43643999099731445, -0.507669985294342, -0.6751300096511841, -0.014883999712765217, -0.24157999455928802, 0.5513700246810913, -0.5332599878311157, -0.08859100192785263, 0.09609799832105637, 0.14678999781608582, 0.13560999929904938, 0.24174000322818756, 0.4395799934864044, -0.6146600246429443, 0.01695300079882145, -0.05677400156855583, 0.037369001656770706, -0.2734000086784363, 0.5824900269508362, -0.1539199948310852, -0.16551999747753143, -0.017062000930309296, -0.04882200062274933, -0.32256999611854553, -0.022769000381231308, -0.34926000237464905, 0.2823199927806854, 0.5879700183868408, -0.09590300172567368, -0.1917099952697754, 0.567799985408783, -0.534600019454956, -0.45511001348495483, 0.018205000087618828, 0.28150999546051025, 0.34929999709129333, 0.10469000041484833, 0.1325100064277649, -0.12624000012874603, -0.08381900191307068, -0.2022700011730194, -0.007815999910235405, 0.06477899849414825, 0.07924400269985199, 0.2980700135231018, 0.2434300035238266, 0.19769999384880066, -0.7393400073051453, -0.04800799861550331, 0.3006500005722046, -0.37213000655174255, -0.04086900129914284, -1.1448999643325806, 0.2750700116157532, 0.26708999276161194, 0.2758699953556061, -0.3323499858379364, -0.44811001420021057, 0.2893500030040741, -0.6904299855232239, -0.42034998536109924, 0.42177000641822815, 0.7403200268745422, 0.5193700194358826, 0.12304999679327011, -0.02227500081062317, -0.029474999755620956, -0.03985600173473358, 0.1269800066947937, -0.01752600073814392, -0.17688000202178955, 0.648639976978302, -0.26256000995635986, -0.26221999526023865, 0.10749000310897827, 0.14855000376701355], u'cloud': [-0.2805100083351135, -0.19200000166893005, 0.051024001091718674, -0.7455999851226807, -0.04206499829888344, -0.071383997797966, -0.2440599948167801, 0.4156000018119812, -0.38356998562812805, -1.3896000385284424, 0.4704900085926056, 0.48853999376296997, -0.23567000031471252, -0.9227799773216248, -0.4177800118923187, 0.5364800095558167, -0.32969000935554504, 0.05584200099110603, 0.3451800048351288, 0.7847099900245667, 0.11738000065088272, -0.1304599940776825, -0.4045400023460388, 0.3292999863624573, 0.22084000706672668, -0.1437000036239624, 0.12076999992132187, -0.06647899746894836, -0.5359600186347961, 0.2282799929380417, 0.4256399869918823, -0.3509500026702881, -0.25286000967025757, -0.36768999695777893, -0.007402199786156416, 0.06630700081586838, -0.3921700119972229, 0.03131699934601784, -0.0013400999596342444, 0.39691001176834106, -0.6192399859428406, -0.022501999512314796, 0.20875999331474304, 0.5168700218200684, 0.12432000041007996, -0.8095999956130981, -0.0029255999252200127, -0.0815420001745224, -0.0019700999837368727, -0.4565199911594391, 0.7024000287055969, 0.05347000062465668, -0.26930999755859375, -0.32655999064445496, -0.482450008392334, -0.5502399802207947, 0.20858000218868256, -0.08150099962949753, 0.17542999982833862, -0.04213299974799156, -0.06757000088691711, 0.24621999263763428, 0.47710999846458435, -0.12370999902486801, 0.035457998514175415, -0.41495001316070557, -0.02896600030362606, 0.44839999079704285, -0.14991000294685364, -0.4059300124645233, -0.023042000830173492, -0.07700499892234802, -0.3008800148963928, -0.7838600277900696, -0.2953700125217438, 0.213469997048378, -0.5080299973487854, 0.15463000535964966, 0.4288100004196167, -0.1935800015926361, -0.315310001373291, -0.19994999468326569, -0.2924000024795532, 0.17527000606060028, 0.4309599995613098, -0.09388700127601624, 0.34970998764038086, 0.5574600100517273, 0.38102999329566956, -0.15392999351024628, -0.3429799973964691, -0.43911001086235046, 0.3017500042915344, 0.20848999917507172, 0.09608999639749527, 0.7427700161933899, 0.27199000120162964, -0.0487620010972023, 0.49849000573158264, -0.1410900056362152, -0.2404700070619583, -0.08612000197172165, -0.2546899914741516, 0.04297899827361107, -0.06032299995422363, -0.017160000279545784, 0.5794699788093567, -0.26284000277519226, -0.16154000163078308, 0.09067700058221817, -0.6037499904632568, -0.3997800052165985, 0.35670000314712524, -0.05044199898838997, 0.3299199938774109, -0.2059199959039688, -0.022958999499678612, -0.004699300043284893, -0.03663700073957443, -0.5072600245475769, 0.0952180027961731, -0.3815799951553345, 0.10379000008106232, 0.44617998600006104, 0.39873000979423523, -0.5173500180244446, 0.2992900013923645, 0.8320800065994263, -0.12681999802589417, 0.15570999681949615, 0.13005000352859497, 0.5493999719619751, 0.16651999950408936, 0.2687300145626068, 0.8111600279808044, -0.3662700057029724, -0.3308500051498413, 0.3831300139427185, 0.3987100124359131, -0.15681999921798706, 0.10673999786376953, -0.6287099719047546, -0.1574999988079071, -0.07026199996471405, -0.4685100018978119, -0.40189000964164734, 0.28988000750541687, -0.00825829990208149, -0.014266000129282475, -0.08127100020647049, 0.4549599885940552, -0.20358000695705414, -0.4211600124835968, 0.056182000786066055, 0.1130400002002716, 0.288239985704422, 0.6017100214958191, -0.43011999130249023, -0.06278199702501297, -0.0007985800039023161, -0.6837800145149231, -0.49658000469207764, 0.5143899917602539, 0.06290599703788757, -0.08296900242567062, -0.12501999735832214, 0.08155100047588348, 0.8051000237464905, -0.4231399893760681, -0.13263000547885895, 0.17353999614715576, 0.13646000623703003, 0.0912880003452301, -0.6423500180244446, 0.2102700024843216, -0.41168999671936035, 0.3172700107097626, 0.12973999977111816, 0.07802099734544754, -0.5265200138092041, 0.28512001037597656, -0.24402999877929688, -0.09948199987411499, -0.1155799999833107, 0.15383000671863556, -0.7605400085449219, 0.1912499964237213, -0.6873499751091003, -0.8741700053215027, -0.5530499815940857, 0.4252600073814392, 0.3171600103378296, -0.24562999606132507, 0.15384000539779663, -0.21870000660419464, -0.7517899870872498, 0.48563000559806824, 0.3790999948978424, -0.5081200003623962, 0.012277999892830849, 0.7951499819755554, -0.17032000422477722, -0.01582299917936325, 0.03796200081706047, -0.20640000700950623, -0.07459200173616409, 0.007658100221306086, -0.23785999417304993, -0.489300012588501, 0.10660000145435333, 0.049180999398231506, 0.22382999956607819, 0.19992999732494354, -0.753570020198822, 0.018619999289512634, -0.6996200084686279, 0.5342699885368347, -0.08095300197601318, 0.5538300275802612, -0.04674699902534485, -0.00260449992492795, 0.36096999049186707, -0.43055999279022217, -0.11112000048160553, 0.12263000011444092, 0.001473700045607984, -0.3296299874782562, -0.1192300021648407, 0.04871400073170662, 0.08440899848937988, 0.27063998579978943, 0.41506001353263855, -0.17122000455856323, -0.5770800113677979, -0.1587900072336197, -0.7192000150680542, -0.18655000627040863, -0.06554900109767914, -0.1824599951505661, -0.4289200007915497, -0.11102999746799469, 0.4356899857521057, -0.43042001128196716, -0.3216699957847595, -0.665149986743927, -0.07851999998092651, 0.1804099977016449, -0.13955000042915344, 0.26510000228881836, 0.0027914000675082207, -0.8577499985694885, 0.09925500303506851, 0.3378799855709076, -0.24403999745845795, 0.21562999486923218, 0.19166000187397003, -0.42866000533103943, 0.05040900036692619, -0.21528999507427216, -0.026573000475764275, 0.3084700107574463, 0.01515199989080429, 0.6891800165176392, 0.06652799993753433, 0.015363000333309174, -0.09654200077056885, -0.2792400121688843, -0.09995400160551071, 0.02817000076174736, 0.005764400120824575, -0.3143500089645386, 0.9160199761390686, -0.18292999267578125, -0.1680700033903122, -0.5255500078201294, 0.12735000252723694, -0.9120799899101257, -0.33188000321388245, -0.07732000201940536, -0.3930000066757202, -0.844789981842041, 0.3250100016593933, -0.24488000571727753, -0.12859000265598297, 0.01358999963849783, -0.12918999791145325, -0.5825200080871582, 0.6482099890708923, 0.2908500134944916, -0.16473999619483948, 0.5126100182533264, 0.4730699956417084, -0.46705999970436096, 0.3874100148677826, 0.35196998715400696, -0.22756999731063843, -0.061567001044750214, -0.1102600023150444, -0.25714001059532166, 0.07034700363874435], u'iguana': [0.45021000504493713, 0.4857099950313568, -0.04655199870467186, 0.09879399836063385, 0.3781000077724457, -0.8409900069236755, 0.1239200010895729, 0.06869100034236908, -0.5535500049591064, 0.64205002784729, -0.27531999349594116, 0.1712000072002411, 0.18737000226974487, 0.12684999406337738, 0.27452999353408813, -0.10824000090360641, -0.08718100190162659, 0.2409600019454956, -0.46474000811576843, 0.35565999150276184, -0.23861999809741974, 0.9234700202941895, -0.1336199939250946, -0.10849999636411667, 0.22067999839782715, -0.14780999720096588, -0.20958000421524048, 0.29750001430511475, 0.06791400164365768, 0.7775899767875671, -0.3329100012779236, 0.06887099891901016, -0.7478700280189514, 0.08861999958753586, 1.090999960899353, 0.22638000547885895, -0.33671000599861145, 0.15591999888420105, -0.03412500023841858, -0.15955999493598938, -0.3015199899673462, 0.3336000144481659, 0.4529399871826172, -0.1988700032234192, -0.20467999577522278, -0.17666999995708466, -0.25051000714302063, -0.3263300061225891, -0.2955999970436096, 0.08582799881696701, 0.2280299961566925, -1.114799976348877, -0.12343999743461609, 0.4609299898147583, -0.25356000661849976, -0.4701099991798401, 0.030153000727295876, 0.34994998574256897, 0.5590599775314331, 0.04443399980664253, -0.6651899814605713, -0.29078999161720276, -0.29513999819755554, 0.4670099914073944, 0.05201299861073494, 0.3099899888038635, 0.24854999780654907, 0.4592300057411194, 0.41425999999046326, -0.02558699995279312, 0.2491600066423416, 0.8691499829292297, -0.2616499960422516, 0.008303600363433361, -0.9651100039482117, 0.5017799735069275, -0.6848899722099304, 0.3307099938392639, 0.5971099734306335, -0.014504999853670597, 0.0934000015258789, 0.1440500020980835, 0.21967999637126923, -0.21946999430656433, 0.3164199888706207, -0.2237599939107895, -0.029052000492811203, -0.11608999967575073, 0.2845099866390228, -0.6285300254821777, -0.5024099946022034, 0.03017600066959858, 0.01708099991083145, 0.49182000756263733, -0.3323900103569031, 0.05936399847269058, 0.38440999388694763, 0.06823299825191498, -0.7075200080871582, 0.29945001006126404, 0.3303399980068207, 0.20160000026226044, -0.4800100028514862, 0.11948999762535095, -0.039854999631643295, -0.5775499939918518, 0.258109986782074, -0.1915999948978424, 0.45267999172210693, -0.08801800012588501, 0.2070399969816208, -0.15057000517845154, -0.6100299954414368, 0.24467000365257263, 0.2224300056695938, -0.6617699861526489, -0.39493998885154724, -0.08246400207281113, -0.3721599876880646, 0.03350299969315529, 0.08851499855518341, 0.27368998527526855, -0.30425000190734863, 0.08325500041246414, 0.028742000460624695, -0.6061499714851379, -0.12392999976873398, 0.46410998702049255, 0.13303999602794647, -0.35124000906944275, 0.09183699637651443, -0.5787000060081482, -0.2868100106716156, 0.13708999752998352, 0.2623400092124939, -0.371289998292923, 0.5217199921607971, -0.33087998628616333, 0.014275999739766121, 0.44464001059532166, 0.20991000533103943, 0.44418999552726746, -0.23954999446868896, 0.3325200080871582, 0.39445000886917114, -0.1439100056886673, -0.013389999978244305, 0.1311199963092804, -0.31172001361846924, -0.18709999322891235, -0.1700199991464615, 0.09063299745321274, -0.17645999789237976, 0.29787999391555786, -0.8577100038528442, -0.5077999830245972, 0.5285500288009644, -0.04340200126171112, 0.1242000013589859, 0.42280998826026917, -0.36061999201774597, -0.4942399859428406, -0.26034000515937805, -0.2536199986934662, -0.33351999521255493, -0.07066600024700165, -0.3385300040245056, 0.5206000208854675, -0.7882800102233887, -0.5589600205421448, 0.4268999993801117, 0.041776999831199646, 0.44315001368522644, -0.11670999974012375, -0.17408999800682068, -0.22428999841213226, -0.06259500235319138, 0.23044000566005707, 0.3927200138568878, -0.35093000531196594, 0.17892000079154968, 0.28769999742507935, 0.07749400287866592, -0.1695999950170517, 0.17900000512599945, -0.23389999568462372, 0.6063399910926819, 0.156700000166893, 0.53507000207901, 0.3531999886035919, -0.17111000418663025, 0.46244001388549805, -0.41089001297950745, -0.36757999658584595, -0.35756000876426697, 0.7153400182723999, -0.1311500072479248, 0.794409990310669, -0.5868099927902222, 0.3156999945640564, 0.5763800144195557, 0.1433899998664856, -0.6600499749183655, -0.03448000177741051, 0.17096999287605286, 1.5499000549316406, 0.3218899965286255, -0.35315001010894775, 0.019293999299407005, 0.22345000505447388, -0.7517200112342834, -0.2306700050830841, -0.3552800118923187, 0.06102300062775612, 0.18021999299526215, 0.6492099761962891, 0.2791700065135956, 0.0615679994225502, 1.0041999816894531, 0.22213000059127808, 0.08159799873828888, 0.27845999598503113, 0.0074072000570595264, 0.07912799715995789, -0.6623899936676025, 0.04639999940991402, 0.5205600261688232, 0.19165000319480896, 0.53684002161026, -0.13590000569820404, 0.5735399723052979, -0.024961000308394432, -0.10941000282764435, 0.09179800003767014, 0.6439700126647949, -0.4549199938774109, -0.6042600274085999, -0.03753099963068962, -0.3869200050830841, -0.09364700317382812, -0.2786400020122528, 0.25213998556137085, -0.6773599982261658, -0.2802000045776367, -0.5940999984741211, -0.7900999784469604, -0.12477999925613403, -0.5616000294685364, -0.8808900117874146, -0.27599000930786133, -0.4787200093269348, 0.6349999904632568, -0.21461999416351318, 0.072223000228405, -0.2836099863052368, 0.21687999367713928, 0.32642999291419983, 0.3030500113964081, 0.1925400048494339, -0.05815799906849861, -0.08900800347328186, -0.16670000553131104, 0.34501999616622925, -0.08622100204229355, -0.556879997253418, 0.3303399980068207, 0.6853600144386292, 0.27156999707221985, -0.26346999406814575, 0.405349999666214, -0.6308900117874146, -0.617579996585846, 0.19497999548912048, 0.1774200052022934, 0.04934199899435043, -0.1803400069475174, 0.0038759999442845583, 0.3592900037765503, -0.9127399921417236, 0.3355399966239929, -0.31909000873565674, 0.40845999121665955, 0.08443800359964371, 0.4450500011444092, -0.784500002861023, -0.49751999974250793, -0.37988999485969543, 0.17733000218868256, 0.3478600084781647, -0.6480299830436707, -0.03282200172543526, 0.11005000025033951, 0.16460999846458435, -0.3482699990272522, -0.5826500058174133, -0.5470100045204163, 0.013900999911129475, 0.119439996778965, 0.21523000299930573, -0.9997699856758118], u'chains': [-0.11485999822616577, 0.1677599996328354, 0.009768400341272354, -0.330159991979599, 0.18975000083446503, 0.17685000598430634, 0.5934299826622009, 0.636900007724762, -0.1510699987411499, -0.8233199715614319, -0.16673000156879425, -0.6248700022697449, -0.06517700105905533, 0.35631999373435974, 0.08505100011825562, -0.516480028629303, -0.08417999744415283, -0.08903899788856506, -0.2074899971485138, -0.014151000417768955, -0.12856000661849976, 0.018355999141931534, 0.7885299921035767, 0.21020999550819397, 0.07272399961948395, -0.2686299979686737, 0.07738900184631348, -0.3447299897670746, 0.09032200276851654, -0.0998070016503334, -0.07644999772310257, 0.3662000000476837, -0.2850100100040436, 0.250900000333786, -0.5796499848365784, 0.41468000411987305, -0.3445799946784973, -0.22965000569820404, 0.18432000279426575, -0.21077999472618103, -1.016700029373169, -0.5137799978256226, -0.2331099957227707, 0.11122000217437744, -0.19541999697685242, -0.17969000339508057, 0.0030968000646680593, -0.4294799864292145, 0.06017899885773659, 0.40588000416755676, -0.16951000690460205, -0.376010000705719, 0.21703000366687775, 0.2734600007534027, 0.4572800099849701, 0.05500200018286705, -0.5817000269889832, 0.18970000743865967, -0.8190199732780457, 0.1784999966621399, 0.8881000280380249, 0.246629998087883, 0.018319999799132347, -0.09524299949407578, 0.1582300066947937, -0.15805000066757202, 0.08798699826002121, 0.85944002866745, 0.22789999842643738, 1.0317000150680542, 0.20577000081539154, 0.11044999957084656, -0.09017500281333923, -0.33079999685287476, -0.13294999301433563, -0.47214001417160034, -0.022898999974131584, -0.48173001408576965, -0.2087000012397766, -0.4570299983024597, -0.11368999630212784, 0.010518000461161137, 0.16933000087738037, -0.3192000091075897, -0.47249001264572144, -0.6345300078392029, 0.0308190006762743, 0.28584998846054077, -0.01081399992108345, -0.03739999979734421, 0.22800999879837036, 0.26822999119758606, -0.1101899966597557, 0.35679998993873596, 0.15226000547409058, 0.1514499932527542, -0.06650199741125107, 0.18606999516487122, 0.12590999901294708, -0.4254299998283386, 0.0776439979672432, -0.12913000583648682, -0.41231998801231384, -0.5776600241661072, -0.21963000297546387, -0.15102000534534454, 0.47005000710487366, 0.3531999886035919, 0.22825999557971954, 0.037436001002788544, -0.22526000440120697, -0.2258799970149994, 0.4121200144290924, 0.1676200032234192, 0.4106599986553192, -0.04123599827289581, -0.3940100073814392, -0.1631300002336502, 0.1691800057888031, -0.12605999410152435, 0.0862869992852211, -0.026458999142050743, 1.3868999481201172, -0.2675899863243103, -0.33285999298095703, 0.058145999908447266, -0.12379000335931778, -0.19267000257968903, -0.3757599890232086, -0.12132000178098679, 0.4658200144767761, -0.0882669985294342, 0.009779799729585648, 0.23204000294208527, 0.058437999337911606, -0.1496499925851822, 0.3592599928379059, 0.5861200094223022, -0.09783399850130081, 0.2697399854660034, -0.48917001485824585, 0.09633500128984451, -0.04612699896097183, 0.2875399887561798, 0.3106600046157837, 0.12571999430656433, -0.3287400007247925, 0.5593299865722656, 0.4159199893474579, -0.4399699866771698, -0.10752999782562256, -0.3926500082015991, -0.04826600104570389, -0.34077998995780945, -0.1650799959897995, -0.49625998735427856, -0.2824400067329407, -0.44707998633384705, -0.8216300010681152, -0.004548400174826384, 1.424399971961975, 0.009437399916350842, 0.03794100135564804, 0.20484000444412231, 0.11754000186920166, 0.44492998719215393, 0.1532900035381317, -0.09092099964618683, 0.36695998907089233, 0.3956499993801117, 0.09883999824523926, 0.2337999939918518, 0.11354999989271164, 0.5755100250244141, -0.01433700043708086, -0.023305999115109444, -0.5853300094604492, 0.6624799966812134, 0.298799991607666, 0.10847999900579453, -0.024172000586986542, -0.12650999426841736, 0.17940999567508698, -0.046987999230623245, 0.7141199707984924, -0.3480300009250641, -0.11088000237941742, 0.8741199970245361, 0.052960000932216644, -0.3312099874019623, 0.05454099923372269, 0.3070099949836731, -0.27810999751091003, -0.16684000194072723, -0.08818899840116501, 0.1507900059223175, -0.3558500111103058, -0.5542500019073486, -0.07572200149297714, 0.48256000876426697, 0.22193999588489532, -0.8392900228500366, 0.16348999738693237, 0.1810699999332428, -0.13755999505519867, 0.5785800218582153, -0.18012000620365143, 0.09278599917888641, 0.43786999583244324, -0.19426999986171722, -0.26820001006126404, 0.6426900029182434, 0.44988998770713806, 0.03657799959182739, -0.042546000331640244, 0.828760027885437, 0.5799300074577332, 0.0073679001070559025, 0.25808998942375183, -0.20769000053405762, 0.12922999262809753, 0.19132000207901, 0.3566499948501587, -0.04761900007724762, -0.036139000207185745, 0.3349800109863281, -0.5646899938583374, 0.23687000572681427, -0.21012000739574432, -0.44562000036239624, 1.024999976158142, -0.3416300117969513, -0.5559399724006653, -0.2668600082397461, 0.22992999851703644, 0.30849000811576843, 0.5115500092506409, -0.16543999314308167, -0.2140199989080429, 0.1903200000524521, -0.4607599973678589, -0.019812000915408134, -0.21964000165462494, -0.518559992313385, -0.6303499937057495, -0.45423999428749084, 0.43873998522758484, 0.32627999782562256, -0.5599899888038635, 0.33101001381874084, 0.18783999979496002, 0.47683998942375183, 0.35370999574661255, -0.11196999996900558, 0.06346800178289413, 0.4273099899291992, -0.6609699726104736, 0.19518999755382538, -0.18258999288082123, -0.157150000333786, 0.6140199899673462, -0.6000800132751465, 0.24603000283241272, -0.41029998660087585, 0.15341000258922577, 0.13786999881267548, -0.18065999448299408, -0.0025579999200999737, -0.10598000138998032, -0.13773000240325928, 0.09554000198841095, 0.39329999685287476, -0.5640199780464172, 0.08286300301551819, -0.4155699908733368, -0.49344998598098755, -1.0221999883651733, 0.05342999845743179, -1.0636999607086182, 0.05315599963068962, 0.27682000398635864, -0.09797299653291702, 0.050165001302957535, 0.259660005569458, -0.07532200217247009, 0.024907000362873077, -0.06522999703884125, -0.3992699980735779, 0.592960000038147, -0.39493998885154724, 0.8134400248527527, 0.027400000020861626, -0.5890100002288818, 0.040366001427173615, -0.5929700136184692, 0.31968000531196594, 0.43272000551223755, -0.1943800002336502, -0.08028099685907364, -0.5088099837303162], u'nest': [-0.5212299823760986, 0.9966800212860107, -0.18925000727176666, -0.09312699735164642, 0.030886000022292137, 0.17621999979019165, 0.5393000245094299, 0.8260499835014343, -0.23660999536514282, -0.3803600072860718, -0.196150004863739, -0.2708199918270111, 0.059192001819610596, -0.8037199974060059, -0.5113000273704529, 0.12949000298976898, 0.3109099864959717, -0.2869499921798706, -0.18741999566555023, 0.2961300015449524, 0.12998999655246735, 0.3233200013637543, 0.3404099941253662, -0.04642400145530701, -0.20722000300884247, -0.5531899929046631, 0.4036000072956085, -0.025810999795794487, -0.05017700046300888, 0.5977200269699097, -0.12365999817848206, -0.15820999443531036, -0.6231200098991394, 0.25777000188827515, 0.2528899908065796, 0.1838800013065338, -0.3979400098323822, 0.2825700044631958, -0.18649999797344208, 0.22440999746322632, 0.08158200234174728, 0.16814999282360077, 0.3147599995136261, 0.017186999320983887, -0.03234200179576874, 0.1151999980211258, 0.858680009841919, 1.0889999866485596, -0.19912999868392944, 0.5774800181388855, -0.33675000071525574, -0.24993999302387238, -0.683899998664856, -0.02543500065803528, -0.21737000346183777, 0.12335000187158585, -0.2959499955177307, 0.20689000189304352, 0.04068699851632118, 0.8250899910926819, -0.0834520012140274, 0.04593899846076965, 0.4104999899864197, -0.10537999868392944, 0.18651999533176422, -0.34606999158859253, 0.5993000268936157, 0.4919700026512146, 0.43891000747680664, 0.07271099835634232, -0.49706000089645386, -0.041708000004291534, -0.20948000252246857, 0.34417998790740967, -0.6905400156974792, 1.0917999744415283, 0.30921000242233276, -0.4926300048828125, 0.6374499797821045, -0.31088998913764954, -0.7717800140380859, 0.31696999073028564, -0.18337999284267426, -0.7800800204277039, -0.15744000673294067, -0.06453099846839905, 0.1714099943637848, -0.212459996342659, -0.43105000257492065, -0.36296001076698303, 0.20577000081539154, 0.20934000611305237, 0.23071999847888947, 0.3206399977207184, 0.04117799922823906, -0.3618299961090088, 0.4102500081062317, 0.4647600054740906, 0.06252200156450272, -0.5038999915122986, 0.3426400125026703, -0.17576000094413757, -0.3955700099468231, 0.4538699984550476, 0.24706000089645386, -0.6591600179672241, 0.09693899750709534, -0.47088000178337097, 0.20804999768733978, 0.0304540004581213, 0.17077000439167023, -0.24529999494552612, -0.46601998805999756, 0.15615999698638916, -0.5669000148773193, 0.4819299876689911, 0.3531799912452698, 0.5375099778175354, -0.12809999287128448, -0.1941699981689453, -0.2227499932050705, -0.5197399854660034, 0.48017001152038574, 1.042199969291687, 0.1911499947309494, 0.23739999532699585, -0.3462199866771698, 0.1341799944639206, -0.1391099989414215, -0.22678999602794647, -0.22968000173568726, -0.001601099967956543, -0.23662999272346497, 0.02943499945104122, 0.4064500033855438, -0.3524099886417389, -0.3230000138282776, 0.008310999721288681, -0.513260006904602, 0.18220999836921692, 0.0603410005569458, -0.1184300035238266, -0.598829984664917, -0.07903899997472763, -0.37882000207901, -0.17836999893188477, 0.7587400078773499, -0.0016246000304818153, -0.19062000513076782, -0.14002999663352966, -0.4812000095844269, 0.7283599972724915, -0.5143399834632874, -0.22599999606609344, 0.23051999509334564, -0.4779199957847595, 0.27241000533103943, 0.13561999797821045, -0.13104000687599182, 0.1630599945783615, -0.38023999333381653, -0.09384600073099136, 0.2779400050640106, 0.5236200094223022, -0.15189999341964722, 0.46724000573158264, -0.11416999995708466, -0.21001000702381134, 0.10232000052928925, -0.2283799946308136, 0.19434000551700592, 0.10486000031232834, -0.24955999851226807, 0.03376200050115585, 0.006175700109452009, 0.2855699956417084, -0.06672800332307816, -0.13595999777317047, 0.25496000051498413, -0.2826499938964844, 0.7601699829101562, -0.48899999260902405, 0.4896799921989441, -0.002595700090751052, -0.7728599905967712, -0.10496000200510025, 0.6682199835777283, 0.018830999732017517, 0.1607300043106079, -0.17112000286579132, 0.5369600057601929, 0.1837099939584732, -0.09180399775505066, -0.9158400297164917, -0.659850001335144, 0.09421399980783463, 0.13874000310897827, -0.45353999733924866, 0.8604199886322021, 0.0591839998960495, 0.7219899892807007, -0.17911000549793243, -0.48087000846862793, -0.34880998730659485, 0.060051001608371735, -0.07364899665117264, -0.04464799910783768, -1.287500023841858, 0.4074000120162964, 0.5521900057792664, 0.4685100018978119, -0.2223999947309494, 0.3322499990463257, -0.5056599974632263, 0.2946999967098236, -0.019985999912023544, -0.12358000129461288, -0.04463199898600578, -0.04339899867773056, -0.5175099968910217, 0.43518000841140747, 0.4973300099372864, 0.2813299894332886, -0.08029799908399582, -0.7994700074195862, -0.31057998538017273, 0.3220300078392029, -0.052404001355171204, 0.31301000714302063, 0.17555999755859375, 0.155799999833107, 0.15418000519275665, -0.0469370000064373, 0.5823799967765808, -0.44975998997688293, 0.5697299838066101, -0.4175199866294861, 0.34578999876976013, -0.38515999913215637, 0.04147600010037422, 0.43985000252723694, -0.6727499961853027, -0.41484999656677246, 0.2013700008392334, -1.311900019645691, 0.015514999628067017, 0.39392000436782837, -0.3557099997997284, -0.27059000730514526, -0.7345399856567383, 0.09599000215530396, 0.8202199935913086, 0.0860389992594719, -0.015800999477505684, 0.14688000082969666, 0.1328900009393692, 0.7006300091743469, -0.023699000477790833, 0.5356400012969971, 0.03341199830174446, -0.3743799924850464, -0.31946998834609985, -0.20533999800682068, -0.4508500099182129, 0.15902000665664673, -0.055456001311540604, -0.27414000034332275, -0.8453599810600281, -0.07082299888134003, 0.35712000727653503, -0.12015999853610992, 0.09702000021934509, 0.18254999816417694, -0.08364299684762955, -0.4338800013065338, -0.2071399986743927, -0.6868699789047241, 0.4139400124549866, -0.4482699930667877, -0.6610400080680847, -0.3693099915981293, 0.024312999099493027, -0.14268000423908234, -0.3865100145339966, -1.174399971961975, 0.49616000056266785, 0.4256500005722046, 0.8597099781036377, -0.3688499927520752, -0.3164899945259094, 0.17949999868869781, -0.2540700137615204, 0.19107000529766083, 0.15731999278068542, -0.4711900055408478, -0.20909999310970306, 0.2623099982738495, 0.09077899903059006, -0.23615999519824982, -0.2835400104522705], u'highway': [-0.4509199857711792, -0.5975199937820435, 0.12269999831914902, 0.044821999967098236, -0.18129000067710876, -0.3327000141143799, -0.07025499641895294, 0.10857000201940536, -0.1485999971628189, -0.7808499932289124, -0.999970018863678, 0.08012499660253525, -0.5044400095939636, 0.6087899804115295, 0.6427299976348877, 0.25602999329566956, -0.3952000141143799, -0.265720009803772, 0.17095999419689178, 0.07826200127601624, -0.5529500246047974, 0.49807000160217285, 0.23019999265670776, 0.20634999871253967, -0.6031200289726257, 0.21413999795913696, 0.031560998409986496, -0.11169999837875366, 0.14757999777793884, 0.21793000400066376, 0.7793300151824951, 0.6076400279998779, -0.34213998913764954, 0.39625999331474304, 0.4099400043487549, 0.23972000181674957, -0.2891699969768524, 0.0007338299765251577, -0.019917000085115433, -0.6383500099182129, -0.49834001064300537, -0.15390999615192413, -1.1538000106811523, -0.08235800266265869, 0.486050009727478, 0.002191399922594428, 0.48423001170158386, 0.40591999888420105, -0.2768099904060364, -0.037742000073194504, -0.6980800032615662, -0.11156000196933746, -0.7799500226974487, 0.0008801899966783822, 0.3159500062465668, 0.09310699999332428, -0.05854500085115433, -0.47143998742103577, 0.23383000493049622, -0.19166000187397003, -0.2726300060749054, 0.023087000474333763, 0.11437000334262848, -0.36542001366615295, 0.4406299889087677, 0.46404001116752625, -0.3873499929904938, 0.259550005197525, 0.5043699741363525, 0.15657000243663788, -0.27230000495910645, 0.1335500031709671, 0.19381999969482422, 0.5377900004386902, -0.7514100074768066, 0.27553999423980713, -0.09862300008535385, 0.5265499949455261, 0.18459999561309814, -0.3831300139427185, 0.07162000238895416, -0.2474599927663803, 0.3570300042629242, -0.13997000455856323, -0.13745999336242676, -0.5268099904060364, -0.5270900130271912, 0.5553299784660339, 0.9716200232505798, -0.12167999893426895, 0.4301399886608124, 0.41962000727653503, 0.3989799916744232, -0.6904100179672241, -0.1792600005865097, 0.4898500144481659, 0.036400001496076584, -0.2068600058555603, -0.1825300008058548, -0.0536159984767437, 0.27946001291275024, 0.37463998794555664, 0.754360020160675, 0.08789099752902985, -0.07469099760055542, 0.35297998785972595, 0.25196999311447144, 0.46408000588417053, -0.034040000289678574, 0.023375000804662704, -0.31972000002861023, -1.3485000133514404, 0.35653001070022583, -0.18062999844551086, -0.10882999747991562, 0.038839999586343765, 0.297789990901947, 0.11853999644517899, 0.20151999592781067, 0.25797000527381897, 0.20570999383926392, 0.414029985666275, -0.0019537999760359526, -0.858020007610321, -0.4628399908542633, -0.07392100244760513, -0.35220998525619507, -0.9417700171470642, -0.2526400089263916, 0.18886999785900116, 0.44562000036239624, 0.30761000514030457, 0.6077399849891663, 0.16759000718593597, 0.4138199985027313, -0.3916800022125244, -0.1271899938583374, 0.2730099856853485, 0.5665500164031982, -0.5044800043106079, -0.24630999565124512, 0.44374001026153564, -0.1515900045633316, 0.37130001187324524, -0.7531800270080566, -0.007116499822586775, 0.6103600263595581, -0.13416999578475952, -0.14298999309539795, -0.020320000126957893, 0.9328600168228149, 0.3550499975681305, 0.1485700011253357, -0.323419988155365, 1.0928000211715698, 0.06319800019264221, 0.517799973487854, -0.40272000432014465, -0.4822399914264679, -0.1994200050830841, 0.4781399965286255, -0.3876200020313263, -0.27695000171661377, -0.7289000153541565, -0.02491999976336956, -0.26210999488830566, 0.15756000578403473, -0.2584100067615509, -0.30153000354766846, -0.3022199869155884, -0.10789000242948532, -1.1194000244140625, 0.6294800043106079, 0.6627699732780457, -0.0988750010728836, -0.1096000000834465, -0.3579399883747101, -0.6220200061798096, 0.058219000697135925, -0.052570000290870667, -0.2518700063228607, 0.12219999730587006, 0.06391000002622604, 0.37345001101493835, 0.38988998532295227, -0.48673000931739807, 0.36368998885154724, -0.16513000428676605, 0.019509000703692436, -0.28033000230789185, 0.1509999930858612, 0.5382699966430664, -0.4561600089073181, -0.15230000019073486, -0.7528300285339355, 0.0531810000538826, 0.44679999351501465, -0.8669899702072144, 0.9319400191307068, 0.10339000076055527, 0.8925399780273438, -0.489980012178421, 0.5393099784851074, -0.3620299994945526, -0.002924799919128418, -0.22005000710487366, -0.17760999500751495, -0.49136000871658325, 1.1779999732971191, 0.33274999260902405, -0.4966599941253662, -0.2953000068664551, 0.09868700057268143, -0.27889999747276306, -0.15017999708652496, 0.12049999833106995, -0.4614900052547455, 0.4933199882507324, -0.08850699663162231, -0.09845300018787384, 0.5527399778366089, -0.17059999704360962, 0.19478000700473785, -0.18029999732971191, 0.6374300122261047, 0.27838000655174255, -0.06010900065302849, -0.7336400151252747, 0.013508999720215797, -0.22147999703884125, -0.3928700089454651, -0.16527999937534332, -0.20361000299453735, -0.5006899833679199, 0.45530998706817627, -0.01464799977838993, -0.18150000274181366, 0.19043000042438507, 0.11569999903440475, -0.23743000626564026, 0.2624399960041046, -0.2945699989795685, 0.3085399866104126, -0.4240100085735321, -0.52183997631073, 0.006982299964874983, 0.717989981174469, -0.3336400091648102, 0.2644599974155426, -0.16311000287532806, -0.20297999680042267, -0.5442299842834473, 0.4683600068092346, -0.2248300015926361, 0.024741999804973602, -0.6288700103759766, -0.47157999873161316, 0.7205100059509277, -0.08332599699497223, 0.0317780002951622, 0.30254998803138733, -0.2622300088405609, 0.20663000643253326, 0.020966000854969025, 0.3898099958896637, -0.01621999964118004, 0.641759991645813, -0.8989099860191345, 0.1356000006198883, -0.19892999529838562, 0.5958700180053711, -0.12365999817848206, 0.05283199995756149, 0.19749000668525696, 0.3980500102043152, 0.506600022315979, -1.3803999423980713, 0.13262000679969788, 0.33847999572753906, 0.09898199886083603, 0.5319399833679199, -0.49830999970436096, 1.038100004196167, -0.6497499942779541, -0.6150699853897095, 0.6145300269126892, 0.3474099934101105, -0.5671200156211853, 0.08579400181770325, -0.052344001829624176, 0.3825699985027313, -0.40296998620033264, -0.06474599987268448, 0.3631399869918823, -0.026074999943375587, 0.5799800157546997, -0.2934400141239166, -0.0728600025177002, 0.047582998871803284, 0.01569399982690811], u'pants': [-0.3399200141429901, -0.3626599907875061, -0.19109000265598297, 0.3065600097179413, -0.061211999505758286, -0.03582699969410896, -0.4848400056362152, -0.3382500112056732, -0.1002499982714653, -0.6689800024032593, -0.14936000108718872, 0.028745999559760094, -0.18091000616550446, 0.4794600009918213, -0.39702001214027405, 0.5224800109863281, 0.1335500031709671, -0.15998999774456024, -0.2439499944448471, -0.08598700165748596, -0.050652001053094864, 0.3833500146865845, 0.3378700017929077, -0.4655199944972992, -0.737309992313385, -0.4335399866104126, 0.8483999967575073, -0.73403000831604, 0.21084000170230865, 0.3668699860572815, 0.019217999652028084, -0.2752699851989746, -0.03343300148844719, -0.16163000464439392, -0.6180599927902222, 0.7047500014305115, -0.3506599962711334, 0.04406199976801872, 0.30660000443458557, 0.5641899704933167, -0.44661998748779297, -0.7345499992370605, -0.42809000611305237, -0.4837400019168854, 0.3272800147533417, 0.2901799976825714, 0.7362099885940552, -0.4998700022697449, -0.4500899910926819, 0.1516599953174591, -0.075033999979496, -0.3907400071620941, 0.16373999416828156, -0.31725001335144043, -0.20202000439167023, 0.11554999649524689, -0.42388999462127686, -0.8143200278282166, 0.08569099754095078, 0.19509999454021454, 0.08534900099039078, -0.3431299924850464, -0.3114300072193146, 0.1356399953365326, -0.17637999355793, -0.37567999958992004, -0.2003999948501587, 0.15223999321460724, -0.16859999299049377, 0.034341000020504, 0.5723000168800354, -0.12560999393463135, 0.13083000481128693, -0.04727799817919731, 0.34376999735832214, 0.04594700038433075, -0.06543199717998505, 0.2503100037574768, -0.12309999763965607, -0.8871999979019165, -0.19074000418186188, 0.4554400146007538, -0.4736199975013733, -0.09618599712848663, -0.19937999546527863, 0.400409996509552, 0.5511199831962585, 0.17461000382900238, -0.2845500111579895, 0.1256600022315979, -0.14077000319957733, 0.25453001260757446, -0.02278600074350834, 0.08260499686002731, 0.09240499883890152, 0.20453999936580658, 0.12995000183582306, 0.6599699854850769, 0.37773001194000244, 0.08169899880886078, 0.30160999298095703, 0.9395300149917603, -0.14208999276161194, 0.4401499927043915, -0.8134099841117859, -0.0394430011510849, 0.3045699894428253, 0.12946000695228577, -0.13172000646591187, -0.25828999280929565, -0.5732700228691101, 0.27893999218940735, -0.08608400076627731, 0.011264000087976456, -0.05420000106096268, 0.3541100025177002, -0.009279600344598293, 0.27059999108314514, 0.3201799988746643, -0.6868500113487244, -0.041099000722169876, -0.11073999851942062, 1.2164000272750854, 0.16767999529838562, -0.055897001177072525, 0.04774999991059303, 0.2761099934577942, 0.14294999837875366, 0.40832000970840454, 0.15841999650001526, -0.04703500121831894, -0.298550009727478, -0.6035100221633911, 0.19469000399112701, -0.16869999468326569, 0.06226800009608269, -0.47692999243736267, 0.09698499739170074, -0.00345359998755157, 0.03307399898767471, -0.23718999326229095, -0.4069100022315979, 0.1533699929714203, -0.46097999811172485, -0.16798000037670135, 0.2867699861526489, -0.7132400274276733, 0.09741099923849106, 1.1691999435424805, 0.08755999803543091, 0.0004632700001820922, 0.19585999846458435, 0.14538000524044037, -0.7311599850654602, 0.24199999868869781, -0.46261000633239746, 0.22924000024795532, 0.013167000375688076, 0.4755600094795227, 0.3686999976634979, 0.24817000329494476, -0.7726399898529053, -0.5282899737358093, -0.02626200020313263, 0.24883000552654266, -0.6965000033378601, 0.015282000415027142, 1.3006999492645264, 0.26072999835014343, 0.06730099767446518, 0.08557800203561783, 0.2665799856185913, -0.5307300090789795, 0.6076400279998779, -0.09575200080871582, -0.18985000252723694, 0.11133000254631042, 0.4707300066947937, 0.1131799966096878, -0.2312300056219101, 0.19315999746322632, 0.1494700014591217, 0.22078999876976013, 0.15403999388217926, 0.4726400077342987, -0.39430001378059387, 1.0069999694824219, 0.5279499888420105, 0.4073599874973297, -0.02179500088095665, 0.6087899804115295, 0.13681000471115112, -0.05164400115609169, 0.4097299873828888, -0.30788999795913696, -0.02817700058221817, -0.7759400010108948, -0.2763800024986267, -0.38086000084877014, -0.03829000145196915, 0.7557700276374817, 0.1630299985408783, 0.9860100150108337, 0.46904999017715454, 0.5394200086593628, 0.14198000729084015, 0.35370001196861267, 0.4687199890613556, -0.6852200031280518, -0.4367299973964691, -0.2653700113296509, -0.7123399972915649, -0.36239999532699585, 0.925819993019104, 0.8626599907875061, -0.07756900042295456, 0.3952600061893463, -0.6556100249290466, 0.3015100061893463, -0.9024800062179565, 0.5285000205039978, 0.08949600160121918, 0.3486500084400177, 0.4103100001811981, 0.19544999301433563, -0.30636000633239746, 0.020969999954104424, -0.2678700089454651, -0.5612900257110596, -0.45032998919487, 0.6909300088882446, -0.06911800056695938, -0.2698499858379364, 0.11289999634027481, 0.7720699906349182, -0.37411001324653625, 0.2751699984073639, -0.11638999730348587, -0.5840299725532532, 0.6394100189208984, 0.5990599989891052, -0.09331099689006805, -0.5723599791526794, 0.5740100145339966, -0.16082000732421875, 0.36643001437187195, -0.43292999267578125, -0.5491999983787537, 0.34790000319480896, -0.2795400023460388, -0.5593699812889099, -0.16349999606609344, 0.1420000046491623, -0.404229998588562, 0.4237000048160553, 0.06724599748849869, -0.188960000872612, 0.6046299934387207, -0.3546600043773651, -0.43022000789642334, 0.361160010099411, -0.6012899875640869, 0.08138500154018402, -0.031081000342965126, -0.36348000168800354, -0.08777499943971634, -0.5635600090026855, 0.24898000061511993, -0.930079996585846, 0.2950499951839447, 0.024757999926805496, -0.18133999407291412, -0.18905000388622284, -0.02982199937105179, -0.45364999771118164, -0.4007900059223175, -0.490119993686676, -0.08015900105237961, -0.7790899872779846, 0.38659998774528503, 0.40720000863075256, 0.14036999642848969, 0.5565999746322632, 0.26116999983787537, -0.5244600176811218, 0.37852001190185547, -0.529259979724884, 0.24800999462604523, -0.4345000088214874, -0.48517999053001404, -0.1941699981689453, 0.3940500020980835, -0.06754899770021439, 0.9402199983596802, -0.4588499963283539, -0.36030998826026917, 0.22881999611854553, 0.3225800096988678, 0.6101999878883362, -0.15681999921798706], u'cord': [-0.24864999949932098, 0.3299199938774109, -0.5297899842262268, -0.09530699998140335, -0.24772000312805176, -0.03909900039434433, -0.6102100014686584, -0.830780029296875, -0.301580011844635, -0.8588100075721741, -0.33228999376296997, 0.22620999813079834, 0.587939977645874, -0.2147500067949295, 0.11657000333070755, 0.4295499920845032, -0.9771900177001953, 0.04922199994325638, -0.47635000944137573, 0.41058000922203064, -0.16554999351501465, -0.6871899962425232, -0.5161299705505371, -0.10068999975919724, -0.20369000732898712, 0.32401999831199646, 0.06427200138568878, -0.6664299964904785, 0.18703000247478485, 0.3870700001716614, -0.7597399950027466, 0.35493001341819763, 0.2137099951505661, -0.08628900349140167, 0.14688999950885773, -0.42590999603271484, 0.46452999114990234, 0.3422499895095825, 0.3147599995136261, 0.6021100282669067, -0.12647999823093414, -0.20839999616146088, -0.2764599919319153, -1.0609999895095825, -0.025637999176979065, 0.2190299928188324, 0.2804099917411804, -0.20464999973773956, -0.21528999507427216, 0.012152000330388546, -0.22046999633312225, 0.47617998719215393, 0.004209999926388264, 0.29646000266075134, -0.1607999950647354, -0.7165899872779846, -0.40845000743865967, -0.24969999492168427, -1.1507999897003174, 0.6179500222206116, -0.08275800198316574, 0.6838499903678894, 0.3486100137233734, 0.019113000482320786, 0.970579981803894, 0.05671299993991852, -0.088748998939991, 0.4836600124835968, -0.10006999969482422, 0.49132001399993896, -0.04460800066590309, -0.38558000326156616, 0.2552500069141388, 0.8979499936103821, 0.2567799985408783, -0.33557000756263733, -0.06754700094461441, -0.3551500141620636, -0.1276800036430359, -0.31380999088287354, 0.8993300199508667, -0.12511000037193298, 0.398470014333725, 0.08698400110006332, -0.7666900157928467, 0.33588001132011414, -0.3947100043296814, 0.3236199915409088, -0.3588699996471405, 0.7911800146102905, 0.23235000669956207, 0.12890000641345978, 0.29541000723838806, -0.2922700047492981, 0.7346199750900269, -0.2928999960422516, 0.1775899976491928, 0.7889999747276306, 0.7107899785041809, -0.7410600185394287, 0.3976899981498718, -0.20630000531673431, 0.1265300065279007, -0.2634899914264679, 0.4997999966144562, 0.29815998673439026, 0.1407500058412552, -0.15360000729560852, -0.7604900002479553, 0.36667001247406006, -0.606939971446991, 0.576200008392334, -0.11445999890565872, -0.1962900012731552, 0.13294999301433563, 0.3315899968147278, 0.012381000444293022, 0.14113999903202057, 0.213469997048378, 0.15331000089645386, -0.22657999396324158, -0.49667999148368835, 0.48267999291419983, 0.04088500142097473, 0.30094000697135925, -0.30577000975608826, -0.6141300201416016, -0.40108001232147217, -0.43830999732017517, 0.3979800045490265, 0.9110000133514404, 0.2332800030708313, 0.14172999560832977, -0.24212999641895294, -0.6077499985694885, -0.22280000150203705, 0.44036000967025757, 0.20385999977588654, -0.2087700068950653, -0.08657699823379517, 0.1519699990749359, 0.3779599964618683, 0.536899983882904, -0.4737899899482727, 0.4300900101661682, 0.41231000423431396, -0.585669994354248, -1.1030000448226929, 0.07473199814558029, 0.48539999127388, -0.5699099898338318, -0.28551000356674194, 0.17903999984264374, -0.30066999793052673, 0.013120000250637531, 0.11623000353574753, 1.021399974822998, 0.1792300045490265, -0.3674300014972687, 0.7610599994659424, -0.30724000930786133, -0.42271000146865845, 0.06936299800872803, -0.02635199949145317, 0.7444499731063843, 0.08062300086021423, -0.09613599628210068, -0.7997999787330627, 0.2893500030040741, 0.4618000090122223, -0.17059999704360962, 0.4316500127315521, -0.1687999963760376, 0.44266998767852783, 0.084757000207901, -0.7951200008392334, -0.1941699981689453, 0.29971998929977417, 0.08904200047254562, -0.3711400032043457, 0.9644700288772583, -0.09007900208234787, 0.02833699993789196, -0.01935099996626377, -0.2826800048351288, 0.04558800160884857, 0.6031000018119812, -0.34665998816490173, -0.1344199925661087, -0.04288100078701973, 0.13165999948978424, 0.6812099814414978, -0.01824299991130829, -0.048151999711990356, 0.12225999683141708, 0.21367000043392181, 0.1801699995994568, -0.4775800108909607, -0.17903999984264374, 0.23824000358581543, 0.19067999720573425, 0.46720001101493835, 0.5825700163841248, 0.025080999359488487, 0.10392999649047852, 0.4314900040626526, -0.14191000163555145, 0.6472399830818176, -0.3689900040626526, 0.17893999814987183, 0.41541001200675964, -0.2875699996948242, -0.11556000262498856, -0.010889999568462372, 0.3436700105667114, 0.09372500330209732, 0.1681399941444397, -0.7315000295639038, -0.15435999631881714, -0.4636099934577942, -0.12444999814033508, -0.9769399762153625, 0.1603900045156479, -0.2079000025987625, 0.40015000104904175, 0.3882400095462799, -0.7297300100326538, 0.08138000220060349, 0.16651000082492828, -0.38065001368522644, 0.07708299905061722, -0.05558599904179573, -0.18748000264167786, 0.12943999469280243, -0.03918499872088432, -0.22487999498844147, -0.0689229965209961, 0.07273600250482559, 0.001776800025254488, -0.43108001351356506, 0.6598899960517883, -0.4712100028991699, 0.13037000596523285, 0.180759996175766, -0.6986600160598755, 0.3875199854373932, 0.1396999955177307, -0.02223300002515316, -0.04366200044751167, -0.36726000905036926, 0.020387999713420868, 0.7383300065994263, -0.27742999792099, -1.24399995803833, 0.5054299831390381, 0.09108000248670578, -0.0865359976887703, 0.2034199982881546, 0.46292001008987427, 0.10823000222444534, 0.31084001064300537, -0.7923200130462646, 0.06381099671125412, 0.5147500038146973, 0.4559600055217743, 0.20930999517440796, 0.3718000054359436, 0.07374799996614456, -0.3856799900531769, 0.48816001415252686, -0.45945000648498535, -0.1802700012922287, -0.04652699828147888, -0.026645999401807785, -0.7184799909591675, -0.24145999550819397, -0.6974300146102905, -0.18761999905109406, -0.5015000104904175, 0.22967000305652618, -0.4561600089073181, 0.044454000890254974, 0.4328100085258484, -0.039009999483823776, -0.515250027179718, 0.3909499943256378, -0.42715001106262207, 0.055987000465393066, -0.31387001276016235, -0.18472999334335327, 0.1584399938583374, -0.3763900101184845, 0.2688499987125397, -0.2814500033855438, 0.008802900090813637, -0.03647699952125549, 0.35940998792648315, 0.31446000933647156, 0.8064299821853638, -0.316210001707077], u'cabinet': [0.15560999512672424, 0.29030999541282654, -0.1756500005722046, -0.17847999930381775, 0.15143999457359314, 0.05660400167107582, 0.0073401001282036304, -0.3377799987792969, -0.4787600040435791, -1.74590003490448, -0.7050399780273438, -0.053217001259326935, -0.386680006980896, 0.20066000521183014, 0.0716560035943985, 0.07966700196266174, 0.09726399928331375, -0.20555000007152557, 0.6959800124168396, 0.1604200005531311, 0.14970000088214874, 0.080144003033638, 0.4943299889564514, -0.39820000529289246, -0.25290998816490173, -0.14811000227928162, -0.1915999948978424, -0.2764900028705597, 0.39928001165390015, 0.09875699877738953, -0.17671999335289001, 0.00793640036135912, 0.2355699986219406, 0.5211600065231323, -0.5923500061035156, 0.17403000593185425, 0.4692699909210205, 0.11407999694347382, -0.39941999316215515, 0.10713999718427658, -0.27083998918533325, 0.681119978427887, 0.062015000730752945, -0.3264400064945221, -0.46015000343322754, -0.12500999867916107, -0.7040500044822693, -0.9616699814796448, -0.2704299986362457, 0.03281699866056442, -0.5166599750518799, 0.21303999423980713, -0.4388499855995178, -0.11772999912500381, -0.6096699833869934, -0.0910160019993782, 0.001853400026448071, -0.026218999177217484, -0.19423000514507294, 0.7198200225830078, -0.09395500272512436, 0.3318899869918823, -0.737339973449707, 0.22561000287532806, 0.13175000250339508, -0.9971399903297424, 0.3010300099849701, -0.17732000350952148, -0.5843600034713745, 0.25325000286102295, 0.2238599956035614, 0.16654999554157257, 0.1426600068807602, -0.16208000481128693, 0.5889599919319153, -0.008844099938869476, -0.3717299997806549, -0.007860800251364708, -0.09458599984645844, -0.018528999760746956, -0.19228999316692352, 0.2438499927520752, 0.10474000126123428, 0.4465799927711487, 0.8977000117301941, 0.004924700129777193, -0.37643998861312866, -0.07427900284528732, -0.19547000527381897, -0.1546899974346161, -0.45489001274108887, -0.34711000323295593, -0.12671999633312225, 0.15443000197410583, -0.18637999892234802, -0.295879989862442, -0.3280999958515167, -0.4374600052833557, 0.44343000650405884, -0.6559100151062012, -0.4477800130844116, -0.06641799956560135, 0.35486000776290894, -0.27893000841140747, -0.23492999374866486, -0.08145900070667267, -0.3066999912261963, -0.28022998571395874, 0.31512999534606934, 0.12255000323057175, 0.5340800285339355, 0.27684998512268066, -0.3008500039577484, -0.5284000039100647, -0.01430600043386221, 0.6962400078773499, 0.08408199995756149, -0.6604099869728088, 0.19555999338626862, -0.7281100153923035, 0.1013299971818924, -0.07909499853849411, -0.7322900295257568, -0.33285000920295715, -0.20788000524044037, -0.04591900110244751, 0.0282600000500679, 0.412990003824234, 0.2968299984931946, 0.2332099974155426, -0.26440998911857605, -0.4272899925708771, 0.3736799955368042, -0.07227200269699097, 0.5138099789619446, 0.22228999435901642, -0.2108599990606308, 0.06628300249576569, 0.33671998977661133, 0.2281000018119812, 0.3356800079345703, 0.7344800233840942, -0.13328999280929565, 0.27726998925209045, -0.1011200025677681, 0.14753000438213348, 0.5641700029373169, 0.251800000667572, 0.2823199927806854, -0.35635000467300415, 0.9232100248336792, 0.29875999689102173, 0.16011999547481537, -0.0337349995970726, -0.021165000274777412, 0.416130006313324, -0.01312199980020523, -0.05900900065898895, 0.25224998593330383, -0.016919000074267387, -0.2878299951553345, 0.5735700130462646, -0.07199200242757797, 0.35468000173568726, -0.4520699977874756, 0.24511000514030457, -0.05984000116586685, 0.4426000118255615, -0.04325899854302406, 0.03365299850702286, -0.24437999725341797, 0.15074999630451202, -0.5023900270462036, -0.3338199853897095, -0.2589400112628937, 0.29934999346733093, -0.17038999497890472, 0.01133199967443943, 0.03790200129151344, 0.11102999746799469, 0.05024600028991699, 0.35141998529434204, 0.34786999225616455, 0.21119999885559082, 0.048158999532461166, 0.10401999950408936, -0.38040000200271606, 0.17238999903202057, -0.5078200101852417, 0.11118000000715256, 0.5057700276374817, -0.2381799966096878, 0.135220006108284, -0.4229699969291687, 0.12231999635696411, 0.6105999946594238, 0.1286199986934662, 0.20145000517368317, -0.17744000256061554, 0.16423000395298004, 0.37505000829696655, -0.07539699971675873, -0.36035001277923584, -0.17170000076293945, -0.1763100028038025, -0.2974900007247925, 0.30316999554634094, -0.8185200095176697, 0.35095998644828796, -0.2151300013065338, 0.3833799958229065, -0.6172299981117249, 0.4664599895477295, -0.19269999861717224, 0.25231000781059265, 0.034320998936891556, -0.08621799945831299, -0.37108999490737915, 0.9943199753761292, 0.05904100090265274, 0.6292399764060974, -0.09062299877405167, -0.4836199879646301, 0.5852400064468384, 0.17103999853134155, 0.5414900183677673, 0.4770300090312958, 0.47540000081062317, 0.45855000615119934, -0.06436199694871902, -0.18318000435829163, -0.30605000257492065, 0.027389999479055405, 0.1598300039768219, 0.7506899833679199, -0.12592999637126923, 0.11236000061035156, -0.44655001163482666, 0.6491299867630005, 0.5090600252151489, 0.26447999477386475, -0.010619999840855598, -0.11992999911308289, 0.3318600058555603, 0.16015000641345978, 0.7781699895858765, 0.5385100245475769, -0.10347999632358551, 0.27542001008987427, -0.2327899932861328, -0.16468000411987305, -0.7376700043678284, 0.6130200028419495, 0.24041999876499176, 0.08789700269699097, -0.20875999331474304, -0.1800300031900406, 0.3287599980831146, 0.301829993724823, 0.2764500081539154, -0.3558900058269501, -0.13747000694274902, 0.25786998867988586, -0.059856001287698746, 0.24774999916553497, -0.5677700042724609, -0.03894200176000595, 0.8575000166893005, -0.1596599966287613, 0.1917099952697754, -0.7373800277709961, 0.32284998893737793, 0.3628300130367279, -0.017872000113129616, 0.08971499651670456, 0.36844000220298767, -1.2723000049591064, 0.6621900200843811, 0.824150025844574, -0.4605099856853485, 1.0901999473571777, -0.03717200085520744, -0.23136000335216522, -0.18330000340938568, 0.3889099955558777, -0.09551600366830826, -0.1571899950504303, 0.9632099866867065, 0.2514899969100952, -0.29416999220848083, 0.10328999906778336, -0.675000011920929, 0.03931500017642975, -0.6732400059700012, 0.15082000195980072, 0.9156799912452698, -0.1823900043964386, -0.7811400294303894, -1.000100016593933, 0.10068000108003616], u'hose': [0.39485999941825867, -0.41405999660491943, 0.06214800104498863, -0.3012999892234802, -0.4568600058555603, 0.38534000515937805, 0.017940999940037727, -0.08021000027656555, 0.014995000325143337, -0.03534200042486191, 0.330020010471344, 0.17181000113487244, 0.36566999554634094, -0.3738099932670593, -0.0400330014526844, 0.5159500241279602, -0.30469998717308044, 0.24859000742435455, -0.010765000246465206, -0.08354099839925766, -0.48930999636650085, -0.1826999932527542, 0.01853499934077263, 0.22939999401569366, -0.281139999628067, 0.005629200022667646, 0.3232400119304657, 0.2563999891281128, 0.011212999932467937, 0.017774999141693115, 0.624809980392456, -0.5237500071525574, 0.20051999390125275, -0.04021399840712547, 0.245169997215271, 0.3813000023365021, 0.025181999430060387, -0.02733200043439865, 0.06506600230932236, 0.4537700116634369, -0.10750000178813934, 0.5352500081062317, 0.3763999938964844, -0.3410800099372864, -0.07503599673509598, 0.21472999453544617, 0.9287099838256836, 0.3752000033855438, 0.3256700038909912, 0.3605799973011017, -0.33048000931739807, 0.35659998655319214, -0.24741999804973602, 0.11163000017404556, 0.3748700022697449, 0.23637999594211578, 0.33414000272750854, 0.47453999519348145, -0.2012300044298172, 0.17687000334262848, 0.30338001251220703, -0.5295600295066833, 0.4202899932861328, 0.5702400207519531, 0.30072999000549316, -0.09310399740934372, -0.5429700016975403, 0.023521000519394875, 0.15919999778270721, 0.3427799940109253, 0.298770010471344, -0.6177399754524231, 0.1418599933385849, 0.37595999240875244, 0.1973000019788742, 0.4156000018119812, -0.6070299744606018, -0.3092299997806549, -0.6968899965286255, -0.8571699857711792, -0.3723199963569641, -0.07445000112056732, 0.5433300137519836, -0.36445000767707825, 0.03206599876284599, 0.2760300040245056, 0.26868000626564026, 0.4020799994468689, -0.4447399973869324, -0.06296700239181519, 0.004897600039839745, -0.39535000920295715, 0.26197999715805054, -0.2582400143146515, 0.16324999928474426, -0.2295999974012375, -0.17215000092983246, 0.5604900121688843, 0.17444999516010284, -0.6330400109291077, -0.07690100371837616, 0.612559974193573, -0.7958300113677979, -0.414000004529953, 0.386790007352829, -0.23226000368595123, -0.037842001765966415, 0.13007000088691711, -0.40408000349998474, 0.1878499984741211, -0.2461100071668625, 0.13853999972343445, 0.4185200035572052, -0.18920999765396118, -0.3960700035095215, 0.13565999269485474, -0.2805599868297577, 0.321399986743927, -0.11913999915122986, -0.6291599869728088, 0.3910500109195709, -0.5346099734306335, 0.8060799837112427, -0.30184999108314514, -0.40411001443862915, -0.22266000509262085, 0.11838000267744064, 0.3121800124645233, -0.12827999889850616, 0.16187000274658203, 0.6504499912261963, 0.3903299868106842, 0.3079800009727478, 0.22697000205516815, -0.28363001346588135, 0.12365999817848206, 0.49386000633239746, 0.37988001108169556, 0.176269993185997, 0.6072800159454346, -0.08199899643659592, -0.1434600055217743, 0.3724600076675415, -0.601580023765564, -0.3776000142097473, 0.28714999556541443, -0.0767270028591156, 0.08353099972009659, 0.03622400015592575, -0.0003630699939094484, -0.39322999119758606, 0.4906100034713745, 0.32771000266075134, -0.3597100079059601, 0.8656200170516968, -0.30000999569892883, 1.0496000051498413, 0.05278199911117554, 0.08252699673175812, 0.8296099901199341, -0.32006001472473145, -0.361050009727478, 0.2561100125312805, -0.07623299956321716, 0.2323099970817566, -0.25951001048088074, 0.01830100081861019, 0.22608999907970428, 0.6097599864006042, 0.017354000359773636, 0.05273500084877014, 0.39972999691963196, 0.02676999941468239, 0.3707900047302246, 0.11279000341892242, 0.10587000101804733, -0.21639999747276306, 0.899399995803833, 0.389739990234375, -0.05805699899792671, 0.04801100119948387, -0.17276999354362488, 0.35043999552726746, 0.3735699951648712, -0.05366099998354912, -0.03747599944472313, 1.1648000478744507, 0.5095700025558472, 0.8375999927520752, -0.5667499899864197, 0.3767299950122833, 0.17279000580310822, -0.28060999512672424, 0.10548000037670135, -0.30199000239372253, -0.3659600019454956, -0.2907699942588806, -0.31630000472068787, 0.03972499817609787, -0.3486100137233734, -0.5915700197219849, 0.45065000653266907, 0.5824699997901917, -0.04300500079989433, 0.46946999430656433, 0.23366999626159668, -0.41172000765800476, -0.2904900014400482, -0.32903000712394714, 0.03724600002169609, -0.5085200071334839, 0.22293999791145325, 0.04202999919652939, -0.0904029980301857, 1.1202000379562378, -0.17483000457286835, 0.2048500031232834, -0.24537000060081482, 0.15198999643325806, -0.5633800029754639, 0.2551800012588501, -0.2013300061225891, 0.3849700093269348, 0.09361500293016434, 0.2770099937915802, -0.2538999915122986, -0.6652600169181824, -0.32245001196861267, 0.1551699936389923, 0.14914000034332275, 0.46250998973846436, -0.4436199963092804, 0.013249999843537807, -0.1064400002360344, 0.4980199933052063, 0.09310699999332428, -0.21046000719070435, -0.5338500142097473, 0.014818999916315079, 0.0010244000004604459, -0.1430799961090088, -0.4599300026893616, -0.5293099880218506, 0.20980000495910645, 0.3463299870491028, -0.19228999316692352, 0.6941099762916565, -0.13786999881267548, 0.42034998536109924, -0.4876300096511841, 0.11388000100851059, -0.4901300072669983, 0.2789599895477295, -0.998009979724884, 0.7904599905014038, -0.1583700031042099, -0.7837399840354919, 0.5008999705314636, 0.08983699977397919, -0.42048001289367676, -0.5694299936294556, -0.3032599985599518, -0.41703999042510986, -0.4509199857711792, -0.29815998673439026, -0.3441300094127655, -0.4686700105667114, 0.4998599886894226, -0.3253999948501587, -0.07227800041437149, 0.27678000926971436, 0.40661999583244324, 0.0628649964928627, -0.042153000831604004, -0.29104000329971313, 0.041398998349905014, -0.1673399955034256, -0.3031499981880188, -0.8053200244903564, 0.4876999855041504, 0.13429999351501465, -0.18283000588417053, 0.4818800091743469, 0.43147000670433044, -0.19771000742912292, 0.4127900004386902, 0.039709001779556274, 0.5107899904251099, 0.03602999821305275, -0.12125000357627869, -0.027689000591635704, 0.19059999287128448, 0.5329300165176392, -0.11473999917507172, 0.7837700247764587, -0.23973999917507172, 0.15095999836921692, -0.14045000076293945, 0.3878999948501587, 0.6085500121116638], u'banana': [0.4214099943637848, 0.0204670000821352, 0.12666000425815582, 0.39761999249458313, -0.11016000062227249, -0.0359559990465641, -0.47214001417160034, -0.13916000723838806, 0.568120002746582, -0.3496899902820587, -0.09323199838399887, -0.17035000026226044, -0.38677000999450684, -0.16810999810695648, -0.1015700027346611, -0.26611998677253723, 0.0480940006673336, -0.4677099883556366, -0.6072499752044678, 0.4095200002193451, 0.3177100121974945, 0.500980019569397, 0.6636800169944763, -0.11827000230550766, -0.7426699995994568, -0.10471999645233154, -0.643530011177063, -0.44023001194000244, -0.39100998640060425, 0.35694000124931335, -0.9348899722099304, 0.4831700026988983, 0.15222999453544617, 0.07933899760246277, -0.2511099874973297, 0.3996799886226654, -0.1798200011253357, -0.28874000906944275, -0.10891000181436539, 0.3882099986076355, -0.2314700037240982, -0.5033699870109558, -0.25231000781059265, -0.02218399941921234, -0.2787399888038635, -0.24192999303340912, 0.05746600031852722, -0.5395500063896179, -0.03487500175833702, -0.4048199951648712, -0.03806700184941292, -0.42337000370025635, 0.4286099970340729, 0.35166001319885254, -0.18164999783039093, -0.3113099932670593, -0.5327600240707397, -0.050953999161720276, 0.6677899956703186, -0.40077000856399536, 0.21402999758720398, -0.29861000180244446, -0.36636999249458313, 0.28488999605178833, -0.3766300082206726, 0.05960400030016899, -0.3179500102996826, 0.2546299993991852, -0.22184999287128448, 0.230320006608963, -0.1230200007557869, 0.24175000190734863, -0.10706000030040741, -0.08659899979829788, -0.037363000214099884, -0.10402999818325043, 0.24492000043392181, -0.8406299948692322, -0.1535000056028366, -0.19362999498844147, -0.01854100078344345, 0.10937999933958054, -0.29401999711990356, -0.11270999908447266, -0.38885998725891113, -0.4283599853515625, -0.44859999418258667, -0.24650999903678894, -0.09497100114822388, -0.6327499747276306, 0.20590999722480774, -0.7771199941635132, -0.23887999355793, -0.7999399900436401, -0.36994001269340515, 0.37863001227378845, 0.27856001257896423, -0.10061000287532806, 0.06472799926996231, 0.09121400117874146, 0.2432200014591217, 0.3931899964809418, 0.2713800072669983, -0.6938999891281128, -0.37602999806404114, 0.19322000443935394, -0.28887999057769775, -0.0138330003246665, -0.2009200006723404, 0.2065100073814392, 1.1442999839782715, 0.20412999391555786, 0.07750300318002701, 0.3686800003051758, 0.26763999462127686, -0.1920499950647354, 0.12437000125646591, 0.7253999710083008, -0.4039199948310852, 0.20171000063419342, 0.02761100046336651, -0.7072700262069702, 0.7335299849510193, -0.2891800105571747, -0.07686500251293182, 0.16481000185012817, 0.47929999232292175, 1.0437999963760376, -0.012671000324189663, 0.21649999916553497, -0.5456299781799316, 0.7460299730300903, 0.05053500086069107, 0.43027999997138977, 0.28582999110221863, -0.22622999548912048, -0.10047999769449234, 0.021872999146580696, -0.015193000435829163, -0.36967000365257263, -0.01257999986410141, -0.033952999860048294, -0.08764400333166122, 0.06780900061130524, 0.07579299807548523, 0.7751399874687195, 0.36430999636650085, -0.3149400055408478, 0.44822999835014343, -0.49691998958587646, -0.395220011472702, 0.1720000058412552, 0.3274399936199188, 0.2806999981403351, -0.22450999915599823, 0.016309000551700592, -0.4960399866104126, -0.07068400084972382, 0.4458799958229065, 0.698360025882721, 0.5778599977493286, -0.08613300323486328, 0.0885000005364418, -0.130280002951622, -0.47819000482559204, -0.5680999755859375, -0.35058000683784485, 0.45372000336647034, -0.07701899856328964, -0.3914699852466583, -0.00625990005210042, -0.008847000077366829, -0.5588799715042114, -0.2786499857902527, 0.4582099914550781, 0.0404990017414093, 0.09659700095653534, 0.7932900190353394, -0.008188299834728241, -0.22360999882221222, 0.13948999345302582, 0.06399700045585632, -0.04814000055193901, -0.8996999859809875, 0.32938000559806824, -0.7324299812316895, 0.4952000081539154, 0.43428999185562134, 0.3959200084209442, -0.3604699969291687, -0.44325000047683716, 1.187399983406067, -0.14529000222682953, -0.2461400032043457, 0.1634799987077713, 0.24299000203609467, -0.08686599880456924, -0.3142000138759613, -0.10316000133752823, 0.44773998856544495, 0.12476000189781189, 0.29401999711990356, 0.05658499896526337, -0.008133400231599808, 0.4144800007343292, 0.07806500047445297, 0.42368999123573303, 0.5951399803161621, -0.18196000158786774, -0.11807999759912491, -0.16229000687599182, -0.3704400062561035, -0.455049991607666, 0.2321300059556961, -0.188060000538826, -0.057700999081134796, 0.3567799925804138, -0.29693999886512756, 0.4071100056171417, 0.2931399941444397, 0.5099700093269348, -0.49059998989105225, -0.03844200074672699, 0.27698999643325806, -0.17813999950885773, 0.561959981918335, -0.257750004529953, 0.16301999986171722, -0.12892000377178192, 0.18511000275611877, 0.044475000351667404, -0.05000400170683861, 0.0034030000679194927, 0.7243599891662598, 0.7284899950027466, -0.05083199962973595, 0.6350700259208679, -0.5197399854660034, -0.018574999645352364, -0.040821000933647156, -0.06515499949455261, -0.47369998693466187, 0.031050000339746475, -0.29190000891685486, -1.06850004196167, 0.1915699988603592, 0.3510400056838989, 0.65447998046875, 0.09460200369358063, -0.749530017375946, 0.2777099907398224, 0.8520299792289734, -0.13937999308109283, -0.0026958000380545855, 0.7590500116348267, 0.1525000035762787, -0.18057000637054443, 0.35791999101638794, 0.2209399938583374, -0.0026223999448120594, 0.2435699999332428, -0.04444900155067444, 0.024306999519467354, -0.18554000556468964, 0.5472999811172485, 0.06756199896335602, -0.17508000135421753, -0.4966999888420105, 0.19099999964237213, -0.12052000313997269, 0.0005621400196105242, -0.042702000588178635, 0.08379500359296799, 0.4198000133037567, 0.14462000131607056, 0.14565999805927277, -0.33083000779151917, -0.2593599855899811, -0.5849900245666504, 0.08274500072002411, -0.49893999099731445, -0.24472999572753906, -0.31758999824523926, -0.6223000288009644, -0.41370001435279846, 0.10231000185012817, 0.562529981136322, -0.411080002784729, 0.15782000124454498, 0.09359999746084213, -0.0667869970202446, -0.6504999995231628, 0.43919000029563904, -0.07727000117301941, -0.1035899966955185, 0.2017199993133545, -0.6401900053024292, 0.09386900067329407, 0.23951999843120575, 0.30140000581741333], u'dirt': [-0.43751001358032227, 0.20236000418663025, -0.023413000628352165, 0.42260000109672546, -0.32732999324798584, -0.2511399984359741, 0.03773999959230423, 0.008562499657273293, 0.8539800047874451, -0.18592000007629395, -0.35662001371383667, -0.05635000020265579, -0.07650700211524963, 0.22476999461650848, -0.2094700038433075, -0.08790100365877151, -0.8471500277519226, 0.06546899676322937, 0.7195500135421753, 0.2868799865245819, -0.1757899969816208, 0.23601999878883362, 0.2112399935722351, 0.4162200093269348, -0.3646799921989441, -0.6839200258255005, 0.26934999227523804, -0.024390000849962234, 0.1978600025177002, 0.24866999685764313, -0.3102099895477295, 0.24467000365257263, -0.26030001044273376, -0.07488799840211868, -0.2908700108528137, 1.03410005569458, -0.8112599849700928, 0.1872600018978119, 0.10379000008106232, 0.043533001095056534, -0.11423999816179276, 0.2687999904155731, 0.0875990018248558, -0.21336999535560608, 0.6154900193214417, 0.4585399925708771, 0.389739990234375, 0.13519999384880066, 0.33302000164985657, -0.4449799954891205, -0.2946000099182129, 0.31134000420570374, -0.26131001114845276, 0.16976000368595123, 0.15715999901294708, -0.01508800033479929, -0.01729699969291687, -1.1019999980926514, 0.40042001008987427, -0.2370299994945526, 0.1424199938774109, 0.07078400254249573, 0.4339599907398224, 0.1506499946117401, 0.03079500049352646, -0.5133200287818909, 0.1756799966096878, 0.10405000299215317, 0.04862299934029579, -0.3567799925804138, 0.36070001125335693, 0.6423500180244446, -0.09931699931621552, 0.23720000684261322, -0.37678998708724976, -0.1409599930047989, -0.045830998569726944, -0.44962000846862793, 0.6169700026512146, -0.6000199913978577, 0.40626001358032227, -0.04443899914622307, 0.4527600109577179, 0.2553499937057495, -0.8144699931144714, -0.1548600047826767, 0.12296999990940094, -0.08765900135040283, 0.3710800111293793, 0.23680000007152557, 0.7673699855804443, 0.01761000044643879, 0.46963000297546387, -0.6071900129318237, -0.10296999663114548, -0.49810001254081726, -0.19550000131130219, -0.3453100025653839, 0.6309300065040588, 0.17207999527454376, 0.21334999799728394, 0.17696000635623932, -0.47481000423431396, -0.014675999991595745, -0.4889200031757355, 0.12202999740839005, 0.5047699809074402, -0.5514600276947021, 0.14270000159740448, -0.488209992647171, -0.5903499722480774, -0.7287999987602234, -0.09192900359630585, -0.5440199971199036, -0.37696000933647156, 0.5186499953269958, 0.27814000844955444, 0.24053999781608582, 0.22800999879837036, -0.31839999556541443, -0.04325399920344353, 0.15821999311447144, 0.3152100145816803, -0.010089999996125698, -0.45583999156951904, 0.6175600290298462, -0.30967000126838684, 0.35214999318122864, -0.4807499945163727, 0.4636799991130829, 0.11632999777793884, 0.47571998834609985, 0.5779500007629395, 1.0983999967575073, 0.2744700014591217, -0.2492700070142746, 0.1563200056552887, 0.4180600047111511, -0.18051999807357788, -0.5913000106811523, 0.36882999539375305, 0.01720600016415119, -0.2018900066614151, -0.3333300054073334, -0.365090012550354, 0.2837199866771698, 0.19473999738693237, 0.4923200011253357, -0.27046000957489014, 0.08921399712562561, -0.06791999936103821, 0.49355000257492065, -0.7487300038337708, 0.0399399995803833, 0.259909987449646, 0.09633699804544449, 0.4144900143146515, 0.32479000091552734, 1.1176999807357788, 0.07241000235080719, 0.22107000648975372, -0.6722699999809265, -0.21860000491142273, 0.416020005941391, 0.43832001090049744, -0.2300100028514862, 0.38872000575065613, 0.46268999576568604, 0.710919976234436, -0.6340100169181824, -0.22103999555110931, -0.07221200317144394, 0.522819995880127, -0.5188500285148621, -0.2829900085926056, -0.22702999413013458, 0.005746299866586924, 0.15583999454975128, -0.5002999901771545, -0.35460999608039856, 0.1239200010895729, 0.37198999524116516, 0.3224799931049347, 0.45587000250816345, -0.23966999351978302, -0.23767000436782837, 1.1004999876022339, -0.40790998935699463, 0.5098199844360352, 0.28314000368118286, 0.11754000186920166, 0.20886999368667603, -0.029417000710964203, -0.11896999925374985, 0.20550000667572021, 0.09294799715280533, -0.1502700001001358, 0.08731500059366226, 0.015709999948740005, -0.12943999469280243, 0.9405900239944458, 0.40281999111175537, 0.40946000814437866, 0.2639800012111664, 0.5851500034332275, 0.2385600060224533, 0.538129985332489, -0.43323999643325806, -0.22717000544071198, 0.2572900056838989, -0.4472599923610687, 0.06193799898028374, 0.01828400045633316, -0.20218999683856964, -0.49538999795913696, -0.2912200093269348, -0.043396998196840286, -0.15663999319076538, -0.17844000458717346, -0.17737999558448792, 0.5352500081062317, -0.14854000508785248, -0.012392999604344368, -0.07962500303983688, 0.7431899905204773, -0.016884999349713326, 0.027688000351190567, -0.017565999180078506, -0.04420299828052521, -0.08822599798440933, 0.29600998759269714, -0.07883100211620331, -0.2911500036716461, 0.11918999999761581, 0.07238300144672394, 0.07003200054168701, -0.03177899867296219, -0.07889500260353088, -0.33649998903274536, 0.025963999330997467, 0.10261999815702438, -0.28738000988960266, -0.07051599770784378, -0.7975000143051147, -0.7711499929428101, -0.2838599979877472, 0.05637200176715851, 0.300029993057251, 0.15508000552654266, 0.1644199937582016, 0.12314999848604202, -0.385919988155365, -0.42357000708580017, -0.7495800256729126, 0.7329699993133545, -0.003159899963065982, -0.395579993724823, -0.2578499913215637, -0.33893999457359314, 0.15112000703811646, -0.009800899773836136, -0.5906599760055542, -0.33246999979019165, -0.07045599818229675, 0.5637199878692627, 0.19668999314308167, -0.07431100308895111, -0.025191999971866608, -0.13683000206947327, -0.190870001912117, -0.40338000655174255, 0.19819000363349915, -0.16665999591350555, -0.16008999943733215, -0.19925999641418457, -0.05905900150537491, -1.4867000579833984, -0.15133999288082123, -0.3502100110054016, -0.28810998797416687, 0.08647400140762329, -0.37117999792099, 0.1674100011587143, 0.5683199763298035, 0.03170200064778328, -0.014108999632298946, 0.4018799960613251, -0.11678999662399292, 0.13634000718593597, -0.45963001251220703, 0.0729679986834526, 0.3732900023460388, -0.31334999203681946, 0.7886499762535095, -0.3041999936103821, -0.07723300158977509, 0.5037199854850769, 0.2347099930047989, 0.07028000056743622, 0.24216000735759735], u'tree': [-0.5643500089645386, -0.12970000505447388, 0.1440799981355667, -0.44018998742103577, -0.33667999505996704, 0.4963900148868561, -0.05313500016927719, 0.22653000056743622, 0.10723000019788742, -0.7889400124549866, -0.3960599899291992, 0.7908400297164917, -0.08420299738645554, -0.05186599865555763, -0.16708000004291534, 0.18568000197410583, -0.4081000089645386, -0.05243200063705444, -0.42302000522613525, 0.23910999298095703, 0.015258999541401863, 0.19099000096321106, 0.2681800127029419, 0.06303399801254272, -0.06215300038456917, -0.10055000334978104, -0.49125999212265015, -0.41791999340057373, -0.6251800060272217, 0.8988699913024902, 0.08024100214242935, 0.350600004196167, -0.6131600141525269, -0.028335999697446823, -0.282150000333786, 0.12443999946117401, 0.8479700088500977, -0.3575400114059448, -0.37786999344825745, -0.36796000599861145, -0.36041000485420227, 0.1259399950504303, -0.06362199783325195, -0.2610900104045868, -0.1790899932384491, -0.3325999975204468, 0.7383300065994263, 0.05986899882555008, 0.25457000732421875, 0.13120999932289124, -0.19317999482154846, -0.5885900259017944, -0.23607000708580017, -0.2946400046348572, -0.4911800026893616, -0.11710000038146973, -0.3790299892425537, -0.17792999744415283, 0.48561999201774597, 0.0743580013513565, -0.0763780027627945, -0.12283000349998474, 0.3995400071144104, 0.8114399909973145, -0.22988000512123108, -0.23312999308109283, -0.06237899884581566, 0.9281299710273743, -0.039347000420093536, 0.3003300130367279, -0.9701300263404846, 0.4124299883842468, -0.9692699909210205, 0.5532500147819519, -0.8715900182723999, 0.2101999968290329, -0.038391999900341034, -0.3067600131034851, 0.22386999428272247, -0.07781700044870377, -0.40667998790740967, 0.2727400064468384, 0.3737199902534485, -0.11734999716281891, -0.13707000017166138, 0.2868799865245819, 0.008095799945294857, 0.3456900119781494, 0.1329299956560135, -0.023826999589800835, 0.16759000718593597, -0.2894600033760071, 0.1421000063419342, -0.49660998582839966, 0.2863200008869171, -0.3184399902820587, 0.42945000529289246, -0.36423999071121216, -0.264849990606308, -0.6266999840736389, 0.23427000641822815, 1.0377000570297241, 0.05925700068473816, 0.2542800009250641, 0.1869799941778183, -0.0750890001654625, 0.450300008058548, -0.3640100061893463, -0.2217099964618683, 0.18016000092029572, -0.04165700078010559, -0.2369299978017807, 0.2243800014257431, 0.3051399886608124, 0.5090799927711487, 0.07172399759292603, -0.10343000292778015, 0.5204600095748901, -0.28975000977516174, -0.5386899709701538, -0.40661999583244324, -0.11785999685525894, 0.6291999816894531, 0.13708999752998352, -0.4218499958515167, -0.014721999876201153, 0.061051998287439346, 0.17121000587940216, 0.14815999567508698, -0.2480199933052063, 0.1375100016593933, 0.6860700249671936, 0.5846199989318848, 0.5920000076293945, 0.12884999811649323, -0.2482600063085556, -0.14802999794483185, -0.13401000201702118, -0.3944000005722046, -0.2798199951648712, 0.9546899795532227, 0.615559995174408, -0.20237000286579132, -0.3582000136375427, 0.10751999914646149, 0.5261600017547607, 0.3650200068950653, -0.07371599972248077, -0.2789500057697296, -0.08061999827623367, 0.012601000256836414, -0.16496999561786652, -0.12163999676704407, 0.19826999306678772, -0.19991999864578247, -0.2991600036621094, 0.5088499784469604, -0.1912499964237213, -0.4416300058364868, 0.9007200002670288, -0.21377000212669373, 0.15911999344825745, 0.5800899863243103, 0.2237199991941452, 0.07000300288200378, -0.014042000286281109, -0.2401999980211258, 0.0560000017285347, -0.3109999895095825, -0.3831300139427185, -0.28600001335144043, -0.5686399936676025, 0.3478899896144867, 0.3955700099468231, 0.23172999918460846, 0.15285000205039978, 0.47383999824523926, 0.623740017414093, 0.13650000095367432, -0.8034399747848511, -0.2666099965572357, -0.1003900021314621, 0.6055200099945068, -0.7130500078201294, -0.05212600156664848, -0.29750001430511475, 0.31169000267982483, 0.1632400006055832, 0.4106999933719635, 0.27167001366615295, 0.1522199958562851, 0.09276899695396423, 0.1652899980545044, -0.25762999057769775, -0.45669999718666077, -0.00813239999115467, 0.4777800142765045, 0.15782999992370605, -0.021043000742793083, 0.3424600064754486, 1.0261000394821167, 0.20904000103473663, -0.1672700047492981, -0.04920399934053421, 0.047175999730825424, 0.181209996342659, -0.4126800000667572, 0.34681999683380127, 0.17674000561237335, 0.08312500268220901, -0.25703001022338867, -0.4511300027370453, 0.12679000198841095, -0.6566600203514099, 0.35139000415802, 0.11653000116348267, 0.1720000058412552, 0.26489999890327454, 0.5645599961280823, 0.0018132999539375305, 0.47595998644828796, -0.08804500102996826, -0.7088099718093872, -0.19062000513076782, -0.3620400130748749, 0.44005000591278076, -0.02842400036752224, 0.39779001474380493, 0.2567700147628784, 0.5215799808502197, -0.15253999829292297, 0.018074000254273415, -0.31516000628471375, 0.1226700022816658, 0.5056700110435486, -0.2184000015258789, -0.7624499797821045, -0.5314300060272217, -0.1516299992799759, -0.7990700006484985, 0.08384999632835388, -0.34650999307632446, -0.5321300029754639, -0.11588999629020691, -1.1181000471115112, 0.35541999340057373, 0.17688000202178955, 0.2788800001144409, 0.09194699674844742, -0.1151299998164177, 0.3136399984359741, 0.4215799868106842, -0.1584700047969818, -1.030500054359436, 1.2319999933242798, -0.09030800312757492, -0.03532499819993973, 0.0935640037059784, 0.44953998923301697, 0.2663300037384033, 0.19227999448776245, -0.0003578700125217438, 0.1420300006866455, -0.04377099871635437, -0.1436000019311905, 0.32043999433517456, 0.3376300036907196, -0.32284000515937805, 0.4618400037288666, -0.11823000013828278, -0.5093700289726257, -0.1748100072145462, 0.04470200091600418, -0.4547500014305115, -0.4753499925136566, -0.23446999490261078, -1.6026999950408936, -0.09156399965286255, -0.6283299922943115, -0.22317999601364136, -0.5234599709510803, -0.5900499820709229, -0.22773000597953796, -0.6454899907112122, -0.7945700287818909, 0.6166099905967712, 0.801289975643158, -0.5550500154495239, 0.1781499981880188, -0.1393599957227707, 0.5565299987792969, 0.07595200091600418, 0.21905000507831573, 0.08973100036382675, 0.26486000418663025, 0.11190000176429749, -0.18649999797344208, -0.14347000420093536, 0.11146000027656555, -0.02756199985742569], u'rock': [-0.14924000203609467, 0.021244000643491745, -0.3424000144004822, 0.13079999387264252, 0.3289400041103363, -0.3736099898815155, 0.18362000584602356, -0.1916400045156479, -0.06142999976873398, -0.3649100065231323, -0.07151699811220169, -0.4808399975299835, 0.5245800018310547, 0.6572099924087524, 0.5957599878311157, -0.43623998761177063, -0.47282999753952026, 0.0028810000512748957, 0.8374900221824646, 0.4743500053882599, 0.003642899915575981, 0.7466599941253662, -0.35537999868392944, 0.7269799709320068, -0.18288999795913696, -0.14542999863624573, -0.24578000605106354, 0.06619100272655487, -0.4679799973964691, 0.4799799919128418, -0.08480799943208694, -0.0024431999772787094, -0.49145999550819397, -0.15998999774456024, -0.10290999710559845, -0.10498999804258347, -0.3283199965953827, -0.46779999136924744, 0.4259999990463257, 0.5041999816894531, -0.049970999360084534, 0.23699000477790833, 0.2283799946308136, 0.4786199927330017, 0.8205900192260742, 0.796019971370697, 0.036733999848365784, -0.16439999639987946, -0.08344300091266632, 0.02761100046336651, -0.24106000363826752, 0.04969500005245209, -0.6582300066947937, 0.8054900169372559, -0.072332002222538, 0.5945199728012085, -0.753279983997345, 0.07180699706077576, 0.21376000344753265, -0.30292001366615295, 0.4744099974632263, 0.2221899926662445, 0.8807399868965149, 0.020281000062823296, 0.2296299934387207, 0.08228699862957001, 0.49948999285697937, 0.5645700097084045, -0.07046599686145782, 0.5507500171661377, -0.05661800131201744, 0.2922399938106537, -0.6312400102615356, 0.46619001030921936, -0.4676400125026703, -0.02842099964618683, 0.5287799835205078, 0.152319997549057, 0.2885900139808655, -0.5664100050926208, -0.45746999979019165, -0.21535000205039978, 0.26545000076293945, -0.12657000124454498, 0.2924000024795532, -0.16620999574661255, 0.41442999243736267, 0.5996599793434143, -0.5588799715042114, 0.020440999418497086, 0.4845600128173828, 0.8018800020217896, -0.07800699770450592, -0.04242999851703644, -0.2362699955701828, -0.21797999739646912, 0.3900600075721741, -0.5764200091362, 0.2874700129032135, -0.7298799753189087, -0.4214800000190735, 0.35624000430107117, 0.42260000109672546, -0.45972999930381775, -0.20791000127792358, 0.09794300049543381, -0.41051000356674194, 0.21299000084400177, 0.5526400208473206, -0.551859974861145, 0.3376300036907196, -0.6126400232315063, 0.3148599863052368, -0.24909000098705292, 0.48552000522613525, -0.08466800302267075, -0.2557699978351593, -0.039438001811504364, 0.12268999963998795, -0.6555100083351135, 0.16080999374389648, -0.05296599864959717, -0.5508599877357483, 0.14895999431610107, -0.18477000296115875, 0.032510001212358475, -0.027664000168442726, 0.4221000075340271, -0.19460999965667725, 0.039521001279354095, 0.020284000784158707, 0.3709700107574463, 0.5606200098991394, -0.5781599879264832, 0.2872900068759918, -0.43698999285697937, -0.16027000546455383, 0.5837399959564209, 0.24121999740600586, 0.2451999932527542, -0.09879200160503387, -0.47047001123428345, 0.08021800220012665, -0.5734900236129761, 0.07350800186395645, -0.28832000494003296, -0.029131000861525536, 0.26743000745773315, 0.053245000541210175, 0.3064199984073639, 0.6835100054740906, -0.04087100178003311, -0.45267999172210693, -0.07914599776268005, 0.8201799988746643, 0.7320399880409241, 0.3062799870967865, 0.021161999553442, 0.40149998664855957, -0.04770699888467789, -0.009555899538099766, -0.6060400009155273, -0.320609986782074, 0.1751900017261505, -0.29774999618530273, 0.3921799957752228, 0.12687000632286072, -0.07543499767780304, 0.3939099907875061, -0.5633900165557861, 0.578540027141571, 0.7305799722671509, 0.2731499969959259, -0.015254000201821327, 0.6136000156402588, 0.3523699939250946, 0.05799400061368942, 0.21626999974250793, 0.058605000376701355, 0.2213599979877472, 0.14385999739170074, 0.23177999258041382, 0.4700799882411957, -0.1077599972486496, -0.14722999930381775, -0.3502900004386902, 0.18793000280857086, -0.03472999855875969, 0.1309099942445755, 0.03489600121974945, 0.5054000020027161, 0.502020001411438, 0.11156000196933746, -0.12455999851226807, 0.25687000155448914, 0.06060900166630745, -0.09707300364971161, -0.3171499967575073, 0.2738400101661682, 0.31202998757362366, 2.2683000564575195, -0.2594200074672699, 0.3703800141811371, 0.2197200059890747, -0.47624000906944275, 0.25213998556137085, 0.389739990234375, 0.009023499675095081, -0.32826998829841614, 0.29782000184059143, -0.4212599992752075, 0.012543999589979649, 0.3880400061607361, 0.48434001207351685, -0.08104699850082397, 0.6566900014877319, 0.3664099872112274, -0.23236000537872314, 0.057353001087903976, -0.06555800139904022, 0.20959000289440155, 0.2563999891281128, 0.05363599956035614, -0.38089001178741455, -0.4455699920654297, 0.1752600073814392, -0.41488000750541687, -0.2941800057888031, -0.009781000204384327, 0.1623699963092804, -0.1071000024676323, -0.012271000072360039, -0.5038999915122986, -0.47321000695228577, 0.82805997133255, -0.08516799658536911, -0.41903001070022583, -0.006747799925506115, -0.695580005645752, 0.03451500087976456, -0.3866899907588959, 0.1752600073814392, -0.012690000236034393, -0.3371799886226654, -0.8362699747085571, 0.12447000294923782, -0.192330002784729, -0.3840799927711487, -0.46748998761177063, 0.3028799891471863, -0.14595000445842743, 0.13767999410629272, -0.3746500015258789, -0.32624000310897827, 0.25655001401901245, 0.061948999762535095, 0.6371600031852722, -0.469539999961853, -0.17587000131607056, -0.2759299874305725, 0.2595599889755249, -0.15139999985694885, 0.10802999883890152, -0.1671600043773651, -0.1122799962759018, -0.1830900013446808, 0.17872999608516693, 0.24427999556064606, 0.263949990272522, -0.07078000158071518, -0.4630900025367737, -0.31235000491142273, 0.25336000323295593, -0.5337899923324585, 0.11985000222921371, 0.2918199896812439, -1.5221999883651733, -0.23306000232696533, -0.07754699885845184, -0.1423099935054779, -0.5426499843597412, 0.07871799916028976, -0.2697499990463257, 0.4823099970817566, -0.444350004196167, 0.005996699910610914, -0.748520016670227, 0.3490999937057495, 0.6703299880027771, -0.21821999549865723, 0.3438200056552887, 0.10349000245332718, -0.4274500012397766, 0.3472599983215332, 0.3622399866580963, 0.3379800021648407, 0.22623999416828156, 0.6467999815940857, -0.3723900020122528, -0.08505500108003616], u'horse': [0.3664399981498718, 0.0444910004734993, 0.15735000371932983, -0.09656299650669098, -0.28672999143600464, 0.3847399950027466, 0.014716999605298042, -0.032079000025987625, -0.5931000113487244, -0.7580000162124634, 0.37953999638557434, 0.19648000597953796, -0.12906000018119812, 0.47898000478744507, -0.22356000542640686, 0.8326900005340576, 0.03692600131034851, 0.6404399871826172, 0.0035343000199645758, -0.370279997587204, 0.09827499836683273, 0.3658899962902069, 0.06961800158023834, 0.4744200110435486, 0.21356000006198883, -0.7069299817085266, 0.4824399948120117, -0.7325699925422668, 0.4402399957180023, 0.37384000420570374, -0.20110000669956207, -0.33331000804901123, 0.05814800038933754, -0.480679988861084, -0.7004500031471252, 0.35381999611854553, -0.04894600063562393, 0.21267999708652496, -0.31970998644828796, -0.2729800045490265, -0.3803099989891052, -0.1442600041627884, 0.32440999150276184, -0.4859200119972229, 0.2582800090312958, -0.13681000471115112, 0.6760200262069702, 0.07864999771118164, 0.22718000411987305, -0.3958500027656555, -0.4665899872779846, 0.09379100054502487, 0.13610999286174774, 0.1598999947309494, 0.27121999859809875, 0.18914000689983368, -0.5264999866485596, 0.36636000871658325, -0.4139400124549866, -0.19470000267028809, 0.15396000444889069, 0.26030999422073364, 0.6525300145149231, 0.11919999867677689, -0.07086999714374542, -0.33096998929977417, -0.5035499930381775, -0.03689999878406525, 0.055362001061439514, -0.48271000385284424, 0.008782699704170227, 0.6467300057411194, -0.007243500091135502, -0.6378800272941589, -0.1422100067138672, -0.40525999665260315, -0.06091900169849396, -0.6981499791145325, 0.12782999873161316, -0.2263299971818924, 0.2931399941444397, 0.3623400032520294, 0.1400199979543686, -0.040380001068115234, 0.08846399933099747, -0.4430600106716156, -0.16378000378608704, 0.03810900077223778, 0.08234699815511703, 0.5178400278091431, 0.572160005569458, -0.1584700047969818, 0.3739199936389923, 0.23109999299049377, 0.06400500237941742, -0.02712099999189377, 0.053415000438690186, 0.2267100065946579, 0.09813600033521652, 0.19453999400138855, -0.053677998483181, 0.06761299818754196, -0.2853800058364868, -0.2508600056171417, -0.07051199674606323, 0.18140999972820282, -0.3024100065231323, -0.6175600290298462, -0.24724000692367554, 0.5923200249671936, -0.22673000395298004, 0.28033000230789185, -0.23973999917507172, -0.02555900067090988, -0.2506600022315979, 0.2560099959373474, -0.2474299967288971, 0.3243899941444397, 0.35436999797821045, -0.15191000699996948, 0.007995200343430042, 0.14700999855995178, -0.0010966999689117074, -0.3889099955558777, 0.18956999480724335, 0.17553000152111053, -0.29420000314712524, 0.9186599850654602, -0.5516200065612793, -0.218299999833107, 0.41183000802993774, 0.2067600041627884, -0.18071000277996063, 0.8364700078964233, 0.019068999215960503, -0.1381099969148636, 0.3321099877357483, -0.2125300019979477, 0.04744600132107735, 0.369049996137619, 0.30452999472618103, 0.153329998254776, -0.30717000365257263, -0.41001999378204346, 0.14688999950885773, -0.8175699710845947, -0.24732999503612518, -0.31988000869750977, -0.5859799981117249, 0.4527199864387512, -0.10939999669790268, 0.9276999831199646, -0.4464600086212158, -0.15615999698638916, 0.08428899943828583, -0.5162400007247925, -0.1272599995136261, -0.03754600137472153, 0.47567999362945557, 0.13030999898910522, 0.0676639974117279, -0.17915000021457672, -0.20409999787807465, 0.10553000122308731, 0.25394999980926514, -0.044307999312877655, 0.7120500206947327, 0.12580999732017517, -0.2740199863910675, 0.29444000124931335, 0.03515499830245972, 0.6832299828529358, 0.05013300105929375, -0.18256999552249908, 0.29659000039100647, -0.723550021648407, 0.07641399651765823, -0.021216999739408493, 0.19489000737667084, 0.06313499808311462, 0.1986600011587143, 0.26377999782562256, -0.36750999093055725, -0.1822499930858612, 0.23068000376224518, 0.03733599931001663, 0.19641000032424927, -0.2619599997997284, 0.508870005607605, -0.4184499979019165, -0.6754800081253052, -0.07235699892044067, 0.17010000348091125, -0.6565300226211548, 0.10724999755620956, -0.08921200037002563, -0.40814998745918274, -0.47786998748779297, -0.20753000676631927, -0.1129399985074997, 2.0469000339508057, 0.18842999637126923, -0.026985999196767807, -0.36118999123573303, 0.8035299777984619, 0.6304299831390381, 0.3050999939441681, 0.004429299850016832, -0.11349000036716461, -0.10417000204324722, -0.06007000058889389, -0.34463998675346375, 0.22452999651432037, -0.33838000893592834, -0.05766100063920021, -0.3222399950027466, -0.2833400070667267, -0.23184999823570251, 0.41328999400138855, -0.09291800111532211, -0.10947000235319138, -0.5701799988746643, -0.011695999652147293, -0.978950023651123, 0.19359000027179718, -0.08729399740695953, -0.14663000404834747, -0.3612000048160553, -0.0021043000742793083, 0.8479899764060974, -0.32888999581336975, -0.4500199854373932, -0.18276000022888184, -0.06263300031423569, 0.563480019569397, 0.3745799958705902, -0.4757699966430664, -0.5530099868774414, -0.34314998984336853, 0.6974300146102905, -0.2616400122642517, -0.3512299954891205, 0.2032800018787384, -0.26568999886512756, -0.7132300138473511, -0.4450500011444092, -0.5652499794960022, 0.5028200149536133, 0.015069999732077122, 0.0498879998922348, -0.6930699944496155, 0.12744000554084778, -0.3495199978351593, -0.24252000451087952, 1.4931999444961548, -0.4332300126552582, 0.17231999337673187, 0.3375999927520752, -0.06550300121307373, -0.088748998939991, -0.06938699632883072, 0.05888799950480461, 0.0052999998442828655, 0.2045699954032898, -0.04205299913883209, -0.47909998893737793, 0.37696000933647156, -0.2683500051498413, -0.42893001437187195, 0.24199999868869781, -0.4404599964618683, -0.1682399958372116, 0.8218299746513367, -0.2443300038576126, -0.23360000550746918, 0.007401700131595135, -1.5592999458312988, 0.07047700136899948, -0.10251999646425247, -0.0675949975848198, -0.2643199861049652, -0.2220200002193451, 0.5819799900054932, 0.42261001467704773, -0.08629100024700165, 0.3684700131416321, -0.24740999937057495, -0.10417000204324722, 0.20433999598026276, -0.45976999402046204, 0.14710000157356262, 0.6023300290107727, -0.5692200064659119, -0.0634239986538887, 0.009367899969220161, -0.12520000338554382, -0.07530300319194794, 0.21883000433444977, 0.36607998609542847, 0.319379985332489], u'water': [0.2220499962568283, -0.3240100145339966, -0.22822999954223633, -0.8057000041007996, -0.03768099844455719, 0.4047600030899048, 0.6771299839019775, 0.06001799926161766, 0.2975200116634369, -1.719099998474121, 0.22618000209331512, -0.2806900143623352, -0.4343799948692322, -0.05376699939370155, 0.1976500004529953, -0.034735001623630524, -0.6284999847412109, 0.18106000125408173, 0.06645900011062622, 0.41741999983787537, -0.9447500109672546, 0.1068200021982193, 0.1310099959373474, 0.30309998989105225, 0.027403000742197037, -0.19389000535011292, 0.0919250026345253, 0.6007400155067444, -0.6415600180625916, 0.38916000723838806, -0.21421000361442566, 0.33862999081611633, -0.4434199929237366, 0.021516000851988792, 0.0087952995672822, 0.10592000186443329, 0.15940000116825104, -0.2514500021934509, -0.26791998744010925, 0.2719799876213074, -0.3533399999141693, -0.0028737999964505434, 0.45506998896598816, 0.16615000367164612, -0.07860799878835678, -0.06027499958872795, 0.40577998757362366, 0.4479700028896332, 0.06009700149297714, 0.3328399956226349, -0.3251200020313263, 0.3783699870109558, -0.22607000172138214, -0.41082999110221863, -0.022586999461054802, 0.8766000270843506, 0.6075699925422668, 0.5102900266647339, 0.35760000348091125, 0.7089300155639648, -0.19237999618053436, 0.15464000403881073, 0.4066300094127655, -0.11723999679088593, -0.5412899851799011, -0.317330002784729, -0.5423099994659424, 0.0892840027809143, -0.6983100175857544, -0.3391000032424927, 0.674560010433197, -0.32019999623298645, -0.39219000935554504, 0.05428700149059296, -0.43915998935699463, 0.27136000990867615, 0.1525299996137619, 0.4311099946498871, -0.6559600234031677, -0.30831000208854675, 0.20937000215053558, -0.28380000591278076, 0.0654660016298294, 0.125900000333786, 0.28123000264167786, 0.03261600062251091, -0.2798900008201599, -0.18884000182151794, -0.13447000086307526, -0.7694799900054932, 0.26649001240730286, 0.18922999501228333, 0.383760005235672, -0.054836999624967575, -0.00812860019505024, -0.03652599826455116, 0.25902000069618225, -0.25051000714302063, 0.3027999997138977, 0.08599399775266647, 0.019022999331355095, 0.03834100067615509, -0.49974000453948975, -0.4657999873161316, 0.04726399853825569, 0.3469899892807007, 0.2900499999523163, -0.07607799768447876, -0.5847200155258179, 0.1350100040435791, -0.07446300238370895, -0.7692700028419495, -0.330949991941452, -0.026612000539898872, -0.4411099851131439, 0.08346299827098846, -0.03464600071310997, 0.1808599978685379, 0.13040000200271606, -0.20407000184059143, -0.42886999249458313, -0.1618099957704544, -0.13519999384880066, 0.05839399993419647, -0.42886000871658325, 0.010762999765574932, 0.29914000630378723, 0.4166499972343445, -0.17066000401973724, -0.07713700085878372, 0.2140900045633316, 0.4183799922466278, 1.0125000476837158, 0.4678100049495697, -0.1396300047636032, -0.019572999328374863, 0.3461500108242035, -0.03228199854493141, -0.852400004863739, 0.37132999300956726, 0.34501999616622925, -0.06418000161647797, 0.06002499908208847, -0.3438499867916107, -0.8762099742889404, 0.03416400030255318, 0.4313099980354309, 0.6140999794006348, 0.0759660005569458, 0.21119999885559082, 0.537339985370636, 0.2916800081729889, -0.1699099987745285, -0.27702000737190247, 0.7384399771690369, -0.036017000675201416, 0.282260000705719, -0.6301699876785278, 0.2345699965953827, 0.3092600107192993, -0.1561100035905838, -0.7055100202560425, 0.35881999135017395, -0.43088001012802124, -0.12399999797344208, -0.5047600269317627, 0.07696899771690369, 0.06056100130081177, -0.3304400146007538, 0.6010199785232544, 0.14983999729156494, 1.1690000295639038, -0.38444000482559204, 0.409060001373291, -0.23013000190258026, 0.0638199970126152, -0.13923999667167664, -0.13470999896526337, 0.3265100121498108, -0.0440870001912117, -0.30351001024246216, 0.17597000300884247, 0.07372800260782242, -0.02886500023305416, -0.002206699922680855, -0.3250400125980377, 1.1460000276565552, -0.09170500189065933, 0.773859977722168, -0.2533400058746338, 0.6042400002479553, 1.0687999725341797, -0.6120399832725525, -0.34084999561309814, -0.43105000257492065, 0.13291999697685242, -0.27981001138687134, -0.39875999093055725, -0.19056999683380127, 0.4637199938297272, 0.3933599889278412, 0.6018999814987183, 0.16147999465465546, -0.2572599947452545, 0.04972999915480614, -0.12302999943494797, -0.010131999850273132, -0.3766399919986725, -0.045747000724077225, 0.2647700011730194, -0.4751800000667572, 0.19294999539852142, -0.03350599855184555, -0.7696599960327148, 0.6013299822807312, 0.0020898000802844763, 0.3170599937438965, -0.3054400086402893, 0.25220999121665955, -0.019572999328374863, 0.7219499945640564, -0.16319000720977783, 0.19155000150203705, -0.037105001509189606, 0.18615999817848206, -0.2165600061416626, -0.33924999833106995, -0.1399800032377243, -0.07266899943351746, -0.21922999620437622, 0.5192199945449829, 0.3541499972343445, -0.17231999337673187, -0.24327999353408813, 0.37116000056266785, -0.06547500193119049, -0.30715999007225037, -0.4252699911594391, 0.28077998757362366, 0.09956999868154526, -0.47909000515937805, 0.1532900035381317, -0.20140999555587769, -0.28286001086235046, -1.3442000150680542, 0.1044899970293045, 0.3509500026702881, -0.18156999349594116, -0.38624000549316406, 0.038182999938726425, 0.05276300013065338, -0.5024700164794922, 0.37810999155044556, -0.7225300073623657, 0.03748900070786476, -0.14437000453472137, -0.08874800056219101, -0.24034999310970306, 0.1064400002360344, 0.02821199968457222, -0.1739100068807602, -0.3609899878501892, -0.32631999254226685, -0.6368799805641174, -0.061330001801252365, 0.1550000011920929, -0.19269999861717224, 0.1465200036764145, -0.34178000688552856, 0.2491600066423416, -0.11614000052213669, 0.3970400094985962, -0.6134300231933594, 0.5999299883842468, -0.1428699940443039, 0.39445000886917114, -2.53439998626709, -0.23270000517368317, -0.17958000302314758, -0.30612999200820923, -0.4004400074481964, 0.2884899973869324, 0.5987899899482727, 0.049949001520872116, -0.08252199739217758, 0.08968900144100189, 0.09996499866247177, -0.5506500005722046, 0.08451700210571289, 0.4393799901008606, -1.0611000061035156, 0.14042000472545624, 0.03806599974632263, 0.14395999908447266, 0.5077099800109863, -0.2781200110912323, 0.04969099909067154, -0.616890013217926, -0.026677999645471573, 0.29267001152038574], u'newspaper': [-0.30730998516082764, 0.4069800078868866, 0.15977999567985535, -0.23850999772548676, 0.1147800013422966, -0.3314799964427948, -0.16374999284744263, 0.09027600288391113, -0.30546000599861145, -1.593000054359436, 0.08101200312376022, 0.04630399867892265, 0.6517099738121033, 0.14675000309944153, 0.5857499837875366, 0.4944300055503845, 0.3641299903392792, 0.7697200179100037, 0.05895499885082245, -0.5461999773979187, 0.0782879963517189, 0.08325500041246414, 0.8342199921607971, 0.6284400224685669, -0.19912000000476837, -0.6009299755096436, 0.20201000571250916, 0.15696999430656433, -0.006639199797064066, -0.31632000207901, -0.7090700268745422, -0.02462800033390522, 0.003017100039869547, 0.5487499833106995, -1.0358999967575073, -0.6599000096321106, -0.4434700012207031, -0.17237000167369843, -0.14670999348163605, 0.45142999291419983, -0.2068299949169159, -0.1766500025987625, 0.4735499918460846, 0.3378399908542633, 0.26969999074935913, 0.06473299860954285, 0.6533899903297424, 0.5038400292396545, -0.6890400052070618, 0.015181000344455242, 0.003863200079649687, 0.17815999686717987, 0.2026599943637848, -0.08516400307416916, 0.4244300127029419, 0.5546299815177917, -0.13420000672340393, 0.5739099979400635, 0.04264599829912186, -0.8879299759864807, 0.1914999932050705, 0.24921000003814697, -0.09513500332832336, -0.23045000433921814, -0.033994000405073166, -0.10944999754428864, 0.7758899927139282, -0.45032998919487, -0.05662799999117851, -0.18127000331878662, 0.21140000224113464, 0.08044499903917313, -0.4262399971485138, -0.22808000445365906, -0.19197000563144684, 0.2637999951839447, 0.4447999894618988, 0.2626200020313263, 0.0118410000577569, 0.7133100032806396, -0.39969000220298767, -0.07854700088500977, 0.11367999762296677, -0.10891000181436539, -0.26517000794410706, 0.0896110013127327, -0.8382200002670288, -0.3838199973106384, 0.11282999813556671, 0.6587499976158142, -0.28251999616622925, -0.08085399866104126, -0.13617999851703644, 0.6560199856758118, -0.11629000306129456, 0.3495500087738037, -0.09835900366306305, -0.12183000147342682, 0.0582440011203289, -0.7501599788665771, 0.28130999207496643, 0.3628700077533722, 0.03192500025033951, -0.015713000670075417, 0.5168200135231018, 0.15996000170707703, 0.04761099815368652, 0.13165000081062317, 0.9247699975967407, 0.5555400252342224, -0.22891999781131744, 0.1950400024652481, 0.20635999739170074, -0.6573899984359741, 0.49584999680519104, -0.17538000643253326, -0.33855000138282776, 0.15071000158786774, 0.47088000178337097, -0.5957800149917603, 0.6048799753189087, -0.19862000644207, 0.6237599849700928, -0.07865100353956223, 0.17323000729084015, -0.033201999962329865, 0.14733999967575073, 0.057454001158475876, -0.1983799934387207, -0.0945110023021698, 0.059822000563144684, -0.8823800086975098, 0.06293900310993195, 0.10600999742746353, -0.16006000339984894, 0.42930999398231506, -0.83160001039505, -0.15157000720500946, 0.8477200269699097, 0.08618699759244919, 0.2556299865245819, 0.10948000103235245, -0.4045200049877167, -0.26058998703956604, 0.07059399783611298, 0.10687000304460526, -0.09649199992418289, 0.63714998960495, 0.006535999942570925, -0.02540699951350689, 0.8781999945640564, 0.10561999678611755, -0.3404799997806549, 0.09895099699497223, -0.2423499971628189, -0.2103700041770935, -0.10920999944210052, 0.34536001086235046, 0.23657000064849854, -0.27087000012397766, 0.37863999605178833, 0.5069900155067444, -0.05421999841928482, -0.052319999784231186, -0.35791000723838806, 0.05279399827122688, -0.1286499947309494, -0.3989799916744232, -0.21326999366283417, 0.02012999914586544, -0.492900013923645, -0.42364001274108887, -1.2933000326156616, -0.013176999986171722, 0.22578999400138855, 0.038339998573064804, 0.29934000968933105, 0.31652000546455383, 0.1512800008058548, 0.4879400134086609, 0.5558300018310547, -0.009119000285863876, -0.299699991941452, 0.15796999633312225, -0.024838000535964966, -0.49873998761177063, -0.3823699951171875, -0.5590400099754333, -0.23284000158309937, -0.30028998851776123, -0.18291999399662018, 0.41640999913215637, 0.29659000039100647, 0.2767300009727478, 0.029954999685287476, 0.5847899913787842, -0.01028400007635355, -0.6733999848365784, -0.6505299806594849, 0.23691999912261963, 0.01497500017285347, 0.17851999402046204, -0.7197200059890747, 0.13078999519348145, -0.5083900094032288, -0.12117999792098999, -0.635200023651123, 0.15898999571800232, 0.1434199959039688, -0.2835499942302704, -0.16263000667095184, 0.2022700011730194, 0.041193000972270966, 0.4404599964618683, 0.4150699973106384, -0.2956799864768982, -0.41547998785972595, -0.5278000235557556, 0.31738999485969543, 0.36789000034332275, 0.5189399719238281, 0.6626099944114685, -0.27195999026298523, 0.41113999485969543, 0.08542200177907944, 0.24936999380588531, -0.4868600070476532, 0.4317300021648407, 0.5577099919319153, -0.266759991645813, -0.2057500034570694, 0.026064999401569366, -0.4562999904155731, -0.16312000155448914, 0.42076998949050903, -0.11445999890565872, 0.013466999866068363, -0.06432099640369415, -0.5005499720573425, -0.6849700212478638, -0.14895999431610107, -0.24595999717712402, -0.20864999294281006, -0.5787100195884705, -0.13812999427318573, 0.5131999850273132, 0.15458999574184418, -0.22178000211715698, 0.0756480023264885, 0.4549799859523773, -0.06296300143003464, -0.18161000311374664, -0.22012999653816223, 0.1345600038766861, 1.3818000555038452, 0.03735300153493881, 0.5512700080871582, 0.25512999296188354, 0.22307999432086945, -1.2972999811172485, 0.3921099901199341, 0.20302000641822815, 0.07729899883270264, -0.3026300072669983, 0.27959001064300537, -0.6104400157928467, 0.1965000033378601, 0.3115200102329254, -0.2716600000858307, -0.2371699959039688, -0.39434999227523804, 0.4754599928855896, 0.19776999950408936, 0.09195899963378906, 0.20183999836444855, 0.15929999947547913, -1.4239000082015991, 0.18322999775409698, 0.39730000495910645, 0.4549500048160553, 0.09599199891090393, 0.04534099996089935, -0.33219999074935913, -0.3828299939632416, 0.1700499951839447, -0.2982499897480011, 0.8048999905586243, -0.028690999373793602, -0.127920001745224, 0.2847500145435333, 0.001383200054988265, 0.40525001287460327, -0.4374699890613556, 0.00181639997754246, -0.09492199867963791, 0.7497000098228455, 0.04679600149393082, 0.20434999465942383, -0.5412399768829346, -0.2263599932193756], u'cookie': [-0.19312000274658203, 0.4603999853134155, -0.22982999682426453, -0.43105000257492065, -0.4259999990463257, 0.19241000711917877, -0.444130003452301, 0.10390999913215637, 0.021369000896811485, -0.18184000253677368, -0.5744100213050842, -0.4541899859905243, -0.09634800255298615, 0.6724100112915039, -0.579509973526001, 0.1088000014424324, -0.19061000645160675, -0.2016099989414215, -0.3626999855041504, -0.28310999274253845, 0.8441600203514099, 0.23201000690460205, 0.44898998737335205, 0.8156200051307678, 0.3794400095939636, 0.6345999836921692, -0.43015000224113464, 0.03784099966287613, 0.39173999428749084, -0.3254599869251251, -0.5137500166893005, 0.136230006814003, 0.340829998254776, -0.25227001309394836, -0.9487199783325195, 0.6352900266647339, -0.02355399914085865, 0.30480000376701355, -0.36559998989105225, -0.23430000245571136, 0.22145000100135803, -0.22565999627113342, 0.7607600092887878, 0.28373000025749207, -0.37198999524116516, -0.16868999600410461, 0.8530099987983704, -0.31310999393463135, -0.06389900296926498, 0.23062999546527863, 0.23749999701976776, -0.36118999123573303, 0.5897300243377686, 0.4185500144958496, 0.213919997215271, -0.6872900128364563, -0.4117000102996826, 0.2723099887371063, 0.5122500061988831, 0.019411999732255936, 0.13086000084877014, 0.2325800061225891, 0.10219000279903412, -0.10999999940395355, 0.7842199802398682, 0.07413800060749054, -0.027083000168204308, -0.031571000814437866, -0.43942999839782715, 0.04695099964737892, -0.3271600008010864, 0.04274100065231323, -0.1661600023508072, -0.05157899856567383, -0.6259099841117859, 0.4038499891757965, 0.25183001160621643, -0.6873900294303894, -0.2541300058364868, -0.05022900179028511, 0.18398000299930573, 0.03080499917268753, -0.042743999511003494, 0.006098899990320206, 0.17611999809741974, -0.5625200271606445, -0.6109799742698669, 0.1747799962759018, -0.37946000695228577, -0.1539199948310852, -0.8941199779510498, -0.20889000594615936, -0.24435999989509583, -0.510640025138855, -0.28832998871803284, 0.2729499936103821, 0.06664799898862839, 0.9880399703979492, -0.20895999670028687, -0.2776699960231781, -0.086326003074646, 0.1354999989271164, 0.3558500111103058, -0.5968400239944458, -0.0029287999495863914, -0.2555699944496155, -0.06059199944138527, 0.03816099837422371, -0.46731001138687134, 0.4988200068473816, 0.4927000105381012, 0.14930999279022217, -0.36157000064849854, -0.4413500130176544, 0.05173699930310249, 0.26589998602867126, -0.7897199988365173, 0.5476300120353699, -0.03520200029015541, 0.27063998579978943, -0.030632000416517258, 0.022648999467492104, 0.36932000517845154, -0.1276800036430359, -0.3059999942779541, 0.23184999823570251, -0.30024001002311707, 0.14688999950885773, -0.23601000010967255, 0.44312000274658203, 0.21800999343395233, 0.9530400037765503, -0.5293899774551392, 0.2936300039291382, 0.700659990310669, -0.057319000363349915, 0.05388500168919563, 0.2997100055217743, -0.7717599868774414, -0.24028000235557556, 0.48802000284194946, 0.25262999534606934, -0.9348800182342529, -0.23711000382900238, -0.40935999155044556, 0.3003000020980835, -0.3479599952697754, -0.3511500060558319, 0.2804900109767914, -0.3193100094795227, -0.8633300065994263, 0.5430200099945068, 0.5330600142478943, -0.33226001262664795, -0.2011599987745285, -0.24083000421524048, 0.2398499995470047, -0.32554998993873596, 0.06014300137758255, -0.1357399970293045, -0.1284099966287613, -0.3605400025844574, 0.13742999732494354, -0.4205799996852875, -0.10392999649047852, -0.03007899969816208, 0.3254700005054474, 0.565339982509613, 0.42131999135017395, -0.2387000024318695, -0.8555899858474731, -0.05720600113272667, -0.21942999958992004, -0.23904000222682953, -0.2225400060415268, -0.058166999369859695, 0.330049991607666, -0.11000999808311462, 0.2695100009441376, -0.8573600053787231, 0.0352960005402565, -0.23005999624729156, 1.0612000226974487, 0.08039899915456772, -0.1515199989080429, -0.04024999961256981, 0.8029500246047974, 0.09463900327682495, 0.35683000087738037, 0.43160000443458557, 0.10693000257015228, 0.5728499889373779, 0.01358100026845932, -0.20408999919891357, -0.29350998997688293, -0.2784300148487091, -0.1984499990940094, -0.2492399960756302, 0.3260599970817566, -0.06715799868106842, 0.07972099632024765, -0.26124998927116394, 1.1312999725341797, 0.2468000054359436, -0.26208001375198364, -0.6906499862670898, 0.16997000575065613, -0.02559500001370907, 0.15183000266551971, -0.13259999454021454, 0.02239999920129776, -0.30234000086784363, 0.10091999918222427, 0.0378899984061718, -0.5838900208473206, 0.7681300044059753, -0.3319700062274933, -0.00601059990003705, 0.18002000451087952, 0.15067000687122345, -0.2787800133228302, 0.9355700016021729, 0.43220001459121704, 0.2856200039386749, -0.4436500072479248, 0.08560500293970108, 0.6610900163650513, -0.04906199872493744, 0.6442899703979492, -0.526669979095459, -0.22610999643802643, -0.14316999912261963, -0.5916699767112732, 0.11477000266313553, 0.5470100045204163, -0.09187500178813934, -0.10326000303030014, 0.3004699945449829, -0.784250020980835, 0.10803999751806259, 0.17802999913692474, -0.25582000613212585, -0.37494999170303345, -0.0933379977941513, -0.8957700133323669, -0.135220006108284, -0.14359000325202942, 0.19271999597549438, 0.6233000159263611, -0.048215001821517944, 0.4028100073337555, -0.06640099734067917, 0.2523699998855591, 0.5831999778747559, 0.6037200093269348, -0.13360999524593353, 0.8317599892616272, -0.7384799718856812, 0.06101600080728531, 0.3824400007724762, -0.6099799871444702, -0.015277000144124031, -0.43230998516082764, 0.200080007314682, 0.6631699800491333, 0.41819000244140625, -0.3377299904823303, 0.04100799933075905, 0.5042499899864197, -0.14079000055789948, -0.27048999071121216, -0.2095700055360794, 0.4482100009918213, 0.27651000022888184, 0.7685400247573853, -0.11249999701976776, -0.2320300042629242, -0.39076998829841614, -1.0170999765396118, -0.6118199825286865, -0.07584399729967117, 0.21800999343395233, -0.8177499771118164, -0.1507200002670288, 0.0581820011138916, 0.35839998722076416, 0.4097900092601776, -0.10546000301837921, 0.15092000365257263, 0.059450000524520874, -0.19833999872207642, -0.08398199826478958, 0.02351599931716919, 0.2957499921321869, -0.18714000284671783, -0.2366199940443039, -0.09665899723768234, -0.016333000734448433, 0.07109799981117249, -0.4509899914264679], u'key': [0.20242999494075775, 0.08856700360774994, 0.04771500080823898, -0.6781299710273743, 0.40773001313209534, -0.23218999803066254, -0.31255999207496643, -0.011064000427722931, -0.6118299961090088, -1.5499999523162842, -0.4703499972820282, -0.2791900038719177, -0.17414000630378723, 0.2552199959754944, -0.018821999430656433, -0.22347000241279602, -0.03523100167512894, -0.3171600103378296, -0.08700200170278549, 0.23196999728679657, -0.28148001432418823, -0.19469000399112701, 0.358489990234375, -0.4288800060749054, -0.17925000190734863, -0.20884999632835388, -0.3470500111579895, 0.2455500066280365, -0.2823899984359741, 0.16143999993801117, 0.122809998691082, 0.19155000150203705, -0.579509973526001, 0.4797700047492981, -0.7302299737930298, 0.15448999404907227, 0.3006100058555603, 0.347680002450943, -0.5254899859428406, -0.5952600240707397, -0.03514900058507919, -0.24679000675678253, -0.48576998710632324, 0.014063999988138676, -0.6512899994850159, 0.19732999801635742, -0.20478999614715576, 0.040998999029397964, -0.7447699904441833, 0.37334001064300537, 0.5678099989891052, 0.010342000052332878, -0.07988899946212769, -0.0544469989836216, -0.34665998816490173, 0.025978999212384224, 0.1705400049686432, 0.19091999530792236, 0.22036999464035034, 0.4023500084877014, 0.2953700125217438, 0.3255400061607361, -0.1555899977684021, 0.12338999658823013, 0.10397999733686447, 0.006201600190252066, 0.30967000126838684, 0.30160000920295715, 0.12060999870300293, 0.2734600007534027, -0.5830000042915344, 0.34995999932289124, 0.11905000358819962, -0.40386998653411865, 0.13321000337600708, 0.010333999991416931, 0.22220000624656677, 0.1343899965286255, -0.4211300015449524, -0.004214299842715263, 0.19167999923229218, -0.4560700058937073, 0.11488000303506851, 0.22594000399112701, 0.2519800066947937, 0.15317000448703766, -0.2848699986934662, 0.1420699954032898, -0.4506100118160248, -0.36493998765945435, 0.33153000473976135, 0.2414499968290329, -0.40867000818252563, -0.21794000267982483, 0.3165600001811981, 0.03226400166749954, -0.05770900100469589, -0.29653000831604004, 0.24018000066280365, -0.46303001046180725, -0.4139699935913086, 0.4143800139427185, 0.25821998715400696, 0.05364200100302696, 0.045226000249385834, -0.35705000162124634, -0.1741899996995926, 0.05075899884104729, -0.29357999563217163, -0.1712000072002411, 0.2890799939632416, -0.11612000316381454, -0.06818100064992905, 0.20592999458312988, -0.020170999690890312, 0.20300999283790588, 0.24045999348163605, 0.11889000236988068, -0.2276100069284439, -0.38453999161720276, -0.00886279996484518, -0.2614699900150299, 0.2725299894809723, -0.36002999544143677, -0.2535899877548218, 0.08224000036716461, -0.15126000344753265, -0.3108699917793274, -0.041437000036239624, -0.34161001443862915, -0.11196999996900558, 0.09959100186824799, -0.4554300010204315, -0.13485999405384064, 0.2209099978208542, -0.10107000172138214, 0.41690000891685486, -0.3590700030326843, 0.5817899703979492, 0.19002999365329742, 0.2584500014781952, -0.3565399944782257, 0.2542800009250641, 0.32433000206947327, -0.6231499910354614, 0.3699600100517273, -0.34970998764038086, -0.04836500063538551, -0.473580002784729, -0.23916000127792358, 0.2921000123023987, 0.10814999788999557, -0.009099000133574009, 0.08623600006103516, 0.2246599942445755, -0.6055300235748291, -0.19307999312877655, -0.049550000578165054, -0.004373900126665831, 0.3918200135231018, -0.16825999319553375, -0.007255000062286854, -0.044165000319480896, -0.05655699968338013, -0.1771100014448166, -0.030894000083208084, 0.2610599994659424, -0.08443699777126312, -0.11716999858617783, 0.7537099719047546, 0.26872000098228455, -0.5215399861335754, -0.620140016078949, 0.3102000057697296, 0.1468999981880188, 0.18977999687194824, 0.42719000577926636, -0.0903559997677803, -0.3095400035381317, 0.35607999563217163, -0.22380000352859497, 0.056999001652002335, 0.0814799964427948, -0.14680999517440796, -0.4054200053215027, -0.0602790005505085, -0.3032500147819519, 0.017544999718666077, -0.17704999446868896, 0.13269999623298645, 0.17598000168800354, 0.08682499825954437, -0.06350699812173843, 0.25701001286506653, 0.5196599960327148, -0.4370500147342682, -0.11231999844312668, -0.3031499981880188, -0.18966999650001526, -0.06819300353527069, 0.938539981842041, -0.12520000338554382, 0.055911000818014145, -0.15692000091075897, -0.3989199995994568, -0.05561500042676926, -0.03918899968266487, -0.18455000221729279, 0.12713000178337097, 0.36103999614715576, 0.3839299976825714, -0.2980700135231018, 0.5496500134468079, -0.06895899772644043, 0.23598000407218933, 0.6028100252151489, 0.015204000286757946, 0.42243000864982605, 0.3174299895763397, 0.29861000180244446, 0.039354998618364334, -0.3468700051307678, 0.23781999945640564, 0.45629000663757324, 0.3182699978351593, 0.06888800114393234, -0.3374300003051758, 0.6126899719238281, -0.310699999332428, 0.08322200179100037, 0.3137199878692627, -0.1592700034379959, 0.22485999763011932, 0.3975600004196167, 0.3168199956417084, 0.6861900091171265, 0.16592000424861908, 0.12291999906301498, 0.23845000565052032, 0.20995000004768372, -0.03892400115728378, 0.396369993686676, 0.297760009765625, -0.2258799970149994, -0.46171998977661133, -0.11958999931812286, -0.0032132999040186405, 0.35899999737739563, -0.22176000475883484, -0.26927998661994934, -0.14256000518798828, -0.43773001432418823, -0.0037086999509483576, -0.35328999161720276, -0.31953999400138855, -0.4985499978065491, -0.17091000080108643, -0.11553999781608582, -0.04817099869251251, 0.44444000720977783, 0.2610900104045868, -0.16493000090122223, 0.3541199862957001, 0.302949994802475, -0.4073199927806854, 0.11166000366210938, -0.013590999878942966, 0.06391599774360657, -0.13605999946594238, 0.16518999636173248, 0.09781300276517868, 0.06664899736642838, -0.2898100018501282, 0.3146600127220154, -0.552619993686676, 0.0030686999671161175, -1.6970000267028809, -0.09374500066041946, 0.76214998960495, -0.08006999641656876, -0.25, 0.31786999106407166, -0.2547599971294403, -0.24864999949932098, 0.2868799865245819, -0.6273699998855591, 0.10931000113487244, -0.3528900146484375, -0.012397999875247478, -0.3555299937725067, 0.16419999301433563, -0.5167499780654907, 0.20555000007152557, 0.15859000384807587, 0.03828300163149834, 1.080199956893921, -0.14722999930381775, -0.8141000270843506, -1.0226999521255493, 0.16731999814510345], u'pasta': [-0.2415499985218048, 0.2994900047779083, 0.5929399728775024, 0.057443998754024506, 0.09962400048971176, 0.11980000138282776, -0.6064500212669373, 0.16256000101566315, 0.16268999874591827, -0.4401400089263916, -0.07220099866390228, -0.10023000091314316, 0.19926999509334564, 0.8849499821662903, 0.03638700023293495, -0.7399399876594543, -0.3799299895763397, 0.32760998606681824, -0.05830800160765648, 0.711080014705658, -0.30404001474380493, -0.1667499989271164, 0.13189999759197235, 0.22293999791145325, 0.0689380019903183, 0.015533000230789185, -0.4648599922657013, -0.19754000008106232, 0.1606599986553192, -0.9891700148582458, -0.35954999923706055, 0.6069999933242798, -0.16214999556541443, 0.2637700140476227, -0.29646000266075134, 0.9060099720954895, -0.027939999476075172, -0.39017000794410706, -0.4977000057697296, 0.36768001317977905, 0.09586399793624878, -0.05053900182247162, 0.31624001264572144, 0.16898000240325928, 0.23176999390125275, -0.2014400064945221, 0.7877200245857239, 0.2132599949836731, 0.5105000138282776, 0.5596399903297424, 0.10876999795436859, 0.3078399896621704, 0.4131999909877777, 0.12150000035762787, -0.25064998865127563, -0.26554998755455017, -0.30316001176834106, 0.08587200194597244, 0.6567999720573425, 0.13922999799251556, 0.5369499921798706, -0.5534499883651733, 0.30882999300956726, -0.469539999961853, -0.48945000767707825, -0.10815999656915665, -0.6630100011825562, 0.44756001234054565, -0.48532000184059143, 0.27998000383377075, 0.8120700120925903, -0.13412000238895416, 0.09437499940395355, -0.2904900014400482, -0.2407499998807907, 0.5233500003814697, 0.5052800178527832, 0.14248999953269958, -0.7584999799728394, -0.33779001235961914, -0.6112499833106995, 0.5449399948120117, 0.21336999535560608, 0.315310001373291, 0.7693799734115601, -0.050634998828172684, 0.08610700070858002, 0.0752979964017868, -0.05889900028705597, -0.1871899962425232, 0.39256998896598816, -0.07105600088834763, 0.06014600023627281, -0.12039999663829803, -0.06995099782943726, 0.4339999854564667, -0.38078999519348145, 0.046796999871730804, -0.2084600031375885, 0.7945399880409241, 0.10444000363349915, -0.28407999873161316, 0.14620999991893768, -0.5472699999809265, -0.4702000021934509, -0.2402999997138977, -0.24612000584602356, 0.17659999430179596, -0.1762000024318695, 0.21829000115394592, 0.3522300124168396, 0.5623800158500671, -0.07896199822425842, -0.7702599763870239, 0.04942600056529045, -0.22325000166893005, -1.1533000469207764, 0.12555000185966492, 0.5914700031280518, 0.6055999994277954, -0.44402000308036804, 0.4128299951553345, 0.3674199879169464, 0.370959997177124, 0.15815000236034393, 0.264629989862442, -0.14753000438213348, 0.7218899726867676, -0.5721700191497803, 0.8036500215530396, 0.289029985666275, 0.3666200041770935, -0.0205329991877079, 0.3530200123786926, -0.09975700080394745, -0.290910005569458, 0.034233998507261276, -0.2932800054550171, -0.14403000473976135, 0.2559399902820587, 0.359499990940094, 0.3710399866104126, -0.11941999942064285, -0.15987999737262726, -0.5122799873352051, 0.05214200168848038, -0.1697400063276291, -0.16896000504493713, 0.5455800294876099, -0.4839699864387512, -0.7077900171279907, 0.5088000297546387, -0.1626800000667572, -0.8128499984741211, -0.483599990606308, -0.736549973487854, 0.054958999156951904, -0.5171399712562561, -0.21872000396251678, -0.1292400062084198, -0.16717000305652618, 0.04520399868488312, -0.3771600127220154, 0.2903900146484375, 0.47172001004219055, -0.026009999215602875, -0.1373099982738495, 0.027388999238610268, -0.13541999459266663, -0.6532400250434875, 0.16523000597953796, 0.6921200156211853, -0.3355099856853485, 0.11816000193357468, -0.13356000185012817, 0.050335001200437546, -0.5526599884033203, 0.1734900027513504, 0.49199000000953674, -0.6490600109100342, 0.32238999009132385, 0.3323400020599365, 0.4060100018978119, 0.06520500034093857, -0.26047998666763306, 0.623769998550415, 0.46849000453948975, 0.4299199879169464, 0.1628199964761734, -0.13860000669956207, -0.15198999643325806, 1.4271999597549438, -0.5270299911499023, 0.3458000123500824, -0.2841399908065796, 0.23190000653266907, -0.4833100140094757, -0.3743399977684021, -0.29877999424934387, 0.22479000687599182, -0.06955400109291077, -0.007711499929428101, 1.176300048828125, 0.5738300085067749, 0.1412699967622757, 0.17542000114917755, 0.735759973526001, -0.07966499775648117, -0.16891999542713165, -0.3059200048446655, 0.27542001008987427, -0.14865000545978546, 0.647159993648529, 0.05118300020694733, -0.272489994764328, 0.11174999922513962, 0.6114100217819214, -0.6471099853515625, -0.26447999477386475, 0.2320300042629242, 0.3822399973869324, 0.3841699957847595, -0.52156001329422, -0.33059000968933105, -0.0037146001122891903, -0.7662000060081482, 0.5524200201034546, -0.49503999948501587, -0.2354000061750412, -0.4162899851799011, -0.21671000123023987, -0.008435600437223911, -0.060054000467061996, -0.03999999910593033, 1.166200041770935, -0.14031000435352325, 0.45612001419067383, 0.32517001032829285, -0.5694800019264221, -0.5034400224685669, 0.13562999665737152, -0.31200000643730164, -0.0010083999950438738, 0.010883999988436699, -0.41376999020576477, 0.016592999920248985, -0.36059001088142395, 0.011615999974310398, -0.12861000001430511, -0.8875200152397156, 0.751550018787384, 0.28431999683380127, 0.3939000070095062, 0.3860599994659424, -0.21366000175476074, -0.07703900337219238, -0.19095000624656677, 0.34066998958587646, -0.3636400103569031, 0.4984399974346161, -0.028704000636935234, -0.9305400252342224, 0.08826299756765366, -0.4257600009441376, 0.6523699760437012, -0.06792300194501877, -0.38040000200271606, 0.3942300081253052, -0.005965900141745806, -0.28867998719215393, -0.9074199795722961, -0.273140013217926, -0.37999001145362854, 1.0231000185012817, 0.19292999804019928, -0.04098200052976608, -0.289139986038208, 0.03792700171470642, -0.9358000159263611, -0.4061500132083893, 0.6666399836540222, 0.3924799859523773, 0.21285000443458557, -0.23631000518798828, 0.0671909973025322, 0.14517000317573547, 0.8532500267028809, -0.15503999590873718, 0.642579972743988, 0.49178001284599304, -0.11457999795675278, -0.23726999759674072, 0.2781899869441986, 0.4338200092315674, -0.16825999319553375, -1.0714000463485718, 0.049424998462200165, -0.748449981212616, -0.3986400067806244, 0.037801001220941544], u'paste': [0.07474400103092194, 0.059773001819849014, 0.4459100067615509, -0.6015300154685974, -0.667169988155365, -0.19972999393939972, 0.00398290017619729, 0.2663399875164032, 0.6725599765777588, -0.2047100067138672, -0.0312809981405735, -0.08782300353050232, 0.5356900095939636, 0.3782300055027008, -0.13169999420642853, -0.2048799991607666, -0.7049000263214111, 0.4551900029182434, -0.7262399792671204, -0.27959001064300537, -0.3297500014305115, -0.3117299973964691, -0.05013199895620346, 0.3831000030040741, -0.8070499897003174, -0.1581999957561493, -0.22190000116825104, 0.23759999871253967, -0.15431000292301178, -0.5090199708938599, -0.3876200020313263, -0.13513000309467316, 0.02454799972474575, 0.040049001574516296, -0.19990000128746033, 0.7958099842071533, -0.1090800017118454, -0.06096800044178963, 0.10525999963283539, -0.312389999628067, -0.09356500208377838, -0.23431000113487244, -0.049501001834869385, 0.06668300181627274, 1.1450999975204468, -0.6010500192642212, 0.367000013589859, 0.7564399838447571, -0.5087900161743164, -0.3775100111961365, 0.4742799997329712, -0.058035001158714294, 0.5221800208091736, 0.046838000416755676, 0.16506999731063843, -0.27678999304771423, 0.21216000616550446, -0.4522300064563751, 0.046792998909950256, 0.0024244000669568777, -0.14111000299453735, 0.13624000549316406, 0.553820013999939, 0.34393998980522156, 0.005389600060880184, 0.19357000291347504, -0.04738900065422058, 0.022009000182151794, 0.7805399894714355, -0.27814000844955444, -0.15976999700069427, -0.7082099914550781, -0.8752700090408325, 0.10593000054359436, -0.30404001474380493, 0.1898300051689148, 0.9958699941635132, -0.2556599974632263, 0.23191000521183014, -0.3417600095272064, -0.22753000259399414, -0.05607600137591362, -0.10028000175952911, -0.745959997177124, -0.26719000935554504, 0.04391000047326088, 0.12013000249862671, -0.42430999875068665, -0.08682800084352493, -0.3877899944782257, -0.4499500095844269, 0.09358300268650055, -0.03013800084590912, -0.45118001103401184, -0.4161199927330017, -0.31345000863075256, -0.44255000352859497, 0.5108500123023987, 0.16046999394893646, 0.1449500024318695, 0.5322399735450745, -0.32260000705718994, 0.6275699734687805, -0.7385600209236145, -0.469870001077652, -0.3183700144290924, -0.2290399968624115, -0.17598000168800354, 0.0022704999428242445, 0.038001999258995056, 0.4696800112724304, 0.3089900016784668, -0.1222200021147728, -0.4627299904823303, -0.41018998622894287, 0.4081000089645386, -0.5718200206756592, 0.8895800113677979, 0.06181799992918968, -0.3072499930858612, -0.0137320002540946, -0.6422899961471558, 0.2350499927997589, 0.09070199728012085, 0.049222998321056366, -0.28925999999046326, 0.15052999556064606, 0.38453999161720276, -0.1368899941444397, 0.7184699773788452, 0.2993200123310089, 0.6431900262832642, -0.2749600112438202, 0.1739400029182434, -0.6269299983978271, -0.5732899904251099, -0.3161799907684326, 0.022075999528169632, 0.03407299891114235, 0.760890007019043, 0.44762998819351196, 0.6376500129699707, -0.50559002161026, -1.1581000089645386, -0.16249999403953552, 0.41999998688697815, -0.01975500024855137, 0.0977109968662262, 0.3932499885559082, -0.2644999921321869, -1.0095000267028809, 0.5809199810028076, 0.008154500275850296, -0.35929998755455017, -0.8395299911499023, -0.5126299858093262, -0.03095499984920025, -0.5033699870109558, 0.2913399934768677, 0.1796099990606308, 0.3535099923610687, -0.20488999783992767, 0.4033200144767761, -0.03164200112223625, 0.3745799958705902, -0.3501499891281128, 0.14657999575138092, 0.7221300005912781, -0.1985500007867813, -0.7648800015449524, -0.3501800000667572, 0.10745000094175339, -0.17101000249385834, 0.2573600113391876, -0.5101900100708008, -0.15669000148773193, 0.048516999930143356, -0.1410199999809265, 0.606939971446991, -0.569320023059845, 0.06452900171279907, 0.39726001024246216, -0.25887998938560486, -0.3904399871826172, 0.030879000201821327, -0.7800300121307373, 0.9207299947738647, 0.3490000069141388, 0.3681600093841553, -0.009889299981296062, 0.3337000012397766, 1.2927000522613525, 0.05779600143432617, 0.3101100027561188, 0.6121399998664856, -0.18725000321865082, -0.16957999765872955, 0.6626999974250793, -0.30726999044418335, -0.20392000675201416, -0.2452400028705597, -0.08204899728298187, 0.6903899908065796, 1.1955000162124634, 0.007128399796783924, 0.4507000148296356, -0.011869999580085278, 0.18125000596046448, -0.7352200150489807, 0.01553799957036972, 0.14151999354362488, -0.14069999754428864, -0.3285199999809265, 0.45524001121520996, -0.3980900049209595, 0.47628000378608704, -0.12880000472068787, -0.13968999683856964, -0.23261000216007233, 0.5061399936676025, -0.421779990196228, 0.11940000206232071, -0.5628200173377991, -0.3119499981403351, -0.7191299796104431, -0.09365399926900864, 0.2624799907207489, 0.1066799983382225, 0.2688800096511841, -0.328139990568161, -0.09262599796056747, 0.06772799789905548, 0.19652000069618225, -0.13470999896526337, 0.6363700032234192, 0.18053999543190002, 0.13496999442577362, 0.022274000570178032, -0.5343000292778015, -0.5853000283241272, -0.16868999600410461, -0.14055000245571136, -0.8491899967193604, -0.23486000299453735, -0.5081599950790405, 0.47268998622894287, 0.5321499705314636, 0.14910000562667847, 0.02863200008869171, -0.9589800238609314, 0.1738000065088272, -0.6042900085449219, -0.5362899899482727, -0.2865299880504608, -0.028243999928236008, 0.649590015411377, -0.3788500130176544, 0.3166100084781647, 0.03891900181770325, 0.4622200131416321, -0.8024700284004211, 0.40421000123023987, -0.10805000364780426, -0.4042699933052063, 0.5980600118637085, -0.6228899955749512, -0.49889999628067017, 0.22148999571800232, -0.12466999888420105, -0.31022000312805176, 0.18285000324249268, -0.5053600072860718, 0.10255999863147736, 0.4614199995994568, -0.20055000483989716, 0.862529993057251, -0.010471999645233154, -0.6659200191497803, -0.6873300075531006, -0.36024001240730286, -0.007354999892413616, 0.16354000568389893, -0.350490003824234, 0.8148199915885925, -0.31433001160621643, 0.03402699902653694, 0.39535000920295715, -0.28593000769615173, 0.09355200082063675, 0.7666400074958801, -0.3010300099849701, 0.00407630018889904, 0.1791200041770935, -0.10089000314474106, 0.11641000211238861, -0.1368899941444397, 0.1704699993133545, -0.35514000058174133, -0.5421000123023987, -0.1954600065946579], u'card': [0.19724999368190765, 0.363070011138916, -0.006536299828439951, -0.13093000650405884, 0.09788099676370621, 0.10672000050544739, -0.1543000042438507, -0.031741999089717865, -0.051711998879909515, -0.9383299946784973, 0.4140799939632416, 0.20886999368667603, 0.6369900107383728, -0.6332100033760071, -0.17270000278949738, -0.22407999634742737, 0.20128999650478363, -0.43268999457359314, -0.2550700008869171, -0.13044999539852142, 0.44514000415802, -0.5927900075912476, -0.6276500225067139, -0.415149986743927, 0.40790998935699463, -0.06016099825501442, 0.1886100023984909, 0.5677099823951721, 0.16857999563217163, 0.06992600113153458, -0.31033000349998474, 0.3191100060939789, 0.20722000300884247, -0.418179988861084, -1.8162000179290771, -0.22084000706672668, 0.09088499844074249, -0.6320899724960327, -0.756850004196167, -0.011296999640762806, -0.10835999995470047, -0.162540003657341, 0.43498000502586365, 0.6968500018119812, -0.012919999659061432, -0.19296999275684357, 0.4190399944782257, -0.33156999945640564, -0.3321700096130371, -0.5793300271034241, 0.13878999650478363, 0.36684998869895935, 0.3218899965286255, 0.16017000377178192, -0.1809300035238266, -0.790149986743927, -0.5271099805831909, 0.12996000051498413, 0.05316599830985069, -0.5544700026512146, 0.4474300146102905, -0.003952099941670895, -0.375, 0.11905000358819962, -0.04835699871182442, -0.18988999724388123, 0.022412000223994255, -0.5875899791717529, 0.25898000597953796, -0.3413099944591522, 0.6186299920082092, 0.10907000303268433, 0.48197001218795776, -0.11946000158786774, 0.14188000559806824, 0.1074799969792366, 0.10221999883651733, -0.5980799794197083, 0.6488699913024902, -0.36847999691963196, 0.249099999666214, -0.04358899965882301, 0.1609400063753128, -0.21638000011444092, 0.6061699986457825, -0.532829999923706, -0.9720900058746338, -0.31505998969078064, -0.28929999470710754, 0.47762998938560486, -0.29416999220848083, 0.22115999460220337, -0.29864999651908875, -0.3831300139427185, 0.5752900242805481, 0.6204599738121033, -0.5260499715805054, 0.2750200033187866, 0.08338599652051926, -1.4884999990463257, -0.08667699992656708, -0.11037000268697739, 0.2700299918651581, -0.17166000604629517, 0.2218399941921234, -0.06063000112771988, 0.04717100039124489, 0.45385000109672546, -0.5358899831771851, -0.1408499926328659, 0.3043400049209595, 0.19571000337600708, 0.2436700016260147, 0.5813900232315063, 0.33246999979019165, 0.26798000931739807, -0.028144000098109245, 0.4720200002193451, -0.4269999861717224, -0.12646999955177307, 0.00361949997022748, -0.12772999703884125, 0.09014099836349487, 0.6222699880599976, 0.2743400037288666, 0.2474599927663803, 0.18348999321460724, -0.5975900292396545, 0.08799099922180176, -0.3322199881076813, 0.3427700102329254, 0.20140999555587769, 0.2762100100517273, 0.006245899945497513, -0.006750499829649925, 0.031975001096725464, 0.7275199890136719, -0.059519000351428986, 0.3593200147151947, 0.44916000962257385, -0.18613000214099884, 0.16562999784946442, 0.3513700067996979, 0.24275000393390656, 0.631380021572113, 0.16064000129699707, -0.5336899757385254, -0.45750999450683594, -0.08601000159978867, -0.18639999628067017, 0.12156999856233597, 0.03339599817991257, 0.3703100085258484, -0.6147699952125549, 0.1200300008058548, -0.11274000257253647, -0.14285999536514282, -0.3878200054168701, -0.7418400049209595, -0.2835899889469147, 0.49974000453948975, 0.11546999961137772, -0.013394000008702278, 0.08040899783372879, 0.4836899936199188, 0.3345699906349182, -0.1299699991941452, 0.09009599685668945, -0.05691299960017204, -0.2110999971628189, 0.10664000362157822, 0.014271000400185585, -0.7516400218009949, 0.15053999423980713, -0.06260699778795242, -0.7819300293922424, -0.35249000787734985, -0.14232000708580017, -0.003310699947178364, 0.2946299910545349, 0.29027000069618225, 0.03114200010895729, 0.6246899962425232, 0.3472500145435333, -0.00025993998860940337, -0.5097100138664246, 0.07053499668836594, 0.2267400026321411, 0.323199987411499, 0.3244200050830841, 0.26177000999450684, 0.866320013999939, 0.5278900265693665, 0.16214999556541443, -0.1512400060892105, -0.2312300056219101, 0.07147300243377686, 0.5040000081062317, -0.2674799859523773, 0.21177999675273895, 0.5737900137901306, 0.09273099899291992, 0.39645999670028687, -0.4047900140285492, 0.22015999257564545, -0.7876499891281128, 0.26660001277923584, 0.43261000514030457, 0.017820000648498535, -0.12839999794960022, 0.019742999225854874, -0.5356900095939636, 0.6377099752426147, -0.37459999322891235, -0.14381000399589539, -0.12707999348640442, 0.043278999626636505, 0.23930999636650085, -0.021555999293923378, 0.8281599879264832, -0.39998000860214233, -0.1785999983549118, -0.4397200047969818, 0.3400700092315674, 0.3428100049495697, 0.020316999405622482, -0.08626099675893784, -0.37553998827934265, 0.34929999709129333, 0.2809999883174896, -0.8827400207519531, 0.0777750015258789, -0.20419999957084656, 0.1775899976491928, 0.35124000906944275, -0.0652410015463829, -0.1084199994802475, -0.09232600033283234, 0.4076800048351288, 0.3095000088214874, -0.003758900100365281, 0.8249800205230713, 0.6673399806022644, -0.4135099947452545, -0.2339400053024292, -0.3540099859237671, -0.006112799979746342, -0.14883999526500702, 0.36263999342918396, -0.351610004901886, 0.3976899981498718, -0.07095500081777573, -0.1633799970149994, -0.05338900163769722, 0.21074999868869781, -0.17486000061035156, -0.291810005903244, 0.32537001371383667, 0.16092999279499054, -0.14063000679016113, -0.6914799809455872, 0.11873000115156174, -0.058118999004364014, 0.5812199711799622, 0.022280000150203705, -0.05697999894618988, -0.26631999015808105, -0.19961999356746674, -0.16053999960422516, -0.08043599873781204, 0.31516000628471375, 0.052629999816417694, 0.09787199646234512, 0.00622300012037158, -0.19530999660491943, -0.14122000336647034, -1.7589000463485718, -0.042010001838207245, -0.08443500101566315, 0.053300999104976654, -0.0720909982919693, 0.19603000581264496, -0.5868899822235107, 0.01584099978208542, -0.766730010509491, -0.042433999478816986, 0.057757001370191574, -0.05964000150561333, -0.3288300037384033, -0.41967999935150146, 0.4133700132369995, 0.10408999770879745, -0.38339000940322876, -0.057829998433589935, 0.24494999647140503, 0.049598000943660736, 0.9495499730110168, 0.11214999854564667, 0.0611019991338253, -1.093999981880188], u'kitchen': [0.1882999986410141, 0.2495799958705902, 0.04334399849176407, -0.7210699915885925, 0.2934100031852722, 0.32433000206947327, -0.2713100016117096, -0.39779001474380493, 0.13071000576019287, -0.778980016708374, -0.07216600328683853, 0.0131029998883605, 0.5432500243186951, 0.28507000207901, 0.05520499870181084, -0.03379099816083908, -0.012879000045359135, 0.01983099989593029, 0.17389999330043793, 0.27636000514030457, 0.07988499850034714, 0.4157699942588806, -0.06105799973011017, -0.042344000190496445, 0.14100000262260437, -0.27893999218940735, -0.04294100031256676, 0.15639999508857727, 0.5557799935340881, -0.11274000257253647, -0.03282000124454498, 0.6266999840736389, -0.41617000102996826, 0.24958999454975128, -0.650950014591217, 0.8596900105476379, -0.25023001432418823, -0.15288999676704407, -0.21809999644756317, -0.25115999579429626, 0.24252000451087952, -0.12785999476909637, -0.05236800014972687, -0.10830999910831451, 0.13925999402999878, 0.1805800050497055, 0.35780999064445496, -0.17010000348091125, 0.10245999693870544, -0.17610999941825867, 0.20520000159740448, -0.027307000011205673, 0.5910400152206421, -0.1344199925661087, -0.4666700065135956, -0.2737399935722351, 0.00825599953532219, -0.05016399919986725, -0.016307000070810318, 0.023679999634623528, 0.046658001840114594, -0.40542998909950256, 0.2463800013065338, 0.641260027885437, -0.5363100171089172, -0.825190007686615, 0.11849000304937363, 0.25953999161720276, -0.7239000201225281, -0.5614799857139587, 0.07575800269842148, -0.5997700095176697, -0.22429999709129333, 0.1774500012397766, -0.5881100296974182, 0.27496999502182007, -0.3549000024795532, 0.24111999571323395, -0.3387700021266937, -0.7327399849891663, -0.056706998497247696, 0.546720027923584, 0.16752000153064728, -0.20333999395370483, 0.47152000665664673, -0.261570006608963, -0.18310999870300293, 0.32100000977516174, -0.15699000656604767, -0.538569986820221, 0.4124799966812134, -0.19425000250339508, 0.4008699953556061, -0.19088999927043915, -0.300709992647171, -0.518310010433197, 0.0484049990773201, -0.3116999864578247, 0.2987099885940552, -0.6071199774742126, -0.286980003118515, 0.09414699673652649, -0.01996699906885624, -0.44453001022338867, 0.1403599977493286, -0.7457600235939026, 0.5160800218582153, 0.18178999423980713, -0.17441999912261963, 0.6547999978065491, -0.21053999662399292, 0.07117900252342224, 0.024986999109387398, -0.6736699938774109, -0.5777300000190735, -0.3114199936389923, -0.5563499927520752, -0.1548900008201599, -0.7103000283241272, -0.2806699872016907, -0.10220000147819519, 0.4419800043106079, -0.23559999465942383, 0.1196800023317337, -0.10656999796628952, 0.06384100019931793, -0.07205300033092499, -0.24406999349594116, -0.016788000240921974, 0.2318899929523468, 0.6017500162124634, 0.025885000824928284, 0.5277100205421448, -0.058660998940467834, 0.40077999234199524, -0.2781200110912323, 0.17430999875068665, 0.5049399733543396, -0.30542999505996704, 0.12939000129699707, 0.28321000933647156, 0.10249000042676926, 0.4066300094127655, -0.21074000000953674, -0.12331999838352203, 0.92330002784729, 0.37310001254081726, 0.24753999710083008, 0.025839999318122864, -0.4038200080394745, -0.3134300112724304, 0.5834000110626221, 0.32653000950813293, 0.005832499824464321, -0.16294999420642853, 0.24397000670433044, -0.6522300243377686, 0.35877999663352966, -0.12146999686956406, -0.022433999925851822, 0.6517599821090698, 0.562690019607544, 0.37907999753952026, 0.005012399982661009, 0.19349999725818634, 0.42976000905036926, 0.227400004863739, 0.006583999842405319, 0.4421899914741516, 0.19606000185012817, -0.4025300145149231, 0.23023000359535217, -0.19645999372005463, 0.19767999649047852, -0.7100899815559387, 0.4006499946117401, -0.21589000523090363, 0.24966000020503998, 0.5826299786567688, -0.7846900224685669, -0.11862000077962875, -0.14042000472545624, 0.21550999581813812, 0.16971999406814575, -0.000969930028077215, 0.2119700014591217, 0.6227800250053406, 0.7246999740600586, 0.31136998534202576, 0.002502199960872531, 0.517579972743988, 0.6261900067329407, -0.9153500199317932, 0.019060999155044556, -0.4207000136375427, 0.3684999942779541, -0.5485699772834778, 0.24741999804973602, -0.8594300150871277, 0.05429299920797348, 0.35326001048088074, 0.04369699954986572, 0.22725999355316162, 0.38113000988960266, 0.3151000142097473, -0.19860999286174774, 0.5117899775505066, -0.48445001244544983, -0.39416998624801636, -0.30737000703811646, -0.45100000500679016, -0.20221999287605286, -0.384799987077713, -0.08188900351524353, 0.44203001260757446, 0.32190999388694763, 0.14635999500751495, -0.45375001430511475, 0.1109900027513504, 0.1429000049829483, 0.264710009098053, 0.2534500062465668, -0.2648699879646301, -0.07571300119161606, 0.08168400079011917, -0.3759799897670746, -0.11631999909877777, 0.1399800032377243, 0.02987699955701828, -0.31174999475479126, -0.004738899879157543, -0.4505999982357025, -0.5667999982833862, -0.035314999520778656, 0.3040899932384491, 0.5339900255203247, 0.12283000349998474, 0.05163700133562088, -0.9492300152778625, -0.07885999977588654, 0.4176200032234192, -0.1673000007867813, -0.30164000391960144, 0.3057999908924103, -0.1828799992799759, -0.08071999996900558, 0.05821700021624565, 0.23405000567436218, -0.2560400068759918, -0.13062000274658203, 0.25099998712539673, -0.2232300043106079, 0.3101100027561188, 0.044982001185417175, 0.3117699921131134, 0.1423500031232834, -0.10217999666929245, -0.22226999700069427, 0.2648000121116638, 0.5758100152015686, 0.012672999873757362, -0.5259100198745728, -0.23756000399589539, -0.614870011806488, -0.17151999473571777, 0.2669999897480011, -0.005993899889290333, 0.5328599810600281, 0.4099400043487549, -0.3835099935531616, -0.24498000741004944, -0.04487200081348419, 0.23819999396800995, -0.2705399990081787, 0.206619992852211, 0.19021999835968018, -1.5609999895095825, 0.9348899722099304, -1.117900013923645, -0.31668001413345337, -0.05789899826049805, 0.4211699962615967, -0.13405999541282654, -0.20152999460697174, 0.28547999262809753, 0.4456999897956848, 0.18589000403881073, -0.015684999525547028, -0.1360899955034256, -0.2545199990272522, -0.2041199952363968, -0.10948999971151352, -0.28141000866889954, 0.18594999611377716, 0.1425199955701828, 0.00656609982252121, 0.21782000362873077, -0.0648529976606369, -0.1710200011730194, 0.6258900165557861], u'box': [-0.3104499876499176, 0.8351399898529053, -0.08395099639892578, 0.28532999753952026, 0.40733999013900757, 0.3388400077819824, -0.6449099779129028, 0.10490000247955322, -0.3458099961280823, -0.644569993019104, -0.009183299727737904, 0.2194100022315979, 0.14871999621391296, -0.13529999554157257, 0.08562599867582321, -0.08728300034999847, -0.19865000247955322, -0.2083600014448166, 0.16165000200271606, -0.04179200157523155, 0.48708000779151917, 0.11050999909639359, 0.08163999766111374, 0.06833899766206741, -0.4611699879169464, 0.154789999127388, 0.10162000358104706, -0.13061000406742096, 0.6540799736976624, 0.11920999735593796, -0.15320000052452087, -0.10272999852895737, -0.15017999708652496, 0.11231999844312668, -0.9524400234222412, 0.38405999541282654, -0.4893999993801117, 0.16067999601364136, -0.36774998903274536, 0.39035001397132874, -0.1844400018453598, -0.2187100052833557, -0.17876000702381134, 0.8737199902534485, 0.09975399821996689, -0.19158999621868134, 0.4818499982357025, -0.12870000302791595, 0.00680779991671443, 0.5019199848175049, 0.11867000162601471, -0.030571000650525093, -0.3529900014400482, -0.386790007352829, -0.11608000099658966, -0.14797000586986542, -0.4004800021648407, -0.055626001209020615, 0.17609000205993652, -0.04651600122451782, 0.4946900010108948, 0.596589982509613, 0.3900099992752075, 0.3062700033187866, 0.4533900022506714, -0.4928300082683563, 0.2348400056362152, -0.30733001232147217, 0.4692699909210205, -0.023509999737143517, 0.17258000373840332, 0.025407999753952026, 0.44130000472068787, 0.6116499900817871, 0.2442300021648407, 0.11260999739170074, -0.2729699909687042, -0.49070999026298523, 0.057381998747587204, -0.0786060020327568, -0.12992000579833984, 0.3065299987792969, 0.24338999390602112, -0.10583999752998352, 0.24255000054836273, -0.5624499917030334, 0.5926899909973145, 0.04778800159692764, -0.09722699970006943, -0.14535999298095703, 0.40612998604774475, 0.013309000059962273, -0.41749000549316406, -0.14435000717639923, 0.8156800270080566, 0.7414799928665161, -0.3193199932575226, 0.330049991607666, -0.1934400051832199, -0.7319499850273132, -0.011253000237047672, 0.18740999698638916, -0.15248000621795654, -0.30491000413894653, -0.06820099800825119, -0.3774299919605255, -0.11582999676465988, 0.18357999622821808, -0.022853000089526176, -0.08924099802970886, 0.5874000191688538, 0.421970009803772, -0.3720400035381317, 0.04050000011920929, -0.3866400122642517, -0.3542400002479553, -0.5670599937438965, 0.38903000950813293, -0.1801699995994568, -0.7808099985122681, -0.046964000910520554, 0.07321099936962128, 0.7019000053405762, 0.09091100096702576, -0.4509199857711792, -0.153779998421669, 0.05752300098538399, 0.1066799983382225, -0.27173998951911926, 0.2886199951171875, 0.1343899965286255, -0.029776999726891518, -0.2949399948120117, 0.144679993391037, 0.14576999843120575, -0.36866000294685364, -0.02790199965238571, 0.2303999960422516, -0.36733001470565796, 0.15828999876976013, -0.009538800455629826, 0.17851999402046204, -0.14775000512599945, -0.6016899943351746, -0.17552000284194946, -0.8912100195884705, 0.0587569996714592, -0.24794000387191772, -0.0715859979391098, -0.897849977016449, 0.4325200021266937, -0.21931999921798706, -0.18499000370502472, -0.7972000241279602, 0.2843500077724457, 0.196710005402565, 0.39618000388145447, -0.006930099800229073, -0.36465999484062195, 0.017545999959111214, 0.20361000299453735, -0.403219997882843, 0.6689599752426147, -0.2582400143146515, 0.7055000066757202, 0.49783000349998474, 0.18918000161647797, -0.0866359993815422, -0.3273699879646301, 0.3050200045108795, -0.1735299974679947, 0.023267999291419983, -0.7671099901199341, -0.01338099967688322, 0.02868800051510334, -0.18122999370098114, -0.11907000094652176, 0.6609699726104736, 0.07361199706792831, -0.90829998254776, 0.07111799716949463, -0.0015788000309839845, 0.5783200263977051, 0.4189999997615814, -0.25146999955177307, -0.6855000257492065, 1.0077999830245972, 0.5164700150489807, 0.4113200008869171, 0.23935000598430634, 0.3138299882411957, 0.3776499927043915, 0.46786999702453613, 0.2815699875354767, -0.15047000348567963, -0.3578200042247772, -0.022628000006079674, -0.023754000663757324, -0.008507300168275833, 0.029148999601602554, 1.138800024986267, 0.2903999984264374, 0.3337799906730652, -0.5153099894523621, -0.09672500193119049, -0.43827998638153076, -0.05324999988079071, -0.2777000069618225, -0.5662099719047546, -0.04731199890375137, -0.3908500075340271, 0.027590999379754066, -0.18341000378131866, -0.27695998549461365, 0.27856001257896423, -0.05909999832510948, -0.1404699981212616, -0.12730999290943146, 0.27309998869895935, 0.3060699999332428, 0.4341999888420105, 0.055459000170230865, 0.3610300123691559, 0.1935099959373474, -0.03987700119614601, -0.3788900077342987, -0.2577100098133087, 0.2271600067615509, 0.33153000473976135, -0.25157999992370605, -0.3959200084209442, 0.3121599853038788, -0.6400099992752075, -0.062477000057697296, 0.38995999097824097, 0.25095000863075256, -0.029844999313354492, 0.4664100110530853, -0.2945399880409241, 0.00553550012409687, -0.4411199986934662, -0.22856000065803528, -0.1777999997138977, 0.14851999282836914, -0.4227299988269806, -0.18628999590873718, 0.7613300085067749, -0.009586899541318417, 0.02790600061416626, 0.5830199718475342, 0.5343000292778015, 0.3414900004863739, -0.09443400055170059, 0.3924899995326996, 0.3167699873447418, -0.3351700007915497, 0.19723999500274658, -0.6297900080680847, 0.5081999897956848, 0.07424599677324295, -0.19871999323368073, -0.2237199991941452, 0.20127999782562256, -0.36063000559806824, 0.02638700045645237, -0.5251200199127197, 0.34323999285697937, -0.11050999909639359, 0.09836400300264359, 0.10818000137805939, -0.32561999559402466, -0.2740499973297119, 0.2575699985027313, -0.13289999961853027, -0.033018000423908234, -0.8244699835777283, -1.4335999488830566, 0.6445199847221375, -0.6942700147628784, -0.2866300046443939, -0.2444400042295456, -0.13048000633716583, -0.11648000031709671, -0.11641000211238861, 0.19517000019550323, 0.31536999344825745, 0.13426999747753143, -0.450080007314682, -0.04886399954557419, 0.10621999949216843, 0.31088000535964966, -0.13761000335216522, -0.32148998975753784, 0.44624999165534973, -0.3490700125694275, 0.6074600219726562, -0.3911699950695038, -0.07796700298786163, 0.41402000188827515, -0.2385299950838089], u'stone': [-0.3109700083732605, -0.2604300081729889, 0.13266000151634216, -0.28692999482154846, 0.3675200045108795, -0.040644001215696335, 0.2750299870967865, 0.06059800088405609, -0.19447000324726105, -0.2576799988746643, -0.06263100355863571, -0.49856001138687134, -0.07257399708032608, 0.05936799943447113, -0.10016000270843506, -0.09462600201368332, -0.8614599704742432, 0.13958999514579773, -0.17494000494480133, -0.02078999951481819, -0.06781899929046631, 0.20757000148296356, -0.007022600155323744, 0.420089989900589, -0.48151999711990356, -0.8421099781990051, -0.3430500030517578, -0.07961499691009521, 0.014619999565184116, 0.613319993019104, 0.1779700070619583, 1.2510000467300415, -0.56836998462677, 0.38119998574256897, -0.4788599908351898, 0.5001299977302551, -0.20603999495506287, -0.26980000734329224, 0.6306099891662598, -0.29739999771118164, 0.10373000055551529, 0.29447001218795776, -0.04227200150489807, 0.3188999891281128, 0.1509000062942505, 0.2853100001811981, 0.034547001123428345, 0.3708600103855133, 0.26058000326156616, -0.06841400265693665, -0.23247000575065613, 0.22404000163078308, -0.20282000303268433, 0.36967000365257263, 0.08528900146484375, 0.15680000185966492, -0.4014100134372711, 0.23202000558376312, -0.36840999126434326, -0.4483399987220764, 0.5156000256538391, 0.44064000248908997, 0.5836399793624878, 0.12522999942302704, -0.0658079981803894, -0.6127499938011169, -0.08696900308132172, 0.2356400042772293, 0.3822599947452545, -0.3707199990749359, 0.16298000514507294, 0.028217000886797905, -0.28821998834609985, -0.26232999563217163, -0.14499999582767487, 0.6955100297927856, 0.35168999433517456, -0.14635999500751495, -0.006881400011479855, -0.29886001348495483, 0.009642800316214561, 0.22526000440120697, -0.02734600007534027, -0.15861999988555908, 0.12509000301361084, 0.5300800204277039, 0.38694998621940613, 0.04954899847507477, -0.07844000309705734, -0.04417800158262253, 0.3022100031375885, 0.08030200004577637, -0.17972999811172485, 0.14985999464988708, -0.012540999799966812, -0.18185999989509583, 0.16952000558376312, -0.239889994263649, 0.34349000453948975, -0.21198000013828278, -0.42985999584198, 0.3974800109863281, 0.3778400123119354, -0.2115900069475174, 0.1381700038909912, 0.10982000082731247, 0.04827599972486496, -0.019078999757766724, 0.6395800113677979, -0.5687599778175354, -0.6159800291061401, -0.09634900093078613, -0.17075000703334808, -0.8264899849891663, -0.42829999327659607, 0.11012999713420868, -0.43261000514030457, -0.11176999658346176, -0.05974600091576576, -0.7710899710655212, -0.43650999665260315, 0.14180000126361847, -0.4483399987220764, 0.3885299861431122, -0.35526999831199646, -0.18509000539779663, 0.32510000467300415, 0.221110001206398, -0.6965199708938599, -0.46358001232147217, -0.07767900079488754, 0.7225499749183655, 0.3912299871444702, 0.1704300045967102, 0.08834400027990341, -0.23276999592781067, -0.7417600154876709, -0.21782000362873077, 0.054687999188899994, 0.10450000315904617, 0.09340699762105942, 0.22258000075817108, -0.3212999999523163, -0.5660600066184998, -0.2573699951171875, -0.07449299842119217, 0.34380999207496643, 0.19670000672340393, -0.21784000098705292, -0.290010005235672, -0.1594800055027008, 0.34672999382019043, -0.49136999249458313, -0.1190200001001358, 0.05010199919342995, 0.27445000410079956, -0.10836999863386154, 0.11016999930143356, -0.04095900058746338, 0.01256600022315979, 0.3413600027561188, 0.351749986410141, 0.38940998911857605, 0.4556500017642975, 0.08427800238132477, 0.8594300150871277, 0.24048000574111938, 0.44451001286506653, 0.4791400134563446, -0.09993000328540802, 0.09257099777460098, 0.2195499986410141, 0.7678899765014648, -0.40099000930786133, 0.19994999468326569, 0.662850022315979, -0.5150200128555298, -0.19237999618053436, -0.369159996509552, -0.9892600178718567, -0.30316001176834106, 0.22123999893665314, 0.5817499756813049, -0.5468000173568726, 0.020949000492691994, -0.8914600014686584, -0.1935800015926361, -0.016245000064373016, 0.34551000595092773, 0.4202899932861328, 0.532010018825531, 0.869379997253418, 0.062286000698804855, 0.027164999395608902, 0.22306999564170837, 0.5002899765968323, -0.23409000039100647, -0.8748599886894226, 0.10118000209331512, -0.20664000511169434, 1.3401000499725342, -0.4141499996185303, -0.3155499994754791, 0.37338000535964966, -0.0844929963350296, -0.09093499928712845, 0.30090001225471497, -0.2796199917793274, -0.6866599917411804, -0.20814000070095062, 0.7873200178146362, 0.27834001183509827, 0.3459399938583374, -0.2658199965953827, -0.15008999407291412, -0.2316499948501587, 0.1127299964427948, -0.020250000059604645, -0.16325999796390533, -0.30730998516082764, -0.06028199940919876, -0.11732999980449677, 0.626479983329773, -0.0005617199931293726, -0.7676600217819214, -0.10931000113487244, 0.16389000415802002, 0.15631000697612762, -0.2841399908065796, -0.5531100034713745, -0.27414000034332275, -0.23319000005722046, -0.24932000041007996, -0.1824900060892105, -0.15161000192165375, 0.3734999895095825, -0.27421998977661133, -0.3497300148010254, -0.3540700078010559, -0.19199000298976898, -0.17733000218868256, -0.02900799922645092, -0.061792001128196716, -0.3458699882030487, -0.6237900257110596, 0.2643199861049652, -0.43612998723983765, 0.07784900069236755, 0.017176000401377678, 0.38631999492645264, 0.21823999285697937, -0.49187999963760376, -0.013826999813318253, -0.3868899941444397, 0.6782100200653076, 0.34198999404907227, 0.05956500023603439, -0.8073700070381165, -0.17941999435424805, -0.3131999969482422, -0.07802200317382812, 0.3641499876976013, -0.03115300089120865, -0.5418800115585327, 0.07586199790239334, 0.17861999571323395, 0.33285000920295715, 0.0667319968342781, -0.1031700000166893, 0.10577999800443649, -0.18267999589443207, -0.14885999262332916, 0.4591600000858307, -0.4848800003528595, 0.07119899988174438, 0.23309999704360962, -1.570199966430664, 0.10712999850511551, -0.23986999690532684, -0.11913999915122986, -0.32245999574661255, -0.4098300039768219, -0.20754000544548035, 0.020099999383091927, 0.41227999329566956, 0.5045999884605408, -0.29315000772476196, 0.09919799864292145, 0.5278099775314331, 0.23382000625133514, 0.2187100052833557, -0.06102500110864639, -0.03415700048208237, 0.8191099762916565, -0.14328999817371368, 0.692110002040863, -0.15807999670505524, 0.21443000435829163, -0.07738599926233292, 0.48625999689102173], u'drum': [-0.15690000355243683, 0.4103200137615204, -0.8853200078010559, -0.12132000178098679, 0.13862000405788422, -0.19686000049114227, 0.47484999895095825, 0.6499000191688538, -0.1003199964761734, -0.2702000141143799, -0.10632999986410141, -0.1969899982213974, 0.21142999827861786, -0.014186999760568142, 0.5940799713134766, -0.24903999269008636, -0.24026000499725342, -0.0511539988219738, 0.04722899943590164, -0.14034000039100647, 0.5130599737167358, 0.23021000623703003, -0.6406800150871277, 0.34477001428604126, 0.16788999736309052, 0.08884800225496292, 0.4273500144481659, -0.46094998717308044, 0.14348000288009644, 0.6675400137901306, -0.36577001214027405, -0.6335899829864502, 0.005451700184494257, 0.1992499977350235, -0.4351699948310852, 0.5591999888420105, -0.09619499742984772, 0.09340299665927887, 0.07910799980163574, 0.8208799958229065, -0.021508999168872833, 0.04751700162887573, -0.6812199950218201, -0.18006999790668488, 0.3512899875640869, 0.7748799920082092, 0.7710700035095215, -0.3514400124549866, 0.4849799871444702, -0.09151600301265717, 0.6317999958992004, 1.1346999406814575, 0.24654999375343323, 0.5382699966430664, -0.4048599898815155, 0.09006199985742569, 0.23171000182628632, -0.06307800114154816, 0.21115000545978546, 0.6982399821281433, 0.6495599746704102, 0.06742800027132034, 0.891040027141571, 0.2211499959230423, 0.37171998620033264, 0.009001100435853004, 0.5187000036239624, -0.12592999637126923, 0.44624999165534973, 0.25115999579429626, -0.036042001098394394, 0.2096800059080124, 0.7221400141716003, 0.2486400008201599, 0.09607400000095367, 0.42734000086784363, 0.5309100151062012, -0.21548999845981598, 0.3595600128173828, 0.19548000395298004, 0.9301699995994568, -0.14234000444412231, 0.043164998292922974, -0.5745400190353394, -0.06234399974346161, -0.6322900056838989, 0.617900013923645, 0.5002099871635437, -0.9269000291824341, -0.14124999940395355, 1.5475000143051147, 0.03759099915623665, 0.35927000641822815, -0.280129998922348, -0.07076100260019302, 0.422650009393692, 0.08809500187635422, -0.1618099957704544, 0.35161998867988586, -0.20886999368667603, -0.5752500295639038, 0.4851900041103363, 0.07526899874210358, -0.7623699903488159, 0.3661699891090393, -0.052528999745845795, -0.22553999722003937, -0.3986400067806244, -0.40073999762535095, 0.2611599862575531, 0.478769987821579, -0.11014000326395035, -0.2288299947977066, 0.7471799850463867, -0.24560999870300293, -0.6845499873161316, -0.655019998550415, -0.07665900141000748, 0.03246600180864334, -0.2871899902820587, -0.1560799926519394, -0.2004300057888031, 0.15579000115394592, -0.35923999547958374, 0.12011999636888504, 0.059154998511075974, 0.30498000979423523, 0.03175399824976921, -0.2682200074195862, -0.1995999962091446, -0.25095000863075256, 0.3131999969482422, 0.400160014629364, -0.7584800124168396, 0.12630000710487366, 0.03585100173950195, 0.517009973526001, 0.21358999609947205, 0.20385000109672546, -0.00965190026909113, 0.12366999685764313, -0.6408399939537048, -0.07307399809360504, -0.5116599798202515, 0.03682500123977661, 0.12734000384807587, -0.220770001411438, -0.14478999376296997, 0.1722699999809265, 0.2913999855518341, 0.034384001046419144, 0.5869200229644775, 0.2951900064945221, -0.16902999579906464, 0.5678799748420715, 0.017255999147892, 0.04371599853038788, -0.2004700005054474, -0.31591999530792236, 0.18133999407291412, 0.5404300093650818, -0.33632999658584595, -0.6675500273704529, -0.27674001455307007, 0.1250399947166443, 0.25181999802589417, 0.515030026435852, 0.31762999296188354, 0.12578000128269196, 0.26739001274108887, -0.04141499847173691, 0.04875300079584122, -0.031449999660253525, -0.013457000255584717, 0.125, -0.5412700176239014, 0.19119000434875488, -0.08750700205564499, -0.5596100091934204, 0.275519996881485, -0.7623999714851379, 0.1861799955368042, 0.15645000338554382, 0.009935400448739529, -0.3871900141239166, -0.35280001163482666, -0.09842800348997116, 0.30230000615119934, 0.2930999994277954, -0.6548500061035156, 0.6406499743461609, -0.25964999198913574, 0.1995300054550171, 0.49028000235557556, 0.29554998874664307, 0.19939999282360077, 0.3143100142478943, -0.2376600056886673, -0.03962000086903572, -0.21710999310016632, 0.5414900183677673, 0.6448299884796143, 0.5371299982070923, -0.057906001806259155, 0.09062699973583221, 0.2994300127029419, 0.15713000297546387, 0.6077399849891663, -0.20307999849319458, -0.2575100064277649, 0.0661889985203743, 0.245169997215271, 0.9202100038528442, 0.030107999220490456, -0.09227800369262695, -0.20981000363826752, -0.20476999878883362, -0.5736899971961975, 0.03297099843621254, 0.35631999373435974, 0.004540699999779463, 0.241689994931221, -0.28723999857902527, 0.32291001081466675, 0.3691999912261963, -0.21849000453948975, -0.27417999505996704, 0.4253999888896942, -0.06290499866008759, -0.47947999835014343, 0.41025999188423157, 0.15315000712871552, 0.01650100015103817, 0.00944020040333271, 0.2567099928855896, 0.003350399900227785, -0.5527099967002869, -0.23666000366210938, -0.5347899794578552, -0.46312999725341797, -0.13096000254154205, 0.12941999733448029, -0.2721099853515625, 0.1907999962568283, -0.3822399973869324, -0.21942999958992004, 0.266539990901947, 0.58406001329422, -0.31248000264167786, 0.16143999993801117, 0.30417999625205994, -0.3728399872779846, 0.8487600088119507, -0.3540300130844116, 0.5126699805259705, 0.08377599716186523, 0.37900999188423157, 0.09564600139856339, -0.1163799986243248, -0.5206800103187561, -0.05090099945664406, -0.08795300126075745, -0.1514499932527542, 0.23792999982833862, 0.11178000271320343, -0.5993300080299377, -0.5351499915122986, 0.34307000041007996, -0.06441199779510498, 0.20116999745368958, -0.17098000645637512, 0.20845000445842743, 0.618369996547699, -0.2417300045490265, 0.02954999916255474, -0.0011138999834656715, -1.0546000003814697, 0.1976500004529953, -0.5507400035858154, -0.5133299827575684, -0.13610999286174774, -0.35168999433517456, -0.4518199861049652, 0.4457400143146515, 0.08858700096607208, 0.1459600031375885, -0.6299999952316284, 0.2503499984741211, -0.07353799790143967, -0.3639099895954132, 0.42072999477386475, -0.2432900071144104, -0.26017001271247864, 0.5755699872970581, 0.11124999821186066, 0.7351700067520142, 0.5936599969863892, 0.18494999408721924, -0.21838000416755676, 0.19514000415802002], u'necklace': [-0.30625998973846436, -0.005408199969679117, -0.7090200185775757, 0.3628399968147278, -0.13535000383853912, 0.32666999101638794, 0.1349399983882904, -0.4118399918079376, -0.31433001160621643, -0.47516000270843506, 0.14410999417304993, 0.3166300058364868, -0.4557099938392639, 0.6847800016403198, -0.2865299880504608, -0.30074000358581543, -0.25172001123428345, 0.3738499879837036, -0.19686999917030334, -0.06088100001215935, 0.042514998465776443, -0.16005000472068787, -0.07356300204992294, -0.2152000069618225, -0.362529993057251, -0.7672200202941895, -0.5150200128555298, -0.23186999559402466, 0.13663999736309052, 0.2560099959373474, 0.34231001138687134, 0.39142999053001404, -0.484389990568161, 0.4772599935531616, 0.23904000222682953, 0.29951998591423035, -0.35657998919487, 0.36702999472618103, 0.178849995136261, 0.20343999564647675, -0.5028600096702576, -0.22904999554157257, -0.14519000053405762, 0.08377599716186523, 0.18254999816417694, -0.5334500074386597, 0.2322400063276291, -0.45921000838279724, 0.03985400125384331, 0.1546200066804886, -0.3241499960422516, -0.2655099928379059, 0.6336100101470947, -0.013005999848246574, -0.2561500072479248, -0.9204800128936768, -0.8134400248527527, 0.5597599744796753, -0.483599990606308, -0.2367900013923645, 0.004536500200629234, 0.12365999817848206, 0.09036099910736084, 0.12484999746084213, 0.7699000239372253, -0.15429000556468964, -0.6317399740219116, -0.07393400371074677, 0.7487499713897705, 0.22454999387264252, -0.09101399779319763, 0.028227999806404114, 0.24178999662399292, -0.0752829983830452, 0.2953299880027771, 0.08230999857187271, 0.7360900044441223, -0.6854599714279175, -0.414110004901886, -0.07485099881887436, -0.1740099936723709, 0.3129900097846985, 0.47510001063346863, 0.5490800142288208, -0.014182999730110168, -0.134770005941391, -0.19068999588489532, 0.02232000045478344, -0.11546000093221664, -0.20276999473571777, -0.20112000405788422, 0.20294000208377838, 0.3315899968147278, -0.32899999618530273, -0.1809300035238266, 0.006445200182497501, 0.5297899842262268, -0.0411049984395504, 0.6164500117301941, 0.08037099987268448, 0.2107200026512146, 0.5542700290679932, 0.19077999889850616, 0.2412700057029724, 0.13371999561786652, -0.24377000331878662, 0.6679700016975403, -0.07836800068616867, -0.2018900066614151, -0.8328700065612793, -0.13505999743938446, 0.7006499767303467, 0.25328999757766724, -0.052744001150131226, 0.4204699993133545, 0.07602500170469284, 0.07374799996614456, 0.3437800109386444, 0.6558899879455566, -0.22168000042438507, -0.300570011138916, 0.20857000350952148, 0.19002999365329742, -0.030837999656796455, -0.05273300036787987, -0.4368099868297577, 0.08021199703216553, 0.4257499873638153, -0.5875800251960754, -0.3866199851036072, -0.27981001138687134, -0.05789199844002724, -0.3191800117492676, -0.23405000567436218, -0.659280002117157, 0.20186999440193176, 0.3003599941730499, -0.1109900027513504, -0.4214699864387512, -0.19517000019550323, 0.008856499567627907, 0.7920600175857544, -0.06739600002765656, -0.18679000437259674, 0.36500999331474304, -0.6594700217247009, 0.014996999874711037, 0.027441000565886497, 0.4721499979496002, 0.05543399974703789, -0.27312999963760376, -0.0747779980301857, -0.26864999532699585, -0.2343199998140335, -0.3494099974632263, -0.0747779980301857, 0.04933900013566017, -0.49858999252319336, -0.5171399712562561, 0.42941001057624817, 0.48173999786376953, -0.29140999913215637, 0.6081699728965759, 0.3992300033569336, 0.034846000373363495, -0.08112899959087372, 0.5042200088500977, 0.027939999476075172, -0.1766200065612793, 0.04721200093626976, -0.7427300214767456, -0.009729400277137756, 0.2978399991989136, 0.551859974861145, 0.21091000735759735, -0.517300009727478, -0.6195600032806396, 0.47088000178337097, 0.17145000398159027, -0.5319399833679199, -0.14700999855995178, 0.37400001287460327, 0.2198600023984909, -0.21602000296115875, 0.06343299895524979, -0.08468899875879288, 0.7007200121879578, 0.7634699940681458, -0.09494300186634064, -0.31262001395225525, -0.06472700089216232, -0.17121000587940216, 0.15431000292301178, -0.1592700034379959, -0.18487000465393066, 0.0019510999554768205, -0.33105000853538513, -0.8978000283241272, 0.25887998938560486, 0.034053999930620193, 0.7247099876403809, 0.06618300080299377, 0.31150999665260315, 0.27553001046180725, 0.633899986743927, 0.41527000069618225, -0.3162199854850769, 0.14996999502182007, -0.09278299659490585, 0.09026700258255005, 0.22834999859333038, 0.3939400017261505, 0.5316799879074097, -0.19137999415397644, 0.07091300189495087, -0.20479999482631683, 0.11393000185489655, -0.528410017490387, 0.28189998865127563, 0.4822100102901459, 0.1431400030851364, -0.38903000950813293, 0.4734500050544739, 0.891979992389679, -0.30987000465393066, 0.0017842999659478664, -0.1397700011730194, 0.15768000483512878, -0.08516500145196915, -0.3460499942302704, 0.5828400254249573, -0.1211400032043457, -0.353520005941391, 0.36390000581741333, -0.08957599848508835, 0.46529000997543335, 0.1002499982714653, -0.10899999737739563, 0.08137399703264236, -0.22156000137329102, 0.3968200087547302, -0.4056999981403351, -0.5262100100517273, 0.5019000172615051, 0.16962000727653503, 0.4772399961948395, -0.48210999369621277, -0.5260099768638611, 0.14959000051021576, -0.4542999863624573, -0.4704200029373169, -0.16801999509334564, 0.0787540003657341, -0.28073999285697937, 0.2525300085544586, 0.10350000113248825, -1.0149999980058055e-05, 0.013300999999046326, 0.3904699981212616, -0.526889979839325, -0.21041999757289886, -0.1395999938249588, 0.6901699900627136, 0.5108799934387207, 0.350849986076355, 0.20151999592781067, 0.10344000160694122, 0.39515000581741333, -0.08149699866771698, 0.5139600038528442, 0.08818899840116501, -0.3390600085258484, -0.06420999765396118, 0.3000999987125397, -0.26249000430107117, -0.49966999888420105, -0.21498000621795654, -0.30476999282836914, -0.6794099807739258, 0.35958999395370483, 0.18455000221729279, 0.714680016040802, -0.2059199959039688, -0.5919100046157837, -0.39500999450683594, -0.030448999255895615, -0.06861600279808044, -0.2732200026512146, 0.006375099997967482, -0.15647999942302704, 0.32833999395370483, -0.23733000457286835, -0.38144999742507935, 0.86913001537323, -0.25690001249313354, -1.163699984550476, 0.9863499999046326, 0.2032800018787384, 0.9477099776268005, -0.1793700009584427], u'thread': [-0.22910000383853912, -0.12601999938488007, 0.41582998633384705, -0.705560028553009, 0.08021499961614609, -0.1408900022506714, -0.3648200035095215, -0.18883000314235687, 0.1285800039768219, -0.9473999738693237, -0.10948999971151352, -0.40498000383377075, -0.17393000423908234, -0.2556999921798706, -0.6648799777030945, -0.2120400071144104, -0.513729989528656, -0.011361000128090382, -0.30753999948501587, -0.06629099696874619, -0.7246599793434143, -0.47143998742103577, 0.17699000239372253, 0.5509300231933594, 0.83024001121521, 0.04270400106906891, 0.15226000547409058, -0.26840001344680786, -0.4049299955368042, 0.4219200015068054, 0.5901600122451782, 0.3121800124645233, -0.31439000368118286, 0.4974600076675415, -0.3577300012111664, 0.7016000151634216, -0.27105000615119934, -0.026095999404788017, -0.06660400331020355, 0.343639999628067, -0.46830999851226807, -0.42949000000953674, -0.0977180004119873, -0.36847999691963196, 0.4025700092315674, -0.21984000504016876, 0.017775999382138252, -0.058917999267578125, -0.17547999322414398, 0.05720699951052666, -0.04317000135779381, 0.6095899939537048, 0.3938100039958954, -0.23984000086784363, -0.12216000258922577, -0.27239999175071716, -0.1815599948167801, -0.427839994430542, -0.03623000159859657, 0.3978300094604492, 0.1522500067949295, -0.08944900333881378, 0.3237699866294861, 0.16434000432491302, 0.7872200012207031, 0.3152100145816803, 0.10937999933958054, 0.31150001287460327, 0.5167700052261353, 0.5474799871444702, -0.034584999084472656, -0.318589985370636, -0.07220499962568283, -0.017264999449253082, 0.37902000546455383, 0.5690900087356567, 0.3284600079059601, -0.27858999371528625, -0.030559999868273735, -0.3849300146102905, -0.4134199917316437, -0.4941500127315521, 0.0148930000141263, -0.2962700128555298, -0.018936999142169952, 0.2703799903392792, -0.39006999135017395, -0.3544299900531769, -0.15870000422000885, 0.27042001485824585, 0.2954699993133545, -0.2604599893093109, 0.34624001383781433, -0.15304000675678253, 0.35266000032424927, 0.11646000295877457, 0.061521001160144806, 0.6220800280570984, -0.004860300105065107, -0.4832099974155426, 0.3456900119781494, 0.5846499800682068, 0.27632999420166016, -0.46386998891830444, -0.347460001707077, -0.1263899952173233, 0.6108099818229675, -0.5210599899291992, -0.6304000020027161, 0.11818999797105789, -0.5435299873352051, 0.18592999875545502, 0.192330002784729, 0.10943999886512756, -0.03729899972677231, 0.36796000599861145, -0.3201200067996979, 0.41995999217033386, 0.21908999979496002, -0.3209500014781952, 0.15387000143527985, -0.25578001141548157, 0.7136099934577942, 0.31075000762939453, 0.2862600088119507, -0.04438899829983711, -0.14444999396800995, 0.6966300010681152, -0.026200000196695328, 0.11480999737977982, 0.04049399867653847, 0.5097299814224243, -0.05225500091910362, -0.14332999289035797, -0.4060800075531006, 0.28766998648643494, -0.27375999093055725, 0.32455000281333923, 0.3218899965286255, -0.18985000252723694, -0.1452299952507019, 0.02865000069141388, 0.013910000212490559, -0.4715999960899353, -0.16163000464439392, 0.35378000140190125, 0.3361000120639801, -0.8298699855804443, 0.07018499821424484, 0.049111999571323395, -0.6013200283050537, 0.10028000175952911, 0.3676699995994568, -0.418969988822937, -0.23327000439167023, -0.3866199851036072, 0.6223599910736084, -0.6070799827575684, -0.16498999297618866, 0.2897300124168396, -0.11541999876499176, -0.3553699851036072, 0.2288299947977066, 0.047947000712156296, 0.08226799964904785, 0.02300800010561943, -0.1963299959897995, -0.11181999742984772, 0.32673999667167664, 0.4163399934768677, -0.11642000079154968, -1.023900032043457, -0.19160999357700348, 0.38363000750541687, 0.43950000405311584, -0.29916998744010925, -0.15908999741077423, 0.5910000205039978, -0.032345000654459, -0.7128700017929077, 0.49248000979423523, -0.13711999356746674, -0.05640900135040283, 0.11757999658584595, -0.29225999116897583, -0.08884300291538239, 0.1674100011587143, 0.11433999985456467, 0.10707999765872955, 0.1321599930524826, -0.007611399982124567, 0.1564600020647049, 0.03420199826359749, -0.15855999290943146, -0.24615000188350677, 0.10626000165939331, -0.4418799877166748, -0.2769800126552582, -0.1755400002002716, -0.26954999566078186, 0.0047193001955747604, 0.3314400017261505, 0.6454499959945679, 0.2804099917411804, 0.7819600105285645, 0.5001199841499329, 0.6108400225639343, -0.18389999866485596, -0.1847900003194809, 0.9117500185966492, -0.0980909988284111, -0.050030000507831573, 0.8803499937057495, 0.32245999574661255, 0.04338400065898895, -0.06580500304698944, 0.30188000202178955, -0.31297001242637634, -0.450219988822937, 0.06518500298261642, -1.1104999780654907, -0.18071000277996063, 0.055771999061107635, 0.5343999862670898, 0.11208000034093857, -0.24071000516414642, -0.1847500056028366, -0.35166001319885254, 0.17792999744415283, 0.3049199879169464, -0.017376000061631203, -0.12306000292301178, 0.14865000545978546, -0.2435300052165985, 0.5796800255775452, 0.0048628998920321465, 0.044449999928474426, -0.13607999682426453, 0.21206000447273254, -0.4124700129032135, -0.2659299969673157, -0.48232999444007874, -0.6491699814796448, -0.24192999303340912, -0.4954099953174591, -0.505620002746582, -0.05783899873495102, 0.504580020904541, 0.0702499970793724, -0.7088299989700317, -0.38284000754356384, 0.34567999839782715, 0.7394700050354004, -0.6599699854850769, 0.18648000061511993, 0.31725001335144043, 0.28470999002456665, 0.12756000459194183, 0.2621000111103058, 0.09949000179767609, 0.2834700047969818, -0.7630100250244141, 0.479310005903244, -0.3823699951171875, -0.29534998536109924, 0.7299200296401978, -0.79830002784729, 0.2716200053691864, -0.32554998993873596, 0.3219299912452698, 0.11406999826431274, 0.11913999915122986, 0.3043600022792816, 0.5139899849891663, -0.6384099721908569, 0.4830699861049652, -0.25095999240875244, -0.709559977054596, 0.4156399965286255, 0.044971998780965805, 0.5017799735069275, -0.07074200361967087, 0.26680999994277954, 0.5968499779701233, 0.12043000012636185, -0.11313000321388245, -0.17497999966144562, -0.2550399899482727, -0.5054299831390381, -0.38238999247550964, 0.6984800100326538, -0.21231000125408173, -0.2726700007915497, 0.23894000053405762, -0.4992600083351135, -0.39111000299453735, -0.07563500106334686, -0.13473999500274658, -0.404339998960495, 0.23109999299049377], u'column': [-0.12454000115394592, 0.24221999943256378, 0.23048999905586243, -0.43323999643325806, 0.3460099995136261, 0.5441399812698364, 0.36223000288009644, -0.8089699745178223, 0.27298998832702637, -0.6313599944114685, 0.11736000329256058, 0.44808998703956604, 0.673829972743988, 0.14710000157356262, 0.38565999269485474, 0.37244001030921936, 0.23354999721050262, 0.10785999894142151, 0.30028000473976135, -0.13761000335216522, -0.47470998764038086, 0.6132500171661377, 0.8553299903869629, 0.5646200180053711, -0.06295999884605408, -0.16076000034809113, 0.3778899908065796, 0.5093200206756592, 0.42184001207351685, 0.1560799926519394, -0.6615899801254272, 0.1921200007200241, 0.3364799916744232, -0.0524820014834404, -0.9946600198745728, -0.3080199956893921, -0.6591600179672241, -0.24586999416351318, 0.11315000057220459, 0.3169800043106079, 0.5256500244140625, -0.1617400050163269, -0.269430011510849, -0.2566800117492676, -0.2049199938774109, -0.29273998737335205, 0.21191999316215515, 0.36522001028060913, -0.5313299894332886, 0.42625999450683594, 0.5406500101089478, 0.8239799737930298, 0.2530899941921234, 0.19528000056743622, 0.11275999993085861, 0.23939000070095062, -0.19594000279903412, -0.04383299872279167, 0.34345999360084534, -0.7942900061607361, 0.27946001291275024, 0.055507998913526535, 0.27270999550819397, 0.23216000199317932, -0.25968998670578003, -0.2687700092792511, 0.019558999687433243, 0.6996099948883057, -0.0005324999801814556, -0.48816001415252686, 0.8296499848365784, -0.23962000012397766, -0.23517000675201416, -0.0037622000090777874, -0.625469982624054, 0.4796600043773651, 0.4018799960613251, -0.046539001166820526, -0.0015844999579712749, -0.5258600115776062, 0.23658999800682068, 0.45629000663757324, -0.2779499888420105, -0.19407999515533447, -0.12556999921798706, 0.04620499908924103, -0.14483000338077545, 0.36910000443458557, 0.44991999864578247, -0.10852000117301941, 0.6674699783325195, 0.29201000928878784, -0.0520550012588501, -0.6201099753379822, 0.22488999366760254, 0.7808700203895569, -0.6933299899101257, 0.04058599844574928, 0.22026999294757843, -0.5527099967002869, -0.11437000334262848, 1.0609999895095825, 0.5690600275993347, 0.46066001057624817, 0.1019200012087822, 0.17295999825000763, 0.6460300087928772, -0.3006100058555603, 0.5911099910736084, -0.28992998600006104, -0.19744999706745148, 0.7205700278282166, 0.16243000328540802, -0.05648199841380119, -0.4377500116825104, -0.19756999611854553, -0.18098999559879303, -0.03048500046133995, -0.11059000343084335, -0.7363899946212769, -0.029548000544309616, -0.09379199892282486, -0.036462001502513885, 0.28154999017715454, -0.2965500056743622, -0.10486999899148941, 0.21005000174045563, -0.8222500085830688, -0.3819499909877777, 0.18959000706672668, 0.26945000886917114, -0.2513200044631958, -0.6039800047874451, 0.545740008354187, 0.026366999372839928, 0.16117000579833984, -0.4743100106716156, -0.037762001156806946, 0.22935999929904938, 0.15214000642299652, -0.11337000131607056, 0.2925100028514862, 0.2916100025177002, -0.4209499955177307, -0.35260000824928284, 0.03397800028324127, -0.06994599848985672, 0.5319499969482422, -0.318340003490448, 0.6869500279426575, 0.5312700271606445, 0.10175999999046326, -0.04741799831390381, 0.06923899799585342, 0.2870199978351593, -0.3785099983215332, 0.7224199771881104, -0.19670000672340393, 0.19668999314308167, 0.439520001411438, -0.26780998706817627, -0.1531199961900711, 0.6323300004005432, -0.2710700035095215, -0.4323999881744385, 0.38477998971939087, 0.24564999341964722, -0.01707400009036064, -0.3718799948692322, 0.2284799963235855, -0.8283500075340271, -0.011250000447034836, -0.6900399923324585, 0.23604999482631683, -0.4556399881839752, -0.21949000656604767, 0.0824500024318695, 0.034136999398469925, 0.23989999294281006, -0.2318899929523468, 0.02920299954712391, -0.3070499897003174, -0.41385000944137573, -0.3220300078392029, -0.10961999744176865, 0.005263600032776594, 0.24336999654769897, -0.4830999970436096, 0.20806999504566193, -0.2733500003814697, -0.22915999591350555, 1.0358999967575073, -0.1395999938249588, 0.5093100070953369, -0.2567099928855896, 0.2925100028514862, 0.6052799820899963, 0.5914000272750854, -0.13098999857902527, -0.14789000153541565, 0.45603999495506287, -0.13098999857902527, -0.4945099949836731, 0.22144000232219696, 0.20784999430179596, -0.44765999913215637, -0.19062000513076782, -0.08641800284385681, -0.060109999030828476, 0.15876999497413635, -0.34907999634742737, 0.03143500164151192, 0.12655000388622284, 0.39958998560905457, -0.4443100094795227, 0.05005599930882454, 0.20826999843120575, 0.2470400035381317, 0.6626700162887573, 0.24410000443458557, 0.4506300091743469, 0.529009997844696, 0.22551999986171722, 0.859529972076416, 0.6669800281524658, -0.1689700037240982, -0.24389000236988068, 0.6438000202178955, 0.5952699780464172, -0.16407999396324158, -0.7310199737548828, 0.24570000171661377, 0.4764699935913086, -0.5435799956321716, 0.16272999346256256, -0.45860999822616577, -0.05431000143289566, -0.5278599858283997, -0.6914700269699097, -0.2500799894332886, -0.3566800057888031, -0.07497099786996841, 0.4671500027179718, -0.3659000098705292, -0.6596400141716003, -0.3451099991798401, -0.775879979133606, 0.26353999972343445, 0.5995399951934814, 0.3975299894809723, -0.006630599964410067, -0.4006499946117401, -0.032910000532865524, -0.5327100157737732, 0.486160010099411, -0.03505700081586838, 0.34038999676704407, -0.8795400261878967, -0.20702999830245972, -0.4596399962902069, -0.06718199700117111, 0.002022600034251809, 0.23253999650478363, -0.6056100130081177, -0.45548999309539795, -0.7700700163841248, 0.04606899991631508, 0.46178001165390015, -0.07134000211954117, -0.06310699880123138, -0.5484300255775452, -0.7107800245285034, 0.7994099855422974, -0.4298900067806244, -0.360150009393692, 0.6895800232887268, -1.048799991607666, 0.5213199853897095, -0.09979700297117233, 0.023708999156951904, 0.1456100046634674, -0.11779999732971191, -0.39100998640060425, 0.24562999606132507, 0.06712900102138519, -0.41266998648643494, -0.14585000276565552, -0.3803800046443939, 0.4862299859523773, 0.28810998797416687, 0.46654000878334045, 0.23284000158309937, 0.016806000843644142, 0.199180006980896, -0.17810000479221344, 0.11163000017404556, -0.6148399710655212, -0.4562700092792511, 0.2021999955177307, 0.0019042999483644962], u'island': [0.008194000460207462, -0.7360299825668335, -0.5106800198554993, -0.1906599998474121, 0.6697099804878235, -0.17493000626564026, 0.07728499919176102, 0.18118000030517578, 0.2859500050544739, -1.2364000082015991, 0.076774001121521, -0.49564000964164734, 0.4530999958515167, 0.15234999358654022, 0.5528799891471863, 0.8861100077629089, 0.15253999829292297, 0.05722000077366829, -0.16975000500679016, -0.07122299820184708, 0.04006500169634819, 0.8360300064086914, -0.21730999648571014, -0.41617000102996826, -0.4192200005054474, 0.16722999513149261, -0.29600000381469727, 0.10374999791383743, -0.12349999696016312, 0.2773599922657013, 0.6247400045394897, 0.7972699999809265, -0.4071800112724304, 0.3031400144100189, 0.3034999966621399, 0.4652000069618225, 0.28874000906944275, 0.1997399926185608, 0.5365599989891052, -0.22936999797821045, -0.30924999713897705, -0.2470400035381317, -0.14940999448299408, 0.09591100364923477, -0.09642700105905533, 0.44200998544692993, 0.7685700058937073, -0.012449000030755997, -0.22743000090122223, 0.9256200194358826, 0.1731799989938736, 0.06351099908351898, 0.7198699712753296, -0.31567999720573425, 0.06452099978923798, 0.47749000787734985, -0.2523599863052368, 0.5385599732398987, -0.31949999928474426, -0.1087300032377243, -0.28001999855041504, -0.05089600011706352, 0.44780001044273376, 0.17488999664783478, 0.6731899976730347, -0.9652199745178223, -0.13619999587535858, 0.6040400266647339, -0.1376899927854538, 0.3361000120639801, -0.8127899765968323, 0.3930400013923645, -0.12946000695228577, -0.6726400256156921, -0.9120799899101257, 0.17365999519824982, 0.180759996175766, -0.12735000252723694, -0.02064500004053116, 0.040084999054670334, 0.0026012000162154436, 0.16192999482154846, -0.2477400004863739, 0.05441499873995781, -0.023080000653862953, -0.09592899680137634, 0.14036999642848969, -0.05898699909448624, 0.19291000068187714, -0.5482500195503235, -0.42261001467704773, -0.5431100130081177, -0.48506999015808105, -0.8677700161933899, -0.054857999086380005, 0.08037900179624557, 0.5115299820899963, -0.012403000146150589, -0.30320999026298523, 0.16358999907970428, -0.2217700034379959, 0.5250599980354309, -0.13954000174999237, 0.3164899945259094, -0.27577000856399536, -0.1760299950838089, -0.004204200115054846, -0.7503700256347656, 0.41888999938964844, 0.39294999837875366, 0.15580999851226807, -0.47683000564575195, 0.09928300231695175, -0.15992000699043274, 0.1445399969816208, -0.08892100304365158, 0.035743001848459244, 0.5501300096511841, -0.11668000370264053, 0.35519999265670776, -0.514680027961731, 0.030897000804543495, -0.7862899899482727, -0.2543500065803528, 0.05466499924659729, 0.42403000593185425, 0.5517899990081787, 0.13505999743938446, -0.022926999256014824, -0.1414099931716919, -0.012272999621927738, 0.44710999727249146, 0.18848000466823578, -0.4696800112724304, 0.8051999807357788, -0.18366000056266785, 0.2301899939775467, -0.04120999947190285, -0.19404000043869019, -0.5836600065231323, 0.18953000009059906, -0.3822399973869324, -0.4741699993610382, -0.130390003323555, -0.545520007610321, 0.06525299698114395, 0.0946270003914833, 0.43413999676704407, -0.2090499997138977, -0.832360029220581, 1.024999976158142, -0.1741500049829483, -0.3560500144958496, 0.4513700008392334, 0.6738799810409546, -0.3626599907875061, -0.3583100140094757, 0.17151999473571777, -0.3352400064468384, 0.2600399851799011, 0.7537999749183655, -0.6449400186538696, 0.3197399973869324, 0.17035000026226044, -0.2558700144290924, -0.5918599963188171, 0.32311999797821045, 0.1118599995970726, 0.20656999945640564, 0.025784999132156372, 0.1782499998807907, -0.15078000724315643, -0.3350900113582611, 0.4312500059604645, 0.1578799933195114, 0.08889999985694885, 0.042323999106884, 0.07056300342082977, 0.20749999582767487, 0.38315001130104065, 0.06591899693012238, 0.4361700117588043, 0.34182998538017273, -0.04408000037074089, 0.1905599981546402, -0.31178000569343567, -0.28102999925613403, 0.025155000388622284, -0.47808000445365906, 0.44168999791145325, -0.4130200147628784, 0.30605000257492065, 0.3613100051879883, -0.5452100038528442, 0.42976000905036926, 0.3179300129413605, 0.13884000480175018, -0.4379900097846985, -0.4171000123023987, 0.3177100121974945, 1.1711000204086304, 0.4796000123023987, -0.6604999899864197, -0.0699160024523735, -0.03134699910879135, 0.3479200005531311, 0.03491799905896187, 0.1424500048160553, 0.9710699915885925, 0.10254000127315521, -0.40845000743865967, 0.4041700065135956, 0.29085999727249146, 0.026590999215841293, 0.8681100010871887, 0.06986600160598755, 0.04259999841451645, 0.02546899951994419, 0.19842000305652618, -0.0037048999220132828, 0.8452100157737732, 0.5911099910736084, 0.81072998046875, -0.038787998259067535, -0.047394998371601105, -0.7134900093078613, 0.6461099982261658, -0.5044599771499634, -0.07654500007629395, -0.24637000262737274, 0.587939977645874, -0.35120999813079834, -0.46533000469207764, 0.3425300121307373, 0.2734600007534027, -0.41029998660087585, 0.07001899927854538, 0.2211499959230423, -0.06873899698257446, 0.12312000244855881, -0.24105000495910645, -0.3876200020313263, 0.5690400004386902, -0.14610999822616577, -0.5833500027656555, 0.11472000181674957, 0.09593400359153748, 0.3427799940109253, -0.043296001851558685, -0.551010012626648, -0.23013000190258026, 0.5212000012397766, 0.22098000347614288, 0.3770500123500824, 0.4090299904346466, -0.14591999351978302, 0.16797000169754028, 0.10705000162124634, 0.09009499847888947, 0.19971999526023865, -0.20893999934196472, 0.011512000113725662, 0.04224799945950508, -0.16443000733852386, 0.36134999990463257, -0.3325900137424469, 0.6550800204277039, -0.5527099967002869, 0.3227199912071228, -0.23984000086784363, -0.00608769990503788, -0.2069700062274933, 0.43136999011039734, -0.02826800011098385, 0.06916099786758423, 0.6098300218582153, -1.9033000469207764, 0.45142999291419983, -0.3044399917125702, -0.28227999806404114, -0.4945000112056732, -0.26833000779151917, -0.3615399897098541, -0.5441200137138367, -1.0400999784469604, -0.4742499887943268, -0.5581899881362915, 0.09731400012969971, 0.6100199818611145, -0.29736000299453735, -0.512499988079071, -0.11957000195980072, -0.10296999663114548, -0.43988001346588135, -0.8293499946594238, 0.8639799952507019, 0.2233700007200241, 0.16582000255584717, 0.249099999666214, -0.29980000853538513], u'tie': [0.0013453999999910593, 0.4298200011253357, -0.39478999376296997, -0.25213998556137085, -0.536300003528595, -0.24785999953746796, -0.4077099859714508, 0.17882999777793884, 0.4995099902153015, -0.44315001368522644, 0.5842800140380859, 0.08076900243759155, -0.3687500059604645, 0.06247900053858757, -0.15248000621795654, 0.00035484001273289323, -0.18901999294757843, 0.4373300075531006, -0.10296999663114548, 0.06379999965429306, -0.2778699994087219, -1.0636999607086182, 0.10862000286579132, -0.13670000433921814, -0.2399200052022934, -0.512660026550293, 0.18070000410079956, -0.0807270035147667, -0.251910001039505, -0.16407999396324158, 0.08984900265932083, -0.4058400094509125, 0.7603700160980225, 0.22832000255584717, -1.815600037574768, 0.15575000643730164, 0.4993799924850464, 0.15160000324249268, -0.11959999799728394, 0.2198300063610077, 0.19242000579833984, -0.3702700138092041, -0.008975699543952942, -0.2280299961566925, -0.0487390011548996, -0.12477999925613403, -0.15068000555038452, -0.6034700274467468, 0.010553999803960323, -0.17817999422550201, -0.5543699860572815, 0.3465900123119354, 0.02993899956345558, -0.43393999338150024, 0.03480000048875809, -0.14092999696731567, -0.6312699913978577, -0.2178100049495697, -0.20733000338077545, -0.05918699875473976, -0.1925099939107895, -0.33803001046180725, -0.5557699799537659, 0.10408999770879745, 0.3658599853515625, -0.1915300041437149, 0.056092001497745514, 0.4563399851322174, 0.3571299910545349, -0.14158999919891357, 0.20432999730110168, 0.17463000118732452, -0.17270000278949738, 0.4946399927139282, -0.05522400140762329, 0.19625000655651093, -0.7027699947357178, 0.386790007352829, -0.00661229994148016, -0.32638999819755554, -0.1288599967956543, -0.2835899889469147, -0.17705999314785004, -0.28507000207901, 0.1432799994945526, -0.44481000304222107, 0.13921000063419342, -0.12050999701023102, -0.4084399938583374, -0.012498999945819378, 0.14573000371456146, 0.35425999760627747, -0.12511999905109406, -0.15547999739646912, 0.41391998529434204, 0.2170500010251999, -0.07860100269317627, 0.561240017414093, -0.4340299963951111, -0.5694000124931335, 0.6644200086593628, 0.5016400218009949, 0.3042899966239929, -0.058802999556064606, 0.27619001269340515, 0.1861799955368042, 0.0866440013051033, -0.05596800148487091, -0.7440999746322632, -0.4273500144481659, 0.09361200034618378, -0.29072999954223633, -0.17809000611305237, 0.23433999717235565, 0.05984099954366684, 0.08280099928379059, 0.0790800005197525, -0.15074999630451202, 0.7446100115776062, -0.23865999281406403, -0.20941999554634094, -0.17677000164985657, 0.48717001080513, -0.39243999123573303, 0.11225000023841858, -0.019519999623298645, 0.06347200274467468, -0.16245000064373016, 0.018438000231981277, -0.13189999759197235, 0.13812999427318573, 0.3046000003814697, -0.2636600136756897, -0.1821800023317337, -0.20740999281406403, -0.009603899903595448, -0.17994999885559082, 0.36068999767303467, 0.01710199937224388, 0.3646799921989441, -0.24063000082969666, -0.010258999653160572, -0.007919900119304657, 0.17159000039100647, -0.2898299992084503, 0.07615800201892853, -0.013470999896526337, -0.11964999884366989, 0.19097000360488892, -0.08269400149583817, -0.13133999705314636, 0.16783000528812408, 0.13151000440120697, -0.5698300004005432, -0.013477999716997147, -0.31393998861312866, -0.05473500117659569, -0.29958000779151917, -0.46731001138687134, 0.19541999697685242, 0.2513499855995178, -0.049316998571157455, -0.3049300014972687, 0.03821700066328049, 0.6164799928665161, 0.5346599817276001, -0.4443399906158447, 0.3824999928474426, 0.0613740012049675, 0.3034999966621399, 0.14273999631404877, -0.24092000722885132, -0.43358999490737915, 0.133310005068779, -0.07678200304508209, -0.43790000677108765, -0.861240029335022, 0.40165001153945923, -0.15836000442504883, 0.31887999176979065, 0.697629988193512, 0.21232999861240387, 0.5900400280952454, -0.1488499939441681, -0.11911000311374664, -0.1903499960899353, -0.2770400047302246, 0.35328999161720276, 0.2878299951553345, 0.6452500224113464, 0.12734000384807587, 0.3710399866104126, 0.0907059982419014, 0.40171998739242554, 0.28911998867988586, -0.5687100291252136, -0.14158999919891357, -0.14600999653339386, -0.46983999013900757, 0.216389998793602, 1.6722999811172485, -0.06868100166320801, 0.4542900025844574, -0.0888499990105629, -0.11642999947071075, -0.3922800123691559, -0.10898999869823456, 0.480569988489151, -0.5447900295257568, -0.12189999967813492, -0.32183000445365906, 0.06932900100946426, 0.04336100071668625, 0.9078199863433838, 0.2021999955177307, 0.04403200000524521, 0.5388299822807312, -0.21432000398635864, -0.0021657999604940414, -0.1284399926662445, 0.37081998586654663, -0.12199000269174576, 0.1719599962234497, -0.07961799949407578, -0.4477899968624115, -0.45028001070022583, 0.37505999207496643, -0.04124699905514717, -0.37349000573158264, 0.12329000234603882, 0.019368000328540802, 0.010332999750971794, 0.3968999981880188, 0.2436700016260147, 0.30414000153541565, -0.24618999660015106, 0.17058999836444855, 0.10993000119924545, 0.7585700154304504, 0.9488099813461304, 0.24792000651359558, -0.2305999994277954, -0.2276100069284439, 0.4618000090122223, -0.28679001331329346, 0.46775999665260315, -0.012938999570906162, 0.2283399999141693, 0.430759996175766, -0.7554399967193604, 0.02602599933743477, 0.20382000505924225, 0.13391000032424927, -0.7946699857711792, 0.3662300109863281, -0.11014000326395035, 0.054381001740694046, -0.07241799682378769, -0.2074899971485138, 0.15098999440670013, 0.4499799907207489, -0.4870400130748749, 0.2578999996185303, -0.0686430037021637, -0.3634699881076813, 0.22175000607967377, -0.22527000308036804, -0.40151000022888184, 0.043740998953580856, -0.14297999441623688, -0.1036899983882904, 0.23134000599384308, 0.18211999535560608, 0.06301599740982056, -0.20353999733924866, -0.006374000106006861, -0.779699981212616, -0.5640299916267395, 0.17080000042915344, 0.04231100156903267, -0.20674000680446625, 1.1624000072479248, 0.3548400104045868, -0.4547500014305115, -0.36629000306129456, 0.25850000977516174, -0.005032000131905079, 0.16035999357700348, -0.2614800035953522, 0.23587000370025635, 0.13663999736309052, 0.5952200293540955, -0.732699990272522, 0.7150300145149231, -0.0704910010099411, 0.09743700176477432, -0.04052300006151199, -0.35655999183654785, 0.7499799728393555, 0.3616200089454651], u'berry': [-0.41596999764442444, 0.18752999603748322, -0.08136899769306183, 0.0228240005671978, 0.24190999567508698, -0.4054900109767914, 0.413349986076355, -0.17722000181674957, 0.36647000908851624, 0.08905500173568726, 0.4178299903869629, -0.364300012588501, -0.09594999998807907, 0.19754000008106232, 0.18378999829292297, 0.16955000162124634, -0.2060600072145462, -0.07331299781799316, -0.18276000022888184, 0.31775999069213867, 0.0661889985203743, 0.07571599632501602, -0.5675699710845947, 0.29061999917030334, -0.31161001324653625, 0.3915500044822693, 0.01986899971961975, -0.6165300011634827, -0.05560100078582764, 0.017362000420689583, -0.1588899940252304, 0.3406299948692322, -0.5331400036811829, -0.28534001111984253, -0.46928998827934265, 0.35131001472473145, 0.013918999582529068, -0.129830002784729, 0.060412000864744186, -0.48486000299453735, 0.4496000111103058, -0.08207499980926514, 0.3344799876213074, 0.2144400030374527, -0.07102899998426437, 0.09070199728012085, -0.3853999972343445, 0.5633699893951416, 0.06863400340080261, -0.09499199688434601, 0.3414900004863739, 0.2458599954843521, 0.022593999281525612, 0.0023670000955462456, -0.28387001156806946, 0.04738700017333031, -0.22780999541282654, -0.3700900077819824, -0.010602000169456005, -0.5327500104904175, -0.07836700230836868, -0.46494999527931213, -0.1497199982404709, 0.09231500327587128, -0.17215000092983246, -0.5413500070571899, 0.12421000003814697, -0.4038600027561188, -0.2046699970960617, -0.4192599952220917, -0.3746800124645233, 0.07173000276088715, -0.45572999119758606, -0.21728000044822693, -0.5119500160217285, -0.6295499801635742, 0.4302099943161011, -0.5076500177383423, 0.3726600110530853, -0.06311199814081192, 0.25995999574661255, 0.2251800000667572, 0.49897000193595886, -0.3252899944782257, 0.4728200137615204, -0.10141000151634216, 0.3786599934101105, 0.5318700075149536, -0.2500999867916107, -0.6714500188827515, 0.04070499911904335, -0.27950000762939453, -0.09792199730873108, -0.20523999631404877, -0.8059999942779541, 0.3700200021266937, 0.503570020198822, -0.22768999636173248, 0.12132000178098679, -0.45179998874664307, -0.23433999717235565, -0.20013999938964844, -0.23833000659942627, 0.17621000111103058, -0.17183999717235565, 0.06736599653959274, -0.3755500018596649, -0.2930600047111511, -0.5032699704170227, -0.096219003200531, 0.22415000200271606, 0.01985199935734272, 0.05860700085759163, 0.16680000722408295, 0.6029300093650818, 0.1704300045967102, -0.3270600140094757, 0.006078899838030338, 0.40893998742103577, 0.21991999447345734, -0.695330023765564, -0.0758569985628128, 0.2660300135612488, -0.9465000033378601, 0.05823900178074837, -0.04732799902558327, 0.24156999588012695, 0.24235999584197998, -0.01925100013613701, 0.2737799882888794, -0.4503600001335144, 0.7289100289344788, -0.29794999957084656, 0.08091499656438828, -0.22234000265598297, -0.3495199978351593, -0.30110999941825867, 0.4915499985218048, -0.15129999816417694, -0.05161000043153763, 0.3405799865722656, 0.2741900086402893, 0.05052600055932999, -0.1594499945640564, 0.2872999906539917, 0.3041299879550934, -0.20159000158309937, -0.5179299712181091, 0.3785899877548218, 0.08012700080871582, 0.05618999898433685, -0.06709399819374084, 0.11482000350952148, -0.5253099799156189, 0.22840000689029694, -0.2990899980068207, -0.037964001297950745, -0.5363100171089172, 0.16920000314712524, 0.19439999759197235, 0.07004199922084808, -0.9741500020027161, -0.15710000693798065, 0.354669988155365, -0.3423199951648712, 0.28641000390052795, -0.06315899640321732, 0.5901100039482117, 0.1376899927854538, 0.2045000046491623, -0.1691100001335144, -0.5524399876594543, -0.3684200048446655, -0.3149699866771698, 0.3959200084209442, 0.3910500109195709, 0.3603900074958801, 0.1938299983739853, -0.1693899929523468, -0.3382599949836731, 0.13106000423431396, 0.77360999584198, -0.13095000386238098, -0.5539900064468384, -0.28850001096725464, -0.11879000067710876, -0.12540000677108765, -0.057881999760866165, -0.19074000418186188, -0.4418399930000305, -0.34205999970436096, 0.3058899939060211, -0.08392199873924255, -0.6191400289535522, 0.265859991312027, 0.3079400062561035, 0.33375000953674316, -0.014983000233769417, -0.07160700112581253, 0.06732899695634842, 0.7273399829864502, -0.453029990196228, 0.4145500063896179, 0.0333390012383461, 0.18592000007629395, 0.11042000353336334, 0.08852200210094452, 0.06951700150966644, 0.38519999384880066, 0.0017538999672979116, 0.04454199969768524, -0.3172299861907959, -0.22995999455451965, 0.06610599905252457, -0.007861300371587276, 0.08469799906015396, 0.4086900055408478, 0.22608999907970428, -0.19464999437332153, -0.5179499983787537, -0.16575999557971954, -0.07253299653530121, -0.00781519990414381, -0.17406000196933746, -0.5884100198745728, 0.43998000025749207, 0.049584001302719116, -0.0021949000656604767, -0.10734999924898148, -0.444350004196167, 0.06376399844884872, 0.0492589995265007, 0.026962999254465103, -0.2433599978685379, -0.07065200060606003, 0.2698200047016144, -0.06001000106334686, 0.24985000491142273, -0.5494400262832642, 0.3558799922466278, -0.0890669971704483, -0.06800100207328796, 0.1167600005865097, -0.002274200087413192, -0.19961999356746674, -0.013612000271677971, 0.1351899951696396, -0.36939001083374023, -0.08493100106716156, -0.24573999643325806, 0.04934899881482124, 0.21535000205039978, 0.04893599823117256, 0.1481499969959259, 0.4254699945449829, -0.08489900082349777, 0.1731099933385849, -0.2926599979400635, 0.5259600281715393, -0.3637000024318695, 0.3605400025844574, 0.4449799954891205, 0.18809999525547028, 0.13517999649047852, 0.38418999314308167, -0.41749000549316406, -0.26541000604629517, 0.3090499937534332, 0.08432400226593018, 0.2165299952030182, -0.5082100033760071, -0.36061999201774597, -0.2559399902820587, -0.19875000417232513, 0.06885100156068802, 0.2671299874782562, -0.07169199734926224, -0.29947999119758606, -0.2710300087928772, -0.09439200162887573, -0.39184001088142395, 0.265720009803772, 0.028022000566124916, -0.6565099954605103, -0.5091800093650818, 0.5680199861526489, 0.7437599897384644, -0.07880000025033951, 0.17607000470161438, 0.2191700041294098, 0.5786299705505371, -0.20996999740600586, 0.9531300067901611, -0.20484000444412231, -0.545490026473999, 0.009988999925553799, -0.2817400097846985, 0.30044999718666077, -0.19651000201702118, 0.07490000128746033], u'smoke': [0.05206599831581116, 0.2029699981212616, 0.5007699728012085, -0.33223000168800354, -1.0348000526428223, 0.3055900037288666, 0.43237000703811646, 0.7497599720954895, 0.21674999594688416, -1.1198999881744385, 0.3679099977016449, -0.09140300005674362, 0.0562639981508255, -0.39337000250816345, -0.0328110009431839, 0.43538999557495117, -0.6047099828720093, -0.09320999681949615, -0.06117900088429451, 0.5815799832344055, -0.008581000380218029, 0.2892099916934967, -0.26221999526023865, 0.4323599934577942, -0.028501000255346298, -0.27351999282836914, 0.005138800013810396, -0.43314000964164734, -0.4650599956512451, -0.13968999683856964, 0.03631199896335602, 0.07810100167989731, -0.2251800000667572, -0.1940300017595291, -0.27000999450683594, 0.274260014295578, -1.0839999914169312, -0.2512199878692627, 0.7644299864768982, 0.626800000667572, -0.047860000282526016, -0.14678999781608582, 0.027597999200224876, 0.06353099644184113, 0.13996000587940216, -0.3695000112056732, 0.5616099834442139, -0.6410199999809265, -0.05646499991416931, -0.5118700265884399, 0.5474799871444702, -0.10745000094175339, -0.025141999125480652, -0.6003900170326233, -0.4517900049686432, 0.23820999264717102, 0.0039252000860869884, -0.5944600105285645, 0.42789000272750854, 0.3664200007915497, -0.6562299728393555, -0.17135000228881836, 0.8448500037193298, 0.20791000127792358, 0.0418890006840229, -0.3376399874687195, -0.03707199916243553, 0.45159998536109924, -0.08466199785470963, -0.6835100054740906, 0.3694800138473511, -0.9451599717140198, -0.24363000690937042, -0.08170399814844131, -0.25255998969078064, -0.25589001178741455, 0.048618000000715256, -0.055020999163389206, 0.5467600226402283, -0.09574499726295471, 0.24458999931812286, -0.5526800155639648, 0.7022200226783752, 0.10127999633550644, 0.018364999443292618, -0.22234000265598297, 0.6496599912643433, 0.1855199933052063, 0.02564300037920475, 0.005777299869805574, -0.07931099832057953, -0.5591999888420105, -0.40961000323295593, 0.44756999611854553, 0.0643720030784607, 0.3071900010108948, -0.0064604999497532845, -0.12403000146150589, 0.30555999279022217, -0.4631200134754181, -0.31345000863075256, 0.16198000311851501, -0.41130998730659485, -0.3131999969482422, -0.3230699896812439, 0.17816999554634094, 0.125560000538826, 0.49401000142097473, -0.4035399854183197, 0.25891000032424927, -0.7391200065612793, -0.2151300013065338, 0.2606399953365326, -0.49410998821258545, 0.5429400205612183, 0.14345000684261322, -0.7237200140953064, 0.5725200176239014, -0.5125100016593933, -0.41495999693870544, -0.2664799988269806, -0.43588000535964966, -0.31586000323295593, 0.24163000285625458, -0.5517399907112122, -0.10980000346899033, 0.09886199980974197, -0.12977999448776245, -0.05490799993276596, 0.28301000595092773, 0.6102799773216248, 0.5434799790382385, -0.043216999620199203, -0.14926999807357788, 0.8735700249671936, 0.49401000142097473, -0.4263100028038025, -0.029407000169157982, 0.5651900172233582, -0.1911499947309494, 0.43417999148368835, -0.15146000683307648, -0.034081000834703445, 0.19000999629497528, -0.825190007686615, -0.5044000148773193, 0.690310001373291, 0.7822700142860413, -0.17318999767303467, 0.054558999836444855, -0.045837000012397766, 0.1980700045824051, 0.09509299695491791, 0.09733899682760239, 0.16721999645233154, 0.0898360013961792, 0.5815399885177612, -0.7397199869155884, 0.5655199885368347, -0.06377799808979034, 0.064580999314785, -0.34984999895095825, 0.3183000087738037, -0.5649799704551697, -0.006882899906486273, -0.004613200202584267, 0.003155200043693185, 0.2778100073337555, -0.21984000504016876, -0.44495999813079834, 0.15042999386787415, 0.47999998927116394, 0.5209599733352661, 0.2366899996995926, -0.19787999987602234, -0.41405001282691956, -0.3358300030231476, 0.22473999857902527, 0.37608999013900757, -0.2258400022983551, 0.04512299969792366, 0.24583999812602997, -0.30564001202583313, 0.04929700121283531, -0.27024999260902405, -0.1766200065612793, 0.3363499939441681, -0.3543800115585327, -0.11014000326395035, -0.39965999126434326, 0.19258999824523926, 0.17791999876499176, -0.3312399983406067, -0.17486999928951263, 0.4614799916744232, -0.7895500063896179, 0.15097999572753906, 1.1050000190734863, -0.254830002784729, 0.6579399704933167, 0.1443299949169159, 0.4991399943828583, 0.13826000690460205, 0.04596500098705292, -0.6534299850463867, 0.32754001021385193, 0.2114199995994568, -0.3450399935245514, -0.10264000296592712, 0.11601000279188156, -0.23783999681472778, -0.045024000108242035, -0.6321600079536438, -0.616379976272583, 0.383789986371994, -0.1428699940443039, 1.0853999853134155, -0.11811999976634979, 0.7695500254631042, -0.055647000670433044, 0.24410000443458557, 0.2969900071620941, 0.2190999984741211, -0.16178999841213226, -0.1023000031709671, 0.456930011510849, -0.2283399999141693, 0.021243000403046608, -0.24661000072956085, 0.1723099946975708, 0.2463500052690506, -0.10802999883890152, 0.17118999361991882, 0.1454399973154068, 0.03955100104212761, -0.360509991645813, 0.6618800163269043, 0.2971400022506714, -0.6998800039291382, -0.43108999729156494, -0.033555999398231506, -0.39873000979423523, -0.24462999403476715, -0.3495199978351593, 0.12139999866485596, -0.04579399898648262, 0.6800600290298462, 0.0722619965672493, 0.15666000545024872, -0.14657999575138092, -0.018554000183939934, -1.0032000541687012, 0.2623099982738495, -0.19654999673366547, 0.6671500205993652, 0.02089500054717064, -0.7971900105476379, -0.17494000494480133, -0.34516000747680664, 0.16479000449180603, 0.5544499754905701, -0.3756900131702423, 0.3740600049495697, -0.46876001358032227, 0.08771099895238876, -0.7715499997138977, -0.18384000658988953, -0.15546999871730804, -0.37762999534606934, -0.14877000451087952, 0.2951900064945221, 0.052097998559474945, 0.13766999542713165, -0.03999200090765953, -0.8703399896621704, -0.29910001158714294, -2.041100025177002, 0.5887699723243713, -0.6992800235748291, 0.1656699925661087, -0.4772399961948395, -0.10620000213384628, -0.39621999859809875, -0.14267000555992126, 0.2612200081348419, 0.6037300229072571, -0.07840999960899353, 0.3597300052642822, -0.24122999608516693, -0.09857600182294846, 0.06366799771785736, 0.694379985332489, -0.018464000895619392, 0.07899100333452225, 0.35870999097824097, -0.8085899949073792, -0.13862000405788422, 0.32839998602867126, -0.04350399971008301, 0.016488000750541687], u'garlic': [-0.2984499931335449, 0.1722400039434433, 0.30338001251220703, -0.15705999732017517, -0.2770499885082245, -0.35596001148223877, -0.4157800078392029, 0.3240000009536743, 0.572350025177002, 0.1585099995136261, 0.12371999770402908, 0.42952001094818115, 0.08792699873447418, 1.263800024986267, -0.28308001160621643, 0.34196001291275024, -0.46654000878334045, 0.46924999356269836, -0.5941299796104431, 0.21886999905109406, -0.7221599817276001, 0.01286999974399805, -0.07027299702167511, -0.20930999517440796, 0.2851400077342987, 0.09848099946975708, 0.28975000977516174, -0.09051100164651871, -0.9314600229263306, -0.20069999992847443, -0.5235999822616577, 0.8076000213623047, 0.1574700027704239, 0.12071999907493591, -0.30518999695777893, 0.49182000756263733, 0.008191400207579136, 0.47826001048088074, -0.18388999998569489, -0.49136000871658325, 0.9241700172424316, -0.17958000302314758, 0.25575000047683716, -0.19249999523162842, 0.8822699785232544, -0.08086500316858292, 0.4189299941062927, 0.5365300178527832, -0.31213000416755676, 0.35152000188827515, 0.6215900182723999, 0.42458000779151917, 0.19723999500274658, 0.21934999525547028, -0.11319000273942947, 0.13065999746322632, -0.42508000135421753, 0.17768999934196472, 0.38600999116897583, 0.04421500116586685, 0.6032000184059143, 0.09738200157880783, 0.5757799744606018, 0.6255199909210205, -0.05902000144124031, -0.25887998938560486, -0.0477450005710125, 0.20898999273777008, 0.3939499855041504, -0.2828899919986725, -0.1481200009584427, 0.11356999725103378, 0.13961000740528107, 0.08173999935388565, -0.19317999482154846, 0.17709000408649445, 1.5089999437332153, -0.4302999973297119, -0.1831900030374527, 0.0038735000416636467, -0.48607999086380005, -0.04434100165963173, 0.19312000274658203, 0.15836000442504883, 0.025203000754117966, -0.4230000078678131, -0.2730099856853485, 0.609279990196228, -0.21046000719070435, -0.786050021648407, 0.3324199914932251, -0.032186999917030334, 0.21911999583244324, 0.29460999369621277, -0.46219000220298767, 0.004194499924778938, -0.0386740006506443, 0.6680499911308289, 0.4110899865627289, 0.967989981174469, -0.09186699986457825, -0.72639000415802, 0.7717700004577637, -1.0591000318527222, -1.0746999979019165, -0.10537999868392944, -0.10943999886512756, -0.25492000579833984, -0.3165700137615204, -0.12251000106334686, 0.6479200124740601, 0.19494999945163727, 0.0730300024151802, 0.0024037999100983143, -0.25402000546455383, 0.08049599826335907, -0.7623000144958496, 0.9587000012397766, 0.7941399812698364, -0.267769992351532, -0.47878000140190125, -0.5661799907684326, 0.07927999645471573, -0.11084999889135361, -0.28328999876976013, -0.2777099907398224, 0.10002999752759933, 0.6366299986839294, -0.750760018825531, 1.0536999702453613, 0.36678001284599304, 0.6644700169563293, -0.4038800001144409, 0.5628899931907654, -0.09150099754333496, -0.15237000584602356, -0.021246999502182007, -0.16346000134944916, -0.05416100099682808, 0.531220018863678, 0.8278200030326843, 0.642009973526001, -0.5864499807357788, -0.23010000586509705, -0.3144899904727936, 0.053644999861717224, -0.36173000931739807, -0.2263599932193756, 0.7885000109672546, -0.08383999764919281, -0.9701399803161621, 0.18052999675273895, 0.43751001358032227, -0.09923899918794632, -0.38199999928474426, -0.350380003452301, 0.08901000022888184, -0.41668999195098877, -0.9490900039672852, 0.0239499993622303, 0.42906999588012695, 0.23802000284194946, 0.29993999004364014, -0.37125998735427856, 0.6682500243186951, -0.43213000893592834, 0.2035599946975708, -0.09971699863672256, -0.2375900000333786, -1.176300048828125, -0.0114120002835989, -0.15660999715328217, -0.13492999970912933, -0.0003938000008929521, -0.46970000863075256, -0.27206000685691833, 0.39667999744415283, -0.19228999316692352, 0.8475599884986877, -0.3158999979496002, 0.06427299976348877, 0.16669000685214996, -0.045423999428749084, -0.3841499984264374, 0.5521699786186218, -0.41451001167297363, 0.7785199880599976, 0.34815001487731934, -0.1339000016450882, -0.09152399748563766, 0.24424999952316284, 1.38100004196167, -0.14704999327659607, 0.2479500025510788, 0.5349500179290771, 0.11878000199794769, -0.17845000326633453, 0.22378000617027283, 0.13407999277114868, 0.28485000133514404, -0.46518000960350037, 0.09443199634552002, 0.4335800111293793, 0.7704600095748901, 0.16686999797821045, 0.855239987373352, 0.34981000423431396, 0.2248000055551529, -0.09969999641180038, -0.09830400347709656, 0.22746999561786652, -0.05112399905920029, 0.12030000239610672, 0.34126999974250793, -0.23923000693321228, -0.38433998823165894, 0.7012699842453003, -0.7129700183868408, 0.09511899948120117, 0.525950014591217, 0.27838000655174255, 0.0948759987950325, -0.7793999910354614, -0.15791000425815582, -0.005472199991345406, -0.59579998254776, -0.4768199920654297, 0.32912999391555786, -0.07521100342273712, -0.06058000028133392, -0.10402999818325043, -0.13872000575065613, -0.009295799769461155, -0.3247399926185608, 0.39750999212265015, 0.8292400240898132, 0.30121999979019165, 0.19912000000476837, -0.45329999923706055, -0.977840006351471, -0.8414999842643738, -0.26159000396728516, -0.05331199988722801, 0.38082998991012573, -0.45107001066207886, 0.1585099995136261, 0.6994699835777283, 0.18977999687194824, -0.5310699939727783, -1.4890999794006348, 0.8054699897766113, -0.07476100325584412, -0.2258799970149994, 0.12773999571800232, 0.03461199998855591, 0.3630799949169159, -0.3110100030899048, -0.27515000104904175, -0.7009400129318237, 0.2991800010204315, -0.11033999919891357, -0.4583800137042999, -0.36291998624801636, -0.7582100033760071, 0.1355700045824051, -0.4398899972438812, -0.4243699908256531, 0.34727999567985535, 0.18494999408721924, 0.08361499756574631, 0.03415299952030182, 0.05748699977993965, -0.6262400150299072, 0.8358700275421143, -0.1602499932050705, 0.7721400260925293, -0.7388799786567688, -0.5910300016403198, -1.0348000526428223, -0.4804399907588959, -0.33103999495506287, -0.6127600073814392, 0.24137000739574432, 0.16460999846458435, -0.42069000005722046, 0.709630012512207, 0.09975200146436691, -0.455949991941452, 0.46571001410484314, 0.4862299859523773, -0.18543000519275665, 0.18272000551223755, -0.11517000198364258, -0.9458699822425842, -0.7647600173950195, -0.8580300211906433, -0.3285599946975708, 0.20197999477386475, 0.6059100031852722, 0.8022599816322327], u'castle': [-0.17659999430179596, -0.10670000314712524, 0.09272799640893936, -0.2003600001335144, 0.7569299936294556, 0.34977999329566956, 0.8994399905204773, -0.19056999683380127, -0.6413800120353699, -0.7291799783706665, -0.4005100131034851, -0.8657500147819519, 0.44892001152038574, 0.1734199970960617, -0.42320001125335693, -0.165460005402565, 0.12554000318050385, -0.2722800076007843, 0.4046199917793274, -0.025894999504089355, 0.0733100026845932, 0.23048000037670135, -0.07999899983406067, 0.6754500269889832, -0.06824800372123718, -0.8405799865722656, -0.379040002822876, 0.3406600058078766, -0.4139400124549866, 0.8808199763298035, 1.0490000247955322, 0.06528899818658829, -0.4080300033092499, 0.27393999695777893, 0.2668600082397461, 0.2616100013256073, 0.3051599860191345, 0.17292000353336334, -0.12225999683141708, -0.4517199993133545, 0.2707200050354004, 0.12071999907493591, 0.10309000313282013, 0.6263300180435181, 0.15423999726772308, 0.06649199873209, 0.2740199863910675, -0.011358999647200108, 0.035617001354694366, -0.3697099983692169, -0.27211999893188477, 0.3041900098323822, -0.029469000175595284, 0.2145400047302246, 0.18797999620437622, -0.2558099925518036, 0.6420000195503235, 0.26256999373435974, 0.25793999433517456, 0.2569600045681, 0.1907300055027008, -0.9146900177001953, 0.28297001123428345, 0.7945799827575684, 0.5036699771881104, -0.33142998814582825, -0.035558998584747314, 0.19905999302864075, 0.4896300137042999, -0.9089800119400024, 0.0005973000079393387, 0.4142700135707855, 0.31812000274658203, -0.3819200098514557, -0.26506999135017395, -0.32502999901771545, -0.15800000727176666, -0.6435400247573853, 0.14048999547958374, -0.3824400007724762, 0.010944000445306301, 0.032186999917030334, 0.15312999486923218, 0.4730600118637085, 0.01787699945271015, 0.3369300067424774, 0.08428700268268585, 0.26405999064445496, -0.12575000524520874, -0.19099999964237213, 0.6771699786186218, -0.0587569996714592, 0.6567400097846985, 0.5557799935340881, -0.2112800031900406, -0.09298799932003021, 0.498199999332428, 0.051024001091718674, -0.01613900065422058, -0.35791999101638794, -0.5312399864196777, 0.3735800087451935, -0.041505999863147736, 0.4873799979686737, -0.04868699982762337, -0.10882999747991562, -0.07162600010633469, -0.2808600068092346, 0.06532599776983261, 0.2939800024032593, 0.34477001428604126, -0.020805999636650085, -0.3412399888038635, 0.0660339966416359, -0.01228800043463707, -0.13179999589920044, -0.3795900046825409, -0.08254099637269974, -0.10266000032424927, -0.7279599905014038, 0.37446001172065735, -0.260809987783432, -0.6001799702644348, -0.3734399974346161, -0.2658799886703491, -0.29280999302864075, -0.08286300301551819, 0.38762998580932617, -0.5301499962806702, -0.34863001108169556, 0.4082300066947937, 1.0276999473571777, 0.03478100150823593, -0.17895999550819397, 0.24130000174045563, -0.19571000337600708, 0.20633000135421753, -0.2429399937391281, -0.4692099988460541, -0.8019899725914001, -0.22871999442577362, -0.4339599907398224, -0.5964900255203247, 0.2955099940299988, 0.6147400140762329, -0.393559992313385, 0.9473599791526794, -0.028328999876976013, -0.7015500068664551, -0.3172299861907959, 0.154339998960495, 0.22620999813079834, -0.30667999386787415, -0.2207999974489212, 0.3116700053215027, 0.4971599876880646, 0.028341000899672508, -0.15126000344753265, 0.21324999630451202, -0.03059300035238266, 0.5108799934387207, 0.031773000955581665, -0.03658600151538849, -0.8284199833869934, -0.43059998750686646, 0.38012999296188354, -0.19724999368190765, -0.484609991312027, 0.1351899951696396, 0.4635399878025055, 0.536549985408783, -0.2673400044441223, 0.33009999990463257, 0.21889999508857727, -0.4859299957752228, 0.8478400111198425, -0.47738000750541687, 0.3002600073814392, 0.036952998489141464, -0.4603700041770935, -0.8576499819755554, 0.6675500273704529, 0.16101999580860138, -0.3163299858570099, 1.0018999576568604, -0.4095200002193451, -0.6383000016212463, -0.08943899720907211, 0.244159996509552, 0.2926599979400635, 0.05804099887609482, 0.17816999554634094, 0.3937399983406067, -0.17104999721050262, -0.3012799918651581, -0.013123000040650368, -0.5473099946975708, -0.10412000119686127, 0.18201999366283417, -0.2293899953365326, 1.0907000303268433, -0.39346998929977417, -0.6262699961662292, 0.4605900049209595, 0.0960329994559288, -0.24106000363826752, 0.24160000681877136, -0.2760300040245056, -0.2160400003194809, -0.2631100118160248, -0.15565000474452972, 0.24759000539779663, -0.06194999814033508, -1.2175999879837036, -0.17199000716209412, -0.3758000135421753, -0.10238000005483627, 0.041262999176979065, -0.19855999946594238, 0.20708000659942627, 0.12976999580860138, -0.31000998616218567, 0.32095998525619507, 0.004800899885594845, -0.9164699912071228, 0.11584000289440155, 0.1818999946117401, 0.17233000695705414, 0.2003300040960312, -0.41214999556541443, 0.04602300003170967, -0.2967100143432617, -0.5012000203132629, 0.2903999984264374, 0.5177099704742432, 0.05142899975180626, 0.18695999681949615, 0.7515699863433838, 0.19384999573230743, 0.84961998462677, -0.22296999394893646, -0.0814250037074089, 0.7527300119400024, 0.013326999731361866, -0.5661100149154663, 0.5769400000572205, 0.17465999722480774, 0.529770016670227, 0.2993200123310089, -0.8364999890327454, -0.5241199731826782, 0.044690001755952835, -0.049929000437259674, -0.46511000394821167, 0.3585500121116638, 0.18595999479293823, 0.46435999870300293, -0.45895999670028687, 0.36921000480651855, -0.06951499730348587, -0.3017599880695343, 0.3718799948692322, -0.08632300049066544, 0.10444000363349915, -0.9357200264930725, -0.879289984703064, 0.5389099717140198, -0.461650013923645, -0.021851999685168266, 0.16685999929904938, 0.04392499849200249, -0.6030700206756592, 0.3440600037574768, -0.19294999539852142, -0.4963200092315674, 0.12571999430656433, -1.1450999975204468, -0.38721001148223877, 0.20841999351978302, -0.2764900028705597, -0.48454999923706055, 0.19811999797821045, 0.018238000571727753, -0.7914000153541565, -0.12803000211715698, 0.24342000484466553, -0.33021000027656555, 0.3922100067138672, 0.08352799713611603, 0.10217999666929245, 0.17656999826431274, 0.25415000319480896, -0.3236199915409088, -0.2728100121021271, -0.31909000873565674, -0.3549700081348419, 0.7523000240325928, 0.6461899876594543, -0.25999999046325684, 0.539110004901886], u'glasses': [-0.7482699751853943, 0.08919499814510345, -0.6544100046157837, -1.0616999864578247, 0.03419800102710724, 0.015469999983906746, -0.10812000185251236, -0.2878600060939789, 0.861739993095398, -0.7230299711227417, 0.39190998673439026, -0.22197000682353973, -0.21392999589443207, -0.016821999102830887, -0.03988400101661682, -0.19494999945163727, -0.02596599981188774, -0.1593800038099289, -0.36381998658180237, -0.4792900085449219, 0.2801100015640259, 0.13736000657081604, -0.1429399996995926, 0.11281000077724457, -0.1544799953699112, -0.15839000046253204, -0.15222999453544617, 0.19981999695301056, 0.23983000218868256, -0.2827500104904175, -0.3469099998474121, 0.4045499861240387, 0.17860999703407288, 0.4588800072669983, -0.7826200127601624, 0.2959100008010864, -0.1887200027704239, -0.3055900037288666, -0.136680006980896, 0.1936500072479248, -0.31080999970436096, -0.7255100011825562, -0.5260199904441833, -0.21683000028133392, -0.5979200005531311, -0.2929399907588959, 0.11708000302314758, -0.13280999660491943, -0.24574999511241913, 0.10445000231266022, 0.04946599900722504, -0.21976999938488007, -0.14270000159740448, 0.12906000018119812, -0.26642000675201416, 0.543940007686615, 0.07861600071191788, -0.3274100124835968, -0.16347000002861023, 0.2797200083732605, 0.06906600296497345, -0.3771899938583374, -0.08146099746227264, 0.7360399961471558, -0.06456799805164337, 0.0005306800012476742, -0.6025099754333496, 0.274370014667511, 0.11896999925374985, -0.4633699953556061, 0.41258999705314636, -0.2600100040435791, 0.07930400222539902, 0.06444700062274933, 0.6821699738502502, -0.8787400126457214, 0.1930599957704544, -0.28338998556137085, -0.3953799903392792, -0.6665999889373779, -0.04438700154423714, 0.6623600125312805, 0.30483001470565796, 0.07265999913215637, 0.2849699854850769, -0.19955000281333923, 0.7210500240325928, -0.2902899980545044, -0.45513999462127686, -0.2457900047302246, -0.07770299911499023, 0.5768399834632874, -0.4496999979019165, 1.1937999725341797, -0.30338001251220703, 0.7678200006484985, 0.06359100341796875, 0.073294997215271, 0.3227599859237671, -0.19103999435901642, 0.3976300060749054, 0.6085000038146973, -0.25762999057769775, -0.08398400247097015, 0.1808999925851822, -0.025085000321269035, -0.4162200093269348, 0.1455399990081787, -0.6048499941825867, -0.49952998757362366, -0.20819999277591705, 0.7232199907302856, 0.351859986782074, -0.6691700220108032, 0.2352299988269806, -0.02961600013077259, -0.3753400146961212, 0.0672990009188652, 0.11429999768733978, -0.11130999773740768, 0.053098998963832855, 0.5240499973297119, 0.2034599930047989, 0.18463000655174255, -0.16494999825954437, -0.5618100166320801, 0.04759399965405464, -0.3638100028038025, 0.16806000471115112, -0.5159800052642822, -0.4034000039100647, 0.3731899857521057, -0.04589499905705452, 0.642009973526001, 0.059560999274253845, 0.47769999504089355, 0.14749999344348907, 0.3839600086212158, 0.021161999553442, -0.11479999870061874, 0.005305800121277571, 0.35339000821113586, 0.2550700008869171, 0.013756999745965004, 0.18562999367713928, 0.20140999555587769, 0.06328099966049194, 0.28001999855041504, 0.36410000920295715, -0.24214999377727509, -0.05967500060796738, 0.12064000219106674, -0.28650999069213867, -0.611810028553009, -0.08658099919557571, -0.28167998790740967, 0.10785999894142151, -1.1043000221252441, 0.6506400108337402, -0.24741999804973602, 0.0816899985074997, -0.4712199866771698, -0.03479599952697754, 0.20330999791622162, 0.6170300245285034, 0.02790199965238571, 0.07759000360965729, 0.6455100178718567, -0.12010999768972397, -0.5276600122451782, 0.13330000638961792, 0.2204200029373169, 0.047290001064538956, -0.03836200013756752, -0.09312999993562698, -1.2350000143051147, 0.427590012550354, 0.3756699860095978, 0.17591999471187592, -1.1029000282287598, 0.0009050699882209301, 0.0920879989862442, 0.579010009765625, 0.12773999571800232, 0.04800700023770332, -0.025560999289155006, 1.3174999952316284, 0.11698000133037567, 0.23440000414848328, 0.0898519977927208, 0.31553998589515686, 0.15042999386787415, -0.2655999958515167, 0.6982600092887878, -0.10593000054359436, -0.08979900181293488, -0.6718000173568726, 0.010796000249683857, -0.3377699851989746, -0.22777999937534332, 0.6135299801826477, 0.4954499900341034, 0.4625599980354309, 0.7880399823188782, -0.20837999880313873, 0.15530000627040863, 0.058851998299360275, -0.20430000126361847, -0.4589099884033203, 0.003811500035226345, 0.8260800242424011, 0.22502000629901886, -0.17885999381542206, 0.40327000617980957, 0.3866899907588959, 0.34228000044822693, 0.3428199887275696, -0.568149983882904, 0.5504599809646606, 0.3659999966621399, 0.3236500024795532, 0.3062100112438202, 0.3968699872493744, 0.4997999966144562, 0.052717000246047974, -0.026877999305725098, -0.043396998196840286, -0.5938699841499329, -0.1018500030040741, -0.25213000178337097, 0.18535999953746796, 0.3465699851512909, -0.47130000591278076, -0.35646000504493713, -0.14620999991893768, -0.16459999978542328, 0.2931399941444397, -0.2069000005722046, -0.48166999220848083, 0.3826499879360199, -0.16720999777317047, 0.3723599910736084, -0.9265300035476685, 0.6811599731445312, -0.5773599743843079, 0.2721500098705292, -0.5603299736976624, -0.49845001101493835, -0.20101000368595123, -0.5746999979019165, 0.7163900136947632, -0.30886998772621155, 0.6980299949645996, -0.5775799751281738, -0.5396100282669067, 0.5469300150871277, -0.09234300255775452, 0.06511399894952774, -0.2808600068092346, -0.09761899709701538, 0.4212000072002411, 0.29194000363349915, 0.32249000668525696, 0.39983999729156494, -0.20416000485420227, -0.6655799746513367, 0.16144999861717224, -0.130280002951622, 0.12031999975442886, 0.9390199780464172, -0.1306699961423874, 0.10902000218629837, 0.11010999977588654, -0.500220000743866, -0.07905100286006927, 0.5549200177192688, -1.407099962234497, -0.030626999214291573, -0.9504500031471252, -0.7295200228691101, 0.18084999918937683, 0.531059980392456, 0.04466399922966957, -0.2744700014591217, 0.4611299932003021, 0.7779800295829773, -0.49309998750686646, 0.43915998935699463, -0.29186999797821045, 0.6396999955177307, -0.27685999870300293, -0.0063646999187767506, -0.269430011510849, 0.2324800044298172, -0.09441299736499786, -0.8891599774360657, 0.22838999330997467, 0.37264999747276306, 0.24961000680923462, 0.127470001578331], u'book': [0.048732999712228775, -0.05508299916982651, 0.1494700014591217, -0.11269000172615051, 0.09879100322723389, 0.5433400273323059, -0.5120400190353394, 0.2788200080394745, 0.11496999859809875, -1.3396999835968018, 0.4116800129413605, -0.1474200040102005, 0.38694000244140625, -0.008241400122642517, -0.03348999843001366, -0.057760998606681824, 0.022662999108433723, 0.10580000281333923, 0.01069399993866682, -0.07761400192975998, 0.05708000063896179, 0.546459972858429, -0.2426699995994568, 0.6978899836540222, 0.3159700036048889, 0.06902600079774857, 0.03372599929571152, -0.10926999896764755, 0.19253000617027283, -0.2222599983215332, -0.11307000368833542, 0.4393799901008606, -0.6090499758720398, 0.11049000173807144, -1.4081000089645386, 0.3734300136566162, -0.3858700096607208, -0.31584998965263367, -0.2863999903202057, -0.33768999576568604, 0.44442999362945557, -0.14012999832630157, -0.40928998589515686, 0.5949199795722961, -0.07697100192308426, 0.08585499972105026, 0.11558999866247177, 0.38694000244140625, -0.6459500193595886, 0.14493000507354736, 0.4386500120162964, -0.01583399996161461, 0.20396000146865845, 0.08682899922132492, -0.06120099872350693, 0.11602000147104263, -0.6223800182342529, -0.3619599938392639, 0.065652996301651, -0.24650000035762787, 0.1281999945640564, 0.20121000707149506, 0.5719599723815918, -0.133760005235672, 0.13502000272274017, -0.3395099937915802, 0.24844999611377716, -0.29434001445770264, -0.02515999972820282, 0.03196699917316437, 0.7087200284004211, -0.3188900053501129, 0.4745199978351593, -0.10868000239133835, -0.31369999051094055, -0.22795000672340393, 0.28130000829696655, -0.1802700012922287, -0.12676000595092773, -0.3607499897480011, 0.1262200027704239, 0.2778800129890442, 0.32232001423835754, 0.049936000257730484, -0.16923999786376953, 0.14824999868869781, 0.19697999954223633, 0.6374499797821045, 0.07985399663448334, -0.16854999959468842, -0.34619998931884766, -0.22142000496387482, -0.15442000329494476, -0.10824999958276749, 0.2725200057029724, 0.27917999029159546, -0.1550000011920929, 0.6010299921035767, -0.2277200073003769, -0.7148500084877014, 0.43518999218940735, -0.06291499733924866, -0.05309699848294258, 0.0017235999694094062, -0.1523900032043457, -0.7707300186157227, 0.5434799790382385, 0.6617199778556824, 0.37839001417160034, -0.39162999391555786, -0.007715200074017048, 0.1524599939584732, 0.1938299983739853, -0.16701999306678772, -0.06928300112485886, -0.025211000815033913, -0.179639995098114, 0.04438000172376633, 0.1752600073814392, -0.9658200144767761, -0.10450000315904617, 0.057179998606443405, -0.4132300019264221, -0.329800009727478, 0.019054999575018883, -0.019053999334573746, -0.20476999878883362, -0.13634000718593597, -0.3303300142288208, -0.3521600067615509, 0.2953900098800659, 0.0897350013256073, -0.04208400100469589, -0.1926400065422058, 0.02238300070166588, -0.2060299962759018, -0.08659200370311737, -0.024302000179886818, -0.4967699944972992, 0.41227999329566956, 0.3437899947166443, 0.041338998824357986, -0.027496999129652977, 0.1326500028371811, 0.13721999526023865, 0.01975099928677082, 0.14308999478816986, 0.3212299942970276, 0.16590000689029694, 0.44637998938560486, -0.00794879999011755, 0.006652100011706352, -0.47415998578071594, -0.28036001324653625, -0.12137000262737274, -0.3882000148296356, -0.4201500117778778, -0.01307199988514185, 0.18059000372886658, -0.22992999851703644, 0.3541100025177002, 0.10779000073671341, 0.6287000179290771, 0.3661699891090393, -0.3801499903202057, 0.40376999974250793, 0.18682999908924103, 0.30017998814582825, -0.178849995136261, 0.22202999889850616, 0.026145000010728836, -0.6524199843406677, -1.371399998664856, -0.15031999349594116, 0.023778999224305153, -0.16981999576091766, 0.09613599628210068, 0.25005000829696655, -0.09224700182676315, -0.1578499972820282, -0.007779099978506565, -0.29607000946998596, -0.31178000569343567, -0.3204300105571747, 0.30184000730514526, -0.07809299975633621, -0.6267300248146057, -0.056442998349666595, 0.20640000700950623, 0.21788999438285828, -0.40171000361442566, 0.629069983959198, 0.21921999752521515, -0.3758400082588196, -0.4833100140094757, -0.021337000653147697, -0.5910000205039978, 0.03848600015044212, -0.6627100110054016, 0.06035799905657768, 0.6668999791145325, 0.08867000043392181, -0.29159998893737793, -0.2127400040626526, 0.36201998591423035, -0.3594599962234497, -0.40505000948905945, 0.23657000064849854, -0.08587999641895294, -0.1817300021648407, -0.31782999634742737, -0.7338399887084961, 0.20423999428749084, 0.2605299949645996, -0.14951999485492706, 0.22519999742507935, -0.45434999465942383, 0.27584001421928406, 0.07911799848079681, 0.098751001060009, -0.1345899999141693, 0.16389000415802002, -0.8331400156021118, 0.13740000128746033, 0.16485999524593353, -0.13199999928474426, -0.05398400127887726, 0.4087499976158142, -0.06286899745464325, -0.43362000584602356, -0.21252000331878662, -0.39647001028060913, -1.0161999464035034, 0.290910005569458, -0.03824000060558319, 0.017988000065088272, -0.13283999264240265, -0.2834399938583374, -0.4581800103187561, -0.18828000128269196, 0.062212999910116196, -0.1689700037240982, 0.012040000408887863, -0.3808099925518036, -0.8169900178909302, -0.21886000037193298, -0.6812599897384644, -0.0834830030798912, -0.02833000011742115, 0.2557600140571594, -0.33616000413894653, -0.3666299879550934, -0.11253999918699265, 0.09420499950647354, 0.6192299723625183, 0.13937999308109283, 0.35269999504089355, -0.27792999148368835, 0.4562300145626068, -0.31700000166893005, 0.14767999947071075, 0.17775000631809235, 0.029572999104857445, -0.33500999212265015, -0.005721500143408775, -0.05049699917435646, -0.24139000475406647, -0.2586899995803833, 0.13273000717163086, 0.3800300061702728, -0.32120001316070557, -0.16492000222206116, 0.5508800148963928, -0.023245999589562416, 0.1446000039577484, 0.12479999661445618, -1.3349000215530396, -0.2942200005054474, 0.5918400287628174, 0.1438400000333786, -0.057714998722076416, -0.03445899859070778, -0.22582000494003296, -0.16592000424861908, -0.10081999748945236, 0.2907800078392029, -0.5178800225257874, 0.11263000220060349, 0.002001600107178092, 0.297789990901947, -0.11647000163793564, -0.07208000123500824, -0.4182099997997284, 0.3923799991607666, -0.017030000686645508, -0.031026000156998634, 0.2542800009250641, 0.5135200023651123, 0.13666999340057373, -0.1263899952173233], u'road': [-0.25911998748779297, -0.241689994931221, 0.25626999139785767, 0.2731100022792816, -0.4716799855232239, -0.3705799877643585, 0.08574499934911728, 0.3772599995136261, 0.2070000022649765, -0.8073400259017944, -0.5752900242805481, -0.213919997215271, -0.13154000043869019, 0.29840999841690063, 0.18050000071525574, 0.010761000216007233, -0.6983399987220764, -0.23250000178813934, 0.35291001200675964, -0.37077000737190247, -0.46285000443458557, -0.08792100101709366, 0.22191999852657318, 0.45969998836517334, -0.6102200150489807, 0.12833000719547272, -0.09236200153827667, -0.24542999267578125, 0.3277600109577179, 0.903190016746521, 0.8157399892807007, 0.21535000205039978, -0.12246999889612198, 0.5736600160598755, -0.42285001277923584, 0.8798499703407288, -0.875220000743866, 0.1031700000166893, 0.06527400016784668, -0.6919999718666077, -0.3548400104045868, 0.14047999680042267, -0.47383999824523926, -0.14318999648094177, 0.2242099940776825, 0.4472599923610687, 0.41624000668525696, 0.25332000851631165, -0.21257999539375305, -0.3530600070953369, -0.42866000533103943, -0.023235000669956207, -0.37417998909950256, -0.08907300233840942, 0.8053900003433228, 0.189410001039505, -0.2477400004863739, -0.18211999535560608, -0.14358000457286835, -0.1468600034713745, 0.06814700365066528, -0.0874829962849617, 0.059774000197649, -0.1584399938583374, 0.5475999712944031, 0.27094000577926636, -0.4037399888038635, 0.35486000776290894, 0.009362200275063515, -0.3598499894142151, -0.12687000632286072, 0.2673400044441223, 0.08049900084733963, 0.40964001417160034, -0.506820023059845, 0.36142000555992126, -0.19668999314308167, -0.0928570032119751, -0.2652699947357178, -0.39478999376296997, 0.2935200035572052, 0.032010000199079514, 0.8571900129318237, 0.08010800182819366, -0.47286999225616455, -0.34848999977111816, 0.26528000831604004, 0.1531900018453598, 0.7727100253105164, 0.19639000296592712, 0.39041998982429504, 0.2582100033760071, 0.41297000646591187, -0.7048599720001221, 0.10100000351667404, 0.45871999859809875, 0.041402000933885574, -0.3035300076007843, -0.2935599982738495, -0.13961000740528107, -0.27208998799324036, 0.45210000872612, 0.3813199996948242, 0.15530000627040863, -0.400409996509552, 0.35234999656677246, 0.4878399968147278, 0.15169000625610352, 0.03170400112867355, -0.3533099889755249, -0.2516300082206726, -1.1505000591278076, 0.2371399998664856, -0.18314999341964722, 0.012498999945819378, -0.09241899847984314, 0.06656800210475922, 0.09735599905252457, -0.03275600075721741, -0.22737999260425568, -0.05514900013804436, 0.019996000453829765, 0.37588000297546387, -0.6020600199699402, -0.2656799852848053, -0.03471999987959862, -0.41958001255989075, -0.08593899756669998, -0.09946399927139282, -0.15910999476909637, 0.24860000610351562, 0.29780998826026917, 0.4365699887275696, 0.31064000725746155, -0.16584999859333038, 0.25672000646591187, 0.35923001170158386, 0.008114100433886051, 0.1399800032377243, -0.38771000504493713, -0.015355000272393227, 0.0695279985666275, 0.0009783200221136212, 0.17973999679088593, -0.7219899892807007, -0.06893199682235718, 0.26846998929977417, -0.08370199799537659, -0.6931700110435486, -0.027418000623583794, 0.4258599877357483, 0.29346001148223877, -0.3096800148487091, -0.04758099839091301, 0.7407600283622742, 0.047672998160123825, 0.2273000031709671, 0.3831399977207184, -0.20250000059604645, 0.010931000113487244, 0.04842999950051308, -0.6313599944114685, -0.4772700071334839, -1.0384000539779663, -0.35589998960494995, 0.1351500004529953, 0.1198199987411499, -0.11703000217676163, -0.15162000060081482, -0.5273100137710571, 0.05073099955916405, -0.5035899877548218, 0.7804800271987915, 0.15397000312805176, -0.48600998520851135, 0.06656300276517868, 0.019892999902367592, -0.7581400275230408, -0.1737699955701828, -0.008190600201487541, -0.4000900089740753, 0.42252999544143677, -0.07635799795389175, 0.16428999602794647, -0.041721999645233154, -0.19936999678611755, -0.007911100052297115, -0.09403199702501297, 0.12912000715732574, 0.1786700040102005, -0.003415199927985668, 0.542140007019043, 0.009330100379884243, -0.23409000039100647, -0.42528998851776123, 0.2135699987411499, -0.0005664399941451848, -0.628570020198822, 0.7073400020599365, -0.10832999646663666, 1.4354000091552734, 0.01991100050508976, 0.3401600122451782, -0.12549999356269836, 0.1821800023317337, -0.13449999690055847, 0.1163799986243248, -0.4765399992465973, 0.6053799986839294, 0.24842000007629395, -0.5873500108718872, -0.051993999630212784, -0.18920999765396118, -0.2598100006580353, -0.09816700220108032, -0.16091999411582947, 0.0697999969124794, 0.2244199961423874, 0.11416000127792358, -0.5557799935340881, 0.7075799703598022, -0.3850899934768677, -0.05022500082850456, 0.11855000257492065, 0.7030799984931946, -0.24196000397205353, -0.04937899857759476, -0.10101000219583511, 0.04006500169634819, -0.23386000096797943, -0.46173998713493347, -0.187049999833107, -0.09731400012969971, -0.42660999298095703, 0.5543199777603149, 0.3712399899959564, 0.19644999504089355, 0.37856000661849976, -0.1690800040960312, -0.0395440012216568, 0.33215999603271484, 0.04864099994301796, 0.7257500290870667, -0.5669100284576416, -0.6657199859619141, 0.16595999896526337, 0.17609000205993652, 0.27129000425338745, 0.3835900127887726, -0.10321000218391418, 0.38499999046325684, 0.11738999933004379, -0.13979999721050262, -0.2926500141620636, 0.49994000792503357, -0.5363600254058838, 0.022742999717593193, -0.1073400005698204, 0.0497869998216629, 0.0955279991030693, -0.2467000037431717, -0.5737599730491638, 0.39250001311302185, 0.23228999972343445, -0.06042499840259552, 0.34042999148368835, 0.6570500135421753, -0.49424999952316284, 0.2589299976825714, -0.4601700007915497, -0.07122600078582764, 0.05618700012564659, 0.18964000046253204, 0.4753499925136566, 0.3258399963378906, -0.038293998688459396, -1.877500057220459, -0.26462000608444214, 0.3657599985599518, 0.4708400070667267, 0.4840399920940399, -0.5046799778938293, 0.4634000062942505, -0.3917999863624573, -0.45151999592781067, 0.3857100009918213, 0.33948999643325806, -0.7425699830055237, 0.01709499955177307, -0.13865000009536743, -0.24453000724315643, 0.20273999869823456, -0.5342400074005127, 0.42384999990463257, -0.0984790027141571, 0.7218300104141235, -0.29297998547554016, 0.067051000893116, 0.22753000259399414, 0.15230999886989594], u'cheese': [0.20440000295639038, 0.6255599856376648, 0.19783000648021698, 0.06262899935245514, -0.4550600051879883, -0.5270599722862244, -0.4233199954032898, 0.2511399984359741, 0.27880001068115234, -0.33855998516082764, -0.398140013217926, -1.0094000101089478, -0.3307400047779083, 0.7735999822616577, -0.30535998940467834, -0.33302000164985657, -0.40443000197410583, -0.0009151999838650227, -0.06753599643707275, 0.7311999797821045, 0.05678899958729744, 0.335640013217926, -0.3104499876499176, 0.40448999404907227, -0.3398500084877014, -0.5138400197029114, -0.45006000995635986, -0.44378000497817993, -0.5903000235557556, -0.7061899900436401, -1.087399959564209, 0.03481600061058998, -0.4352099895477295, 0.1813499927520752, -0.5753300189971924, 0.6974200010299683, 0.12841999530792236, -0.1899300068616867, 0.22657999396324158, 0.04447599872946739, -0.178289994597435, -0.016289999708533287, 0.468860000371933, 0.12007000297307968, -0.1155100017786026, 0.5529800057411194, 0.5660099983215332, 0.08499500155448914, -0.1420699954032898, 0.1205499991774559, -0.4596399962902069, -0.09354300051927567, 0.59552001953125, 0.4995900094509125, -0.011040999554097652, 0.008682000450789928, -0.43737998604774475, 0.320279985666275, 0.1957399994134903, -0.18288999795913696, 0.9070900082588196, -0.06465300172567368, 0.28262999653816223, 0.0034278000239282846, -0.24969999492168427, -0.13393999636173248, -0.23779000341892242, -0.018837999552488327, -0.7973300218582153, 0.09049399942159653, 0.21219000220298767, 0.10254999995231628, 0.28297001123428345, -0.060006000101566315, -0.5714700222015381, -0.08190400153398514, 0.4987100064754486, -0.19006000459194183, -0.48107999563217163, -0.16128000617027283, -0.8036699891090393, 0.0084282997995615, -0.27911001443862915, 0.11826000362634659, 0.24307000637054443, -0.4034099876880646, -0.42294999957084656, 0.08992400020360947, -0.8083800077438354, -0.42465999722480774, -0.04212699830532074, -0.22337999939918518, -0.19878999888896942, -0.30153000354766846, -0.24205000698566437, -0.060871999710798264, 0.005346800200641155, 0.529420018196106, -0.6058300137519836, 0.7145400047302246, 0.19312000274658203, 0.6476600170135498, 0.5560399889945984, -0.8860300183296204, -0.6191400289535522, -0.41644999384880066, -0.31654998660087585, 0.4007500112056732, -0.48684000968933105, 0.5527200102806091, 0.4631499946117401, 0.397599995136261, -0.396230012178421, -0.6901000142097473, -0.22067999839782715, 0.05106600001454353, -0.3899399936199188, 0.6990900039672852, 0.4365600049495697, 0.29168999195098877, -0.5207800269126892, 0.02384999953210354, 0.26458999514579773, 0.14474999904632568, -0.1601099967956543, 0.3508700132369995, -0.07550299912691116, 0.4627099931240082, -0.30932000279426575, 0.5562199950218201, 0.06051500141620636, 0.8589000105857849, -0.3487200140953064, 0.5823400020599365, -0.21730999648571014, -0.16670000553131104, -0.11994999647140503, 0.6008399724960327, -0.4742000102996826, 0.1410599946975708, 0.7180699706077576, 0.13850000500679016, -0.11772000044584274, -0.08794800192117691, -0.13276000320911407, -0.30149000883102417, -0.26434001326560974, -0.5764300227165222, 0.23725999891757965, -0.4578000009059906, -0.816789984703064, 1.160099983215332, 0.5699300169944763, -0.11445000022649765, -0.08636900037527084, -0.9066399931907654, -0.3856300115585327, -0.046202000230550766, -0.25613999366760254, -0.4817900061607361, 0.5913199782371521, 0.09528099745512009, -0.5275700092315674, -0.12669000029563904, 0.15463000535964966, 0.04944499954581261, -0.015165000222623348, -0.6275100111961365, 0.124269999563694, -0.7701799869537354, 0.45758000016212463, -0.04668499901890755, -0.21507999300956726, 0.3360399901866913, -0.3206399977207184, -0.4098699986934662, -0.6112099885940552, -0.7212399840354919, 0.7035499811172485, -0.4464600086212158, 0.08000999689102173, -0.02267800085246563, 0.28415998816490173, -1.208400011062622, 0.08418300002813339, -0.6714199781417847, 0.5016700029373169, -0.5683500170707703, 0.19453999400138855, 0.2717899978160858, 0.11793000251054764, 1.4846999645233154, 0.32864999771118164, -0.006089500151574612, -0.12375999987125397, -0.1844100058078766, -0.7209600210189819, 0.05168699845671654, 0.549780011177063, 0.14678999781608582, 0.2655700147151947, -0.2371000051498413, 1.0551999807357788, 0.5737199783325195, -0.09763699769973755, -0.20746000111103058, 0.14222000539302826, 0.2185100018978119, -0.18764999508857727, 0.18494999408721924, 0.5442799925804138, 0.28301000595092773, -0.023590000346302986, 0.29763999581336975, 0.03301699832081795, 0.06667900085449219, 0.2842499911785126, -0.6955999732017517, 0.2008100003004074, 0.012159000150859356, 0.3568199872970581, 0.43977001309394836, -0.4295099973678589, -0.4272800087928772, -0.4514800012111664, -0.7074199914932251, 0.3629299998283386, 0.5086399912834167, 0.2286199927330017, 0.09614299982786179, 0.11138000339269638, -0.07697500288486481, -0.08763200044631958, -0.33476001024246216, 1.305299997329712, 0.2534500062465668, 0.5076199769973755, 0.7702400088310242, -0.9144300222396851, -0.29085999727249146, 0.0654670000076294, -0.518310010433197, -0.17521999776363373, -0.45778998732566833, -0.9201400279998779, 0.3778499960899353, -0.5139700174331665, -0.015824999660253525, -0.1836400032043457, -1.166700005531311, 0.6946600079536438, -0.031266000121831894, -0.019385000690817833, 0.4035800099372864, 0.18016000092029572, 0.5669299960136414, 0.1319199949502945, -0.23431000113487244, -0.4051699936389923, -0.09164000302553177, -0.3253999948501587, -0.6685100197792053, 0.42326000332832336, -0.19183999300003052, 0.1811700016260147, -0.8107500076293945, 0.07532200217247009, 0.08735500276088715, 0.36157000064849854, -0.43202000856399536, -0.6702499985694885, -0.11867000162601471, 0.13636000454425812, 0.7285799980163574, -0.1092899963259697, 0.24145999550819397, -0.6947900056838989, 0.0971129983663559, -0.8035100102424622, 0.08988899737596512, 0.04179200157523155, 0.37782999873161316, -0.06435099989175797, -0.02952899970114231, 0.5194500088691711, 0.7745800018310547, 0.3437800109386444, -0.6026899814605713, 0.49320998787879944, 0.3339200019836426, 0.20916999876499176, -0.2527399957180023, 0.5240899920463562, -0.3060699999332428, -0.17666999995708466, -0.49709999561309814, 0.4216499924659729, -0.2503800094127655, 0.10047999769449234, 0.2572999894618988], u'apple': [-0.20841999351978302, -0.01966799981892109, 0.06398099660873413, -0.7140300273895264, -0.21180999279022217, -0.5928300023078918, -0.1531600058078766, 0.044217001646757126, 0.632889986038208, -0.8482099771499634, -0.21129000186920166, -0.19763000309467316, 0.19029000401496887, -0.5622599720954895, 0.27125999331474304, 0.23781999945640564, -0.5188999772071838, -0.24517999589443207, 0.03524300083518028, 0.0968329980969429, 0.24898000061511993, 0.7127900123596191, 0.03827900066971779, -0.10514000058174133, -0.4778999984264374, -0.39515000581741333, -0.2719399929046631, -0.4442799985408783, 0.06112999841570854, -0.23180000483989716, -0.3590100109577179, -0.1823900043964386, 0.03550700098276138, -0.08771900087594986, -1.0815999507904053, -0.42520999908447266, 0.0032240001019090414, -0.45991000533103943, -0.04346200078725815, -0.39030998945236206, 0.5189999938011169, 0.21139000356197357, -0.25527000427246094, 1.1805000305175781, -0.19041000306606293, -0.1215599998831749, 0.034185998141765594, -0.06231600046157837, 0.14420999586582184, -0.5336599946022034, 0.4742499887943268, -0.4471000134944916, 0.5804700255393982, 0.43577998876571655, 0.13210000097751617, -0.09571199864149094, -0.37182000279426575, -0.013837000355124474, 0.20600999891757965, -0.10098999738693237, 0.10684999823570251, -0.33722999691963196, 0.10986000299453735, 0.3479599952697754, -0.09983900189399719, 0.36941999197006226, -0.5291699767112732, 0.12407000362873077, -0.4612700045108795, -0.3848299980163574, -0.10113999992609024, -0.1763399988412857, 0.3757399916648865, 0.16377000510692596, -0.21979999542236328, -0.26840999722480774, 0.8470600247383118, -0.3561899960041046, -0.08399199694395065, -0.2027599960565567, -0.5654199719429016, 0.1911199986934662, -0.1413400024175644, -0.7811999917030334, 0.6918799877166748, -0.08362799882888794, -0.542930006980896, 0.16437000036239624, 0.037606000900268555, -0.6889600157737732, -0.6871100068092346, -0.13367000222206116, -0.4778999984264374, 0.20125000178813934, 0.0851219967007637, -0.06386499851942062, -0.17103999853134155, -0.324319988489151, -0.17622999846935272, -0.5139999985694885, -0.5028899908065796, 0.23204000294208527, -0.11324000358581543, -1.0640000104904175, -0.03535899892449379, -0.5067999958992004, -0.27118000388145447, -0.16620999574661255, -0.6301599740982056, 0.05425199866294861, -0.04817799851298332, 0.292820006608963, -0.03066599927842617, -0.2464500069618225, -0.27083998918533325, -0.42563000321388245, -0.3917100131511688, 0.18427999317646027, -0.017772000283002853, -0.3533399999141693, -0.4907500147819519, -0.9078199863433838, 0.13872000575065613, -0.765209972858429, -0.4631800055503845, -0.32124000787734985, -0.08622799813747406, 1.044800043106079, -0.3991900086402893, 0.6947799921035767, -0.10377000272274017, 0.8671500086784363, 0.22742000222206116, 0.438400000333786, 0.08576700091362, -0.22845999896526337, 0.4309000074863434, 0.06418699771165848, -0.027925999835133553, -0.09305600076913834, 0.6518800258636475, 0.5914300084114075, -0.3375999927520752, -0.37731999158859253, 0.005221200175583363, 1.1193000078201294, -0.23845000565052032, -0.16029000282287598, 0.4287700057029724, -0.16227999329566956, -0.12201999872922897, -0.10610000044107437, 0.01576099917292595, 0.022745000198483467, -0.17734000086784363, -0.09171099960803986, -0.2915799915790558, 0.19033999741077423, -0.3516800105571747, 0.27562999725341797, -0.20577000081539154, 0.11472000181674957, -0.34125998616218567, -0.006591499783098698, 0.14895999431610107, -0.02676199935376644, 0.0019372999668121338, 0.5327900052070618, -0.7608799934387207, 0.06308499723672867, -0.7208899855613708, -0.041280001401901245, -0.9616400003433228, 0.02076900005340576, 0.16122999787330627, -0.34341999888420105, 0.697130024433136, -0.160180002450943, -0.11700999736785889, -0.07023900002241135, -0.3077400028705597, 0.39741000533103943, 0.39994001388549805, -0.6779999732971191, 0.5768399834632874, -0.4809899926185608, 0.5931699872016907, -0.4226199984550476, 0.28613001108169556, -0.2620300054550171, 0.052726998925209045, 0.6165900230407715, -0.368010014295578, -0.28428998589515686, -0.40053999423980713, -0.30055001378059387, -0.2744399905204773, -0.04572900012135506, -0.5610499978065491, 0.2417600005865097, 0.8663100004196167, -0.837149977684021, 0.13561999797821045, 0.2619599997997284, -0.4305500090122223, 0.34558001160621643, 0.05944100022315979, 0.6184499859809875, 0.11836999654769897, -0.019168000668287277, 0.47696998715400696, -0.32464998960494995, -0.15463000535964966, -0.23555999994277954, -0.6426299810409546, -0.09215600043535233, -0.19621999561786652, 0.4066599905490875, 0.18008999526500702, 0.094309002161026, 0.04691699892282486, 0.2636899948120117, -0.5072699785232544, 0.37490999698638916, -0.6677299737930298, 0.3509500026702881, -0.03383500128984451, 0.3053399920463562, 0.23165999352931976, 0.02352599985897541, -0.683650016784668, 0.26078000664711, -0.22526000440120697, -0.2655999958515167, 0.5996699929237366, 0.259799987077713, 0.3624800145626068, 0.15564000606536865, -0.45548999309539795, 0.11152999848127365, -0.3328700065612793, 0.08136399835348129, -0.3698900043964386, -0.2554300129413605, -1.1627999544143677, -0.14621999859809875, -0.03297099843621254, -0.556190013885498, 0.4771699905395508, -0.2902100086212158, 0.42688000202178955, 1.2396999597549438, -0.8139100074768066, 0.21084000170230865, -0.25426000356674194, -0.08683999627828598, -0.0784119963645935, 0.26034998893737793, 0.3280999958515167, -0.23777000606060028, 0.05138000100851059, -0.030246999114751816, -0.15669000148773193, 0.05714699998497963, 0.3390200138092041, 0.12794999778270721, -0.2146800011396408, -0.7520800232887268, 0.41422000527381897, 0.0062719001434743404, -0.5290399789810181, 0.9219300150871277, -0.4217900037765503, -0.6963800191879272, 0.07411500066518784, 0.19070999324321747, -1.2030999660491943, -0.08133299648761749, -0.49140000343322754, -0.22158999741077423, -0.29875999689102173, 0.30094000697135925, 0.018634000793099403, 0.18785999715328217, -0.4542900025844574, -0.292959988117218, 0.3695000112056732, -0.2421800047159195, -0.11802999675273895, 0.07177499681711197, 0.44025999307632446, -0.5997800230979919, 0.45353999733924866, 0.17854000627994537, -0.17155000567436218, 0.018811000511050224, -0.6235399842262268, -0.014162999577820301, 0.16798999905586243, -0.06439200043678284], u'wall': [0.2825300097465515, -0.19652999937534332, -0.6085500121116638, -0.2718000113964081, 0.3957599997520447, 0.11907000094652176, -0.19061000645160675, -0.008996600285172462, 0.28433001041412354, -1.5716999769210815, -0.1400900036096573, 0.4837299883365631, 0.6859300136566162, 0.26895999908447266, 0.4909000098705292, -0.04450099915266037, -0.6574400067329407, 0.3917500078678131, 0.1424800008535385, 0.23346999287605286, -0.11305999755859375, 0.020726000890135765, 0.6309199929237366, -0.19580000638961792, 0.06001799926161766, -0.2858400046825409, 0.031466998159885406, 0.12524999678134918, 0.3465000092983246, 0.1902099996805191, -0.0717419981956482, 0.6001200079917908, -0.11994999647140503, -0.045534998178482056, -1.05649995803833, -0.2000100016593933, -0.5394399762153625, -0.027837999165058136, 0.5200999975204468, 0.08101200312376022, 0.24279999732971191, 0.39131999015808105, -0.13479000329971313, 0.37174999713897705, 0.23291000723838806, 0.9757599830627441, -0.034262001514434814, -0.018996000289916992, -0.840499997138977, -0.23146000504493713, -0.17372000217437744, 0.027011999860405922, 0.2932400107383728, 0.18107999861240387, 0.5277600288391113, 0.2799600064754486, -0.17507000267505646, -0.07611899822950363, -0.17072999477386475, -0.1456499993801117, 0.7092099785804749, -0.13766999542713165, 0.7560799717903137, -0.04839299991726875, 0.29666998982429504, -0.322160005569458, 0.1751900017261505, -0.35370999574661255, 0.7437899708747864, -0.10571999847888947, -0.5667799711227417, -0.23680000007152557, -0.1820099949836731, 0.15960000455379486, 0.10916999727487564, 0.02900799922645092, 0.0710109993815422, -0.4250999987125397, -0.11035999655723572, -0.4702799916267395, 0.22070999443531036, -0.44345998764038086, -0.5656099915504456, -0.335640013217926, 0.5559599995613098, 0.6937299966812134, 0.25001001358032227, 0.3552199900150299, 0.4373199939727783, 0.09864600002765656, 0.6077600121498108, 0.08239799737930298, -0.2665899991989136, 0.22041000425815582, -0.492220014333725, 0.5787500143051147, -0.5648800134658813, -0.1584399938583374, 0.48445001244544983, -0.5337899923324585, -0.00787109974771738, 0.9000499844551086, -0.04410000145435333, -0.5153499841690063, 0.7136200070381165, 0.21015000343322754, 0.2657800018787384, -0.2501400113105774, 0.467739999294281, 0.3476400077342987, -0.8561000227928162, 0.04104100167751312, -0.43970999121665955, -0.712149977684021, 0.08592800050973892, 0.2574000060558319, 0.16549000144004822, -0.46595999598503113, -0.5379700064659119, -0.8678200244903564, -0.006239899899810553, -0.6587799787521362, 0.03532399982213974, 0.40518999099731445, 0.424919992685318, -0.03331900015473366, -0.27303001284599304, -0.24966000020503998, -0.523639976978302, -0.15470999479293823, 0.10604000091552734, 0.9021099805831909, -0.12815000116825104, 0.1907300055027008, -0.1498900055885315, -0.0988050028681755, -0.44391998648643494, 0.14012999832630157, -0.3021099865436554, -0.5181900262832642, 0.21258999407291412, 0.1539199948310852, -0.009134599938988686, -0.18437999486923218, -0.31209999322891235, 0.2718200087547302, -0.38596001267433167, -0.5322399735450745, 0.14351999759674072, 0.04673700034618378, -0.12159000337123871, 0.5311800241470337, -0.05612799897789955, -0.1960200071334839, 0.2506600022315979, 0.59934002161026, -0.3783699870109558, 0.5181099772453308, 0.10565999895334244, 0.3447299897670746, -0.0017124000005424023, 0.26014000177383423, 0.2143000066280365, -0.20029999315738678, 0.14219999313354492, 0.49625998735427856, 0.33855998516082764, 0.3059200048446655, 0.0984949991106987, 0.29409998655319214, 0.21875, -0.39803001284599304, 0.143669992685318, -0.6872400045394897, -0.1167600005865097, -0.036281000822782516, -0.2584500014781952, 0.30469000339508057, -0.08372800052165985, -0.17935000360012054, 0.2075899988412857, 0.21010999381542206, 0.0939909964799881, 0.051569998264312744, 0.23695999383926392, -0.22160999476909637, 0.1753000020980835, -0.13087999820709229, -0.0004976899945177138, 0.4946100115776062, 0.7041100263595581, -0.30428001284599304, -0.41861000657081604, -0.14404000341892242, -0.3627600073814392, -0.18288999795913696, 0.06165299937129021, -0.32771000266075134, -0.1573600023984909, -0.3081499934196472, 1.3617000579833984, -0.20724999904632568, -0.1834699958562851, 0.09818600118160248, -0.3170199990272522, 0.17597000300884247, -0.44196999073028564, 0.017103999853134155, -0.3511500060558319, 0.03200199827551842, 0.1141199991106987, 0.04697199910879135, -0.2337699979543686, -1.2343000173568726, 0.02359200082719326, 0.442220002412796, -0.08208999782800674, -0.1102600023150444, 0.14754000306129456, 0.04333600029349327, 0.893090009689331, -0.02488899976015091, -0.327129989862442, -0.33491000533103943, 0.0898440033197403, 0.2743600010871887, -0.5841299891471863, 0.463919997215271, 0.4342299997806549, -0.8429700136184692, -0.4185999929904938, 0.2484000027179718, -0.04813700169324875, 0.3381600081920624, -0.146139994263649, -0.21949000656604767, -0.22461000084877014, -0.25874999165534973, 0.01474399957805872, 0.03468799963593483, 0.1309099942445755, -0.5367199778556824, 0.03458100184798241, 0.10318999737501144, -0.6964799761772156, -0.5885499715805054, -0.06637699902057648, -0.4645799994468689, 0.4207099974155426, 0.055011000484228134, 0.2684899866580963, -0.1783600002527237, 0.018276000395417213, -0.3409099876880646, -0.12982000410556793, 0.26458999514579773, -0.3357299864292145, -0.39278000593185425, 0.1886100023984909, 0.45092999935150146, -0.46481001377105713, 0.18885000050067902, 0.32864001393318176, -0.09298399835824966, 0.14722999930381775, -0.14702999591827393, -0.28005000948905945, 0.0006643200176768005, -0.22210000455379486, -0.16102999448776245, -0.28231000900268555, -0.3145500123500824, 0.21143999695777893, -0.2505199909210205, -0.7300800085067749, 0.4901899993419647, -1.36080002784729, 0.1077599972486496, -0.3483799993991852, 0.13405999541282654, 0.1390399932861328, -0.036122001707553864, -0.028610000386834145, -0.2214300036430359, 0.1501999944448471, 0.10236000269651413, 0.6019899845123291, -0.017669999971985817, 0.5217900276184082, -0.06003300100564957, 0.10863000154495239, -0.2650200128555298, -0.45386001467704773, 0.12660999596118927, 0.23827999830245972, 0.39625999331474304, 0.2296999990940094, -0.12289000302553177, -0.05015400052070618, 0.1834300011396408], u'pot': [-0.3035599887371063, 0.39761000871658325, 0.022029999643564224, -0.32354000210762024, -0.10535000264644623, 0.29750001430511475, 0.7970499992370605, 0.2935500144958496, 0.5507299900054932, -0.36640000343322754, -0.05703200027346611, 0.14914000034332275, -0.882830023765564, 0.6955299973487854, -0.5447199940681458, 0.4092499911785126, -0.1571899950504303, 0.4455699920654297, -0.03859400004148483, -0.09162899851799011, 0.12370000034570694, -0.0495619997382164, 0.04808500036597252, 0.18649999797344208, 0.046932999044656754, -0.5706599950790405, -0.16064999997615814, 0.4329800009727478, 0.23858000338077545, -0.5050100088119507, -0.6118299961090088, 0.05005599930882454, 0.05323199927806854, -0.03099299967288971, 0.04461900144815445, 0.3198400139808655, 0.26469001173973083, -0.14080999791622162, -0.8047199845314026, 0.207519993185997, 0.02365799993276596, -0.31933000683784485, 0.17847000062465668, 0.13291999697685242, 0.5285500288009644, 0.08732900023460388, 0.6688299775123596, 0.004633000120520592, -0.42465001344680786, 0.4692800045013428, 0.00994119979441166, 0.4637700021266937, -0.48274001479148865, -0.1677200049161911, 0.3351300060749054, -0.22822999954223633, -0.10098999738693237, -0.2142300009727478, 0.6816999912261963, -0.10401000082492828, 0.0684180036187172, -0.10543999820947647, -0.16328999400138855, 0.43876999616622925, -0.27553999423980713, 0.05658299848437309, -0.2375199943780899, 0.22508999705314636, -0.7519400119781494, -0.0017752000130712986, -0.09089100360870361, -0.06790100038051605, 0.2595899999141693, 0.013269999995827675, -0.6188799738883972, 0.15063999593257904, 0.9584000110626221, -0.25637000799179077, -0.1066799983382225, -1.2793999910354614, -0.5577999949455261, -0.16368000209331512, 0.5703499913215637, -0.11698000133037567, 0.1749899983406067, -0.25317999720573425, 0.2543799877166748, 0.17308999598026276, -1.0196000337600708, -0.5488399863243103, 0.3839699923992157, -0.31325000524520874, -0.5181800127029419, 0.079475998878479, 0.05120699852705002, 0.022130999714136124, 0.0640610009431839, -0.4447700083255768, -0.08833499997854233, -0.21462999284267426, -0.3936299979686737, -0.04766400158405304, 0.26344001293182373, -0.3677400052547455, -0.10604000091552734, 0.554099977016449, -0.45162999629974365, 0.0780080035328865, -0.21784000098705292, 0.7125899791717529, -0.21827000379562378, 0.011675000190734863, 0.4682300090789795, -0.809249997138977, 0.2420700043439865, -0.1592700034379959, -0.6548799872398376, 0.3658500015735626, 0.37014999985694885, -0.3886899948120117, 0.11655999720096588, 0.2159699946641922, -0.14395000040531158, 0.20558999478816986, 0.34749001264572144, 0.04638100042939186, 1.028499960899353, 0.4963200092315674, -0.3051399886608124, 0.9729400277137756, 0.6049699783325195, 1.077299952507019, 0.005551000125706196, 0.0814879983663559, 0.17055000364780426, -0.4351100027561188, -0.2925100028514862, -0.5087000131607056, -0.24507999420166016, 0.20962999761104584, 0.18731999397277832, 0.5392699837684631, -0.20003999769687653, 0.044456999748945236, -0.7689899802207947, -0.3350299894809723, -0.15826000273227692, 0.552049994468689, 0.6225500106811523, -0.38117000460624695, -0.5233299732208252, 0.2599799931049347, -0.3792499899864197, 0.021352000534534454, -0.16008999943733215, 0.4473100006580353, -0.4286800026893616, -0.19158999621868134, -0.28431999683380127, 0.022523999214172363, 0.3466799855232239, 0.6464099884033203, 0.44444000720977783, 0.221670001745224, 0.149509996175766, -0.042417000979185104, 0.439410001039505, -0.06901499629020691, -0.47784000635147095, -1.0159000158309937, -0.2242799997329712, 0.2341500073671341, -0.2809999883174896, -0.6633700132369995, -0.1493300050497055, -0.405460000038147, 0.06832099705934525, -0.12790000438690186, 0.4992299973964691, 0.20465999841690063, -0.4696600139141083, 0.17484000325202942, 0.6531299948692322, 0.3078100085258484, -0.17486000061035156, -0.18115000426769257, 0.7466199994087219, 0.10080999881029129, 0.22023999691009521, -0.8507000207901001, 0.35078001022338867, 0.39449000358581543, -0.5337299704551697, 0.5403100252151489, -0.1987999975681305, -0.15568000078201294, -0.17850999534130096, 1.010599970817566, -0.19833999872207642, 0.09989099949598312, 0.46911001205444336, 0.5785800218582153, -0.03461199998855591, 0.09834499657154083, 0.09109500050544739, 0.19526000320911407, 0.1657399982213974, -0.24149000644683838, 0.014719000086188316, -0.07605999708175659, 0.2849000096321106, -0.3248000144958496, -0.00855919998139143, 0.10745000094175339, 0.1090100035071373, 0.33667001128196716, 0.04954500123858452, -0.08552999794483185, -0.28172001242637634, 0.361380010843277, -0.017319999635219574, -0.08038099855184555, -0.48381999135017395, -0.18714000284671783, -0.24842999875545502, 0.18839000165462494, 0.20061999559402466, -0.3358300030231476, -0.08220399916172028, 0.18860000371932983, 0.031230999156832695, -0.3639400005340576, -0.2121499925851822, 0.5205900073051453, 0.038725998252630234, -0.25, 0.22741000354290009, -0.5822499990463257, -0.2836199998855591, -0.3517799973487854, -0.3981899917125702, -0.30364999175071716, 0.21836000680923462, -0.1075500026345253, -0.42451998591423035, -0.21900999546051025, 0.6991299986839294, -0.022880999371409416, 0.12071000039577484, -0.5156499743461609, -0.09181699901819229, -0.4143899977207184, 0.37797999382019043, -0.3237000107765198, 0.4842100143432617, 0.17212000489234924, 0.39386001229286194, 0.26949000358581543, -1.0703999996185303, 0.008422999642789364, -0.3107999861240387, -0.20839999616146088, 0.023114999756217003, 0.04971100017428398, -0.17836999893188477, 0.042562998831272125, -0.48691999912261963, -0.006264300085604191, -0.2509100139141083, 0.2851400077342987, -0.7588099837303162, 0.33410000801086426, 0.5581200122833252, 0.5346300005912781, 0.49698999524116516, 0.44958001375198364, -1.1655000448226929, 0.1918099969625473, -0.9640499949455261, 0.19631999731063843, -0.05271200090646744, -0.36660999059677124, -0.619920015335083, 0.2604199945926666, 0.2579300105571747, 0.1121399998664856, 0.2059900015592575, 0.008213399909436703, 0.030733000487089157, 0.9135800004005432, -0.22201000154018402, -0.031130999326705933, 0.40623000264167786, 0.4858100116252899, 0.6208699941635132, -0.8264700174331665, 0.6453199982643127, -0.43024998903274536, -0.4779199957847595, 0.7664600014686584], u'canyon': [-0.03474799916148186, 0.29447001218795776, -0.3464600145816803, -0.298799991607666, 0.17125999927520752, 0.3460400104522705, -0.0845090001821518, 0.09528700262308121, 0.2011999934911728, -0.18663999438285828, -0.8067299723625183, -0.0025895999278873205, 0.4300299882888794, -0.0026360999327152967, -0.38920000195503235, 0.068852998316288, 0.09251300245523453, -0.2293500006198883, 0.640030026435852, 0.716509997844696, 0.39083999395370483, 0.39302000403404236, 0.27636998891830444, 0.14603999257087708, 0.43926000595092773, -0.08516799658536911, 0.2581599950790405, -0.22001999616622925, -0.331279993057251, 0.3917500078678131, 1.5671000480651855, 0.4352099895477295, -0.19900000095367432, -0.34272000193595886, 0.48104000091552734, 0.1997700035572052, -0.5305299758911133, -0.08659300208091736, 0.3035399913787842, -0.5047600269317627, -0.2902500033378601, 0.7964100241661072, 0.12307000160217285, 0.946690022945404, -0.2319899946451187, -0.2257699966430664, 0.8533599972724915, 0.12081000208854675, 1.1039999723434448, -0.3170599937438965, 0.05051400139927864, -0.30156999826431274, -0.030696000903844833, 0.1843400001525879, 0.4203599989414215, -0.09505199640989304, -0.9015300273895264, -0.323309987783432, 0.20372000336647034, 0.38345998525619507, -0.1819400042295456, -0.10592000186443329, 0.8131200075149536, 0.06746800243854523, 0.35982999205589294, -0.6602399945259094, 0.0007406399818137288, 0.20062999427318573, -0.33698999881744385, -0.41297000646591187, 0.2199299931526184, -0.12482000142335892, 0.12293999642133713, 1.252500057220459, -0.024457000195980072, 0.1530900001525879, 0.16391000151634216, -0.23732000589370728, -0.1848900020122528, -0.42923998832702637, -0.1601399928331375, -0.648389995098114, 0.38964998722076416, -0.23326000571250916, -0.020692000165581703, -0.1107499971985817, -0.14927999675273895, 0.5666199922561646, 0.5561500191688538, 0.17648999392986298, -0.11004000157117844, 0.04830300062894821, 0.7417500019073486, 0.2641899883747101, 0.15349000692367554, 0.32124000787734985, 0.7222599983215332, -0.4704500138759613, 0.048792000859975815, 0.3465699851512909, 0.032311998307704926, -0.0881659984588623, -0.3026899993419647, 0.14300000667572021, -0.38159000873565674, 0.017760999500751495, 0.45076000690460205, 0.5200799703598022, 0.06111299991607666, -0.4195300042629242, -0.7518100142478943, -0.5572500228881836, 0.3801499903202057, -0.5383800268173218, -0.277209997177124, -0.0160559993237257, 0.29335999488830566, -0.07743699848651886, -0.09614700078964233, 0.9449599981307983, 0.298909991979599, -0.48212000727653503, -0.23849999904632568, -0.1836100071668625, -0.1296599954366684, 0.0939980000257492, -0.10515999794006348, 0.3031400144100189, -0.2740600109100342, 0.05851700156927109, 0.18282000720500946, 0.13440999388694763, 0.3394399881362915, 0.20134000480175018, 0.23146000504493713, -0.13334999978542328, -0.04709000140428543, 0.21070000529289246, 0.042757000774145126, -0.29580000042915344, 0.20338000357151031, -0.5414900183677673, -0.21661999821662903, -0.2531900107860565, -0.36994999647140503, -0.8569300174713135, 0.42381998896598816, 0.1071000024676323, 0.10305000096559525, 0.31139999628067017, 1.1640000343322754, 0.3379899859428406, -0.18584999442100525, -0.30300000309944153, 0.7458800077438354, 0.0023831999860703945, 0.39427998661994934, -0.023396000266075134, -0.7628499865531921, 0.03441299870610237, 0.1492999941110611, -0.7780799865722656, 0.4321799874305725, 0.3895399868488312, 0.22947999835014343, -0.1445000022649765, 0.16269999742507935, -0.04318400099873543, -0.3559100031852722, -0.4847800135612488, -0.6810200214385986, -0.09739600121974945, 0.5027199983596802, -0.16875000298023224, -0.19997000694274902, 0.5891299843788147, -0.36798998713493347, -0.24345999956130981, 0.45159000158309937, -0.38951998949050903, 0.13932999968528748, 0.8313599824905396, 0.17794999480247498, -0.054455000907182693, -0.7297899723052979, -0.613610029220581, 0.3781200051307678, -0.35117998719215393, -0.13985000550746918, 0.25165000557899475, 0.007601900026202202, 0.9978899955749512, 0.09641200304031372, -0.45987001061439514, -0.2912999987602234, -0.16401000320911407, 0.019896000623703003, -0.78889000415802, 0.2131199985742569, 1.0104000568389893, 1.4816999435424805, 0.24984000623226166, -0.10103999823331833, -0.31560999155044556, -0.06527899950742722, 0.6673300266265869, -0.08919999748468399, 0.05490100011229515, 0.30406999588012695, 0.014537000097334385, -0.2691600024700165, 0.06142299994826317, -0.09693499654531479, -0.21533000469207764, 0.487529993057251, 0.16381999850273132, 0.08159200102090836, -0.06653100252151489, 0.1416199952363968, -0.2827700078487396, 0.6038900017738342, 0.20976999402046204, -0.2591400146484375, -0.3182600140571594, 0.3658500015735626, -0.17423999309539795, -0.0771699994802475, 0.019267000257968903, -0.23295000195503235, -0.2525100111961365, -0.9723600149154663, -0.4746600091457367, -0.4160099923610687, 0.06250300258398056, 0.674239993095398, -0.33243000507354736, 0.18738999962806702, -0.7771199941635132, 0.04057500138878822, -0.26023000478744507, -0.27188000082969666, 0.7025899887084961, -0.020896000787615776, 0.02082899957895279, -0.7577199935913086, -0.43338000774383545, 0.5478799939155579, -0.2388399988412857, -0.15454000234603882, 0.251800000667572, 0.19697000086307526, -0.10656999796628952, 0.07458899915218353, -0.8224300146102905, 0.2521600127220154, -0.04466899856925011, 0.4405199885368347, 0.4293999969959259, 0.2833099961280823, 0.3070099949836731, -0.38578000664711, -0.2357800006866455, 0.226500004529953, -0.761650025844574, -0.06182600185275078, -0.30410999059677124, 0.6492400169372559, 0.3963800072669983, -0.293040007352829, 0.1959500014781952, -0.35062000155448914, -0.6450799703598022, 0.7128300070762634, 0.4648300111293793, 0.5255900025367737, -0.3841499984264374, -0.35016998648643494, 0.09232600033283234, 0.04335800185799599, -0.2884399890899658, 0.5232599973678589, -0.008075299672782421, -0.10582999885082245, 0.2156199961900711, -0.5456500053405762, -0.40070000290870667, -0.005590999964624643, -0.36055999994277954, 0.21810999512672424, -0.3307799994945526, 0.22321000695228577, -0.33904001116752625, -0.5114700198173523, 0.5661600232124329, -0.18765999376773834, 0.06039299815893173, 0.04090199992060661, 0.5629900097846985, 0.4791400134563446, 0.34248998761177063], u'tomato': [0.00012323999544605613, 0.16989000141620636, 0.6841300129890442, -0.05591300129890442, -0.05171699821949005, -0.2835899889469147, -0.4241499900817871, -0.08672600239515305, 0.33351001143455505, 0.16857999563217163, -0.05616400018334389, 0.12058000266551971, -0.17020000517368317, 0.8130300045013428, -0.25679001212120056, -0.3809100091457367, -0.24729999899864197, 0.5611100196838379, -0.6093400120735168, 0.5872600078582764, -0.3638100028038025, 0.17069999873638153, -0.13510000705718994, -0.1612900048494339, 0.23927000164985657, -0.47113001346588135, -0.43588998913764954, 0.047022998332977295, -0.6501299738883972, -0.6676200032234192, -0.5403900146484375, 0.13683000206947327, -0.1368899941444397, -0.09548100084066391, 0.043428000062704086, 0.6247599720954895, 0.08243399858474731, -0.41791000962257385, 0.08307400345802307, 0.008580800145864487, 0.43768998980522156, -0.2641400098800659, 0.09447000175714493, 0.1717199981212616, 0.10018999874591827, -0.07764499634504318, 0.15557000041007996, 0.7396900057792664, -0.23803000152111053, -0.4867599904537201, -0.35280001163482666, -0.2157299965620041, 0.1710900068283081, -0.11144000291824341, -0.5041499733924866, -0.05042000114917755, -0.2810400128364563, -0.1352500021457672, 0.5760300159454346, -0.19083000719547272, 0.4499100148677826, -0.8385300040245056, 0.13956999778747559, 0.2933500111103058, -0.45882999897003174, 0.2739500105381012, -0.6987800002098083, 0.21110999584197998, -0.20419000089168549, -0.14642000198364258, 0.24172000586986542, 0.2598100006580353, 0.058795999735593796, -0.2548600137233734, -1.041200041770935, 0.04092400148510933, 0.646619975566864, -0.23747999966144562, -0.30908000469207764, 0.08539800345897675, -0.05808800086379051, 0.2693299949169159, -0.11799000203609467, -0.09245000034570694, 0.11145000159740448, -0.06930500268936157, -0.03245700150728226, 0.01961199939250946, -0.05620500072836876, -0.6439399719238281, 0.23890000581741333, -0.21863999962806702, 0.048889998346567154, -0.43439000844955444, -0.245619997382164, 0.49487999081611633, 0.02504199929535389, 0.5133100152015686, -0.3250499963760376, 0.3003999888896942, 0.4025000035762787, -0.01998800039291382, 0.586080014705658, -0.8011199831962585, -0.8271600008010864, -0.24754999577999115, -0.6194300055503845, 0.045329999178647995, -0.4081000089645386, 0.6614999771118164, 0.48431000113487244, 0.3459100127220154, -0.17208999395370483, -0.47606000304222107, -0.6940500140190125, -0.2707499861717224, -0.7260599732398987, 0.7622900009155273, 0.5876200199127197, 0.13833999633789062, -0.37953001260757446, -0.3191699981689453, 0.25224998593330383, 0.25192999839782715, -0.18104000389575958, -0.07645200192928314, 0.020004000514745712, 0.37288999557495117, -0.4978199899196625, 0.5290799736976624, 0.41631999611854553, 1.063599944114685, -0.19958999752998352, 0.3907899856567383, -0.20284000039100647, -0.8184700012207031, -0.4702099859714508, -0.1604200005531311, 0.313620001077652, 0.5222399830818176, 0.43907999992370605, 0.5079799890518188, -0.4714199900627136, -0.6390699744224548, -0.40946999192237854, 0.8994100093841553, 0.11080999672412872, -0.17489999532699585, 0.3797900080680847, -0.4682599902153015, -1.2525999546051025, -0.17086000740528107, 0.12594999372959137, -0.05062999948859215, -0.5598499774932861, -0.4000200033187866, -0.04924499988555908, -0.5987899899482727, -0.2952300012111664, 0.17428000271320343, 0.6156700253486633, 0.23380999267101288, 0.10188999772071838, 0.11584000289440155, 0.6598399877548218, -0.5494700074195862, -0.2262900024652481, 0.05720999836921692, -0.22901000082492828, -0.8382700085639954, -0.015420000068843365, 0.23438000679016113, -0.6172299981117249, 0.05475800111889839, -0.155689999461174, -0.45794999599456787, 0.2759299874305725, -0.43191999197006226, 0.8211600184440613, -0.2651500105857849, -0.21965999901294708, -0.13424000144004822, -0.09902799874544144, -0.3356499969959259, -0.11537999659776688, -0.7902100086212158, 0.28679999709129333, 0.035673998296260834, 0.4485499858856201, -0.017488999292254448, -0.4340600073337555, 1.089400053024292, -0.3917999863624573, -0.1436000019311905, 0.2784999907016754, 0.022319000214338303, 0.10081999748945236, 0.11349000036716461, -0.6227399706840515, 0.2113800048828125, 0.008100899867713451, -0.3587999939918518, 1.1435999870300293, 0.35034000873565674, -0.24295000731945038, 0.49838000535964966, 0.35023999214172363, 0.2662700116634369, 0.2624500095844269, -0.07592999935150146, -0.16993999481201172, -0.25394999980926514, -0.40119001269340515, -0.1964000016450882, 0.19185000658035278, 0.10937999933958054, 0.23752999305725098, -0.06031699851155281, -0.2042900025844574, 0.20830999314785004, 0.38708001375198364, -0.14952999353408813, -0.5768100023269653, 0.13395999372005463, -0.6581699848175049, -0.35558000206947327, 0.0803309977054596, -0.26190999150276184, -0.17889000475406647, -0.07965700328350067, -0.17308999598026276, 0.36546000838279724, 0.20096999406814575, -0.1904900074005127, 0.886650025844574, 0.2414499968290329, 0.22944000363349915, -0.2271299958229065, -0.3504999876022339, -0.07363799959421158, -0.20648999512195587, -0.42910000681877136, -0.2027300000190735, -0.32433000206947327, -0.8533499836921692, 0.09515900164842606, 0.3560200035572052, 0.07505100220441818, -0.6679999828338623, -1.2904000282287598, 0.5051199793815613, 0.36302998661994934, -0.36267998814582825, 0.3443700075149536, 0.6083199977874756, 0.1881600022315979, -0.3270300030708313, 0.28442999720573425, -0.1719599962234497, 0.3294000029563904, 0.2686299979686737, 0.13899999856948853, 0.42517000436782837, -0.4545699954032898, 0.27731001377105713, -0.6470000147819519, -0.38517001271247864, 0.10762999951839447, 0.15768000483512878, -0.16418999433517456, 0.18416999280452728, -0.052476998418569565, -0.1822499930858612, 0.47056999802589417, -0.055013999342918396, 0.4229699969291687, -0.617169976234436, -0.4837000072002411, -0.9350500106811523, -0.39381998777389526, -0.39423999190330505, -0.2851699888706207, -0.06003199890255928, 0.16607999801635742, -0.14687000215053558, -0.26203998923301697, 0.7261999845504761, -0.041958998888731, 0.2400200068950653, 0.048496998846530914, 0.32166001200675964, -0.22902999818325043, 0.7135000228881836, -0.18443000316619873, -0.3285900056362152, -0.4950999915599823, 0.1530500054359436, -0.7564299702644348, 0.36465999484062195, 0.37053000926971436], u'church': [-0.6523600220680237, -0.7469099760055542, -0.7139700055122375, -0.005951500032097101, -0.25791001319885254, -0.2159299999475479, -0.3519900143146515, -0.1160300001502037, -0.056035999208688736, -1.2818000316619873, -0.164110004901886, 0.7074800133705139, 0.5212299823760986, 0.3277899920940399, 0.1738699972629547, -0.5392000079154968, -0.18577000498771667, -0.47874999046325684, -0.24883000552654266, -0.6199399828910828, -0.023262999951839447, 0.1254899948835373, 0.04575200006365776, 0.11283999681472778, -0.02009899914264679, 0.23056000471115112, -0.41756001114845276, -0.25314998626708984, -0.697380006313324, 0.41157999634742737, 0.8880800008773804, 0.8385199904441833, -0.6606799960136414, 0.7071899771690369, -0.2674199938774109, 0.20890000462532043, -0.2784000039100647, -0.6108800172805786, -0.15424999594688416, -0.09471599757671356, -0.04770899936556816, 0.26721999049186707, -0.43974000215530396, 0.6377500295639038, 0.6514899730682373, 0.26826998591423035, 0.4333299994468689, -0.22958000004291534, 0.022456999868154526, -0.028403999283909798, -0.035516999661922455, 0.517989993095398, -0.48083001375198364, 0.47857001423835754, -0.05461199954152107, 0.2521199882030487, 0.18369999527931213, 0.6167899966239929, 0.013829999603331089, -0.14949999749660492, 0.9226499795913696, 0.17462000250816345, 0.46116000413894653, 1.1370999813079834, 0.0942080020904541, -1.12909996509552, 0.14715999364852905, -0.2794399857521057, -0.05511400103569031, -0.381740003824234, -0.773419976234436, -1.0166000127792358, -0.6523000001907349, 0.026551999151706696, 0.10459999740123749, -0.24240000545978546, -0.013206999748945236, 0.28134000301361084, -0.4312799870967865, -0.39136001467704773, -0.47672998905181885, -0.07255800068378448, -0.4636799991130829, 0.22186000645160675, 0.020483000203967094, 0.17291000485420227, 0.26030999422073364, 0.3154999911785126, 0.009201999753713608, 0.3549099862575531, 0.044172000139951706, -0.7893000245094299, -0.19731999933719635, 0.4070900082588196, -0.20552000403404236, -0.6404600143432617, 0.33855998516082764, 0.05195799842476845, -0.9975299835205078, 0.0970349982380867, -0.19654999673366547, 0.1561799943447113, 4.014799924334511e-05, 0.12240000069141388, 0.50559002161026, -0.09644100069999695, 0.1850699931383133, 0.6272500157356262, -0.23697000741958618, -0.3357200026512146, -0.22439000010490417, -0.3305400013923645, -0.07336600124835968, 0.23306000232696533, -0.38155001401901245, -0.46931999921798706, -0.15338000655174255, -0.4837000072002411, -0.47415000200271606, -0.3969300091266632, 0.07524800300598145, -0.07361199706792831, 0.536080002784729, -0.08191700279712677, 0.318230003118515, 0.03279300034046173, 0.46048998832702637, 0.3931399881839752, 0.060568999499082565, 0.03411899879574776, 0.5719699859619141, -0.4192599952220917, 0.1095300018787384, -0.06525500118732452, 0.7338500022888184, 0.041742000728845596, -0.11460000276565552, -0.16911999881267548, -0.42704999446868896, -0.5008299946784973, -0.09635499864816666, 0.10553000122308731, -0.3139899969100952, 0.21461999416351318, -0.1709900051355362, -0.20016999542713165, -0.08077699691057205, 0.20192000269889832, -0.008065699599683285, -0.12904000282287598, 0.3595600128173828, 0.06463699787855148, -0.15577000379562378, -0.002956199925392866, -0.2575500011444092, 0.9733899831771851, 0.09395399689674377, 0.5885499715805054, 0.19377000629901886, 0.11879000067710876, 0.39291998744010925, 0.4776799976825714, 0.7250800132751465, -0.16696999967098236, -1.1800999641418457, 0.25349000096321106, -0.47512000799179077, -0.10154999792575836, 0.7827500104904175, -0.5056599974632263, 0.1100199967622757, -0.05635499954223633, -0.147599995136261, 0.2896699905395508, 0.44562000036239624, 0.7997900247573853, 0.12427999824285507, 0.1240599974989891, -0.47624000906944275, 0.20964999496936798, 0.09463000297546387, 0.7068399786949158, 0.028186999261379242, -0.3064799904823303, 0.645359992980957, 0.13357000052928925, -0.9009900093078613, -0.17190000414848328, -0.06190900132060051, 0.40128999948501587, 0.10412999987602234, -0.08274800330400467, 0.021158000454306602, 0.4402399957180023, 0.019293000921607018, -0.18424999713897705, 0.4696600139141083, -0.7213900089263916, -0.18505999445915222, 0.413239985704422, 0.6156799793243408, -0.6087300181388855, 0.0037964999210089445, -0.41350001096725464, 0.15126000344753265, -0.4802199900150299, -0.055268000811338425, 0.11078000068664551, -0.20642000436782837, 0.5179200172424316, 0.12861000001430511, 0.7346900105476379, -0.723609983921051, -0.6522200107574463, -0.38199999928474426, 0.2768099904060364, 0.1567700058221817, -0.23744000494480133, 0.03060699999332428, 0.5936300158500671, 0.26096999645233154, -0.11371000111103058, -0.25780001282691956, -0.31369999051094055, -0.19726000726222992, -0.3654400110244751, -0.17271000146865845, 0.20527000725269318, 0.20419999957084656, 0.2219099998474121, -0.328249990940094, 0.3379499912261963, -0.048170000314712524, 0.1277800053358078, 0.031029999256134033, -0.377920001745224, 0.697350025177002, 0.18844999372959137, -1.2842999696731567, 0.20494000613689423, -0.26502999663352966, -0.1425500065088272, 0.4518199861049652, -0.6078699827194214, -0.1877100020647049, 0.43860000371932983, 0.48596999049186707, 0.4012399911880493, 0.18559999763965607, 0.11582999676465988, -0.028147000819444656, 0.31505000591278076, 0.35986000299453735, 0.05311400070786476, 0.5838000178337097, 0.07142700254917145, 0.07099799811840057, -0.6643499732017517, 0.25722000002861023, -0.18163999915122986, -0.3209899961948395, 0.20141999423503876, 0.15926000475883484, 0.0007454900187440217, 0.007360899820923805, -0.6877800226211548, -0.2913599908351898, -0.11625999957323074, 0.0503619983792305, 0.1766899973154068, -0.8539199829101562, -0.30375999212265015, 0.17467999458312988, 0.25613000988960266, -0.19380000233650208, 0.09501899778842926, -2.126699924468994, -0.04545700177550316, 0.7124699950218201, 0.7228599786758423, 0.4528599977493286, -0.05670500174164772, 0.22522999346256256, -0.19072000682353973, -0.30487000942230225, 0.7323899865150452, -0.49744001030921936, 0.37551000714302063, 0.18303999304771423, -0.06440000236034393, -0.382889986038208, -0.15444999933242798, 0.22412000596523285, 0.6525800228118896, 0.18424999713897705, 0.014895999804139137, -0.15188999474048615, 0.30928000807762146, 0.10806000232696533, -0.12449000030755997], u'table': [-0.256089985370636, 0.5052700042724609, 0.6702399849891663, -0.48322999477386475, -0.16091999411582947, 0.4055199921131134, -0.577530026435852, -0.6172699928283691, 0.27748000621795654, -1.1638000011444092, -0.2771199941635132, -0.020294999703764915, 0.02877499908208847, 0.22266000509262085, -0.12470000237226486, 0.21623000502586365, -0.34953999519348145, 0.4387199878692627, -0.11428999900817871, -0.5208799839019775, -0.09877599775791168, 0.36173999309539795, -0.10907000303268433, -0.33722999691963196, 0.12477999925613403, -0.17475999891757965, -0.028060000389814377, 0.29811999201774597, 0.4519999921321869, -0.1525299996137619, -0.08335699886083603, 0.03476100042462349, -0.24092000722885132, 0.3274900019168854, -1.4544999599456787, 0.48006001114845276, 0.331959992647171, -0.1540299952030182, -0.6942099928855896, -0.2776699960231781, -0.11926999688148499, -0.43105000257492065, -0.47290000319480896, 0.3703399896621704, 0.3416900038719177, -0.05325400084257126, 0.15647000074386597, -0.10772000253200531, 0.0165180005133152, 0.38008999824523926, 0.3429499864578247, 0.24506999552249908, -0.38752999901771545, -0.5432299971580505, -0.46239998936653137, 0.3305099904537201, -0.5591099858283997, -0.27595001459121704, 0.08159200102090836, 0.6352400183677673, 0.49963998794555664, -0.43428999185562134, 0.06719899922609329, 0.25352001190185547, -0.5093299746513367, -0.7463799715042114, -0.25850000977516174, 0.2326900064945221, -0.25115999579429626, -0.10638999938964844, 0.18583999574184418, 0.12071999907493591, -0.30594000220298767, -0.3820500075817108, -0.7889400124549866, 0.19583000242710114, -0.24714000523090363, -0.07462400197982788, -0.1596899926662445, -0.4459199905395508, -0.08155199885368347, 0.31977999210357666, 0.10503000020980835, 0.0054445997811853886, 0.2655400037765503, -0.7573400139808655, -0.5335999727249146, 0.290120005607605, -0.06411799788475037, 0.07188999652862549, -0.10019999742507935, 0.18322999775409698, -0.3672100007534027, 0.051837000995874405, 0.2561500072479248, 0.11507999897003174, -0.20353999733924866, -0.024862000718712807, 0.17288999259471893, -0.567870020866394, -0.1949699968099594, 0.16598999500274658, -0.02274000085890293, -0.12867000699043274, -0.16200000047683716, -0.17047999799251556, -0.1043199971318245, -0.2535000145435333, 0.2204499989748001, -0.4566099941730499, -0.13744999468326569, -0.030223999172449112, 0.043699000030756, -0.18161000311374664, -0.45375001430511475, 0.06096599996089935, -0.5672600269317627, -0.039767999202013016, -0.2670300006866455, -0.06397400051355362, -0.2601200044155121, 0.29440999031066895, 0.30542999505996704, 0.3277300000190735, -0.31902000308036804, 0.31057998538017273, 0.20735999941825867, 0.020728999748826027, -0.3487299978733063, -0.16662999987602234, -0.11727999895811081, 0.3334200084209442, 0.4122700095176697, -0.08525200188159943, 0.5206500291824341, 0.1964700073003769, 0.3780199885368347, 0.2924799919128418, -0.4431400001049042, 0.19523000717163086, 0.08237099647521973, 0.27869999408721924, 0.13965000212192535, -0.06909800320863724, -0.11195000261068344, 0.5214300155639648, 0.43849000334739685, 0.03121500089764595, 0.08999300003051758, -0.20983999967575073, -0.460750013589859, 0.3215799927711487, 0.312610000371933, -0.08153899759054184, 0.06409899890422821, 0.020493999123573303, -0.06891299784183502, 0.022847000509500504, -0.018045000731945038, 0.47409000992774963, 0.2556000053882599, 0.2894200086593628, 0.005348999984562397, 0.2218399941921234, 0.5758799910545349, 0.2708500027656555, 0.029016999527812004, -0.09782099723815918, 0.13407999277114868, 0.08263599872589111, -0.6502599716186523, 0.3704099953174591, -0.1696999967098236, -0.5220299959182739, -0.07504499703645706, -0.4008600115776062, -0.1476999968290329, 0.9749500155448914, -0.098860003054142, -0.16053999960422516, -0.09223999828100204, -0.37929999828338623, 0.3605400025844574, -0.10380999743938446, 0.4490100145339966, 0.08289200067520142, -0.07997699826955795, 0.08435600250959396, 0.04224799945950508, -0.0964210033416748, 0.691789984703064, 0.4988600015640259, 0.40619000792503357, -0.07779999822378159, -0.14788000285625458, 0.05270899832248688, -0.09787700325250626, 0.07082799822092056, -0.5629600286483765, 0.0522180013358593, 0.9761000275611877, -0.5536500215530396, -0.044514000415802, -0.18416999280452728, 0.30776000022888184, -0.6362900137901306, -0.10328999906778336, -0.7368500232696533, -0.3794099986553192, -0.0570329986512661, -0.3435800075531006, -0.2622700035572052, -0.2140900045633316, 0.18885000050067902, 0.5408099889755249, 0.5431200265884399, -0.03865300118923187, -0.33017000555992126, 0.16678999364376068, 0.10520999878644943, 0.23586000502109528, 0.13128000497817993, -0.0023244000039994717, -0.14755000174045563, -0.6543999910354614, 0.2253199964761734, 0.2753300070762634, 0.44426000118255615, -0.5224400162696838, -0.12011999636888504, 0.06167899817228317, -0.12355999648571014, 0.3502799868583679, 0.033955998718738556, 0.5760300159454346, 0.26243001222610474, -0.3162499964237213, 0.027056999504566193, 0.06856200098991394, 0.36305001378059387, -0.15706999599933624, -0.18637999892234802, -0.15244999527931213, 0.5138499736785889, 0.003051399951800704, 0.3636299967765808, -0.5427899956703186, 0.5440000295639038, -0.3303300142288208, -0.018278000876307487, 0.034519001841545105, -0.38429999351501465, 0.4817200005054474, -0.24718999862670898, 0.17587000131607056, 0.3404200077056885, -0.2586599886417389, 0.1147100031375885, -0.4248400032520294, 0.7757700085639954, -0.47343000769615173, -0.24318000674247742, 0.6615300178527832, 0.11480999737977982, 0.027111999690532684, 0.0797630026936531, -0.10655999928712845, 0.30877000093460083, 0.1464499980211258, -0.12334000319242477, -0.7975500226020813, 0.0829790011048317, -0.10762999951839447, 0.2561599910259247, 0.6034299731254578, -0.26589998602867126, -1.6927000284194946, 0.16208000481128693, 0.27090999484062195, -0.287990003824234, -0.2205599993467331, -0.14431999623775482, -0.5978099703788757, -0.5589900016784668, 0.6231899857521057, 0.10158000141382217, -0.06904800236225128, -0.35172998905181885, 0.08186700195074081, -0.19860999286174774, -0.35076001286506653, 0.29545000195503235, -0.1848199963569641, -0.10118000209331512, 0.25696998834609985, -0.40751001238822937, 0.6938700079917908, -0.4472000002861023, 0.18863999843597412, 0.18275000154972076], u'ring': [-0.11721000075340271, 0.24539999663829803, 0.1185699999332428, -0.9813100099563599, -0.5585299730300903, -0.265390008687973, 0.2814599871635437, -0.37237998843193054, -0.6319100260734558, -0.9711899757385254, 0.206169992685318, 0.27074000239372253, -0.5082399845123291, 0.05072199925780296, -0.7851399779319763, -0.8908100128173828, -0.5038099884986877, 0.1565299928188324, -0.028643999248743057, -0.11034999787807465, -0.023887999355793, -0.3088400065898895, -0.18371999263763428, -0.3676699995994568, 0.031279999762773514, -0.43595001101493835, -0.0892219990491867, 0.10869000107049942, 0.08690500259399414, 0.10420999675989151, 0.19302000105381012, 0.27608999609947205, -0.7346000075340271, -0.06097099930047989, -0.42318999767303467, 0.4728800058364868, -0.15012000501155853, 0.2462099939584732, 0.06635499745607376, 0.19592000544071198, -0.3055799901485443, 0.21487000584602356, -0.4521099925041199, 0.26381999254226685, 0.08730000257492065, 0.04546700045466423, 0.005125400144606829, -0.3407000005245209, -0.2781299948692322, 0.3693400025367737, -0.14648999273777008, -0.41291001439094543, 0.12087000161409378, 0.20656000077724457, -0.09556599706411362, -0.18004000186920166, -0.17222000658512115, 0.3383600115776062, -0.364329993724823, -0.6360999941825867, 0.2517299950122833, -0.2662299871444702, 0.0668720006942749, 0.31411001086235046, 0.2858699858188629, 0.14345000684261322, -0.2507399916648865, 0.2829299867153168, 0.688319981098175, 0.15331999957561493, 0.09967300295829773, 0.012165999971330166, 0.7277399897575378, 0.06235000118613243, -0.15580999851226807, -0.0738300010561943, 0.5005599856376648, -0.7478399872779846, -0.24350999295711517, 0.3585900068283081, 0.8984500169754028, -0.04245699942111969, 0.4090900123119354, 0.22050000727176666, 0.06284099817276001, -0.3595399856567383, -0.20117999613285065, 0.05004400014877319, -0.19519999623298645, 0.41982001066207886, -0.13659000396728516, 0.31064000725746155, 0.3150100111961365, 0.1501699984073639, -0.6877999901771545, 0.29361000657081604, -0.32655999064445496, -0.040160998702049255, 0.1735299974679947, -1.003999948501587, 0.22032999992370605, 0.657039999961853, -0.07238099724054337, 0.29580000042915344, 0.1300400048494339, -0.3652999997138977, 0.6482499837875366, 0.24666999280452728, -0.46797001361846924, 0.2587699890136719, -0.20638999342918396, -0.1790499985218048, 0.8060399889945984, 0.12060999870300293, -0.09498199820518494, 0.6838899850845337, -0.17973999679088593, 0.2234400063753128, -0.6307899951934814, -0.49154001474380493, 0.16638000309467316, -0.2963100075721741, 0.13561999797821045, -0.033633001148700714, -0.30430999398231506, -0.3549099862575531, 0.530210018157959, 0.25575000047683716, -0.5904200077056885, -0.570609986782074, 0.04123000055551529, 0.5628299713134766, 0.3980900049209595, 0.697920024394989, -0.03344700112938881, -0.21338999271392822, 0.23027999699115753, -0.03723499923944473, 0.017712999135255814, -0.8129900097846985, 0.06655900180339813, -0.19269999861717224, -0.24094000458717346, -0.1323300004005432, 0.6904100179672241, -0.2145400047302246, -0.1379999965429306, -0.046762000769376755, -0.19035999476909637, 0.4251900017261505, 0.22189000248908997, 0.3678700029850006, -0.2915300130844116, 0.08759000152349472, 0.1812800019979477, -0.09586700052022934, -0.0021575000137090683, 0.04371599853038788, -0.24954000115394592, -0.30612000823020935, -0.2606399953365326, 0.08471900224685669, 0.8294199705123901, -0.10296999663114548, -0.11150000244379044, 0.04097599908709526, -0.05347999930381775, -0.20416000485420227, 0.6662300229072571, 0.008208200335502625, -0.24562999606132507, -0.22896000742912292, 0.23218999803066254, 0.040445998311042786, 0.38938000798225403, 0.06386099755764008, 0.1007699966430664, 0.634119987487793, -0.47374001145362854, -0.6658400297164917, 0.19740000367164612, 0.3503600060939789, -0.0595569983124733, -0.3882899880409241, -0.1383100003004074, -0.5329099893569946, -0.15633000433444977, 0.7968199849128723, 0.10649000108242035, -0.747730016708374, 0.314410001039505, 0.0491890013217926, -0.2676999866962433, 0.6406499743461609, 0.09485699981451035, -0.05956900119781494, -0.19781999289989471, -0.5209599733352661, 0.7099199891090393, 0.03569699823856354, 1.3890999555587769, 0.30827000737190247, 0.4113900065422058, 0.24232999980449677, 0.05834199860692024, 0.09405799955129623, 0.06991100311279297, -0.07291000336408615, 0.41648998856544495, -0.06450500339269638, -0.10525999963283539, 0.460099995136261, 0.035597000271081924, 0.24525000154972076, -0.4066999852657318, -0.26159000396728516, -0.16272999346256256, -0.4374200105667114, -0.38694000244140625, 0.307779997587204, 0.0005994500243104994, -0.09296700358390808, 0.4405600130558014, -0.31338998675346375, 0.34147000312805176, 0.5964999794960022, 0.249549999833107, -0.046831000596284866, -0.038353998214006424, 0.09016799926757812, 0.7121300101280212, 0.04819199815392494, -0.24130000174045563, -0.5074999928474426, -0.20276999473571777, -0.048875000327825546, -0.24048000574111938, -0.3046700060367584, 0.2142300009727478, 0.2787899971008301, -0.213809996843338, -0.5873000025749207, 0.3118799924850464, -0.08219499886035919, -0.02106899954378605, -0.5440800189971924, 0.0818450003862381, 0.19750000536441803, -0.05853300169110298, -0.07960300147533417, -0.4162299931049347, -0.008661599829792976, -0.3086499869823456, -0.06142500042915344, 0.3128899931907654, 0.27360999584198, -0.032954998314380646, -0.04640600085258484, -0.12547999620437622, 0.2877199947834015, -0.40676000714302063, -0.22439000010490417, 0.733780026435852, 0.6861100196838379, -0.19526000320911407, -0.21149000525474548, -0.06520300358533859, 0.13526999950408936, -0.1444299966096878, 0.008596899919211864, 0.3714100122451782, -0.2142699956893921, 0.610729992389679, -0.24814000725746155, -0.5663700103759766, 0.032058000564575195, -1.5297000408172607, -0.28797999024391174, -0.34272998571395874, 0.014008999802172184, -0.14601999521255493, 0.1500999927520752, -0.5503699779510498, 0.2118300050497055, -0.6473000049591064, 0.33965998888015747, -0.10457000136375427, 0.3290500044822693, -0.2425999939441681, 0.059866998344659805, 0.262939989566803, 0.050891000777482986, -0.64205002784729, 0.16913999617099762, 0.2588599920272827, 0.014948000200092793, 0.5822700262069702, 0.1255200058221817, 0.13782000541687012, -0.32085999846458435], u'brass': [0.06683100014925003, 0.19354000687599182, -0.3096100091934204, -0.9099500179290771, -0.2556400001049042, -0.23122000694274902, 0.2293200045824051, 0.6523200273513794, 0.07953500002622604, 0.013922999612987041, -0.4369800090789795, 0.49678000807762146, -0.08599899709224701, 0.17768999934196472, 0.03076300024986267, -0.2015800029039383, -0.2981700003147125, -0.22840000689029694, 0.5393099784851074, -0.4948900043964386, 0.21101999282836914, 0.145579993724823, -0.11953999847173691, 0.29875001311302185, -0.21567000448703766, -0.4662800133228302, 0.13821999728679657, 0.4407599866390228, -0.007426000200212002, 0.11558999866247177, 0.6671299934387207, -0.058274999260902405, -0.6467199921607971, 0.44029998779296875, -0.1940000057220459, -0.10926000028848648, -0.25679999589920044, -0.10315000265836716, -0.04254499822854996, -0.030553000047802925, -0.4496699869632721, -0.2515000104904175, -0.2865299880504608, 0.024546999484300613, 0.09769900143146515, 0.3851900100708008, -0.0381460003554821, -0.8352699875831604, -0.5616300106048584, -0.07894500344991684, 0.19399000704288483, 0.06607300043106079, -0.3241899907588959, 0.44850000739097595, -0.32041001319885254, -0.36974000930786133, -0.23638999462127686, 0.7663999795913696, 0.434689998626709, 0.0032887000124901533, 0.553820013999939, 0.20092999935150146, 0.624019980430603, 0.10174000263214111, 0.13381999731063843, -0.1968899965286255, 0.3712100088596344, 0.3923400044441223, -0.161190003156662, 0.2602800130844116, 0.5844200253486633, -0.15832999348640442, -0.12417999655008316, 0.09628500044345856, 0.04539699852466583, 0.6668499708175659, 0.01960800029337406, 0.5048900246620178, 0.07898499816656113, -0.1697400063276291, -0.2151300013065338, 0.05169599875807762, 0.007002099882811308, -0.5042600035667419, 0.7182999849319458, -0.055309999734163284, -0.2759400010108948, -0.11298999935388565, -0.4856100082397461, 0.43031999468803406, 1.3759000301361084, 0.6440100073814392, -0.05973000079393387, 0.06098899990320206, -0.1326500028371811, 0.07191400229930878, -0.3809100091457367, -0.6963199973106384, -0.037650998681783676, -0.046404000371694565, -0.08046100288629532, 0.4794999957084656, -0.21842999756336212, -0.08115900307893753, 0.328139990568161, -0.4850499927997589, 0.23622000217437744, 0.08423200249671936, -0.8252500295639038, 0.2240999937057495, 0.5285000205039978, 0.3606800138950348, -0.10756999999284744, -0.04494199901819229, -0.37669000029563904, -0.27024000883102417, 0.08631099760532379, -0.083126001060009, 0.07261700183153152, -0.6345300078392029, -0.3484399914741516, -0.34380999207496643, 0.3760699927806854, -0.6257699728012085, -0.08703800290822983, 0.19248999655246735, 0.28271999955177307, 0.4333299994468689, -0.7499200105667114, 0.460750013589859, 0.15706999599933624, 0.1545799970626831, 0.13919000327587128, -0.41203001141548157, -0.06798899918794632, 0.29054999351501465, -0.1559000015258789, 0.37790000438690186, 0.36212000250816345, -0.3205600082874298, -0.11934000253677368, 0.32280001044273376, 0.0436829999089241, -0.07446099817752838, 0.4652999937534332, 0.0743279978632927, 0.2476000040769577, -0.029145000502467155, 0.22381000220775604, 0.20027999579906464, 0.05779699981212616, -0.16482999920845032, 0.2842499911785126, -0.17983999848365784, -0.1773100048303604, -0.16518999636173248, -0.27202001214027405, -0.47593000531196594, -0.8164299726486206, 0.4663800001144409, 0.44905000925064087, 0.29877999424934387, 0.21074000000953674, 0.09213600307703018, 0.3823400139808655, 0.06696899980306625, -0.18810999393463135, -0.046289000660181046, 0.12212000042200089, -0.4234200119972229, -0.4531799852848053, 0.25380998849868774, 0.03575199842453003, -0.044537998735904694, 0.36517998576164246, -0.39013001322746277, -0.4010699987411499, 0.12300000339746475, -0.5299000144004822, -0.5021600127220154, -0.6880000233650208, 0.5314599871635437, 0.3475300073623657, 0.18772000074386597, 0.23169000446796417, -0.391759991645813, -0.12408000230789185, 0.6614699959754944, 0.3137100040912628, -0.20192000269889832, 0.2597599923610687, 0.14837999641895294, 0.4694800078868866, 0.7822499871253967, 0.14055000245571136, 0.24483999609947205, -0.029682999476790428, 0.35135000944137573, -0.30994001030921936, 0.006032899953424931, 0.8123000264167786, 0.6439399719238281, 0.1543000042438507, -0.3754599988460541, 0.23072999715805054, 0.7886000275611877, 0.9674999713897705, -0.24942000210285187, -0.14199000597000122, -0.40261000394821167, 0.40985000133514404, 0.5825099945068359, 0.10412999987602234, -0.08302400261163712, 0.5371900200843811, -0.059324998408555984, 0.11680000275373459, -0.46494001150131226, -0.2578499913215637, 0.11508999764919281, -0.02079099975526333, -0.47971001267433167, 0.2759400010108948, -0.5087699890136719, -0.9694399833679199, 0.10216999799013138, -0.23442000150680542, -0.009116999804973602, 0.40661999583244324, -0.422760009765625, 0.2224300056695938, -0.15731999278068542, -0.24070000648498535, -0.4979499876499176, 0.6463199853897095, -0.23085999488830566, 0.20396000146865845, -0.9747999906539917, -0.30667001008987427, -0.2506200075149536, -0.10200999677181244, 0.29050999879837036, -0.2853499948978424, -0.29743000864982605, -0.00406369986012578, -0.643809974193573, -0.3960700035095215, -0.05259000137448311, -0.1685899943113327, 0.2871899902820587, -0.21810999512672424, -0.14338000118732452, 0.24081000685691833, -0.34248000383377075, 0.4365899860858917, 0.09233500063419342, 0.3304100036621094, 0.19059999287128448, -0.3443300127983093, 0.1795399934053421, -0.41446998715400696, -0.1322699934244156, 0.5361499786376953, 0.6968399882316589, 0.10988999903202057, 0.3807600140571594, 0.0964839980006218, 0.4244599938392639, -0.2045000046491623, 0.06664799898862839, 0.30072999000549316, 0.03622899949550629, 0.48089998960494995, -0.42188000679016113, -0.3022199869155884, 0.7854099869728088, -0.8534700274467468, 0.22251999378204346, -0.6414499878883362, -0.1405699998140335, 0.6650199890136719, 0.14821000397205353, -0.4163999855518341, 0.3315599858760834, 0.32576000690460205, 0.22663000226020813, -0.5044500231742859, 0.5424699783325195, -0.002589199924841523, -0.2703399956226349, 0.11766000092029572, -0.3487899899482727, -0.12171000242233276, 0.8061400055885315, -0.13964000344276428, 0.23058000206947327, 0.2835099995136261, -0.42247000336647034, -0.47446000576019287, 0.16615000367164612], u'boat': [0.31198999285697937, -0.880079984664917, 0.031275998800992966, -0.4682900011539459, -0.619920015335083, 0.06645599752664566, 0.7583699822425842, 0.23555999994277954, 0.23833000659942627, -0.8353300094604492, 0.23565000295639038, -0.0561549998819828, 0.15561999380588531, 0.006190800108015537, 0.10621000081300735, 0.30838000774383545, 0.8294399976730347, -0.2683500051498413, -0.596530020236969, 0.17771999537944794, -0.37112000584602356, 0.38133999705314636, -0.016913000494241714, 0.4101699888706207, -0.13732999563217163, -0.06548599898815155, -0.282370001077652, 0.4426400065422058, -0.2093999981880188, 0.4422299861907959, -0.2259099930524826, 0.1867700070142746, 0.05156800150871277, -0.07770100235939026, -0.1785700023174286, 0.31431999802589417, 0.05525499954819679, -0.6581699848175049, 0.12696999311447144, 0.6543899774551392, -0.40786001086235046, -0.11202999949455261, 0.30289000272750854, 0.17674000561237335, -0.7798799872398376, 0.46779000759124756, 1.0225000381469727, -0.3849399983882904, -0.154339998960495, 0.3730500042438507, -0.4521600008010864, -0.19812999665737152, -0.15857000648975372, -0.006847099866718054, -0.2829500138759613, 0.5117800235748291, -0.3528200089931488, 0.22723999619483948, -0.16730999946594238, 0.45864999294281006, -0.12184000015258789, 0.357589989900589, 0.5476999878883362, -0.2103399932384491, 0.44143998622894287, -0.16152000427246094, -0.8348100185394287, 0.1276099979877472, -0.16362999379634857, -0.18393999338150024, -0.034630000591278076, 0.14601999521255493, 0.07745900005102158, -0.2423499971628189, -0.39621999859809875, 0.5975300073623657, 0.6622200012207031, 0.3167499899864197, 0.30847999453544617, -0.21176999807357788, -0.41530001163482666, 0.6564099788665771, 0.09775000065565109, 0.44176000356674194, 0.08079300075769424, -0.31953999400138855, -0.14935000240802765, -0.3473300039768219, -0.2571299970149994, -0.013408999890089035, 0.9803299903869629, 0.14775000512599945, -0.16448000073432922, -1.1549999713897705, 0.387470006942749, -0.2112099975347519, 0.2152400016784668, -0.30320999026298523, 0.07429099828004837, -0.34455999732017517, 0.44920000433921814, 0.48392000794410706, 0.33215001225471497, -0.17788000404834747, 0.2982499897480011, -0.05204499885439873, 0.5995699763298035, -0.5530099868774414, 0.07928700000047684, -0.03363899886608124, -0.32784000039100647, -0.2054699957370758, 0.35203999280929565, -0.4661799967288971, 0.1286499947309494, -0.26175999641418457, -0.015201999805867672, 0.26673001050949097, -0.11351999640464783, 0.42827001214027405, -0.4164600074291229, -0.5390200018882751, -0.08824600279331207, -0.06731999665498734, 0.6886699795722961, 0.18788999319076538, 0.19282999634742737, 0.21589000523090363, 0.4315199851989746, -0.04521699994802475, -0.11389999836683273, 0.686710000038147, 0.6590399742126465, -0.10377000272274017, 0.14847999811172485, 0.5102900266647339, 0.21476000547409058, -0.6627200245857239, -0.4862099885940552, -0.07992400228977203, -0.1125900000333786, 0.3638400137424469, 0.027170000597834587, 0.17749999463558197, -0.8420500159263611, 0.06893099844455719, 0.05920900031924248, -0.05105400085449219, -0.20223000645637512, 0.4695799946784973, 0.7872499823570251, 0.5557199716567993, 0.28426000475883484, -0.2989799976348877, 0.3303300142288208, -0.14645999670028687, -0.09136799722909927, -0.058107998222112656, 0.3461399972438812, 0.4650900065898895, 0.6243799924850464, -0.6585900187492371, 0.09365800023078918, -0.391759991645813, -0.0071968999691307545, -0.09932299703359604, 0.06035900115966797, 0.05458800122141838, 0.025181999430060387, 0.15567000210285187, -0.5879700183868408, 0.453000009059906, -0.24653999507427216, -0.19312000274658203, -0.30329999327659607, -0.11970999836921692, -0.578220009803772, 0.02315800078213215, 0.3306199908256531, -0.20990000665187836, 0.6121299862861633, 0.07096900045871735, 0.004685199819505215, 0.09511200338602066, 0.2655799984931946, -0.6444500088691711, -0.24018999934196472, 0.9901999831199646, 0.47672998905181885, 0.010259999893605709, 0.09715700149536133, 0.12025000154972076, -0.3156999945640564, -0.08093500137329102, 0.5450299978256226, 0.2227499932050705, -0.16387000679969788, -0.1958799958229065, -0.22246000170707703, -0.2668299973011017, 0.6523000001907349, 0.4126800000667572, -0.33388999104499817, -0.11038000136613846, 0.6517699956893921, 0.2887299954891205, -0.1141899973154068, 0.4290899932384491, -0.6169599890708923, -0.22071999311447144, -0.2615300118923187, 0.06571300327777863, -0.4230000078678131, -0.1389400064945221, -0.26282998919487, -0.2606799900531769, -0.3849399983882904, 0.04139000177383423, -0.29997000098228455, 0.2162500023841858, 1.2156000137329102, 0.37026000022888184, 0.3501800000667572, -0.44367000460624695, 0.3431699872016907, -0.1044899970293045, 0.2754000127315521, -1.100600004196167, -0.20803000032901764, 0.4448699951171875, 0.14821000397205353, -0.43097999691963196, -0.1694599986076355, 0.2132900059223175, 0.750540018081665, -0.145019993185997, -0.3998900055885315, 0.060756001621484756, 0.06068199872970581, 0.026758000254631042, 0.48497000336647034, 0.378030002117157, 0.1137700006365776, -0.21310999989509583, -0.08793699741363525, -0.47258999943733215, 0.3668000102043152, 0.45458999276161194, -0.4116100072860718, 0.6033999919891357, -0.6270400285720825, 0.33539000153541565, -0.024436000734567642, 0.28951001167297363, 0.5086699724197388, -0.20969000458717346, 0.08255399763584137, 0.2063799947500229, -0.13819000124931335, -0.002455499954521656, -0.2952600121498108, -0.6017299890518188, -0.25014999508857727, 0.08215799927711487, 0.41075000166893005, 0.25606000423431396, -0.19425000250339508, -0.6413400173187256, -0.8073499798774719, 0.029879000037908554, 0.25391000509262085, -0.03175799921154976, 0.5085399746894836, 0.48217999935150146, 0.43724000453948975, -0.15343999862670898, -1.5049999952316284, -0.02385699935257435, -0.30458998680114746, 0.44947001338005066, -0.1464100033044815, 0.5387899875640869, 0.054166000336408615, -0.3234499990940094, -0.2826400101184845, -0.5533000230789185, -0.21884000301361084, 0.06473899632692337, 0.38499000668525696, 0.2071000039577484, -0.6473000049591064, -0.29802000522613525, 0.016973000019788742, -0.20329999923706055, -0.21879999339580536, 0.19306999444961548, -0.39688000082969666, 0.0688219964504242, 0.13479000329971313, 0.44435998797416687], u'belt': [0.007869799621403217, 0.06130500137805939, 0.185589998960495, 0.19442999362945557, -0.6005899906158447, -0.2858799993991852, -0.042514000087976456, -0.07513000071048737, -0.03147000074386597, -0.9010499715805054, -0.2439900040626526, 0.16580000519752502, -0.4849900007247925, 0.34233999252319336, -0.31553998589515686, -0.0446930006146431, -0.45638999342918396, 0.38117000460624695, -0.18573999404907227, 0.12929999828338623, 0.2664099931716919, -0.16218000650405884, -0.10869999974966049, -0.17326000332832336, -0.7199900150299072, -0.2094999998807907, 0.17760999500751495, 0.06070199981331825, -0.0955279991030693, 0.2575100064277649, 0.7762500047683716, 0.4901599884033203, -0.18084999918937683, 0.03974999859929085, -0.2562600076198578, 0.5105299949645996, -0.47304001450538635, 0.2076999992132187, -0.05882199853658676, 0.6611899733543396, -0.10554999858140945, -0.09921199828386307, -0.030977999791502953, -0.23286999762058258, 0.1867399960756302, 0.02920999936759472, 0.056001998484134674, -0.5640599727630615, -0.24985000491142273, -0.18192000687122345, -0.1974799931049347, 0.11918000131845474, 0.54475998878479, -0.04256200045347214, 0.07127399742603302, -0.32019999623298645, 0.11591999977827072, -0.8521900177001953, 0.37591999769210815, -0.17741000652313232, -0.10362999886274338, 0.13718999922275543, -0.10943999886512756, 0.4246799945831299, -0.17239999771118164, 0.4912799894809723, -0.7843999862670898, -0.015123999677598476, 0.36579999327659607, 0.44565001130104065, 0.35576000809669495, 0.12801000475883484, 0.23928000032901764, -0.3379499912261963, -0.5447999835014343, -0.26642999053001404, -0.20441000163555145, -0.7541599869728088, -0.3948200047016144, 0.047162000089883804, 0.6857100129127502, 0.3849300146102905, 0.06720300018787384, -0.5725700259208679, 0.04786499962210655, -0.01354300044476986, 0.014605999924242496, -0.0438929982483387, -0.5821200013160706, 0.654229998588562, -0.05036100000143051, 0.25464001297950745, 0.4906199872493744, 0.3915899991989136, -0.5347700119018555, 0.2828400135040283, -0.18100999295711517, 0.1278499960899353, 0.2828800082206726, -0.25442999601364136, 0.08684500306844711, 1.2223999500274658, -0.23837999999523163, 0.006146399769932032, -0.261929988861084, -0.3504999876022339, -0.28738999366760254, 0.5331500172615051, -0.13752000033855438, -0.6127399802207947, -0.20260000228881836, -0.21052999794483185, 0.03848600015044212, -0.030226999893784523, -0.19535000622272491, 0.7910299897193909, 0.21529999375343323, -0.11040999740362167, 0.70551997423172, -0.3736000061035156, 0.16259999573230743, -0.05664199963212013, 0.08076699823141098, 0.1800300031900406, -0.2722199857234955, 0.16613000631332397, -0.027316000312566757, 0.13594000041484833, 0.049911998212337494, -0.5149700045585632, 0.2116899937391281, 0.12937000393867493, 0.411080002784729, 0.1445399969816208, 0.0027914000675082207, -0.061351001262664795, -0.45660001039505005, 0.392659991979599, -0.09231799840927124, 0.2790699899196625, -0.21180999279022217, 0.4121600091457367, 0.11829999834299088, -0.3731499910354614, 0.42069000005722046, 0.21613000333309174, 0.04612400010228157, -0.5358999967575073, -0.14322000741958618, -0.17659999430179596, 0.27674999833106995, 0.4117099940776825, 0.12284000217914581, -0.19436000287532806, 0.7895799875259399, 0.006431899964809418, 0.30834001302719116, -0.10617999732494354, 0.04140400141477585, 0.5522400140762329, 0.28856000304222107, -0.43571001291275024, 0.19662000238895416, 0.02543400041759014, 0.07062699645757675, -0.8938699960708618, -0.04587100073695183, 0.008043699897825718, 0.2196200042963028, -0.3139899969100952, 0.34975001215934753, 0.2133300006389618, -0.2267799973487854, -0.05283600091934204, 0.4510599970817566, -0.34237998723983765, -0.4473699927330017, 0.37046000361442566, -0.05128699913620949, -0.02763199992477894, 0.44488999247550964, 0.40856999158859253, 0.29534000158309937, -0.3820599913597107, 0.8227800130844116, 0.04107299819588661, 0.14384999871253967, 0.5519999861717224, 0.05039399862289429, -0.6472399830818176, 0.14271000027656555, 0.29008999466896057, 0.29528000950813293, 0.9873999953269958, -0.31560999155044556, -0.1158600002527237, 0.00025378999998793006, -0.8471400141716003, 0.49584999680519104, -0.0750569999217987, 0.9266800284385681, 0.03747899830341339, 0.6072400212287903, 0.17020000517368317, 0.05251799896359444, -0.5836300253868103, 0.5243099927902222, 0.010348999872803688, 0.12419000267982483, 0.14291000366210938, -0.1022299975156784, -0.48201000690460205, 0.1975499987602234, 0.22461000084877014, 0.005458099767565727, -0.3553900122642517, -0.33636999130249023, -0.5927199721336365, -0.2504200041294098, -0.5158399939537048, 0.29357001185417175, -0.14836999773979187, 0.3477199971675873, -0.05791100114583969, 0.11131999641656876, 0.5914700031280518, 0.014581999741494656, 0.10294000059366226, 0.10067000240087509, -0.5547999739646912, 0.48210999369621277, 0.6074399948120117, -0.02719300054013729, -0.3434700071811676, 0.1049100011587143, -0.5963299870491028, -0.03787200152873993, 0.08230599761009216, 0.22262999415397644, 0.07102999836206436, -0.004231899976730347, 0.247529998421669, 0.06776300072669983, -0.02371799945831299, -0.5689899921417236, -0.1653199940919876, -0.053105998784303665, 0.08142399787902832, 0.024237999692559242, -0.35126999020576477, -0.44391000270843506, 0.17821000516414642, 0.5324199795722961, -0.21788999438285828, 0.27612999081611633, 0.21946999430656433, 0.009747699834406376, 0.4205999970436096, 0.32646000385284424, -0.48065000772476196, -0.2121099978685379, -0.2554199993610382, 0.15650999546051025, 0.058862000703811646, -0.5316100120544434, -0.2805500030517578, -0.6659799814224243, 0.32719001173973083, -0.5168200135231018, 0.04680100083351135, -0.3165299892425537, 0.18704000115394592, -0.3777799904346466, 0.7100300192832947, -0.6643700003623962, -0.7106000185012817, -1.3384000062942505, 0.5241199731826782, -0.37288999557495117, 0.3184100091457367, 0.0006277799839153886, 0.4103200137615204, -0.45023998618125916, -0.3210900127887726, -0.8971400260925293, 0.05420399829745293, 0.1161699965596199, -0.5403100252151489, -0.6186800003051758, -0.3241199851036072, 0.9971699714660645, -0.15070000290870667, -0.19715000689029694, -0.07416299730539322, -0.03561199828982353, 1.2838000059127808, -0.3251500129699707, -0.28321999311447144, -0.08195800334215164, -0.4646799862384796], u'city': [-0.2865700125694275, -0.2559700012207031, -0.1766899973154068, -0.46088001132011414, 0.31700000166893005, 0.06430400162935257, 0.5245400071144104, 0.18382999300956726, 0.06290599703788757, -0.9312400221824646, 0.2327200025320053, -0.1600400060415268, 0.06944700330495834, 0.9079499840736389, 0.733680009841919, 0.40591999888420105, -0.10001999884843826, 0.07107000052928925, 0.4305500090122223, -0.013252000324428082, -0.3742699921131134, -0.30006998777389526, 0.15565000474452972, 0.16303999722003937, 0.053762998431921005, -0.4435200095176697, -0.30542999505996704, 0.012330999597907066, -0.6302700042724609, 0.07493499666452408, 0.20353999733924866, 0.19314000010490417, -0.7472800016403198, 0.5630999803543091, -0.761210024356842, -0.07272099703550339, -0.07842499762773514, -0.07089299708604813, -0.7575299739837646, -0.3781200051307678, 0.2830199897289276, 0.29649001359939575, -0.28665000200271606, 0.7960000038146973, 0.3226499855518341, 0.18990999460220337, 0.5075200200080872, 0.6497099995613098, -0.33452001214027405, 0.6617799997329712, 0.24073000252246857, -0.028376000002026558, -0.01910099945962429, 0.20698000490665436, 0.22766000032424927, -0.17351000010967255, -0.21006999909877777, 0.1602499932050705, 0.013240999542176723, 0.023293999955058098, -0.9332200288772583, -0.24830999970436096, 0.3351300060749054, -0.3011699914932251, 0.2496200054883957, 0.2629599869251251, 0.08563899993896484, 0.3356499969959259, 0.28859999775886536, -0.5381699800491333, 0.6187899708747864, -0.38381001353263855, 0.05031700059771538, -0.2669900059700012, -0.3990499973297119, 0.5341600179672241, -0.3577499985694885, 0.7913299798965454, 0.5125300288200378, -0.08840099722146988, -0.4218499958515167, -0.26923999190330505, 0.05676700174808502, 0.17534999549388885, -0.1159600019454956, -0.15487000346183777, -0.18443000316619873, 0.013698999769985676, 0.6593300104141235, 0.8601899743080139, 0.14774000644683838, 0.22360000014305115, 0.670009970664978, -0.13346999883651733, -0.04261299967765808, 0.3181999921798706, 0.04762899875640869, 0.21619999408721924, 0.35401999950408936, -0.1964000016450882, 0.1341100037097931, 0.18039999902248383, -0.35455000400543213, -0.09550300240516663, 0.03751299902796745, 0.2381500005722046, 0.8464800119400024, 0.0734110027551651, 0.020255999639630318, 0.15228000283241272, 0.10236000269651413, -0.6400700211524963, -0.03209000080823898, -0.30110999941825867, 0.2730799913406372, -0.017981000244617462, 0.16007000207901, -0.39208999276161194, 0.025596000254154205, -0.18904000520706177, 0.02519500069320202, -0.07524199783802032, -0.010816000401973724, 0.15931999683380127, -0.049518000334501266, -0.08813899755477905, 0.06547500193119049, 0.02059900015592575, -0.06575799733400345, -0.1603499948978424, 0.3993299901485443, 0.2277500033378601, 0.20956000685691833, 0.3982599973678589, 0.40834999084472656, -0.10360000282526016, -0.3260500133037567, -0.25023001432418823, 0.5740699768066406, 0.07499100267887115, -0.3875899910926819, -0.08109000325202942, -0.4034300148487091, 0.3862600028514862, -0.10183999687433243, -0.1968500018119812, 0.5928000211715698, -0.16545000672340393, -0.6865500211715698, 0.1863899976015091, 1.1777000427246094, -0.06402499973773956, 0.32502999901771545, 0.22213000059127808, 0.6206799745559692, 0.3844900131225586, -0.20991000533103943, -0.17566999793052673, -0.188400000333786, -0.021687999367713928, 0.6399499773979187, 0.19934000074863434, 0.08156999945640564, -0.3480600118637085, -0.25446999073028564, -0.5855399966239929, 0.3698999881744385, -0.0860389992594719, 0.23027999699115753, 0.2263599932193756, -0.3526900112628937, -0.2540700137615204, -0.5889099836349487, 0.19766999781131744, 0.5724300146102905, 0.6460599899291992, 0.03308200091123581, -0.3396399915218353, -0.5152300000190735, 0.44029000401496887, -0.7647799849510193, 0.421999990940094, -0.598360002040863, 0.4193499982357025, 0.5269799828529358, 0.03686000034213066, -0.3892099857330322, -0.338809996843338, 0.1325799971818924, -0.01727299951016903, 0.516319990158081, 0.04737500101327896, -0.1709499955177307, 0.06734400242567062, 0.06772000342607498, -0.12745000422000885, 0.039087001234292984, -0.33333998918533325, -0.20740999281406403, 0.6955699920654297, 0.8088700175285339, -0.351639986038208, 0.4153900146484375, -0.048454999923706055, -0.41398999094963074, -0.3314099907875061, 0.4566200077533722, -0.13149000704288483, 0.4381600022315979, 0.014391000382602215, -0.7210599780082703, 0.12109000235795975, -0.29333001375198364, -0.2439499944448471, 0.5347200036048889, 0.3192700147628784, -0.21229000389575958, 0.06408800184726715, -0.09168499708175659, -0.02376900054514408, 0.4053100049495697, -0.08009299635887146, 0.19555999338626862, 0.20374999940395355, 0.08787699788808823, -0.21453000605106354, 0.6056600213050842, -0.5782399773597717, -0.1527400016784668, -0.35719001293182373, 0.21558000147342682, 0.34891000390052795, -0.095100998878479, -0.4847399890422821, 0.12691999971866608, 0.3505699932575226, 0.21382999420166016, -0.09953399747610092, 0.029399000108242035, 0.1995600014925003, -0.026928000152111053, -0.6252599954605103, 0.5483700037002563, 0.46810999512672424, -0.7199199795722961, 0.41600000858306885, -0.1772499978542328, -0.4005500078201294, 0.35986998677253723, -0.056536998599767685, 0.26458999514579773, -0.2575500011444092, 0.3052699863910675, 0.28266000747680664, 0.287990003824234, -0.11060000211000443, -0.3637099862098694, -0.5802199840545654, 0.11266999691724777, -0.2963100075721741, -0.02370999939739704, 0.2368299961090088, 0.5009300112724304, 0.1477700024843216, 0.04619999974966049, -0.32315999269485474, -0.09449400007724762, 0.008019300177693367, -0.39111998677253723, -0.13500000536441803, -0.15205000340938568, 0.017304999753832817, 0.1618500053882599, 0.24031999707221985, -0.0818760022521019, -0.16558000445365906, -2.197000026702881, 0.20448000729084015, 0.26172998547554016, 0.4123600125312805, -0.2985999882221222, 0.14020000398159027, -0.16569000482559204, 0.02792700007557869, -0.6799299716949463, 0.7832499742507935, -0.024013999849557877, 0.6438800096511841, 0.2513299882411957, -0.3861500024795532, 0.17024999856948853, -0.1058100014925003, -0.23545999825000763, 0.31477001309394836, 0.6870499849319458, 0.29326000809669495, 0.3920300006866455, -0.46542999148368835, -0.5854600071907043, 0.17404000461101532], u'bathroom': [0.210889995098114, 0.19731999933719635, -0.42181000113487244, -0.6922399997711182, 0.038933999836444855, 0.3290500044822693, -0.3830699920654297, -0.25523999333381653, 0.8396499752998352, -0.7738100290298462, -0.40560999512672424, 0.47161999344825745, 0.263700008392334, 0.06283199787139893, 0.38940000534057617, 0.15046000480651855, 0.38350000977516174, -0.11100000143051147, -0.012470000423491001, -0.48416998982429504, -0.17233000695705414, 0.21188999712467194, 0.013284999877214432, -0.3474099934101105, -0.10885000228881836, -0.8004800081253052, 0.3701600134372711, 0.03290800005197525, 0.43160000443458557, 0.01082799956202507, -0.3515099883079529, 0.6431099772453308, 0.02725999988615513, 0.030424000695347786, -0.6851599812507629, 0.6209800243377686, -0.7809699773788452, -0.1088000014424324, -0.2248300015926361, -0.06224900111556053, -0.30757999420166016, -0.3061800003051758, -0.05454099923372269, -0.37929999828338623, 0.06632199883460999, 0.8965299725532532, 0.7925199866294861, -0.36267000436782837, -0.2951200008392334, -0.6729699969291687, -0.3195900022983551, -0.37046000361442566, 0.616599977016449, 0.21713000535964966, -0.04698900133371353, 0.22710999846458435, 0.4572699964046478, 0.1640699952840805, -0.0072963000275194645, 0.18905000388622284, 0.02735700085759163, -0.17598000168800354, 0.06639499962329865, 0.9277300238609314, -0.37435001134872437, -0.7181199789047241, 0.14861999452114105, -0.2738400101661682, 0.06074099987745285, -0.33761999011039734, 0.05469899997115135, -0.9659900069236755, -0.13087999820709229, 0.042305998504161835, 0.44944000244140625, 0.14932000637054443, -0.24592000246047974, 0.09369099885225296, -0.27768000960350037, -0.6998900175094604, 0.06211400032043457, -0.09686599671840668, 0.20698000490665436, -0.05610699951648712, 0.35846999287605286, 0.12419000267982483, -0.38060998916625977, -0.643779993057251, -0.40408000349998474, -0.031019000336527824, 0.27008000016212463, 0.3400700092315674, 0.7353000044822693, 0.2574400007724762, -0.13876000046730042, 0.005710700061172247, 0.07821600139141083, -0.3242200016975403, 0.7358800172805786, -0.7105600237846375, -0.11757999658584595, 0.030389999970793724, -0.03332199901342392, -0.2692900002002716, 0.3310700058937073, -0.20653000473976135, 0.2511399984359741, 0.24081000685691833, -0.3740699887275696, 0.6488199830055237, -0.2691799998283386, 0.403329998254776, 0.12004999816417694, -0.3109700083732605, -0.2640500068664551, 0.5192999839782715, -0.11602000147104263, -0.05101900175213814, -0.6355699896812439, -0.2523599863052368, -0.007090900093317032, 0.06788100302219391, 0.01890699937939644, 0.2505199909210205, -0.29745998978614807, -0.12636999785900116, -0.0013011000119149685, -0.0792360007762909, 0.41640999913215637, -0.11298999935388565, 0.7971100211143494, -0.10779999941587448, 0.5601300001144409, 0.05114100128412247, 0.15118999779224396, 0.19130000472068787, -0.23526999354362488, -0.05152500048279762, -0.0016494999872520566, -0.4534499943256378, 0.05647199973464012, -0.7562800049781799, 0.17046000063419342, 0.23711000382900238, 0.05995500087738037, -0.1552100032567978, -0.028578000143170357, 0.1998099982738495, 0.419050008058548, -0.21499000489711761, -0.5368599891662598, 0.8199300169944763, 0.43031999468803406, -0.014688000082969666, -0.2712100148200989, 0.17348000407218933, -0.1538500040769577, 0.2443999946117401, -0.28703001141548157, 0.48309001326560974, 0.9138799905776978, -0.01486700028181076, 0.37702998518943787, -0.1820099949836731, 0.46748000383377075, 0.5122900009155273, 0.4341700077056885, 0.60930997133255, 0.22985999286174774, 0.17389999330043793, -0.4059700071811676, 0.20954999327659607, 0.3871600031852722, 0.25797998905181885, -0.80103999376297, 0.4822100102901459, 0.1897599995136261, 0.6438999772071838, 0.17812000215053558, -0.2935999929904938, 0.1132500022649765, -0.11101000010967255, -0.32804998755455017, -0.028063999488949776, -0.26429998874664307, -0.330049991607666, 1.0636999607086182, 1.163699984550476, 0.29837000370025635, 0.3132599890232086, 0.8723300099372864, 0.10369999706745148, -0.4006099998950958, -0.2354699969291687, -0.4753200113773346, 0.644540011882782, -0.4572499990463257, 0.41988998651504517, -0.6087899804115295, -0.32708001136779785, 0.02992900088429451, -0.2965799868106842, -0.5130900144577026, -0.22137999534606934, 0.342629998922348, -0.31099000573158264, -0.008452700451016426, -0.424809992313385, -0.6984000205993652, -0.46764999628067017, -0.23356999456882477, 0.01445899996906519, -0.2686299979686737, 0.20931999385356903, 0.909250020980835, 0.2217099964618683, -0.05706600099802017, -0.2906399965286255, -0.0022871000692248344, 0.3781299889087677, 0.23397000133991241, 0.6606500148773193, -0.040456999093294144, 0.09389399737119675, -0.16032999753952026, -0.3224300146102905, -0.7290999889373779, 0.04191699996590614, 0.13971999287605286, -0.18953000009059906, 0.3805699944496155, -0.39485999941825867, -0.2664699852466583, -0.331279993057251, 0.2888000011444092, -0.03466000035405159, -0.41152000427246094, 0.6222699880599976, -0.009905800223350525, -0.6385599970817566, 0.6025099754333496, -0.1507599949836731, -0.3511900007724762, 0.3052999973297119, 0.06552000343799591, -0.36357998847961426, 0.07423699647188187, 0.24481000006198883, -0.27153000235557556, 0.06297200173139572, 0.006244800053536892, -0.43031999468803406, 0.2948000133037567, -0.29660001397132874, 0.22104999423027039, 0.3761099874973297, 0.5480300188064575, -0.2641499936580658, 0.4379599988460541, 0.1433500051498413, -0.16816000640392303, -0.6358100175857544, 0.2547000050544739, -1.135200023651123, -0.18931999802589417, 0.011470000259578228, 0.3549700081348419, 0.4029499888420105, 0.2370699942111969, -0.044057998806238174, 0.2883799970149994, 0.13112999498844147, -0.12009000033140182, -0.27900999784469604, -0.30052998661994934, 0.12695999443531036, -1.2761000394821167, 0.6043300032615662, -0.9865800142288208, -0.1812400072813034, -0.1941400021314621, 0.46244001388549805, 0.26353999972343445, -0.11588999629020691, -0.12308000028133392, 0.9953399896621704, -0.18574999272823334, 0.010344999842345715, -0.08420100063085556, -0.3691200017929077, -0.32420000433921814, 0.12431000173091888, -0.19088999927043915, 0.01241500023752451, 0.4530400037765503, -0.291269987821579, 0.26798000931739807, 0.16088999807834625, -0.14059999585151672, 0.8823999762535095], u'toy': [0.2488200068473816, 0.3420099914073944, -0.36469998955726624, -0.42462000250816345, -0.49814000725746155, 0.42570000886917114, 0.12007000297307968, 0.3078700006008148, -0.3844600021839142, -0.42085000872612, 0.02996799908578396, -0.5368000268936157, -0.21859000623226166, 0.25115999579429626, 0.11045999825000763, 0.09190499782562256, 0.07184900343418121, 0.2183700054883957, -0.4106000065803528, 0.19212999939918518, 0.7674999833106995, 0.7892000079154968, 0.05742799863219261, 0.3805699944496155, -0.1496499925851822, 0.40202999114990234, -0.27439001202583313, -0.21613000333309174, 0.8946200013160706, -0.0048420000821352005, -0.3961600065231323, -0.06755899637937546, 0.22832000255584717, -0.23479999601840973, -0.7097200155258179, 0.4482100009918213, -0.08041200041770935, -0.014589999802410603, 0.22208000719547272, 0.12744000554084778, 0.47325000166893005, -0.5082600116729736, 0.3600600063800812, 0.3445500135421753, -0.33177998661994934, -0.05806799978017807, 0.5497300028800964, -0.9516299962997437, 0.3354400098323822, 0.48739001154899597, 0.41993001103401184, -0.1289599984884262, -0.5408899784088135, 0.7587900161743164, 0.23783999681472778, 0.046939998865127563, -0.1875399947166443, -0.1540900021791458, -0.3071900010108948, -0.19840000569820404, -0.08829700201749802, -0.17757000029087067, 0.05005599930882454, -0.5105500221252441, 0.4626300036907196, 0.2600100040435791, -0.5652199983596802, 0.04094899818301201, 0.1481200009584427, 0.3574399948120117, 0.008490899577736855, 0.6561999917030334, 0.36952000856399536, -0.3263700008392334, 0.3410100042819977, -0.11670999974012375, -0.2386299967765808, 0.24586999416351318, 0.1472499966621399, -0.5299299955368042, 0.10147000104188919, 0.3309899866580963, -0.08396100252866745, -0.07503300160169601, 0.4830099940299988, 0.18095000088214874, 0.3304100036621094, 0.12419000267982483, -0.29467999935150146, -0.07375799864530563, 0.2890799939632416, -0.022161999717354774, -0.3600200116634369, -0.17497999966144562, -0.07317999750375748, 0.7334700226783752, -0.29023998975753784, 0.5613700151443481, -0.3181999921798706, -0.31435999274253845, 0.5485000014305115, 0.34005001187324524, -0.5372999906539917, -0.30347999930381775, 0.27105000615119934, -0.7306200265884399, -0.061969999223947525, -0.34606000781059265, -0.1584399938583374, 0.36149999499320984, 0.2539600133895874, 0.4772300124168396, -0.3470599949359894, 0.500469982624054, 0.3568499982357025, -0.24368000030517578, -0.26377999782562256, 0.5062100291252136, -0.06723099946975708, -0.11896999925374985, 0.27382001280784607, 0.1016400009393692, 0.8228800296783447, -0.1011900007724762, -0.5995699763298035, 0.15007999539375305, -0.03185100108385086, 0.3674300014972687, -0.17892000079154968, -0.43601998686790466, 0.7782099843025208, 0.09120599925518036, 0.06673000007867813, 0.27689000964164734, 0.2582300007343292, 0.719760000705719, 0.7669100165367126, 0.3445200026035309, 0.2550800144672394, 0.44023001194000244, 0.4092999994754791, -0.06508500128984451, -0.2286199927330017, -0.4765099883079529, 0.7352100014686584, 0.08834700286388397, 0.34101998805999756, 0.3568899929523468, -0.26488998532295227, -0.48284000158309937, -0.14645999670028687, 0.04977700114250183, 0.2671999931335449, -0.14688999950885773, 0.3473599851131439, -0.27052000164985657, -0.3584100008010864, -0.24842999875545502, -0.36399999260902405, 0.4579100012779236, 0.8738399744033813, 0.8197199702262878, -0.29455000162124634, -0.5658199787139893, 0.8725299835205078, -0.2781299948692322, 0.6684399843215942, 0.34283000230789185, -0.6136699914932251, -0.04742100089788437, -0.32677000761032104, -0.1760600060224533, -0.2459699958562851, 0.5624499917030334, 0.2098499983549118, -0.08988799899816513, -0.46814000606536865, -0.011853000149130821, -0.11458999663591385, -0.015823999419808388, -0.38418999314308167, -0.7634900212287903, -0.14454999566078186, 0.2558499872684479, 0.45210000872612, -0.17513999342918396, 0.4485200047492981, -0.2639999985694885, 0.5020099878311157, -0.21521000564098358, -0.057374998927116394, -0.2324800044298172, -0.039354998618364334, 0.3839600086212158, -0.39638999104499817, 0.2643499970436096, -0.6281499862670898, 0.3152799904346466, -0.06404700130224228, -0.1921900063753128, 0.4851900041103363, 0.6782299876213074, 0.37883999943733215, -0.20569999516010284, 0.20255999267101288, 0.13213999569416046, -0.021456999704241753, 0.16839000582695007, -0.5339599847793579, 0.17354999482631683, -0.15814000368118286, -0.1696300059556961, 0.40733999013900757, -0.22589999437332153, 0.2445400059223175, 0.20928999781608582, -0.22639000415802002, 0.6538599729537964, -0.2080399990081787, -0.18689000606536865, 0.20728999376296997, -0.15464000403881073, -0.30643001198768616, -0.07170400023460388, -0.11420000344514847, 0.7200599908828735, 0.3071199953556061, 0.10339999943971634, 0.43121999502182007, 0.047506000846624374, 0.25317999720573425, -0.07164499908685684, -0.481330007314682, -0.05009499937295914, 0.6559900045394897, 0.8511000275611877, 0.7620400190353394, 0.27781999111175537, -0.2839199900627136, -0.04492099955677986, 0.4411199986934662, -0.5286099910736084, -0.2859100103378296, -0.04045400023460388, -0.7459999918937683, -0.5534600019454956, -0.5015199780464172, -0.23785999417304993, -0.08563800156116486, 0.7441700100898743, -0.005860200151801109, 0.11287999898195267, -0.4532400071620941, 0.03822200000286102, 0.37060999870300293, 0.18442000448703766, 0.04428400099277496, -0.27028998732566833, 0.22127999365329742, -0.30298998951911926, 0.04165099933743477, -0.7584699988365173, 0.19231000542640686, -0.40244001150131226, 0.3618299961090088, 0.0491579994559288, 0.2502000033855438, -0.2815900146961212, -0.08530200272798538, 0.11275999993085861, 0.1072700023651123, -0.26923999190330505, 0.3429799973964691, -0.6555399894714355, 0.5687500238418579, 0.12212000042200089, -1.2812000513076782, -0.24999000132083893, -1.197100043296814, -0.18935999274253845, -0.037344999611377716, 0.2565099895000458, -0.4664199948310852, 0.018365999683737755, -0.09190300107002258, 0.6605100035667419, -0.07821100205183029, 0.10401000082492828, 0.09447100013494492, -0.11862000077962875, -0.04299100115895271, -0.15880000591278076, 0.1761299967765808, 0.3628999888896942, -0.21177999675273895, 0.5624899864196777, 0.5824000239372253, 0.26868999004364014, 0.20997999608516693, 0.10206999629735947], u'fabric': [0.4597899913787842, -0.32366999983787537, -0.21985000371932983, -0.695360004901886, -0.3646399974822998, -0.3092400133609772, -0.08332599699497223, 0.0857509970664978, 0.3541499972343445, -1.36899995803833, 0.18024000525474548, -0.6548200249671936, 0.44637998938560486, -0.10407999902963638, 0.11958999931812286, -0.13048000633716583, 0.03250199928879738, 0.4216200113296509, -0.010658999904990196, -0.10468000173568726, -0.42570000886917114, -0.24081000685691833, 0.12973999977111816, 0.5767599940299988, -0.1407500058412552, -0.06717599928379059, -0.14277000725269318, -0.0411980003118515, -0.6523500084877014, 0.38749000430107117, 0.16629000008106232, 0.5909000039100647, -0.7674000263214111, -0.06391800194978714, -0.4040299952030182, 0.8325899839401245, -0.04119500145316124, -0.4313800036907196, 0.29780998826026917, 0.4009999930858612, -0.26423001289367676, -0.720579981803894, -0.11981000006198883, -0.6376299858093262, 0.2534399926662445, 0.26510998606681824, 0.3333800137042999, -0.20969000458717346, 0.1026500016450882, -0.08019600063562393, 0.21975000202655792, 0.4998700022697449, 0.14791999757289886, -0.6133700013160706, -0.19884000718593597, -0.3248000144958496, 0.12454000115394592, -0.7536399960517883, 0.20905999839305878, -0.11736000329256058, 0.257999986410141, -0.33807000517845154, -0.26241999864578247, -0.5127500295639038, 0.6377999782562256, -0.03644600138068199, 0.19309000670909882, -0.029146000742912292, 0.38530999422073364, 0.1684200018644333, -0.026513999328017235, -0.6972699761390686, -0.5192899703979492, -0.06184599921107292, 0.5554800033569336, 0.4974699914455414, -0.4909699857234955, -0.280349999666214, -0.460099995136261, 0.05354499816894531, -0.49581998586654663, -0.22543999552726746, -0.3713200092315674, -0.5301700234413147, 0.004980000201612711, 0.3340199887752533, 0.19790999591350555, -0.014355000108480453, -0.4279699921607971, 0.35464000701904297, 0.3659000098705292, -0.020183999091386795, 0.652459979057312, -0.15724000334739685, 0.09393300116062164, -0.02660599909722805, 0.3470799922943115, 0.6512100100517273, 0.17922000586986542, -0.27827998995780945, 0.18014000356197357, 0.4483399987220764, -0.4574899971485138, -0.37992000579833984, -0.027411000803112984, -0.12732000648975372, 0.5900499820709229, 0.059595998376607895, -0.44558998942375183, -0.014569999650120735, -0.057305000722408295, -0.056488998234272, 0.012807999737560749, -0.2365500032901764, -0.22131000459194183, 0.05862699821591377, 0.1585800051689148, 1.1862000226974487, -0.3981199860572815, -0.5476300120353699, 0.51214998960495, 0.13485999405384064, 0.7373200058937073, 0.5517600178718567, 0.3511199951171875, 0.008700200356543064, 0.10390999913215637, 0.6925399899482727, 0.031390998512506485, 0.4938800036907196, 0.055233001708984375, 0.0415400005877018, 0.11027999967336655, 0.0844850018620491, -0.5980899930000305, -0.06315600126981735, -0.6534799933433533, 0.45326000452041626, -0.16749000549316406, 0.08153200149536133, -0.2227499932050705, 0.5671300292015076, 0.0240860003978014, 0.044769998639822006, 0.26844000816345215, 0.5313299894332886, 0.10611999779939651, -0.40149998664855957, -0.6376199722290039, -0.2098499983549118, -0.028015000745654106, 0.038996998220682144, -0.19415999948978424, -1.2139999866485596, 0.04120299965143204, -0.4199199974536896, -0.3448599874973297, 0.10711000114679337, 0.25751999020576477, 0.32530999183654785, 0.02352200075984001, -0.2660199999809265, -0.3617599904537201, -0.030855000019073486, 0.249549999833107, -0.11007999628782272, -0.273499995470047, 0.2598299980163574, 0.4112600088119507, 0.24262000620365143, -0.27077001333236694, -0.4020799994468689, -0.03308200091123581, 0.4645799994468689, 0.06949400156736374, -0.07722999900579453, -0.10101000219583511, 1.0088000297546387, -0.020168999210000038, -0.7953100204467773, 0.40038999915122986, -0.15639999508857727, 0.08502800017595291, 0.005374699831008911, 0.13307000696659088, -0.11135999858379364, 0.4524199962615967, 0.17497000098228455, 0.28470999002456665, 0.24800999462604523, 0.47874999046325684, 0.42010000348091125, -0.1827400028705597, -0.21126000583171844, -0.01760699972510338, 0.45023998618125916, -0.03045699931681156, 0.2932699918746948, -0.022825999185442924, 0.0068465000949800014, -0.1342500001192093, 0.04915200173854828, 0.7936499714851379, 0.17584000527858734, 0.4009000062942505, -0.1978899985551834, 0.7498300075531006, 0.6071500182151794, -0.8074100017547607, 0.6734899878501892, 0.022165000438690186, -0.08215799927711487, 0.34661999344825745, 0.29785001277923584, 0.30816999077796936, 0.3515700101852417, 0.4222300052642822, -0.582319974899292, 0.014453000389039516, 0.3310700058937073, 0.19682000577449799, -0.258760005235672, 0.07034700363874435, -0.18904000520706177, 0.2445800006389618, -0.10801000148057938, 0.03373200073838234, 0.2596299946308136, 0.1353600025177002, -0.19943000376224518, 0.8191099762916565, 0.03748499974608421, 0.12887999415397644, 0.3676699995994568, 0.5988399982452393, 0.03728500008583069, 0.3084399998188019, -0.3633899986743927, -0.32673999667167664, 0.11867000162601471, 0.06322900205850601, -0.0552700012922287, 0.16327999532222748, 0.3421899974346161, -1.0357999801635742, 0.27755001187324524, -0.11386000365018845, -0.23082999885082245, 0.21232999861240387, 0.05545800179243088, -0.13728000223636627, 0.3403399884700775, 0.3637799918651581, -0.5817300081253052, 0.5636900067329407, 0.6712499856948853, -0.2664799988269806, -0.1516599953174591, 0.17784999310970306, -0.17423999309539795, 0.2809099853038788, -0.4611400067806244, 0.1306699961423874, 0.04040500149130821, -0.030780000612139702, 0.20914000272750854, -0.36476001143455505, 0.5862900018692017, 0.12912000715732574, -0.061455000191926956, 0.18310999870300293, 0.4012700021266937, -0.004809100180864334, 0.2001499980688095, -0.7415500283241272, 0.16835999488830566, -0.17371000349521637, -0.1835400015115738, -0.44527000188827515, 0.20183999836444855, 0.33438000082969666, -0.02038699947297573, -0.03563300147652626, -0.041735000908374786, 0.12894000113010406, 0.29128000140190125, 0.16297000646591187, 0.37470999360084534, -0.0737290009856224, -0.24607999622821808, 0.14431999623775482, -0.3086700141429901, -0.02659899927675724, 0.513260006904602, 0.011536000296473503, 0.09024599939584732, 0.1791599988937378, 0.3544900119304657, -0.02877499908208847, 0.30136001110076904], u'beef': [0.38418999314308167, 0.7373200058937073, 0.37911999225616455, -0.14169999957084656, 0.21337999403476715, -0.020600000396370888, 0.02699200063943863, 0.3477399945259094, -0.2723200023174286, -1.1676000356674194, -0.30594998598098755, -0.6174299716949463, -0.3601300120353699, 0.5017799735069275, -0.6700599789619446, 0.11541999876499176, -0.15343999862670898, 0.4165099859237671, -0.37286001443862915, 0.2414100021123886, -0.37035998702049255, 0.3246000111103058, 0.23485000431537628, -0.5974100232124329, -0.5433800220489502, 0.462009996175766, -0.3143799901008606, -0.4711799919605255, 0.09679300338029861, -0.13776999711990356, -1.301300048828125, 0.33090001344680786, -0.20726999640464783, 0.1081399992108345, -0.08944199979305267, 0.04882799834012985, 0.4250999987125397, 0.40147000551223755, -0.6609100103378296, 0.003524299943819642, -0.6097599864006042, -0.19648000597953796, 0.008346900343894958, -0.10475999861955643, -0.23768000304698944, -0.0940610021352768, -0.176269993185997, -0.16850000619888306, -0.3289699852466583, 0.17357000708580017, -0.22991999983787537, 0.09336499869823456, -0.397379994392395, 0.40817999839782715, 0.27083998918533325, -0.06536299735307693, -0.05735199898481369, 0.36864998936653137, -1.1102999448776245, -0.17489999532699585, -0.31009000539779663, 0.04815400019288063, 0.3606700003147125, -0.10711000114679337, -0.4731299877166748, 0.08267199993133545, -0.6000300049781799, 0.3489699959754944, 0.3639200031757355, 0.273470014333725, 0.16627000272274017, 0.28238001465797424, 0.2558799982070923, -0.1814499944448471, -0.028699999675154686, -0.15108999609947205, 0.4729500114917755, 0.8892599940299988, -0.7136499881744385, 0.023326000198721886, -0.11271999776363373, -0.012153999879956245, 0.041370000690221786, 0.23235000669956207, 0.041749998927116394, -0.5960900187492371, -0.6223000288009644, 0.1009799987077713, -0.8336499929428101, -0.5829399824142456, -0.2009900063276291, -0.004129699897021055, 0.17962999641895294, 0.40803998708724976, -0.04688100144267082, 0.11759000271558762, -0.7399899959564209, 0.7936400175094604, -0.7738999724388123, 1.0475000143051147, -0.2570599913597107, -0.06503699719905853, -0.26980000734329224, -1.0270999670028687, -0.8396199941635132, -0.11556000262498856, -0.46994999051094055, 0.3728199899196625, -0.02648800052702427, 0.09378699958324432, 0.4153999984264374, 0.1698800027370453, -0.7686300277709961, -0.6723600029945374, -0.20837000012397766, -0.27542999386787415, -0.06191699951887131, 0.3171499967575073, 0.4946799874305725, -0.5555300116539001, 0.059390000998973846, -0.2578200101852417, 0.27684998512268066, 0.19304999709129333, -0.540440022945404, -0.07680600136518478, -0.13996000587940216, 0.06564699858427048, 0.017093999311327934, 0.29826000332832336, -0.025575000792741776, 0.8802700042724609, -0.09462200105190277, -0.051332999020814896, 0.4837999939918518, -0.0649930015206337, -0.13981999456882477, -0.19370999932289124, 0.522599995136261, 0.37852999567985535, 0.15349000692367554, -0.014550000429153442, 0.42601001262664795, -0.31527000665664673, -0.5560399889945984, -0.0298289991915226, -0.12645000219345093, -0.30814000964164734, 0.13966000080108643, -0.5198500156402588, -0.6119700074195862, 0.023034999147057533, 0.06553799659013748, 0.5534899830818176, 0.28598999977111816, -0.1691100001335144, -1.1991000175476074, -0.5669199824333191, -0.5427500009536743, -0.16931000351905823, 0.28123000264167786, 0.8692600131034851, -0.7100099921226501, -0.15234999358654022, 0.48357999324798584, -0.37338000535964966, 0.391510009765625, -0.6742799878120422, -0.02424200065433979, -0.3779999911785126, -0.14553000032901764, 0.2525700032711029, -0.4360800087451935, 0.3378300070762634, 0.1432799994945526, -0.09504900127649307, 0.3635700047016144, -0.31578001379966736, 0.9197099804878235, -0.6057299971580505, 0.01533500012010336, 0.10706999897956848, -0.5602700114250183, -1.1068999767303467, -0.46733999252319336, -0.09567700326442719, 0.33118999004364014, -0.5071600079536438, 0.1485300064086914, -0.6740099787712097, -1.0435999631881714, 0.7375699877738953, 0.14219999313354492, -0.029123999178409576, -0.06775300204753876, -0.03140399977564812, 0.11653000116348267, -0.23920999467372894, -0.3671500086784363, 0.3073900043964386, 0.920710027217865, -0.37523001432418823, 0.071663998067379, 0.7733100056648254, -0.18776999413967133, 0.5087400078773499, 0.5778700113296509, -0.19800999760627747, -0.053998999297618866, -0.0022913001012057066, 0.06591399759054184, -0.4645400047302246, -0.5376399755477905, 1.173799991607666, -0.4019699990749359, -0.28613001108169556, 0.5895799994468689, -0.6225299835205078, -0.07800500094890594, 0.32624000310897827, 0.6195399761199951, -0.06505600363016129, -0.22971999645233154, -1.0384000539779663, 0.08250100165605545, 0.24005000293254852, 0.1536799967288971, 0.2209399938583374, -0.310479998588562, 0.460999995470047, 0.4170700013637543, 0.13726000487804413, -0.16934999823570251, 0.5644800066947937, 0.817870020866394, 0.0805789977312088, 0.6752499938011169, -0.07433599978685379, -0.05618299916386604, -0.07543499767780304, -0.7976300120353699, -0.5890899896621704, -0.2534100115299225, -0.48649999499320984, -0.8877400159835815, 0.24859000742435455, -0.1921900063753128, -0.37599998712539673, 0.22203999757766724, -0.9603700041770935, 0.4185200035572052, -0.4562999904155731, 0.20911000669002533, 0.23690000176429749, 0.07324100285768509, 0.17563000321388245, -0.3591499924659729, 0.8014199733734131, -0.23659999668598175, 0.6451600193977356, 0.4366999864578247, -0.07409500330686569, 0.09798900038003922, -0.5379300117492676, 0.14180000126361847, -0.02851399965584278, 0.028054000809788704, -0.1785999983549118, -0.25745999813079834, -0.7907299995422363, -0.9458799958229065, 0.20964999496936798, 0.2856999933719635, -0.25183001160621643, -0.07748100161552429, 0.14406000077724457, -1.2144999504089355, 0.17529000341892242, -0.15711000561714172, -0.3041299879550934, -0.2730799913406372, -0.44359999895095825, 0.05484199896454811, 0.6655700206756592, 0.03126800060272217, 0.11880999803543091, 0.162650004029274, -0.6270800232887268, 0.48539999127388, 0.05815500020980835, 0.4666700065135956, 0.02510399930179119, 0.2717899978160858, -0.22032999992370605, -0.38576000928878784, -0.41025999188423157, 0.004985800012946129, -0.7084400057792664, 0.20663000643253326, -0.565850019454956], u'window': [-0.02935199998319149, -0.1377200037240982, -0.19707000255584717, -0.7930300235748291, 0.146029993891716, 0.5632299780845642, -0.4949299991130829, -0.6106299757957458, -0.08615999668836594, -1.1164000034332275, -0.22384999692440033, 0.6619200110435486, 0.5520300269126892, -0.40070000290870667, -0.41332000494003296, -0.4676800072193146, 0.23107999563217163, -0.28341999650001526, -0.1782200038433075, -0.08456800132989883, 0.23765000700950623, 0.12309999763965607, -0.327890008687973, 0.004811599850654602, 0.4370500147342682, -0.028363000601530075, -0.4895800054073334, 0.024272000417113304, -0.2917400002479553, -0.19373999536037445, 0.4125100076198578, 0.3255400061607361, 0.22213999927043915, 0.05604099854826927, -0.5363900065422058, 0.5505899786949158, -0.7297400236129761, -0.5855299830436707, -0.31396999955177307, 0.08996099978685379, 0.06603600084781647, -0.21363000571727753, -0.7764099836349487, 0.29872000217437744, -0.18702000379562378, 0.31630998849868774, 0.40042001008987427, -0.06418099999427795, -0.5378400087356567, -0.5198500156402588, -0.38510000705718994, 0.1551000028848648, 0.3154299855232239, -0.2563599944114685, -0.30935001373291016, -0.13722999393939972, 0.1546500027179718, -0.273389995098114, -0.18640999495983124, -0.004489299841225147, 0.18005000054836273, 0.07652299851179123, 0.38826000690460205, 0.264710009098053, 0.1811700016260147, -0.15008999407291412, 0.2229200005531311, -0.5865600109100342, 0.543720006942749, -0.23267999291419983, -0.661050021648407, -0.5876200199127197, -0.3629100024700165, 0.21296000480651855, 0.07892899960279465, -0.2047400027513504, -0.6206499934196472, -0.26583001017570496, -0.21303999423980713, -0.4205999970436096, -0.237869992852211, 0.10604000091552734, -0.03660999983549118, 0.22916999459266663, -0.19742000102996826, -0.08526600152254105, 0.2006399929523468, 0.03665899857878685, -0.22883999347686768, 0.19280000030994415, 0.10701999813318253, -0.18900999426841736, -0.36629998683929443, 0.3629699945449829, 0.35989001393318176, 0.10723999887704849, -0.15028999745845795, -0.40692999958992004, 0.6793799996376038, -0.8113399744033813, -0.061278000473976135, 0.08823300153017044, -0.0561399981379509, -0.0800200030207634, -0.14855000376701355, -0.33191001415252686, -0.3669799864292145, 0.40004000067710876, 0.17354999482631683, 0.6588799953460693, -0.5107700228691101, 0.24131999909877777, 0.08997999876737595, -0.2281000018119812, 0.05576999858021736, -0.4065999984741211, -0.38155001401901245, -0.09332200139760971, -0.14158999919891357, -0.5864800214767456, 0.02506200037896633, -0.04271399974822998, 0.2152400016784668, 0.4627099931240082, 0.05352000147104263, -0.8374000191688538, 0.41635000705718994, 0.09492900222539902, 0.33972999453544617, -0.1483599990606308, 0.3784500062465668, 0.2785100042819977, -0.13729999959468842, 0.43841999769210815, 0.398360013961792, 0.3512899875640869, 0.08632499724626541, 0.13693000376224518, -0.008854400366544724, -0.014999999664723873, -0.08220600336790085, 0.33087998628616333, -0.08811099827289581, -0.03564399853348732, -0.5658900141716003, -0.051426999270915985, 0.06347499787807465, 0.39452001452445984, -0.30417001247406006, -0.38444000482559204, -0.2064799964427948, -0.11751999706029892, 0.0033388000447303057, -0.7390000224113464, 0.4571099877357483, -0.3586199879646301, 0.012137999758124352, 0.043389998376369476, -0.15127000212669373, 0.248089998960495, 0.2573699951171875, -0.08236400038003922, 0.2908500134944916, 0.40163999795913696, 0.629610002040863, 0.7008699774742126, 0.05362100154161453, 0.250789999961853, 0.569379985332489, -0.36814001202583313, -0.5489199757575989, 0.2649900019168854, 0.22363999485969543, -0.4128299951553345, -0.07380899786949158, 0.11022999882698059, -0.31676000356674194, 0.6286399960517883, -0.37231001257896423, -0.8543999791145325, 0.16981999576091766, -0.38159000873565674, 0.2874299883842468, 0.08392100036144257, 0.8780999779701233, -0.21692000329494476, 0.4973999857902527, 0.19453999400138855, 0.3939799964427948, 0.8596799969673157, 0.3397899866104126, 0.15546000003814697, -0.43678000569343567, 0.20220999419689178, 0.023786000907421112, -0.012294000014662743, -0.11344999819993973, 0.07120800018310547, -0.8302199840545654, -0.0923750028014183, 0.8054900169372559, -0.44710999727249146, 0.14127999544143677, 0.07535699754953384, 0.2508600056171417, -0.2203799933195114, -0.43858999013900757, -0.42302000522613525, -0.4155200123786926, 0.39252999424934387, 0.2029999941587448, -0.425790011882782, -0.16583000123500824, -0.28189998865127563, 0.22416000068187714, 0.11587999761104584, 0.5323899984359741, 0.12984000146389008, -0.24369999766349792, 0.387719988822937, 0.3793700039386749, 0.26684999465942383, 0.07568900287151337, 0.09129899740219116, 0.010397999547421932, -0.3372099995613098, -0.4106599986553192, 0.16582000255584717, 0.41308000683784485, -0.22527000308036804, 0.05639899894595146, -0.4154300093650818, 0.05482900142669678, -0.3645099997520447, 0.29510000348091125, 0.16410000622272491, -0.15146000683307648, 0.041165001690387726, -0.20789000391960144, -0.2698799967765808, 0.3179500102996826, -0.20322999358177185, -0.6952000260353088, 0.026635000482201576, -0.02634499967098236, -0.2614299952983856, -0.14812999963760376, -0.8162299990653992, -0.39146000146865845, -0.05653199926018715, 0.18988999724388123, 0.001270200009457767, 0.3334900140762329, -0.7097200155258179, -0.16098999977111816, 0.5174099802970886, -0.2517400085926056, -0.08118700236082077, -0.016148999333381653, 0.4074299931526184, 0.036986999213695526, -0.3237200081348419, 0.4758099913597107, 0.10057999938726425, 0.0032913999166339636, -0.5358899831771851, -0.10339999943971634, 0.14350000023841858, 0.1678999960422516, -0.22957000136375427, 0.13872000575065613, -0.43595001101493835, -0.41962000727653503, -0.11958999931812286, -0.0396759994328022, 0.12472999840974808, -2.108599901199341, 0.10892999917268753, -0.4744099974632263, 0.10075999796390533, -0.8949699997901917, -0.18799999356269836, 0.43974000215530396, -0.5261800289154053, 0.17544999718666077, 0.5946800112724304, -0.044555000960826874, -0.01109199970960617, 0.2818799912929535, -0.28404000401496887, -0.10081999748945236, 0.07663200050592422, -0.17502999305725098, 0.11089999973773956, 0.418830007314682, 0.29614999890327454, -0.23393000662326813, 0.399509996175766, 0.1678999960422516, 0.4560900032520294], u'plastic': [-0.1146399974822998, 0.04165700078010559, -0.12105000019073486, -0.7495800256729126, -0.15612000226974487, -0.33702000975608826, 0.1920499950647354, 0.015798000618815422, 0.33065998554229736, -1.0640000104904175, -0.2533999979496002, -0.10154999792575836, -0.3513000011444092, 0.22509999573230743, 0.2556599974632263, -0.11753000319004059, -0.8657199740409851, 0.24432000517845154, -0.10227999836206436, -0.07074899971485138, 0.5078999996185303, -0.4569700062274933, -0.4241600036621094, 0.8083099722862244, -0.5371099710464478, -0.0660490021109581, -0.7749699950218201, 0.13644999265670776, 0.45197001099586487, -0.5202299952507019, 0.12701000273227692, 0.32613998651504517, -0.05325999855995178, 0.05307700112462044, -0.06233600154519081, 0.9197199940681458, -0.0881740003824234, 0.06521400064229965, -0.2170100063085556, 0.5329599976539612, -0.4811600148677826, -0.4737499952316284, 0.005519900005310774, -0.20755000412464142, -0.17997999489307404, 0.07862299680709839, 0.03984899818897247, -0.10803999751806259, 0.4644399881362915, 0.6398299932479858, -0.08237200230360031, 0.25731000304222107, -0.15203000605106354, -0.00723630003631115, 0.08552499860525131, 0.09346599876880646, -0.3891899883747101, -0.2451000064611435, 0.34224000573158264, -0.2244900017976761, -0.04804600030183792, 0.1033099964261055, -0.11394000053405762, 0.3063400089740753, 0.445389986038208, -0.3289799988269806, -0.508650004863739, -0.07356300204992294, -0.2180899977684021, -0.057100001722574234, -0.25334998965263367, -0.1004600003361702, -0.12487000226974487, 0.6707299947738647, 0.18316000699996948, 0.18821999430656433, 0.3108200132846832, -0.20472000539302826, 0.09269200265407562, -0.6555500030517578, -0.08659400045871735, -0.2259799987077713, -0.2843500077724457, 0.39789000153541565, -0.3336299955844879, -0.1473899930715561, 0.3343699872493744, -0.028488000854849815, -0.9118800163269043, -0.4197700023651123, 0.3576500117778778, 0.32638001441955566, 0.22565999627113342, -0.1307699978351593, 0.19686999917030334, 0.40863001346588135, -0.21115000545978546, 0.21223999559879303, -0.1646299958229065, -0.9948099851608276, 0.4002699851989746, 0.8614199757575989, -0.16997000575065613, -0.7425500154495239, 0.05937900021672249, -0.30527999997138977, 0.09193900227546692, 0.33875998854637146, -1.0674999952316284, -0.18095000088214874, -0.023142000660300255, 0.5703099966049194, -0.25586000084877014, -0.6658999919891357, 0.2719700038433075, 0.3068000078201294, 0.09804099798202515, 0.6299200057983398, -0.28404998779296875, -0.5462599992752075, -0.16052000224590302, -0.010456000454723835, 0.6727700233459473, 0.4893999993801117, -0.1047699972987175, 0.36542999744415283, -0.1805800050497055, -0.45605000853538513, 0.10072000324726105, 0.12331999838352203, 0.23733000457286835, 0.5818600058555603, 0.4712499976158142, 0.5246400237083435, 0.28084999322891235, 0.004654400050640106, -0.15768000483512878, -0.24974000453948975, -0.11647000163793564, 0.7732099890708923, 0.2147199958562851, 0.00950899999588728, -0.021383000537753105, -0.6741499900817871, 0.26381000876426697, 0.4459899961948395, 0.37389999628067017, -0.01167600043118, -0.22056999802589417, -0.23265999555587769, -0.03893199935555458, -0.3626999855041504, 0.1411599963903427, -0.6144999861717224, 0.3101100027561188, 0.1862799972295761, -0.4864000082015991, -0.9128900170326233, -0.08351899683475494, 0.5416300296783447, 0.027289999648928642, 0.2563300132751465, -0.12664000689983368, 0.13394999504089355, 0.5806300044059753, -0.01207400020211935, 0.1575700044631958, 0.3822999894618988, 0.046456001698970795, -0.23138000071048737, 0.012354999780654907, 0.3247300088405609, -0.05217999964952469, 0.3119699954986572, -0.19898000359535217, -0.7729600071907043, -0.10413999855518341, 0.5137100219726562, 0.2654699981212616, -0.6519899964332581, 0.2517800033092499, -0.15006999671459198, 0.3392300009727478, 0.15033000707626343, -0.018449999392032623, -0.12515999376773834, 0.8515300154685974, 0.554610013961792, 0.5333999991416931, -0.13350999355316162, 0.48572999238967896, 0.7143099904060364, 0.08584299683570862, 0.20987999439239502, -0.2995400130748749, 0.45388999581336975, -0.17899000644683838, 0.729390025138855, -0.2682799994945526, 0.6700400114059448, 0.13761000335216522, 0.3846299946308136, 0.5726199746131897, 0.22878000140190125, 0.19381999969482422, -0.23127999901771545, 0.5232700109481812, 0.09125100076198578, -0.7266799807548523, 0.06074000149965286, 0.2770799994468689, 0.12292999774217606, 0.19332000613212585, -0.1625799983739853, 0.7951099872589111, -0.24195000529289246, 0.24582000076770782, -0.23548999428749084, -0.38655999302864075, 0.28301000595092773, 0.6000800132751465, 0.11630000174045563, 0.4925900101661682, -0.025467000901699066, -0.10808999836444855, 0.3215300142765045, -0.3564800024032593, -0.2721399962902069, 0.394320011138916, -0.5585899949073792, 0.7581999897956848, 0.45396000146865845, -0.092958003282547, -0.14940999448299408, 0.5105999708175659, 0.28047001361846924, -0.13685999810695648, -0.2926500141620636, -0.912090003490448, 0.1552100032567978, -0.05652499943971634, -0.4346800148487091, -0.4094800055027008, 0.2021300047636032, -0.6722400188446045, -0.5959699749946594, 0.5699599981307983, -0.6939499974250793, -0.21369999647140503, 0.3513300120830536, 0.31942999362945557, -0.2801800072193146, 0.36608999967575073, -0.3878900110721588, 0.4867900013923645, 0.6473299860954285, -0.22686000168323517, -0.5856900215148926, 0.07924400269985199, -0.24917000532150269, -0.13816000521183014, -0.2931399941444397, -0.20767000317573547, 0.37821999192237854, 0.8097800016403198, -0.0921970009803772, -0.19265000522136688, 0.033333998173475266, 0.00714890006929636, 0.2966899871826172, 0.09464699774980545, -0.19201000034809113, -0.001015600049868226, -0.3648799955844879, -0.7670300006866455, 0.1348399966955185, -1.6708999872207642, 0.09070499986410141, -1.1490000486373901, -0.15591999888420105, 0.1388300061225891, -0.2966499924659729, -0.28661999106407166, 0.2406100034713745, -0.004062400199472904, 0.6452699899673462, 0.27494001388549805, 0.2641499936580658, 0.11238999664783478, 0.06794100254774094, -0.3341200053691864, -0.3525800108909607, 0.1574999988079071, 0.8413400053977966, 0.0036891999188810587, 0.09168999642133713, 0.5486800074577332, 0.048879001289606094, -0.07929600030183792, 0.008929800242185593], u'paint': [0.6881700158119202, 0.04696499928832054, -0.23454000055789948, -0.3967899978160858, -0.16746999323368073, -0.2201700061559677, -0.2664699852466583, 0.04461099952459335, 0.32743000984191895, -0.9821900129318237, 0.05339900031685829, -0.15286999940872192, 0.4306800067424774, 0.06185400113463402, 0.5702700018882751, -0.4322099983692169, 0.1251399964094162, 0.2247599959373474, -0.21875999867916107, -0.22279000282287598, -0.2565799951553345, 0.05526699870824814, 0.7240800261497498, -0.15690000355243683, 0.19370000064373016, -0.8691999912261963, -0.3107999861240387, 0.19272999465465546, -0.057659000158309937, 0.06186100095510483, 0.20941999554634094, 0.4421199858188629, 0.07309199869632721, 0.020191000774502754, -0.16824999451637268, 0.6666399836540222, -0.6696299910545349, -0.23752999305725098, -0.015698999166488647, -0.1622299998998642, 0.021494999527931213, -0.06949999928474426, 0.13075000047683716, -0.26093998551368713, 0.24275000393390656, 0.12803000211715698, -0.019443999975919724, -0.4344800114631653, 0.047203000634908676, -0.7209799885749817, -0.1861400008201599, 0.28703001141548157, 0.5286099910736084, 0.2590300142765045, 0.3752500116825104, 0.425570011138916, -0.1272599995136261, -0.39054998755455017, 0.28984999656677246, 0.04244999960064888, -0.11294999718666077, 0.10516999661922455, 0.039778999984264374, -0.13412000238895416, 1.0508999824523926, -0.525409996509552, -0.3005099892616272, -0.8939899802207947, 0.23507000505924225, -0.21496999263763428, -0.21333999931812286, -0.710919976234436, 0.13199999928474426, 0.3957499861717224, -0.27081000804901123, -0.35593000054359436, -0.13431000709533691, 0.5307300090789795, 0.11475999653339386, -0.5056300163269043, -0.26680999994277954, -0.305649995803833, 0.09988900274038315, -0.6835700273513794, 0.18532000482082367, 0.19497999548912048, 0.42017999291419983, 0.22846999764442444, -0.6189000010490417, 0.47828999161720276, -0.09357199817895889, -0.12589000165462494, -0.008451900444924831, -0.5235900282859802, -0.14565999805927277, -0.0481639988720417, -0.020275000482797623, -0.6298800110816956, 0.4445900022983551, -0.8132699728012085, -0.16745999455451965, -0.0967089980840683, -0.37185999751091003, 0.14887000620365143, 0.3078700006008148, -0.1253799945116043, -0.13797999918460846, -0.056866999715566635, -0.03675299882888794, 0.09953700006008148, 0.09222599864006042, -0.4120999872684479, 0.07942300289869308, -0.05486899986863136, -0.09255599975585938, 0.372979998588562, -0.1818699985742569, 1.0681999921798706, 0.12861000001430511, -0.08238600194454193, 0.3012999892234802, 0.10277000069618225, -0.00920610036700964, 0.9937400221824646, -0.1686599999666214, -0.3070099949836731, -0.27654001116752625, 0.45083001255989075, 0.19484999775886536, 0.2246599942445755, 0.3932200074195862, 0.031129000708460808, 0.2184399962425232, 0.5541800260543823, -0.1162400022149086, 0.21773000061511993, -0.1822900027036667, 0.5115900039672852, 0.6313599944114685, -0.0075425999239087105, 0.8657400012016296, 0.608240008354187, -0.5919100046157837, -0.5079900026321411, 0.3832699954509735, 0.46397000551223755, 0.017861999571323395, 0.29403001070022583, 0.0969809964299202, 0.12985999882221222, -0.031943999230861664, 0.11812999844551086, -0.42285001277923584, -0.38672998547554016, -0.2554300129413605, 0.20148000121116638, -0.24602000415325165, 0.02132599987089634, 0.5070800185203552, 0.11246000230312347, 0.1726900041103363, 0.08912400156259537, 0.08745700120925903, -0.1001800000667572, 0.5652999877929688, -0.3119699954986572, -0.12081000208854675, 0.33337000012397766, -0.04576500132679939, 0.007720599882304668, 0.26218000054359436, -0.20374000072479248, -0.04077199846506119, 0.03457700088620186, 0.18851999938488007, -0.3617900013923645, 0.13660000264644623, -0.04727200046181679, 0.012593000195920467, -0.6578699946403503, -0.2908500134944916, 0.33202001452445984, 0.14236000180244446, 0.052156999707221985, -0.23479999601840973, -0.47936999797821045, 1.1711000204086304, -0.21507999300956726, 0.616320013999939, 0.2915099859237671, 0.6536499857902527, 0.2721700072288513, -0.30663999915122986, -0.12483999878168106, 0.19134999811649323, -0.5493500232696533, -0.13118000328540802, 0.6173800230026245, -0.13605999946594238, -0.2971400022506714, 0.5690600275993347, -0.048813000321388245, 0.27628999948501587, -0.12105000019073486, 0.3388499915599823, 0.3418799936771393, 0.34755000472068787, 0.5893099904060364, -0.6521499752998352, 0.2262600064277649, 0.12383999675512314, 0.13917000591754913, 0.2378299981355667, -0.5447800159454346, 0.1088000014424324, 0.07891400158405304, 0.8245199918746948, 0.13167999684810638, 0.20550000667572021, -0.40852999687194824, 0.593209981918335, 0.42142999172210693, -0.12981000542640686, -0.07282000035047531, -0.10440000146627426, -0.18167999386787415, 0.08472800254821777, 0.4136900007724762, -0.04206399992108345, -0.2896600067615509, -0.13742999732494354, -0.1078300029039383, 0.05520499870181084, -0.14993999898433685, 0.3797700107097626, 0.05873600021004677, 0.1192300021648407, -0.0062489998526871204, 0.07442999631166458, -0.1709499955177307, -0.20555999875068665, 0.19589999318122864, -0.4392099976539612, 0.020500000566244125, -0.5217499732971191, -0.1606999933719635, 0.19585999846458435, -0.5354400277137756, -0.10490000247955322, 0.13685999810695648, 0.14094999432563782, -0.6971399784088135, 0.46498000621795654, -0.5107499957084656, 1.135200023651123, -0.22700999677181244, -0.2716200053691864, 0.02680700086057186, -0.06416299939155579, 0.5015199780464172, -0.10120999813079834, -0.47494998574256897, -0.14587000012397766, -0.1481499969959259, 0.5161899924278259, -0.12239000201225281, -0.2223300039768219, -0.3571299910545349, -0.3078700006008148, -0.19166000187397003, 0.43331000208854675, 0.030101999640464783, 0.32409998774528503, -0.1857299953699112, -0.0835300013422966, 0.4021100103855133, -1.4035999774932861, -0.4217100143432617, -0.782800018787384, -0.14887000620365143, 0.2650800049304962, 0.1719599962234497, -0.005651600193232298, 0.29155999422073364, 0.47960999608039856, 0.5931699872016907, -0.11462999880313873, 0.17961999773979187, 0.1066799983382225, 0.02687700092792511, -0.026195000857114792, -0.2735700011253357, 0.2234400063753128, 0.6140900254249573, -0.1973699927330017, -0.24048000574111938, 0.9232699871063232, -0.09051299840211868, -0.05499900132417679, 0.18273000419139862], u'camera': [-0.6574000120162964, 0.4971500039100647, 0.0979280024766922, -0.46845000982284546, -0.40303000807762146, -0.36901000142097473, -0.33072999119758606, -0.3325999975204468, 0.18398000299930573, -1.3990000486373901, 0.39902999997138977, 0.190870001912117, 0.7778499722480774, -0.25971001386642456, 0.10313999652862549, 0.007594999857246876, -0.43595999479293823, -0.47464001178741455, -0.09808900207281113, -0.5037099719047546, 0.044638000428676605, 0.38106000423431396, 0.1388300061225891, 0.011346999555826187, 0.4741300046443939, -0.43939998745918274, 0.02391500025987625, -0.35067999362945557, 0.5179399847984314, 0.3743099868297577, -0.13241000473499298, -0.23303000628948212, -0.22878000140190125, 0.4795199930667877, -0.7980599999427795, 0.11108999699354172, -0.16981999576091766, -0.30529001355171204, 0.13650000095367432, 0.6090700030326843, -0.33507001399993896, 0.5057899951934814, 0.212459996342659, 0.536620020866394, -0.18045000731945038, 0.05961799994111061, 0.7680699825286865, -0.4996100068092346, 0.3641799986362457, 0.1864600032567978, -0.11094000190496445, 0.13989000022411346, 0.008154800161719322, 0.045504000037908554, 0.2840999960899353, 0.3405799865722656, 0.2580600082874298, -0.5271700024604797, -0.13138000667095184, 0.41791000962257385, -0.3524099886417389, 0.3831000030040741, 0.29264000058174133, 0.8238199949264526, 0.1265300065279007, 0.5624099969863892, -0.07955600321292877, -0.2215700000524521, 0.5883700251579285, -0.5557000041007996, 0.1507200002670288, 0.13612000644207, 0.24352000653743744, 0.23130999505519867, 0.4727799892425537, -0.34946000576019287, -0.25731998682022095, 0.127360001206398, 0.2644500136375427, -0.5936300158500671, -0.661080002784729, -0.021896999329328537, 0.1316400021314621, -0.046535998582839966, -0.18327000737190247, 0.24408000707626343, -0.09438399970531464, 0.15060999989509583, -0.22033999860286713, -0.011394999921321869, 0.2272700071334839, -0.11138000339269638, -0.3954100012779236, 0.35736000537872314, 0.3129900097846985, -0.5169600248336792, -1.1062999963760376, -0.6198099851608276, -0.04940799996256828, -1.6158000230789185, -0.2170500010251999, 0.7103000283241272, -0.16321000456809998, -0.11437000334262848, -0.013566000387072563, 0.16572999954223633, 0.3062500059604645, 0.6304600238800049, -0.45385000109672546, 0.03501100093126297, -0.15056000649929047, 0.4846700131893158, 0.19259999692440033, 0.1258399933576584, 0.004400299862027168, 0.12939999997615814, 0.09946800023317337, -0.15263999998569489, -0.5213800072669983, -0.01580600067973137, -0.07698799669742584, -0.02536799944937229, 0.4699299931526184, -0.397460013628006, 0.33246999979019165, -0.33730998635292053, -0.03269999846816063, 0.4821999967098236, 0.32774999737739563, 0.11952000111341476, -0.10726000368595123, 0.027844000607728958, 0.15175999701023102, 1.0055999755859375, -0.35585999488830566, 0.14406000077724457, 0.6886799931526184, 0.37369000911712646, 0.008435900323092937, -0.35712000727653503, -0.21814000606536865, -0.15399999916553497, -0.4567900002002716, -0.021337000653147697, -0.4050300121307373, -0.1818699985742569, 0.31624001264572144, 0.18219000101089478, 0.1448799967765808, -0.492110013961792, 0.24677999317646027, 0.1365099996328354, 0.3149000108242035, 0.009411700069904327, 0.23412999510765076, 0.24793000519275665, 0.4288899898529053, 0.3601300120353699, 0.03199699893593788, -0.37617000937461853, 0.22881999611854553, -0.4161899983882904, -0.011625000275671482, -0.3854300081729889, 0.5052599906921387, 0.2128800004720688, 0.08332300186157227, 0.14137999713420868, -0.008196000009775162, -0.008094199933111668, -0.04843899980187416, -0.25867000222206116, -0.3440200090408325, 0.48805999755859375, -0.039882998913526535, -0.2005700021982193, 0.10384999960660934, 0.46546000242233276, -0.284280002117157, -0.6759399771690369, 0.39068999886512756, -0.321150004863739, -0.13208000361919403, 0.8547199964523315, -0.21813000738620758, -0.7003700137138367, 0.9973000288009644, -0.15633000433444977, 0.1776300072669983, 0.05830100178718567, -0.31606999039649963, 0.15148000419139862, -0.38659998774528503, 0.5871800184249878, -0.7668600082397461, 0.15476000308990479, -0.6118000149726868, 0.16676999628543854, -0.10524000227451324, -0.41110000014305115, 1.065500020980835, -0.1532599925994873, -0.32238999009132385, 0.5523300170898438, 0.24126000702381134, -0.21928000450134277, -0.13151000440120697, 0.2526099979877472, -0.08763100206851959, -0.019693000242114067, 0.9140899777412415, -0.030525999143719673, -0.16524000465869904, -0.0499269999563694, 0.25409001111984253, -0.06051899865269661, -0.20736999809741974, -0.49022001028060913, -0.12901000678539276, 0.4164299964904785, -0.4849199950695038, 0.47099998593330383, 0.4692299962043762, 0.4485799968242645, 0.07812099903821945, 0.1917800009250641, 0.39164999127388, -0.8040500283241272, 0.07777699828147888, -0.13650000095367432, 0.6826099753379822, -0.37856999039649963, 0.08470799773931503, -0.3695000112056732, -0.012144000269472599, 0.10721000283956528, -0.04340000078082085, 0.08044099807739258, -0.4860599935054779, 0.2161100059747696, 0.43198999762535095, 0.5251299738883972, -0.3025299906730652, 0.6586599946022034, -0.09359099715948105, -0.643559992313385, -0.16896000504493713, -0.31606999039649963, -0.4033699929714203, -0.16064999997615814, 0.43678998947143555, -0.16449999809265137, 0.04187700152397156, 0.25426000356674194, -0.10852999985218048, 0.42660000920295715, -0.13113999366760254, -0.050925999879837036, -0.03819100186228752, -0.19343000650405884, 0.0475349985063076, -0.2748500108718872, -0.28933998942375183, 0.3750300109386444, -0.042392000555992126, -0.39921000599861145, -0.7611100077629089, 0.5607500076293945, -0.3660300076007843, 0.07413800060749054, 0.16203999519348145, 0.3393299877643585, -0.10029000043869019, 0.4008699953556061, 0.14476999640464783, -0.0087956003844738, -1.4069000482559204, 0.21164000034332275, -0.3052299916744232, -0.07240399718284607, -0.10627000033855438, 0.33041998744010925, -0.22461000084877014, -0.5578299760818481, -0.17459000647068024, 0.4375799894332886, -0.13597999513149261, -0.2750700116157532, -0.16226999461650848, 0.6728699803352356, 0.08968599885702133, -0.12782999873161316, -0.434469997882843, -0.6830499768257141, -0.8668699860572815, 0.13332000374794006, 0.0054294997826218605, 0.808929979801178, 0.11281000077724457, -0.2053000032901764], u'bronze': [0.3384599983692169, 0.4096499979496002, -0.38947999477386475, -0.8544899821281433, 0.30726000666618347, 0.5253900289535522, -0.16936999559402466, 0.20699000358581543, 0.33441001176834106, -0.26315000653266907, 0.23619000613689423, 0.2494100034236908, -1.004699945449829, 0.3458000123500824, 0.11845000088214874, 0.17499999701976776, 0.2617200016975403, 0.05951400101184845, -0.7339800000190735, -0.35923001170158386, -0.27452000975608826, 0.024867000058293343, 0.38405999541282654, -0.12797999382019043, 0.385019987821579, -0.7090700268745422, 0.1577499955892563, -0.026607999578118324, 0.05826399847865105, 0.6202899813652039, 0.20356999337673187, 0.6093900203704834, 0.10531000047922134, 0.3073500096797943, -1.0429999828338623, 0.07564199715852737, -0.04907499998807907, -0.09041199833154678, 0.27386999130249023, 0.6076800227165222, -0.458840012550354, -0.9152299761772156, 0.31356000900268555, 0.13379999995231628, 0.06581799685955048, -0.12957000732421875, 0.2626200020313263, -0.11553999781608582, 0.3032900094985962, 0.22333000600337982, -0.6681200265884399, 0.26350000500679016, -0.08604100346565247, -0.1963600069284439, -0.33807000517845154, 0.43560999631881714, -0.38899001479148865, 0.1250700056552887, 0.25189000368118286, -0.2799000144004822, -0.5206500291824341, 1.1606999635696411, -0.3478499948978424, -0.7976300120353699, 0.4283899962902069, -0.46410998702049255, -0.5646499991416931, 0.7145699858665466, 0.4902400076389313, -0.6840000152587891, -0.36017999053001404, -0.6751000285148621, -0.09897500276565552, -0.7138699889183044, -1.3202999830245972, 0.8542500138282776, 0.02653999999165535, -0.6954900026321411, 0.6356199979782104, -0.15191000699996948, 0.06390299648046494, 0.43101999163627625, -0.22067999839782715, -0.4756700098514557, 0.04480399936437607, 0.14117999374866486, -0.021947000175714493, -0.22935999929904938, 0.053300999104976654, 0.33809998631477356, 0.8876199722290039, 0.035381998866796494, 0.7687199711799622, -0.2803100049495697, 0.387580007314682, 0.05969800055027008, -0.2610799968242645, -0.32190999388694763, -0.2520500123500824, 0.20292000472545624, -0.10289999842643738, 0.005006699822843075, -0.34084999561309814, 0.03142499923706055, 0.48715001344680786, -0.2109300047159195, 0.18140999972820282, -0.19142000377178192, -0.8716999888420105, -0.9451799988746643, -0.29794999957084656, 0.4667699933052063, -0.1014999970793724, -1.0918999910354614, -0.34845998883247375, 0.08237600326538086, -0.5198400020599365, 0.3388200104236603, 0.13388000428676605, 0.19374999403953552, -0.6782000064849854, 0.5109500288963318, -0.0720440000295639, -0.4161899983882904, -0.0776199996471405, 0.859499990940094, -0.20601999759674072, 0.6528900265693665, -0.673550009727478, -0.48183000087738037, -0.9120299816131592, -0.1076200008392334, 0.5702999830245972, 0.15952999889850616, -0.31011998653411865, 0.16629000008106232, -0.22075000405311584, -0.025689000263810158, 0.32460999488830566, 0.41343000531196594, -0.19211000204086304, -0.05038600042462349, 0.10405000299215317, -0.36100998520851135, 0.10642000287771225, -0.6246399879455566, 0.10180000215768814, 0.28053998947143555, 0.2527799904346466, -0.5108199715614319, 0.46345001459121704, -0.2622799873352051, -0.16825999319553375, 0.07157400250434875, -0.32572001218795776, 0.3531999886035919, -1.0094000101089478, 0.0067980000749230385, -0.8047699928283691, -0.011037999764084816, -0.08323899656534195, 0.23407000303268433, 0.45333999395370483, 0.25374001264572144, 0.639490008354187, 0.47738000750541687, 0.26298999786376953, 0.2641200125217438, 0.5522500276565552, -0.4096300005912781, -0.2513999938964844, 0.061090998351573944, 0.7058600187301636, -0.6016700267791748, 0.006711000110954046, -0.25317999720573425, 0.26958000659942627, -0.26203998923301697, -0.1842299997806549, -0.2264299988746643, -0.4878399968147278, 0.8506600260734558, 0.21258999407291412, -1.0175000429153442, 0.6880300045013428, -0.20835000276565552, -0.7333700060844421, 0.26135000586509705, 0.47380000352859497, -0.017085000872612, 0.6045200228691101, 0.1327199935913086, 0.5197499990463257, 0.4483500123023987, -0.21743999421596527, 0.16631999611854553, 0.35738998651504517, -0.08986099809408188, 0.16166000068187714, -0.17973999679088593, 1.807800054550171, -0.5041000247001648, -0.2508000135421753, -0.2646099925041199, 0.4506399929523468, 0.19348999857902527, -0.28422999382019043, -0.7280099987983704, -0.8916400074958801, -0.11422999948263168, 1.0293999910354614, 0.6103699803352356, -0.13016000390052795, -0.03452499955892563, 0.031638000160455704, -0.29256001114845276, 0.0759660005569458, -0.4702500104904175, -0.5097900032997131, 0.22105999290943146, -0.20521999895572662, -0.7259699702262878, 0.1530500054359436, -0.16148999333381653, -0.5293200016021729, 0.24761000275611877, 0.059808000922203064, 0.18790000677108765, -0.07961899787187576, 0.37147000432014465, -0.5557000041007996, -0.07823900133371353, 0.030383000150322914, -0.06617199629545212, -0.13460999727249146, 0.25975000858306885, -0.43702998757362366, -0.41780999302864075, 0.6311500072479248, 0.15522000193595886, 0.03160199895501137, 0.6842600107192993, -0.42594999074935913, 0.3812299966812134, -0.7399200201034546, -0.4644800126552582, -0.5484300255775452, 0.1777700036764145, -0.4182800054550171, 0.4439699947834015, -0.2875399887561798, -0.6462299823760986, 0.23499999940395355, 0.08274400234222412, 0.41749000549316406, 0.05789399892091751, 0.5965800285339355, -0.06070299819111824, -0.542389988899231, 0.08203200250864029, -0.2556599974632263, -0.21017000079154968, 0.5551699995994568, 0.39840999245643616, 0.2489600032567978, 0.7242599725723267, -0.07504700124263763, 0.18553000688552856, -0.057728998363018036, 0.0068536000326275826, -0.36500999331474304, -0.1382800042629242, 0.39146000146865845, 0.5451700091362, -0.8068699836730957, 0.019863000139594078, -1.0372999906539917, -0.41492998600006104, -0.7566199898719788, -0.13097000122070312, 0.07906799763441086, -0.1477999985218048, -0.22842000424861908, -0.132860004901886, 0.20229999721050262, 0.24683000147342682, -0.5897899866104126, 0.10346999764442444, -0.2605299949645996, 0.22240999341011047, 0.006683799903839827, 0.6680700182914734, 0.15984000265598297, 0.1305599957704544, 0.4113200008869171, -0.10582999885082245, 0.6662600040435791, 0.20021000504493713, -0.007650400046259165, -0.32014000415802], u'tea': [-0.4025300145149231, 0.42706000804901123, 0.3381600081920624, -0.18929000198841095, -0.45596998929977417, -0.4218299984931946, 0.553629994392395, 0.1943099945783615, -0.12827999889850616, -0.42702001333236694, -0.1344500035047531, -0.4220300018787384, -0.8534899950027466, 0.23771999776363373, -0.019719000905752182, 0.06972700357437134, -0.31540998816490173, 0.19175000488758087, -0.7292100191116333, 0.22333000600337982, -0.8564800024032593, -0.05898800119757652, 0.14042000472545624, 0.14368000626564026, -0.45965999364852905, -0.09332799911499023, -0.23783999681472778, -0.29159000515937805, -0.1567399948835373, 0.2578999996185303, -0.22463999688625336, 0.13131999969482422, -0.08518800139427185, -0.1701200008392334, -1.1955000162124634, 0.4346500039100647, 0.2544899880886078, 0.023357000201940536, -0.38773998618125916, -0.8514599800109863, -0.5757700204849243, -0.2138500064611435, 0.226500004529953, -0.07189500331878662, 0.6692000031471252, -0.9154099822044373, 0.3861599862575531, -0.33320000767707825, -0.7032600045204163, 0.3607800006866455, 0.21115000545978546, -0.20754000544548035, 0.10693000257015228, -0.21401000022888184, -0.36006999015808105, 0.1005999967455864, 0.026876000687479973, 0.25393998622894287, 0.11582999676465988, -0.031060000881552696, 0.3731200098991394, -0.6224499940872192, -0.16193999350070953, 0.15487000346183777, -0.03646399825811386, 0.0874280035495758, -0.35989999771118164, -0.027122000232338905, -0.643060028553009, 0.06730999797582626, 0.026288999244570732, 0.005001800134778023, 0.17395000159740448, -0.7532899975776672, -0.5138700008392334, -0.45851001143455505, 0.5451200008392334, -0.4711799919605255, -0.568120002746582, -0.18957999348640442, -0.4984300136566162, 0.6013500094413757, 0.05993599817156792, 0.008689199574291706, 0.633679986000061, -0.24363000690937042, 0.10027000308036804, -0.3432300090789795, 0.27667999267578125, -0.5561800003051758, 0.34588000178337097, -0.5187699794769287, 0.21438999474048615, -0.021602999418973923, 0.04557099938392639, 0.523419976234436, 0.6201000213623047, -0.32875001430511475, -0.12518000602722168, 0.1294499933719635, 0.06881999969482422, 0.04765399917960167, -0.14451999962329865, -0.7504299879074097, 0.21296000480651855, -0.22195999324321747, -0.4238300025463104, 0.10209999978542328, -0.2152000069618225, 0.3081800043582916, 0.5379999876022339, -0.3705199956893921, 0.10055000334978104, 0.3053300082683563, 0.7466800212860107, 0.06467600166797638, -0.43525999784469604, 0.48541998863220215, 0.06054399907588959, -0.3380900025367737, -0.4910399913787842, 0.3347499966621399, -0.5116900205612183, 0.09603700041770935, -0.32440999150276184, -0.03807799890637398, 0.30281999707221985, 0.10826999694108963, 0.3712800145149231, 0.016793999820947647, -0.029103999957442284, 1.0161999464035034, 0.05937600135803223, -0.5531700253486633, -0.11869999766349792, 0.1117900013923645, -0.35550999641418457, -0.1113400012254715, -0.12941999733448029, 0.014305000193417072, 0.20890000462532043, 0.23783999681472778, 0.06496500223875046, 0.05922900140285492, -0.19102999567985535, 0.4505099952220917, 0.11279000341892242, -0.23030999302864075, 0.6736199855804443, -0.29111000895500183, -0.13068999350070953, 0.32666999101638794, 0.2787800133228302, -0.14643000066280365, -0.4352700114250183, 0.18806999921798706, -0.06706299632787704, -0.6856399774551392, 0.8972200155258179, -0.3846699893474579, 0.003322900040075183, -0.009023400023579597, -0.052560001611709595, -0.18094000220298767, -0.11050999909639359, -0.26482999324798584, -0.14375999569892883, -0.3432300090789795, -0.3415600061416626, 0.1845400035381317, -0.4987100064754486, -0.14036999642848969, -0.20767000317573547, 0.4474300146102905, -0.22968000173568726, -0.3125700056552887, 0.08646900206804276, 0.48673999309539795, 0.3831700086593628, -0.1700499951839447, -0.07340600341558456, 0.33048000931739807, 0.2327200025320053, -0.48969998955726624, 0.2994599938392639, 0.7903800010681152, 0.8020099997520447, 0.30441001057624817, 0.10057000070810318, -0.7137699723243713, 0.5745300054550171, 0.6071100234985352, 0.027581000700592995, 0.42985999584198, -0.4005599915981293, -0.2732599973678589, 0.1780800074338913, 0.14443999528884888, -0.3426400125026703, -0.12585000693798065, 0.21616999804973602, -0.14023999869823456, 0.21513999998569489, -0.13124999403953552, 0.19957000017166138, -0.740809977054596, 0.5173599720001221, 0.45021000504493713, -0.31654998660087585, -0.5055000185966492, -0.029413999989628792, -0.24864999949932098, 0.21371999382972717, 0.1107499971985817, -0.2935200035572052, -0.47859999537467957, 0.10192999988794327, -0.39739999175071716, 0.4124000072479248, 0.5594900250434875, 0.8376299738883972, 0.1626099944114685, -0.8637700080871582, -0.1397700011730194, -0.46546998620033264, 0.5931699872016907, -0.07872900366783142, 0.04498700052499771, -0.38155999779701233, 0.43316999077796936, -0.1358499974012375, -0.5601300001144409, -0.12777000665664673, 0.8423100113868713, 0.7644199728965759, -0.001141799963079393, -0.07063300162553787, -0.2035900056362152, -0.7478100061416626, -0.449970006942749, -0.4105699956417084, -0.06696099787950516, -1.0020999908447266, 0.1378600001335144, -0.8650199770927429, 0.7671800255775452, 0.09071599692106247, 0.6424099802970886, -0.29093000292778015, 0.34426000714302063, 0.579010009765625, -0.3556300103664398, 0.3086400032043457, -0.16402000188827515, 0.24647000432014465, 0.007475799880921841, 0.4132300019264221, -0.1655299961566925, 0.5422000288963318, 0.028829000890254974, 0.17851999402046204, -0.3033899962902069, 0.07403700053691864, -0.2439900040626526, -0.21166999638080597, 0.16749000549316406, 0.23077000677585602, 0.13235999643802643, 0.540690004825592, 0.17813000082969666, -0.3860799968242645, -0.299780011177063, 0.050857000052928925, -0.21155999600887299, -0.33972999453544617, -0.07616200298070908, -1.2664999961853027, 0.14374999701976776, -0.2676199972629547, -0.08023200184106827, -0.3915799856185913, -0.3077400028705597, 0.13547000288963318, -0.7868499755859375, 0.017347000539302826, 0.33682000637054443, 0.4455600082874298, -0.11958999931812286, 0.07876300066709518, 0.9348099827766418, -0.2633900046348572, 0.43151000142097473, -0.2648800015449524, 0.24240000545978546, 0.13819000124931335, 0.22812999784946442, 0.11071000248193741, -0.4334399998188019, -0.2746500074863434, 0.5893200039863586], u'valley': [-0.3966299891471863, -0.07944899797439575, 0.24487000703811646, -0.5664899945259094, 0.042121998965740204, -0.05698100104928017, 0.009821799583733082, 0.027734000235795975, 0.383870005607605, -0.5467699766159058, -0.5392199754714966, -0.38148999214172363, -0.02418299950659275, 0.5937100052833557, 0.4210200011730194, 0.3650200068950653, -0.287339985370636, -0.009405500255525112, 0.5926700234413147, 0.885890007019043, 0.07883399724960327, -0.017472999170422554, 0.2197899967432022, -0.05514200031757355, -0.21258999407291412, -0.5710800290107727, -0.3138900101184845, -0.4808500111103058, -0.02611199952661991, -0.43856000900268555, 0.9379000067710876, 0.2973499894142151, 0.008689399808645248, 0.46334001421928406, -0.11712999641895294, -0.2228900045156479, -0.1563500016927719, -0.09550300240516663, 0.019471999257802963, -1.0774999856948853, -0.09125199913978577, -0.12640999257564545, 0.24597999453544617, 0.9573500156402588, 0.4360100030899048, -0.22984999418258667, 0.15020999312400818, 0.5883100032806396, 0.9437000155448914, -0.6246899962425232, 0.4728600084781647, 0.0009095999994315207, 0.07069800049066544, 0.121799997985363, -0.020819000899791718, -0.27636000514030457, -0.20489999651908875, -0.2711299955844879, 0.7117499709129333, 0.4239799976348877, 0.03487500175833702, -0.7252500057220459, 0.21466000378131866, -0.3455199897289276, 0.010041000321507454, -0.303600013256073, -0.632319986820221, -0.29065001010894775, -0.4401699900627136, -0.3421100080013275, -0.1864600032567978, -0.14770999550819397, 0.15613999962806702, -0.1909399926662445, -0.4102100133895874, -0.35558998584747314, 0.31220000982284546, -0.061462000012397766, -0.2781899869441986, 0.10100000351667404, -0.4509100019931793, -0.25800999999046326, -0.4343700110912323, -0.5313199758529663, 0.5637699961662292, 0.30695998668670654, -0.31244000792503357, 0.3361800014972687, 0.09106100350618362, 0.3669300079345703, 0.2770000100135803, 0.4501500129699707, 0.8765199780464172, 1.0516999959945679, -0.46549999713897705, 0.21055999398231506, 0.4115299880504608, -0.41589000821113586, 0.1634099930524826, 0.08155900239944458, -0.10053999722003937, 0.3736700117588043, -0.3335700035095215, 0.11023999750614166, -0.1710200011730194, -0.0012255000183358788, 0.3961000144481659, 0.03082999959588051, 0.027842000126838684, -0.02520200051367283, -0.49788999557495117, -0.683709979057312, 0.3866899907588959, -0.2577100098133087, 0.09053999930620193, 0.28338000178337097, -0.002059499965980649, -0.05930300056934357, -0.1424800008535385, 0.2122199982404709, 0.010670999996364117, -0.17589999735355377, 0.1017799973487854, -0.04782800003886223, -0.0469449982047081, 0.30827000737190247, -0.05779699981212616, 0.41464999318122864, -0.4206700026988983, -0.2333499938249588, -0.20200000703334808, 0.2096399962902069, 0.08005599677562714, 0.25161001086235046, 0.24803000688552856, -0.024004999548196793, -0.06157299876213074, 0.08040499687194824, -0.2874799966812134, -0.807669997215271, 0.30570998787879944, 0.4168199896812439, -0.22556999325752258, -0.09548400342464447, -0.9985600113868713, -0.17238999903202057, 0.70073002576828, -0.12397000193595886, -0.8110399842262268, 0.18863999843597412, 0.5416100025177002, 0.08893699944019318, 0.27632999420166016, -0.2709699869155884, 1.6576000452041626, -0.2228900045156479, 0.22197000682353973, -0.32256001234054565, 0.04941299930214882, 0.23214000463485718, -0.3018200099468231, -0.15501999855041504, 0.06336499750614166, -0.18522000312805176, 0.39632999897003174, -0.2506699860095978, 0.3382599949836731, -0.5093799829483032, -0.9091699719429016, 0.2771799862384796, -0.39215001463890076, 0.289110004901886, 0.040160998702049255, 0.06240599974989891, 0.0680370032787323, 0.6716600060462952, -0.3896999955177307, -0.5616199970245361, 0.26934999227523804, 0.2418300062417984, 0.007696600165218115, 0.26930999755859375, -0.15639999508857727, -0.20358000695705414, 0.2818300127983093, -0.18002000451087952, -0.0913420021533966, -0.46248000860214233, 0.05213100090622902, -0.33719000220298767, -0.018369000405073166, 0.9624199867248535, 0.2025199979543686, 0.24229000508785248, -0.12381000071763992, 0.150969997048378, 0.21051999926567078, -0.9121400117874146, 0.1302500069141388, 1.1541999578475952, 1.1523000001907349, 0.09296400099992752, 0.04467099905014038, 0.12492000311613083, -0.6018199920654297, -0.6277499794960022, -0.8593199849128723, 0.6272199749946594, 0.3217099905014038, 0.2591699957847595, 0.3160400092601776, 0.135220006108284, 0.18513000011444092, -0.5494999885559082, 0.33465999364852905, -0.42598000168800354, -0.403439998626709, 0.04106299951672554, 0.23461000621318817, 0.2184000015258789, 0.32978999614715576, -0.3892500102519989, -0.1405400037765503, -0.6723099946975708, 0.3586600124835968, 0.2913300096988678, 0.296640008687973, 0.024229999631643295, 0.16656999289989471, 0.4370099902153015, -0.4154700040817261, 0.30090001225471497, -0.23879000544548035, -0.3337700068950653, 0.5356900095939636, 0.08448400348424911, 0.05645500123500824, -0.44484999775886536, -0.398470014333725, -0.23958000540733337, -0.515500009059906, 0.6663900017738342, 0.33921998739242554, -0.08227699995040894, -0.820330023765564, 0.0023380001075565815, -0.11941000074148178, 0.3691200017929077, -0.2887600064277649, -0.19494999945163727, 0.06059600040316582, 0.3480899930000305, 0.19850000739097595, 0.043891001492738724, 0.016207000240683556, -0.33474001288414, 0.23544999957084656, -0.13346999883651733, 0.345660001039505, 0.1653199940919876, -0.2533800005912781, -0.1137000024318695, 0.12691999971866608, -0.362199991941452, -0.6144099831581116, 0.20823000371456146, 0.2409999966621399, 0.23323999345302582, 0.3256100118160248, 0.6127200126647949, -0.2404100000858307, -0.26093000173568726, 0.2549000084400177, -0.1222200021147728, 0.29881998896598816, -0.18698999285697937, -1.075700044631958, -0.032891999930143356, 0.30028998851776123, 0.37681999802589417, -0.0752670019865036, -0.30035001039505005, -0.4359300136566162, 0.24729999899864197, -0.8755000233650208, 0.3039900064468384, 0.7029200196266174, -0.2678399980068207, 0.05610800161957741, -0.4253099858760834, -0.03186799958348274, -0.6667299866676331, -0.35141998529434204, 0.711080014705658, 0.11434999853372574, 0.3171299993991852, 0.1157199963927269, -0.03302000090479851, 0.05623500049114227, 0.6865299940109253], u'bubble': [0.6553000211715698, 0.762470006942749, 0.16042999923229218, -0.07451900094747543, 0.2834799885749817, 0.28435999155044556, -0.24381999671459198, 0.2231599986553192, 0.6081100106239319, -1.0015000104904175, -0.08194199949502945, 0.15242999792099, -0.26631999015808105, -0.21622000634670258, 0.4750399887561798, 0.32684001326560974, -0.0924300029873848, -0.15446999669075012, 0.1542000025510788, 0.8057900071144104, -0.0435979999601841, 0.6380299925804138, -0.08216799795627594, 0.2481900006532669, -0.11907000094652176, -0.04458799958229065, 0.15821999311447144, -0.4088299870491028, -0.4376400113105774, -0.06958899646997452, -0.3605400025844574, -0.254040002822876, -0.2552199959754944, -0.35012000799179077, -0.4262099862098694, 0.615339994430542, -0.44889000058174133, 0.22134999930858612, 0.34957998991012573, 0.8263900279998779, 0.1571899950504303, -0.14257000386714935, 0.12981000542640686, 0.7549800276756287, -0.1563899964094162, -0.34191998839378357, -0.2588300108909607, 0.1746000051498413, 0.14535999298095703, 0.39702001214027405, 0.3544299900531769, -0.37099000811576843, -0.35418999195098877, -0.4393700063228607, -0.22785000503063202, -0.10260999947786331, -0.24775999784469604, 0.2868399918079376, -0.017548000440001488, -0.40165001153945923, 0.45809000730514526, -0.2783699929714203, -0.09805499762296677, -0.10768000036478043, 0.3467999994754791, 0.3137100040912628, 0.04356199875473976, 0.0181450005620718, -0.3700999915599823, -0.223130002617836, 0.17705999314785004, -0.4915199875831604, -0.14720000326633453, -0.007882200181484222, -0.1231900006532669, -0.21792000532150269, 0.7539299726486206, -0.4689300060272217, -0.2198999971151352, -0.6166099905967712, 0.1517000049352646, -0.19051000475883484, -0.03436199948191643, 0.30149999260902405, 0.7850599884986877, 0.48750001192092896, 0.34358999133110046, -0.15534000098705292, -0.0372220017015934, -0.13579000532627106, 0.3734000027179718, 0.8224599957466125, 0.06487999856472015, -0.2796800136566162, 0.12807999551296234, 0.8532299995422363, -0.2880600094795227, 0.5034499764442444, 0.35346001386642456, -0.843280017375946, -0.25892001390457153, 0.8576599955558777, -0.44310998916625977, 0.12514999508857727, -0.02606300078332424, 0.37288999557495117, -1.0856000185012817, 0.36632999777793884, -0.8039399981498718, 0.40128999948501587, 0.09942799806594849, 0.20250999927520752, -0.01839499920606613, 0.003910600207746029, 0.44143998622894287, 0.10846000164747238, 0.06516300141811371, 0.8238999843597412, -0.4521999955177307, -1.1131999492645264, 0.6264299750328064, -1.2877999544143677, 0.1696300059556961, 0.36741000413894653, 0.4986400008201599, -0.3296799957752228, -0.5692200064659119, -0.10093999654054642, -0.2571299970149994, -0.11567000299692154, 0.4039900004863739, 1.0277999639511108, 0.4625900089740753, 0.30671000480651855, 0.27303001284599304, 0.24066999554634094, -0.5670999884605408, 0.0006810500053688884, -0.37077000737190247, 0.03775700181722641, -0.014330999925732613, -0.40939000248908997, -0.35931000113487244, 0.27685999870300293, 0.3948499858379364, -0.3013699948787689, 0.14452999830245972, 0.018275000154972076, 0.18689000606536865, 0.647159993648529, -0.5916299819946289, -0.3154599964618683, -0.2903900146484375, 0.20754000544548035, 0.5027599930763245, 0.5017600059509277, -0.027746999636292458, -0.019565999507904053, -0.1566700041294098, 0.6407300233840942, -0.2933399975299835, 0.12556999921798706, -0.4546799957752228, -0.36406999826431274, 0.5956400036811829, -0.6685299873352051, 0.3190400004386902, 0.5661600232124329, -0.1264200061559677, 0.10307999700307846, -0.3812499940395355, 0.1331699937582016, 0.17396000027656555, 0.4258899986743927, -0.5930399894714355, -0.7566499710083008, -0.48236000537872314, 0.2351900041103363, 0.5343899726867676, -0.05942400172352791, 0.388839989900589, -0.42142000794410706, 0.053849998861551285, -0.2998799979686737, 0.11607000231742859, 0.026233000680804253, 0.2960200011730194, -0.42267000675201416, -0.5613399744033813, -0.2325199991464615, 1.294600009918213, -0.010716999880969524, -0.04865799844264984, -0.3142000138759613, -0.3701300024986267, 0.1894800066947937, 0.14684000611305237, 0.26287999749183655, 0.14273999631404877, -0.4387499988079071, 0.4247399866580963, 0.3226900100708008, 0.6431699991226196, 0.17125000059604645, 0.2167000025510788, -0.42274999618530273, 0.028982000425457954, 0.37092000246047974, -0.8295400142669678, 0.24517999589443207, 0.06526599824428558, -0.6301000118255615, -0.026924999430775642, -0.4572800099849701, 0.11166000366210938, 0.11535999923944473, -0.1423099935054779, -0.16468000411987305, -0.2672100067138672, 0.48660001158714294, 0.4377500116825104, -0.018880000337958336, -0.2579500079154968, -0.3889800012111664, 0.057906001806259155, 0.3103399872779846, -0.9577400088310242, -0.07892200350761414, 0.5180400013923645, 0.15282000601291656, -0.2194499969482422, -0.12952999770641327, -0.29054000973701477, -0.0967240035533905, -0.569320023059845, -0.019871000200510025, -0.49439001083374023, -0.54271000623703, -0.14145000278949738, 0.02598400041460991, 0.4918400049209595, 0.32583001255989075, -0.37623000144958496, 0.35047999024391174, -0.5073599815368652, -0.44648998975753784, 0.5834699869155884, -0.5914199948310852, -0.07168500125408173, -0.19957999885082245, -0.3736700117588043, -0.1294800043106079, -0.03480900079011917, 0.10284999758005142, -0.5163900256156921, -0.10194999724626541, 0.04775699973106384, 0.21761000156402588, 0.2755100131034851, -0.17507000267505646, 0.03163599967956543, -0.6611300110816956, -0.33781999349594116, 0.008742200210690498, -0.0017080999678000808, -0.19541999697685242, -0.01916399970650673, -0.22405000030994415, 0.5041400194168091, 0.2136400043964386, -0.7603800296783447, 0.1887899935245514, 0.2496500015258789, -0.1467600017786026, 0.14575999975204468, -0.30810999870300293, -0.6258000135421753, 0.2629700005054474, -0.49136999249458313, -0.44350001215934753, -0.5262600183486938, -0.3024100065231323, -0.14891000092029572, -0.03240099921822548, 0.2910900115966797, 0.2444400042295456, 0.08959600329399109, 0.0704760029911995, 0.0895640030503273, -0.39487001299858093, -0.16166000068187714, 0.12862999737262726, 0.34297001361846924, 0.15320000052452087, 0.6456199884414673, -0.19109000265598297, 0.03224800154566765, 0.38339000940322876, -0.17639000713825226, 0.0341310016810894], u'frame': [-0.07082299888134003, -0.22466999292373657, -0.4513300061225891, -0.5768499970436096, 0.0709180012345314, 0.2065100073814392, -0.19704000651836395, 0.014574999921023846, -0.23472000658512115, -0.8733100295066833, 0.09668699651956558, 0.2125999927520752, 0.20528000593185425, -0.016186999157071114, -0.38339999318122864, -0.07573799788951874, -0.06878200173377991, -0.7065100073814392, -0.1770700067281723, -0.4411099851131439, 0.10591000318527222, 0.022810999304056168, 0.4802899956703186, 0.44968000054359436, -0.1260399967432022, -0.22833000123500824, 0.2587699890136719, -0.3119800090789795, -0.19088000059127808, 0.40696999430656433, -0.10401000082492828, 0.4753299951553345, -0.06376799941062927, 0.36687999963760376, -0.4476200044155121, 0.6761299967765808, -0.1661899983882904, -0.23079000413417816, 0.11642000079154968, 0.6683400273323059, -0.3110699951648712, -0.24852000176906586, -0.5908600091934204, -0.09800300002098083, -0.31442999839782715, 0.05247600004076958, -0.27279001474380493, -0.41273000836372375, -0.020719999447464943, -0.0871649980545044, -0.1988700032234192, 0.5776799917221069, -0.03325200080871582, -0.30886998772621155, 0.48368000984191895, 0.09059999883174896, 0.2897999882698059, -0.18533000349998474, -0.39886000752449036, -0.034400999546051025, 0.7437000274658203, 0.5067300200462341, 0.3398999869823456, 0.05244699865579605, 0.12241999804973602, -0.34876999258995056, -0.0636489987373352, -0.4208900034427643, 0.6794099807739258, -0.12026999890804291, -0.16147999465465546, 0.13842999935150146, -0.10221000015735626, 0.3793700039386749, -0.06115499883890152, 0.17659999430179596, -0.3466799855232239, -0.04374900087714195, -0.4482400119304657, -0.04506700113415718, -0.13259999454021454, 0.36059999465942383, -0.008670199662446976, -0.17732000350952148, -0.45570001006126404, 0.27590999007225037, 0.060120001435279846, 0.2007800042629242, -0.3132300078868866, 0.5133799910545349, 0.4460099935531616, -0.1808999925851822, -0.2147199958562851, -0.07431299984455109, -0.3256700038909912, -0.7508900165557861, -0.30153998732566833, -0.03068700060248375, 0.05413300171494484, -0.6828600168228149, -0.7317100167274475, 0.5458099842071533, 0.05244699865579605, -0.196260005235672, 0.056203000247478485, 0.36469000577926636, 0.06657899916172028, 0.2941100001335144, -0.3767800033092499, -0.4503999948501587, -0.6781499981880188, 0.4528000056743622, -0.07714500278234482, -0.04206300154328346, 0.02683199942111969, -0.123989999294281, -0.24830999970436096, 0.4829300045967102, -0.270330011844635, -0.007078600116074085, 0.10181999951601028, -0.15514999628067017, 0.5138499736785889, 0.2606000006198883, 0.08155699819326401, 0.02300499938428402, -0.050971999764442444, 0.30292999744415283, 0.019929999485611916, 0.05179800093173981, -0.04720799997448921, 0.4720599949359894, 0.005219100043177605, 0.20654000341892242, 0.4016000032424927, 0.5625100135803223, -0.24169999361038208, 0.03171800076961517, -0.2771100103855133, -0.1383499950170517, 0.012303999625146389, 0.03411199897527695, -0.16085000336170197, -0.11530999839305878, -0.5639299750328064, -0.15810999274253845, 0.14393000304698944, -0.16152000427246094, -0.3142000138759613, -0.3339399993419647, -0.25777000188827515, 0.42298999428749084, -0.1262899935245514, -0.2683599889278412, 0.12541000545024872, -0.5087199807167053, -0.11038000136613846, 0.1440100073814392, -0.47971001267433167, 0.09928400069475174, 0.03286400064826012, 0.15068000555038452, 0.06438799947500229, -0.4413299858570099, 0.3474999964237213, 0.30788999795913696, -0.23577000200748444, 0.5541999936103821, 0.31363001465797424, 0.17924000322818756, -0.0028941999189555645, 0.16635000705718994, 0.1473499983549118, 0.12135999649763107, 0.07540400326251984, -0.24726000428199768, -0.4576199948787689, 0.0682080015540123, 0.08237099647521973, -1.3794000148773193, -0.20991000533103943, -0.620169997215271, 0.6427099704742432, 0.2546499967575073, -0.46452999114990234, -0.2872900068759918, 0.2629700005054474, 0.373199999332428, 0.911109983921051, 1.0247000455856323, 0.25277000665664673, 0.3182600140571594, 0.008228800259530544, 0.3127099871635437, 0.03807999938726425, -0.2932699918746948, 0.06998100131750107, 0.27035999298095703, -0.21747000515460968, -0.5457500219345093, 0.5329200029373169, -0.48243001103401184, 0.3841499984264374, -0.12189999967813492, 0.27355000376701355, -0.05169999971985817, -0.2720299959182739, 0.23816999793052673, -0.3924500048160553, 0.30928999185562134, 0.41525998711586, 0.10146000236272812, 0.265720009803772, 0.35745999217033386, -0.06183699890971184, 0.26423999667167664, -0.24640999734401703, -0.46239998936653137, 0.0720909982919693, 0.1682800054550171, -0.5080999732017517, -0.6924099922180176, 0.6335700154304504, 0.14148999750614166, 0.2665899991989136, 0.34022000432014465, -0.22658999264240265, 0.25266000628471375, -0.19248999655246735, 0.06769900023937225, 0.32207000255584717, -0.4146299958229065, 0.15217000246047974, -0.19981999695301056, 0.03045099973678589, 0.05659899860620499, -0.45364999771118164, -0.050397999584674835, -0.3709000051021576, 0.3797900080680847, 0.5266799926757812, 0.17860999703407288, -0.2828899919986725, -0.04454899951815605, -0.6803799867630005, -0.43435001373291016, -0.20343999564647675, 0.45743998885154724, -0.9657899737358093, -0.14441999793052673, -0.150409996509552, 0.2985000014305115, 0.49685001373291016, -0.9794800281524658, 0.2693299949169159, 0.4752500057220459, -0.13798999786376953, 0.021133000031113625, 0.1403599977493286, 0.10044000297784805, 0.1227400004863739, 0.19752000272274017, 0.04906700178980827, 0.6761400103569031, -0.5151699781417847, -0.26210999488830566, 0.08286499977111816, 0.26743999123573303, 0.5351999998092651, 0.2316800057888031, -0.26405999064445496, -0.11003000289201736, 0.2305999994277954, 0.5277799963951111, -0.08015900105237961, 0.3816699981689453, -1.2509000301361084, 0.35784998536109924, 0.09530600160360336, 0.1995300054550171, 0.24434000253677368, -0.5171700119972229, -0.4197100102901459, 0.17031000554561615, 0.22846999764442444, 0.25433000922203064, -0.2399899959564209, -0.16062000393867493, -0.32253000140190125, 0.2581999897956848, 0.3198600113391876, 0.03651899844408035, -0.3893600106239319, -0.08688399940729141, 0.30063000321388245, 0.8522899746894836, -0.22702999413013458, 0.15877999365329742, 0.23127000033855438, 0.41561999917030334], u'building': [-0.14013999700546265, -0.1463800072669983, -0.5455300211906433, -0.6182399988174438, 0.2556999921798706, 0.19731999933719635, 0.1452600061893463, 0.13760000467300415, -0.3567799925804138, -1.8515000343322754, 0.15605999529361725, -0.36131998896598816, 0.38477998971939087, 0.22585999965667725, 0.11607000231742859, 0.3585599958896637, -0.2709699869155884, -0.05026800185441971, 0.03404799848794937, 0.16178999841213226, -0.04425400123000145, -0.05593099817633629, 0.6391900181770325, 0.5023900270462036, -0.03414800018072128, 0.054607998579740524, -0.08210299909114838, -0.11655999720096588, -0.59934002161026, 0.3470799922943115, 0.3967199921607971, 0.5220100283622742, -0.3116399943828583, 0.7624499797821045, 0.019247999414801598, 0.654229998588562, -0.17295999825000763, -0.4284299910068512, 0.5396400094032288, -0.08027199655771255, -0.29394999146461487, 0.10182999819517136, -0.5591999888420105, 0.5422599911689758, -0.12336000055074692, 0.14381000399589539, 0.44756999611854553, 0.42399001121520996, -0.003262300044298172, -0.25933998823165894, -0.29490000009536743, -0.008129199966788292, -0.16629000008106232, -0.09251199662685394, 0.530460000038147, 0.30612999200820923, 0.3000200092792511, 0.2553499937057495, -0.22261999547481537, -0.1820400059223175, 0.44854000210762024, 0.04427200183272362, 0.21807000041007996, -0.19434000551700592, 0.09023699909448624, -0.10029999911785126, 0.22558000683784485, 0.10898999869823456, 0.17378999292850494, -0.328220009803772, -0.1795700043439865, -0.07334200292825699, -0.1727299988269806, 0.3821200132369995, -0.3348200023174286, 0.25679999589920044, -0.33614999055862427, 0.17288999259471893, 0.22360999882221222, 0.08524999767541885, -0.08076299726963043, 0.0012095000129193068, -0.1447400003671646, 0.26436999440193176, 0.3089599907398224, 0.2284799963235855, -0.1135300025343895, -0.1692499965429306, 0.11903999745845795, -0.09701299667358398, 0.5356000065803528, 0.002848200034350157, 0.4676699936389923, 0.474839985370636, 0.08455599844455719, -0.5535899996757507, -0.2117300033569336, -0.2529599964618683, 0.1426900029182434, -0.33586999773979187, -0.5059400200843811, 0.5686699748039246, -0.14135999977588654, 0.01903500035405159, 0.03681299835443497, -0.0440949983894825, -0.035909999161958694, 0.002901799976825714, 0.08875399827957153, -0.07483600080013275, -0.3704099953174591, -0.3239699900150299, -0.3243100047111511, 0.08741000294685364, -0.38561001420021057, 0.07115600258111954, -0.23532000184059143, 0.2953900098800659, -0.1409900039434433, 0.013400999829173088, 0.3542099893093109, -0.14771999418735504, 0.3697200119495392, 0.4353100061416626, -0.07958199828863144, -0.6615899801254272, -0.07942599803209305, -0.33869001269340515, 0.07798899710178375, -0.45715999603271484, 0.27496999502182007, 0.6717000007629395, 0.18479999899864197, -0.0811690017580986, 0.09437599778175354, 0.2935599982738495, -0.33052000403404236, -0.04967600107192993, -0.1875, -0.01929200068116188, -0.43700000643730164, 0.04013599827885628, 0.30077001452445984, 0.7334799766540527, 0.09633400291204453, -0.3942599892616272, 0.04087100178003311, 0.3777799904346466, -0.4861699938774109, -0.18476000428199768, 0.4058699905872345, 0.414000004529953, -0.06283599883317947, -0.1303199976682663, 0.14225000143051147, 0.24244000017642975, -0.26183998584747314, 0.3017599880695343, -0.07235100120306015, -0.006480500102043152, 0.47450000047683716, 0.2512199878692627, 0.15990999341011047, -0.38756999373435974, -0.23367999494075775, 0.04874800145626068, -0.3237699866294861, 0.09564899653196335, 0.4573400020599365, 0.34874001145362854, 0.27682000398635864, -0.4275699853897095, 0.17215999960899353, -0.3614799976348877, 0.3462199866771698, 0.7900199890136719, -0.6648100018501282, -0.25084999203681946, 0.19074000418186188, -0.5186300277709961, -0.22564999759197235, -0.3371700048446655, 0.29412999749183655, -0.35802000761032104, -0.31894001364707947, 0.3839600086212158, -0.08231700211763382, 0.1839900016784668, -0.06706999987363815, 0.5265300273895264, 0.8694499731063843, 0.10657999664545059, -0.38475000858306885, 0.002615999896079302, -0.1561799943447113, 0.08438800275325775, -0.39342001080513, 0.12466000020503998, 0.08367700129747391, 0.09275799989700317, 0.40637001395225525, -0.49678000807762146, 0.04650900140404701, -0.17007000744342804, -0.301800012588501, -0.2879599928855896, 0.25527000427246094, -0.38258999586105347, 0.19752000272274017, 0.18796999752521515, 0.24400000274181366, 0.5716099739074707, -0.5894299745559692, -0.6353800296783447, 0.05121000111103058, 0.6137599945068359, -0.20079000294208527, 0.2721000015735626, 0.005741099826991558, 0.0021349999587982893, 0.5162299871444702, -0.5007299780845642, -0.07327699661254883, 0.32594001293182373, 0.002595700090751052, -0.07983600348234177, -0.18990999460220337, 0.19506999850273132, -0.032301001250743866, -0.03338700160384178, -0.059918999671936035, -0.03350000083446503, -0.2764599919319153, 0.45785000920295715, 0.19809000194072723, 0.01079300045967102, 0.5011000037193298, 0.1664000004529953, -0.22217999398708344, 0.28022998571395874, 0.33052998781204224, 0.0540120005607605, 0.22099000215530396, 0.005445399787276983, -0.5448899865150452, -0.03926999866962433, 0.20841999351978302, -0.22943000495433807, -0.018101999536156654, 0.10288000106811523, 0.4275600016117096, -0.27720001339912415, 0.09042000025510788, -0.23103000223636627, 0.38896000385284424, -0.14222000539302826, -0.036299001425504684, -0.12043000012636185, 0.3805600106716156, -0.0767270028591156, -0.14890000224113464, 0.12723000347614288, 0.07763099670410156, -0.1973000019788742, 0.4011499881744385, -0.043428998440504074, 0.016614999622106552, -0.1505099982023239, 0.29986000061035156, -0.19338999688625336, -0.3158000111579895, -0.07317200303077698, 0.19220000505447388, -0.2329999953508377, -0.12559999525547028, 0.1370999962091446, -2.4054999351501465, 0.32886001467704773, 0.2537899911403656, 0.3282899856567383, -0.06783399730920792, -0.26326000690460205, 0.15259000658988953, -0.3154999911785126, 0.47012999653816223, 0.9208599925041199, -0.5971199870109558, 0.7199100255966187, -0.04174700006842613, -0.376800000667572, 0.06751599907875061, -0.38293999433517456, -0.1724500060081482, 0.21085000038146973, 0.44398999214172363, 0.6731899976730347, -0.04161100089550018, -0.43939998745918274, 0.34351998567581177, 0.28992000222206116], u'ceiling': [0.022655000910162926, 0.19505999982357025, -0.5602499842643738, -0.4887399971485138, 0.05696199834346771, 0.4696199893951416, -0.05679599940776825, -0.19238999485969543, -0.4253000020980835, -1.4936000108718872, -0.12978999316692352, 0.6464300155639648, 0.3357299864292145, -0.2264000028371811, -0.23718999326229095, 0.521340012550354, 0.22630000114440918, 0.18100999295711517, -0.1726900041103363, -0.12556999921798706, 0.03142600134015083, 0.39421001076698303, 0.32502999901771545, -0.08521000295877457, 0.2070000022649765, -0.11880999803543091, -0.036364998668432236, -0.34158000349998474, -0.27678999304771423, 0.3204900026321411, 0.3064199984073639, 0.6265900135040283, -0.19853000342845917, 0.294189989566803, -0.07107599824666977, 0.37049001455307007, -0.37237000465393066, -0.3872300088405609, 0.6662899851799011, 0.5416100025177002, -0.313400000333786, 0.0642080008983612, -0.7456799745559692, 0.31485000252723694, 0.20632000267505646, 0.20946000516414642, -0.40077000856399536, -0.20358000695705414, -0.38411998748779297, -1.0390000343322754, -0.33278998732566833, 0.5157700181007385, -0.355540007352829, -0.19360999763011932, -0.11432000249624252, 0.35172000527381897, -0.27351000905036926, 0.45972999930381775, 0.1856199949979782, 0.2558499872684479, 0.03265799954533577, -0.16189999878406525, 0.7867599725723267, 0.3461199998855591, -0.2657899856567383, -0.6579300165176392, 0.22281000018119812, -0.0057530999183654785, 0.3143700063228607, -0.041770998388528824, -0.3667599856853485, -0.06117299944162369, -0.1795099973678589, -0.2405499964952469, 0.2702299952507019, 0.2131199985742569, 0.21782000362873077, 0.05902300029993057, -0.09606599807739258, -1.027999997138977, -0.17351000010967255, -0.4436500072479248, 0.02474300004541874, 0.16777999699115753, 0.07767999917268753, 0.5436599850654602, -0.22856999933719635, -0.23016999661922455, 0.04751000180840492, 0.5452100038528442, 0.9258400201797485, -0.02687999978661537, -0.008662199601531029, 0.1741199940443039, -0.19120000302791595, -0.16756999492645264, -0.09513100236654282, -0.3307900130748749, 0.5097600221633911, -0.5700299739837646, -0.27886998653411865, 0.7755799889564514, 0.06497199833393097, -0.32839998602867126, 0.018015999346971512, 0.3937999904155731, -0.17945000529289246, 0.18639999628067017, -0.026624999940395355, 0.3485200107097626, -0.27862000465393066, -0.04284999892115593, -0.03208300098776817, -0.042295001447200775, -0.27204999327659607, -0.13979999721050262, 0.012122999876737595, -0.16309000551700592, -0.2545500099658966, -0.306549996137619, 0.31762000918388367, -0.19354000687599182, 0.17441999912261963, 1.2632999420166016, 0.41874998807907104, -0.3010199964046478, -0.06307400017976761, 0.17750999331474304, 0.46612000465393066, 0.7980599999427795, -0.11231999844312668, 0.151869997382164, 0.22311000525951385, 0.26701000332832336, 0.43342000246047974, 0.009669600054621696, -0.24931000173091888, 0.7716799974441528, -0.09061899781227112, -0.4684099853038788, -0.31617000699043274, 0.18316000699996948, -0.1621199995279312, -0.4209499955177307, 0.18082000315189362, -0.19589999318122864, -0.4069100022315979, -0.5205199718475342, -0.047784000635147095, -0.5808899998664856, -0.20347000658512115, 0.11864999681711197, -0.2114199995994568, -0.580079972743988, -0.41176000237464905, 0.7824900150299072, -0.31825000047683716, 0.22605000436306, -0.7843700051307678, 0.07221399992704391, -0.1982100009918213, 0.7307299971580505, 0.07005400210618973, 0.01565299928188324, 0.8736699819564819, -0.13964000344276428, 0.01666400022804737, -0.20223000645637512, 0.22086000442504883, 0.23104999959468842, -0.15707999467849731, 0.4572399854660034, 0.7698699831962585, -0.0073645999655127525, -0.05960199981927872, 0.33823999762535095, 0.122079998254776, 0.5541599988937378, -0.22190000116825104, 0.07331500202417374, 0.5774800181388855, -0.5675899982452393, -0.08460699766874313, 0.18279999494552612, 0.01003200002014637, -0.27279001474380493, 1.059000015258789, -0.1302099972963333, 0.16641999781131744, 0.671180009841919, 0.328029990196228, 0.36469000577926636, -0.29140999913215637, -0.12916000187397003, -0.09120900183916092, -0.5630800127983093, -0.3245899975299835, 1.0616999864578247, 0.06823600083589554, 0.012543999589979649, 0.8325499892234802, 0.27814000844955444, -0.3878200054168701, -0.05307300016283989, 0.5956199765205383, 0.27570000290870667, 0.1565600037574768, -0.3293899893760681, 0.02268199995160103, 0.19633999466896057, 0.23096999526023865, -0.13812999427318573, 0.32113999128341675, -0.07158699631690979, 0.043324001133441925, 0.019246000796556473, 0.28547999262809753, -0.45802998542785645, 0.3003000020980835, 0.3256300091743469, 0.3882099986076355, 0.10909999907016754, 0.14914999902248383, -0.12055999785661697, -0.3754900097846985, -0.004435599781572819, -0.5868600010871887, 0.48061999678611755, 0.17893999814987183, -0.568149983882904, -0.05522599816322327, -0.5015599727630615, 0.027515999972820282, -0.09152399748563766, 0.4120999872684479, -0.4947200119495392, -0.10518000274896622, 0.16890999674797058, -0.22366000711917877, -0.37685999274253845, 0.5076199769973755, -0.7609800100326538, -0.3447900116443634, 0.3689199984073639, -0.07672300189733505, 0.2188200056552887, -0.04500100016593933, -0.30449000000953674, -0.2643499970436096, -0.25892001390457153, -0.4565199911594391, -0.456169992685318, 0.7052800059318542, -0.6798099875450134, 0.17476999759674072, -0.2531299889087677, -0.6579499840736389, -0.09599500149488449, 0.4737200140953064, 0.49028000235557556, 0.26673999428749084, -0.07473500072956085, 0.6661499738693237, -0.6043400168418884, -0.04832400009036064, -0.4578799903392792, 0.5304700136184692, 0.24488000571727753, -0.29978999495506287, -0.630050003528595, -0.14845000207424164, 0.34244999289512634, -0.3396799862384796, 0.2361699938774109, -0.9732400178909302, -0.2731800079345703, -1.3291000127792358, -0.3093099892139435, -0.7171000242233276, -0.2538500130176544, 0.16425000131130219, -0.07938399910926819, -0.2220499962568283, -0.8980699777603149, 0.030358999967575073, 0.6396999955177307, -0.17941999435424805, -0.38975998759269714, -0.48739001154899597, -0.410970002412796, -0.19423000514507294, 0.2578999996185303, -0.2536199986934662, 0.3167400062084198, 0.6815900206565857, 0.11020000278949738, -0.01042999979108572, -0.2243099957704544, -0.21706999838352203, 0.3790999948978424], u'diamond': [-0.14665000140666962, 0.21477000415325165, 0.05139100179076195, 0.44297999143600464, -0.6883299946784973, 0.42732998728752136, 0.1818999946117401, 0.07572299987077713, -0.4755299985408783, -0.9279900193214417, -0.21749000251293182, -0.29308000206947327, -0.1780800074338913, 0.3012099862098694, -0.49952998757362366, -0.6202600002288818, -0.07620099931955338, 0.43435999751091003, -0.5361899733543396, -0.5846400260925293, -0.10600999742746353, 0.40354999899864197, 0.03365800157189369, -0.006556299980729818, 0.2730900049209595, -0.1694899946451187, -0.08699300140142441, -0.07456400245428085, -0.1881600022315979, 0.0068280999548733234, 0.33726000785827637, 0.230320006608963, -0.1680700033903122, 0.5319300293922424, -0.4639100134372711, 0.158160001039505, 0.3149600028991699, 0.478769987821579, 0.18277999758720398, -0.38172999024391174, -0.5728899836540222, -0.08772300183773041, 0.18664999306201935, 0.5117800235748291, 0.4573900103569031, -0.32545000314712524, -0.14424000680446625, -0.1164499968290329, -0.04317700117826462, 0.005797300022095442, -0.09043099731206894, 0.03345600143074989, 0.29681000113487244, 0.6527100205421448, -0.12139999866485596, -0.19617000222206116, -1.1461999416351318, 0.3857499957084656, -0.24841000139713287, -0.4079500138759613, 0.14835000038146973, -0.09141000360250473, 0.265500009059906, -0.05576099827885628, 0.7226600050926208, 0.034696999937295914, -0.766319990158081, 0.08910500258207321, 0.35611000657081604, -0.16753999888896942, 0.21546000242233276, -0.30584999918937683, 0.3749299943447113, -0.1328199952840805, -0.15349000692367554, -0.11071000248193741, 0.4600200057029724, -0.7270900011062622, 0.015600999817252159, -0.3274900019168854, 0.33847999572753906, 0.062199998646974564, 0.3080900013446808, 0.0784199982881546, 0.6230300068855286, -1.25, 0.07237999886274338, -0.05417700111865997, 0.27011001110076904, 0.013124999590218067, -0.1820800006389618, 0.31266000866889954, -0.25936999917030334, 0.06369899958372116, -0.4949699938297272, 0.4262700080871582, 0.1626800000667572, 0.2199299931526184, 0.7084000110626221, -0.8030400276184082, 0.36765000224113464, 0.4684799909591675, 0.06483600288629532, 0.3603299856185913, -0.3158299922943115, 0.2512499988079071, 0.4432699978351593, 0.16190999746322632, -0.15467000007629395, -0.41484999656677246, 0.20448000729084015, -0.1780800074338913, 0.37005001306533813, -0.019812999293208122, -0.30691999197006226, 0.24196000397205353, 0.611299991607666, 0.03448300063610077, 0.17858999967575073, 0.2775999903678894, -0.04507699981331825, -0.09872200340032578, -0.1335899978876114, -0.29499998688697815, 0.027950000017881393, -0.16378000378608704, 0.6187300086021423, 0.39609000086784363, -0.8174800276756287, -0.2902199923992157, -0.4116800129413605, 0.11196000128984451, -0.5467699766159058, 0.12365999817848206, -0.0055541000328958035, 0.4251500070095062, -0.19203999638557434, -0.02569199912250042, -0.12883000075817108, -0.3767400085926056, 0.15855999290943146, 0.2899099886417389, 0.2560800015926361, -0.7645400166511536, 0.6155400276184082, 0.04296199977397919, 0.41398000717163086, -0.4324199855327606, 0.08176399767398834, 0.2438800036907196, 0.18871000409126282, 0.05878699943423271, -0.003190400078892708, -0.11271999776363373, 0.6328700184822083, 0.1466600000858307, -0.30296000838279724, -0.164000004529953, -0.1984899938106537, -0.1490200012922287, 0.4186500012874603, -0.40376999974250793, 0.4234600067138672, 0.5349500179290771, 0.2981399893760681, -0.16870999336242676, 0.3327699899673462, -0.2873600125312805, 0.20843000710010529, 0.19487999379634857, -1.0568000078201294, 0.06796500086784363, 0.36847999691963196, -0.15873000025749207, 0.01231400016695261, -0.24560000002384186, 0.03371800109744072, 0.3050000071525574, -0.7369800209999084, 0.062070999294519424, 0.07707499712705612, -0.007908299565315247, 0.0660569965839386, 0.17803999781608582, 0.31725001335144043, -0.1806199997663498, 0.37637001276016235, 0.4897499978542328, -0.3396199941635132, -0.22777000069618225, -0.024893000721931458, 0.4962399899959564, 0.23454000055789948, 0.38675999641418457, 0.3620299994945526, 0.12042000144720078, 0.05740800127387047, -1.163699984550476, 0.3988899886608124, 0.3316200077533722, 1.3666000366210938, -0.014810999855399132, 0.08490599691867828, -0.28790000081062317, 0.16529999673366547, 0.2526699900627136, 0.10270000249147415, -0.016589999198913574, 0.0960569977760315, -0.10021000355482101, 0.45669999718666077, -0.06590399891138077, 0.3325900137424469, -0.1198199987411499, -0.2429399937391281, 0.210889995098114, -0.1241300031542778, -0.09685300290584564, -0.4016999900341034, 0.020930999889969826, -0.042583998292684555, -0.7356899976730347, 0.5564500093460083, 0.37540000677108765, -0.37648001313209534, 0.166360005736351, -0.4644399881362915, 0.012075000442564487, -0.6067100167274475, -0.03959299996495247, 0.35905998945236206, -0.13311000168323517, -0.16227999329566956, -0.03957200050354004, 0.24041999876499176, 0.4314799904823303, 0.43404000997543335, 0.014449000358581543, -0.17374999821186066, -1.0946999788284302, -0.27476999163627625, -0.042396001517772675, -0.31575000286102295, -0.050060998648405075, -0.39768001437187195, 0.3009899854660034, -0.9642999768257141, -0.19165000319480896, 0.127470001578331, -0.48853999376296997, 0.2507399916648865, -0.11969000101089478, -0.8325600028038025, 0.04950200021266937, 0.7075499892234802, -0.15887999534606934, -0.018060000613331795, -0.2815000116825104, 0.061778999865055084, 0.025439999997615814, -0.18723000586032867, 0.2561799883842468, 0.21961000561714172, 0.29071998596191406, 0.384909987449646, 0.15182000398635864, -0.0006812100182287395, 0.17865000665187836, -0.27849000692367554, 0.33768001198768616, -0.14666999876499176, -0.6007699966430664, 0.34891998767852783, -0.36285001039505005, -0.010414999909698963, -0.23607000708580017, -0.8880400061607361, -0.2457599937915802, -0.6324499845504761, 0.04847799986600876, -0.08184699714183807, 0.13332000374794006, -0.83024001121521, -0.19607999920845032, -0.19505000114440918, -0.5726600289344788, -0.26688000559806824, -0.3237699866294861, -0.2126300036907196, -0.5323899984359741, -0.19266000390052795, -0.2851000130176544, -0.4788700044155121, 0.8149499893188477, -0.11314000189304352, 0.4853299856185913, 1.0247000455856323, -0.26759999990463257, 0.8132399916648865, 0.09302300214767456], u'door': [-0.012489999644458294, 0.10815999656915665, -0.24887999892234802, -0.6930500268936157, 0.3126699924468994, 0.12725000083446503, -0.5574600100517273, -0.12387000024318695, 0.06740999966859818, -1.395799994468689, -0.4479900002479553, 0.31584998965263367, 0.315420001745224, -0.20746999979019165, -0.48489001393318176, -0.21377000212669373, 0.07097700238227844, -0.4200800061225891, 0.059059999883174896, 0.19589999318122864, 0.4955900013446808, 0.5417100191116333, 0.27496999502182007, -0.30979999899864197, 0.00022559000353794545, -0.24526000022888184, 0.6417499780654907, -0.2055799961090088, 0.029498999938368797, -0.09467300027608871, 0.2482600063085556, -0.03787500038743019, 0.10341999679803848, 0.08182299882173538, -0.7453500032424927, 0.6909599900245667, -0.47304999828338623, -0.4354900121688843, -0.30689001083374023, -0.2794800102710724, -0.4773100018501282, -0.029867999255657196, -0.47119998931884766, 0.13644999265670776, -0.35482001304626465, 0.5277000069618225, 0.4647899866104126, -0.657509982585907, -0.796239972114563, -0.2315099984407425, -0.20946000516414642, 0.18559999763965607, 0.22272999584674835, -0.4546299874782562, 0.01732500083744526, 0.07587099820375443, 0.22356000542640686, 0.08441799879074097, 0.15647000074386597, -0.24355000257492065, 0.31470000743865967, 0.11246000230312347, 0.04543599858880043, 0.4281199872493744, -0.1495800018310547, -0.4829699993133545, 0.48416000604629517, -0.4300599992275238, -0.00018754000484477729, -0.4361700117588043, -0.4768899977207184, -0.49939998984336853, 0.14996999502182007, 0.43191999197006226, 0.3447299897670746, 0.40252000093460083, -0.3755300045013428, -0.38429000973701477, -0.00814330019056797, -0.5790200233459473, 0.07389800250530243, 0.4756700098514557, 0.16122999787330627, 0.4516200125217438, -0.5649200081825256, -0.35078999400138855, 0.11868999898433685, -0.04199900105595589, -0.48809999227523804, -0.24077999591827393, 0.1457500010728836, -0.2638300061225891, -0.26037999987602234, 0.2962700128555298, 0.15661999583244324, -0.40105998516082764, -0.4695500135421753, -0.37762001156806946, 0.020640000700950623, -0.43759000301361084, 0.08965100347995758, 0.5954399704933167, -0.0940219983458519, -0.2071399986743927, 0.1278200000524521, -0.44642001390457153, 0.23229999840259552, 0.06300300359725952, -0.23058000206947327, 0.0992560014128685, -0.24347999691963196, 0.22341999411582947, -0.12201999872922897, -0.09700100123882294, -0.2590799927711487, 0.3041599988937378, -0.4164699912071228, 0.06058799847960472, -0.33087000250816345, -0.11625999957323074, -0.18424999713897705, -0.5105199813842773, 0.278439998626709, -0.21616999804973602, -0.05030599981546402, -0.5468599796295166, 0.15711000561714172, -0.19323000311851501, 0.2827000021934509, -0.4751400053501129, 0.7715799808502197, -0.034981001168489456, -0.019998999312520027, 0.09727499634027481, 0.16816000640392303, 0.12563000619411469, -0.05047500133514404, 0.180759996175766, -0.11141999810934067, -0.21615999937057495, 0.291949987411499, -0.0577859990298748, -0.25049999356269836, 0.0780860036611557, -0.6585400104522705, -0.20336000621318817, 0.1071700006723404, -0.13603000342845917, 0.06172399967908859, -0.11114999651908875, 0.15556000173091888, 0.0717179998755455, -0.00957849994301796, -0.2758699953556061, 0.1451600044965744, 0.19027000665664673, 0.3908799886703491, -0.12570999562740326, -0.19306999444961548, 0.41315001249313354, 0.47940000891685486, 0.6619600057601929, -0.22622999548912048, -0.008937699720263481, 0.3495199978351593, 0.41721999645233154, -0.07267999649047852, -0.0006026800256222486, 0.7322400212287903, -0.537630021572113, -0.26017001271247864, 0.571590006351471, 0.26017001271247864, -0.1634799987077713, -0.02622300013899803, -0.12132000178098679, -0.2652300000190735, 0.25731998682022095, 0.10632000118494034, -0.46511000394821167, 0.1740099936723709, -0.08062999695539474, 0.040344998240470886, 0.14809000492095947, 0.07149700075387955, -0.4767000079154968, -0.04698000103235245, 0.3091700077056885, 0.5011399984359741, 0.4981200098991394, 0.18568000197410583, -0.23740999400615692, -0.08829999715089798, 0.31509000062942505, 0.2332800030708313, 0.10717999935150146, -0.025940999388694763, 0.010982999578118324, -0.5223600268363953, -0.36500999331474304, 1.1399999856948853, -0.07895500212907791, 0.4366399943828583, -0.018633000552654266, 0.12696999311447144, -0.4086099863052368, -0.0971359983086586, -0.7093799710273743, -0.27445000410079956, 0.18805000185966492, 0.2280299961566925, -0.06808000057935715, -0.3114300072193146, -0.13470999896526337, 0.33667001128196716, -0.44143998622894287, -0.07765600085258484, 0.2732900083065033, -0.011749999597668648, 0.14856000244617462, 0.5389999747276306, 0.34871000051498413, 0.1694599986076355, -0.19878000020980835, -0.05582800135016441, -0.03048500046133995, -0.585860013961792, 0.26805999875068665, 0.32036998867988586, -0.4881399869918823, 0.21673999726772308, -0.8939300179481506, -0.27943000197410583, 0.08278500288724899, 0.5437300205230713, 0.306410014629364, 0.07943200320005417, -0.34968000650405884, -0.37836000323295593, -0.022549999877810478, 0.3216100037097931, 0.23336000740528107, -0.2784000039100647, 0.3476000130176544, 0.05794300138950348, -0.5573999881744385, 0.08899799734354019, -0.15988999605178833, -0.3575200140476227, 0.3434099853038788, 0.08123599737882614, -0.1528400033712387, 0.11744000017642975, -0.32670000195503235, -0.06800100207328796, 0.10999000072479248, 0.19564999639987946, -0.217739999294281, -0.12536999583244324, 0.30410999059677124, 0.0379600003361702, -0.39800000190734863, 0.4740999937057495, 0.06100400164723396, -0.08345899730920792, -0.3312099874019623, 0.26754000782966614, -0.18182000517845154, -0.2561900019645691, -0.24812999367713928, -0.3098500072956085, -0.1155799999833107, -0.47258999943733215, -0.15830999612808228, -0.13526999950408936, -0.06032099947333336, -2.1414999961853027, 0.33702000975608826, -0.11063999682664871, 0.14541999995708466, -0.5482699871063232, -0.09346099942922592, -0.17058999836444855, -0.25297001004219055, -0.03297799825668335, 0.6347900032997131, 0.10971000045537949, -0.13702000677585602, 0.2224300056695938, -0.11710000038146973, -0.23064999282360077, -0.36059001088142395, 0.2451999932527542, 0.3643200099468231, -0.002282199915498495, 0.6430299878120422, 0.12070000171661377, 0.04955900087952614, 0.05159499868750572, 0.9793199896812439], u'gear': [0.5037400126457214, 0.17816999554634094, -0.22989000380039215, -0.4351400136947632, -0.05305600166320801, -0.35335999727249146, 0.06759999692440033, 0.5249000191688538, -0.3413499891757965, -0.9298099875450134, 0.2894099950790405, -0.3484399914741516, 0.6806300282478333, -0.08355700224637985, -0.008424599654972553, -0.42278000712394714, -0.08434099704027176, -0.5556399822235107, 0.28077998757362366, -0.23255999386310577, -0.14417000114917755, -0.014856000430881977, 0.47565001249313354, 0.09479200094938278, -0.04892300069332123, -0.13304999470710754, 0.3828299939632416, 0.02198600023984909, 0.34553998708724976, 0.9919999837875366, 0.13422000408172607, -0.5640000104904175, -0.022167999297380447, 0.23181000351905823, -0.2999500036239624, 0.12148000299930573, 0.008717799559235573, 0.13344000279903412, -0.09622199833393097, 0.36010000109672546, -0.3442699909210205, 0.005668300203979015, -0.2418700009584427, -0.08554500341415405, -0.16008000075817108, 0.455949991941452, 0.3622699975967407, 0.1326500028371811, 0.2967599928379059, 0.5727400183677673, -0.2618600130081177, -0.1144699975848198, 0.15028999745845795, -0.03812500089406967, -0.6593599915504456, 0.1294800043106079, 0.30663999915122986, -0.7105100154876709, -0.33827000856399536, -0.06368199735879898, -0.12060999870300293, 0.09521900117397308, 0.08301199972629547, 0.06569100171327591, -0.6656399965286255, -0.027271000668406487, -0.5217900276184082, 0.03249099850654602, -0.0613039992749691, -0.3672800064086914, 0.008583899587392807, 0.3457300066947937, 0.11883000284433365, 0.26506999135017395, 0.12891000509262085, 0.20880000293254852, -0.19241000711917877, -0.0945269986987114, 0.1897599995136261, -0.2660599946975708, 0.395689994096756, -0.02097800001502037, -0.05008599907159805, -0.2791900038719177, -0.38931000232696533, -0.371069997549057, 0.19803999364376068, 0.06004000082612038, -1.0480999946594238, -0.47547000646591187, 0.8921499848365784, 0.26159998774528503, -0.10655000060796738, 0.022676000371575356, 0.2185399979352951, 0.3314700126647949, -0.6916099786758423, 0.13744999468326569, 0.0859759971499443, -0.7552099823951721, -0.3192099928855896, 0.7473700046539307, -0.5734900236129761, 0.16086000204086304, 0.3785400092601776, -0.6962299942970276, 0.0886010006070137, 0.23717999458312988, -0.5604699850082397, -0.08429399877786636, -0.19030000269412994, -0.07454899698495865, -0.4400700032711029, 0.3137100040912628, -0.2051900029182434, 0.08582299947738647, 0.3757399916648865, 0.21525999903678894, -0.4424999952316284, -0.4391700029373169, 0.3341200053691864, -0.901669979095459, 0.26750999689102173, -0.6756399869918823, -0.873199999332428, 0.12835000455379486, 0.15257999300956726, -0.4788700044155121, 0.4015200138092041, 0.24404999613761902, -0.20457999408245087, 0.36399999260902405, -0.24469000101089478, 0.42089998722076416, 0.4878599941730499, 0.2516799867153168, 0.014156999997794628, -0.44530001282691956, 0.05601000040769577, 0.5206699967384338, -0.554639995098114, 0.07003699988126755, 0.25071999430656433, -0.2901099920272827, -0.3071500062942505, 0.20640000700950623, 0.23368999361991882, 0.10887999832630157, 0.44683998823165894, 0.0361969992518425, 0.2253500074148178, -0.04811900109052658, -0.6768800020217896, -0.5556100010871887, 0.9571499824523926, -0.7014099955558777, 0.18855999410152435, 0.18427999317646027, 0.48190000653266907, 0.3989900052547455, 0.4382300078868866, -0.8797000050544739, -0.5262500047683716, -0.2340099960565567, 0.3503299951553345, 0.2919900119304657, 0.6620500087738037, 0.9470099806785583, 0.3930099904537201, 0.5387399792671204, -0.33518001437187195, 0.5969899892807007, -0.19011999666690826, 0.30386999249458313, 0.10726000368595123, 0.007686700206249952, 0.25817999243736267, 0.5314800143241882, -0.07534900307655334, -0.2612299919128418, 0.530460000038147, 0.21351000666618347, 0.33939000964164734, -0.1358799934387207, 0.2782000005245209, -0.3872399926185608, 0.5137900114059448, 0.4191800057888031, 0.12585000693798065, -0.5016800165176392, -0.18723000586032867, 0.011249000206589699, -0.4060800075531006, 0.4041300117969513, 0.43428999185562134, 0.31848999857902527, -0.7519199848175049, 0.4789400100708008, 0.16922999918460846, 0.04463899880647659, 0.02801400050520897, 0.18738999962806702, 0.8450700044631958, 0.42427998781204224, -0.026684999465942383, -0.06197600066661835, 0.2878200113773346, -0.3184399902820587, -0.1139800027012825, 0.21536000072956085, 0.30219998955726624, 0.09382999688386917, -0.08387099951505661, 0.3561300039291382, -0.03266400098800659, -0.47707000374794006, 0.1336199939250946, -0.12922999262809753, 0.20215000212192535, -0.12195999920368195, 0.42142000794410706, 0.1992100030183792, 0.49948999285697937, -0.32422998547554016, 0.9316200017929077, 0.01684500090777874, -0.34415000677108765, -0.1463399976491928, 0.24469000101089478, 0.3739500045776367, 0.02340400032699108, 0.3551599979400635, 0.23837999999523163, -0.47672998905181885, 0.26774001121520996, -0.027938000857830048, -0.36322999000549316, 0.3299199938774109, -0.03602200001478195, 0.29607000946998596, 0.9502800107002258, 0.23863999545574188, -0.13065999746322632, 0.18458999693393707, -0.5104100108146667, -0.20266999304294586, -0.03061099909245968, 0.2281399965286255, -0.08732599765062332, 0.19152000546455383, -0.21504999697208405, -0.14327000081539154, -0.5074700117111206, 0.05117199942469597, 0.09243299812078476, 0.5594499707221985, -0.45816999673843384, -0.09347599744796753, -0.2412099987268448, -0.6516000032424927, -0.17116999626159668, -0.5552800297737122, 0.1552799940109253, 0.48767998814582825, -0.2326200008392334, 0.05776600167155266, -0.5248200297355652, -0.05420999974012375, 0.41804999113082886, 0.25971999764442444, -0.0401809997856617, 0.5737900137901306, 0.3870300054550171, -0.2367600053548813, -0.5200499892234802, -0.1075500026345253, -1.287600040435791, 0.577530026435852, 0.12651999294757843, -0.33842000365257263, 0.444599986076355, -0.23047000169754028, 0.09528700262308121, 0.16436000168323517, -0.09548299759626389, 0.060961998999118805, -0.5475999712944031, -0.13744999468326569, 0.08775400370359421, -0.024664999917149544, -0.6070200204849243, -0.3797999918460846, -0.21897999942302704, 0.5104699730873108, -0.3201799988746643, 0.5031899809837341, -0.6902599930763245, 0.6498600244522095, 0.3444800078868866, -0.4130299985408783], u'shorts': [0.015432000160217285, -0.7280899882316589, -0.23763999342918396, 0.05696899816393852, -0.20694999396800995, -0.25220000743865967, -0.5059000253677368, 0.04409300163388252, -0.15396000444889069, 0.0310210008174181, 0.09548500180244446, 0.033296000212430954, 0.14936000108718872, 0.40852001309394836, -0.012643000110983849, 0.12172999978065491, 0.47328999638557434, -0.17478999495506287, 0.29624998569488525, 0.12428999692201614, -0.10300999879837036, 0.04762199893593788, -0.1671600043773651, -0.04746700078248978, -0.37073999643325806, -0.06233000010251999, 0.2772899866104126, 0.04270000010728836, 0.0523420013487339, 0.6022300124168396, 0.27312999963760376, -0.482230007648468, -0.046007998287677765, -0.07303699851036072, -0.4320099949836731, 0.2666800022125244, -0.09061700105667114, 0.4434100091457367, 0.4088900089263916, 0.69132000207901, -0.10306999832391739, -0.6606500148773193, -0.06591299921274185, -0.18469999730587006, -0.042771000415086746, 0.26197999715805054, 0.6106799840927124, -0.7241700291633606, -0.22363999485969543, 0.18425999581813812, -0.48047998547554016, -0.40296000242233276, 0.18716000020503998, -0.4856100082397461, 0.30202001333236694, 0.2995400130748749, -0.10650999844074249, -0.2601099908351898, -0.3028300106525421, -0.5031599998474121, -0.5441499948501587, -0.2577199935913086, -0.6003199815750122, -0.3075999915599823, 0.1634099930524826, 0.17000000178813934, 0.13760000467300415, -0.2835400104522705, 0.3715299963951111, 0.30430999398231506, 0.6415299773216248, 0.9941999912261963, -0.06249399855732918, 0.03875900059938431, 0.4139299988746643, -0.08170899748802185, -0.019123999401926994, 0.14196999371051788, -0.20319999754428864, -0.7652400135993958, -0.6427800059318542, -0.04078200086951256, 0.04619000107049942, 0.45796000957489014, -0.17746999859809875, 0.6351400017738342, 0.6428300142288208, 0.2621299922466278, 0.3256799876689911, -0.259660005569458, 0.22092999517917633, 0.34637001156806946, 0.2665899991989136, 0.24052999913692474, 0.02114499919116497, 0.26622000336647034, 0.18692000210285187, 0.5162500143051147, 0.06325899809598923, -0.2802799940109253, 0.7401800155639648, 0.359360009431839, -0.5127300024032593, -0.05450399965047836, -0.664900004863739, 0.2785699963569641, -0.3453400135040283, -0.1598300039768219, -0.22707000374794006, -0.37843000888824463, -0.7477800250053406, 0.3260599970817566, -0.14182999730110168, 0.02770799957215786, -0.5494300127029419, 0.3440600037574768, 0.22809000313282013, 0.5003200173377991, -0.35113999247550964, -0.6421899795532227, 0.14643000066280365, 0.36055999994277954, 0.6615200042724609, 0.045104000717401505, -0.16705000400543213, 0.5267199873924255, 0.14003999531269073, 0.21996000409126282, 0.6947199702262878, 0.07667999714612961, -0.6713600158691406, -0.31790000200271606, 0.26333001255989075, -0.12052000313997269, -0.30667001008987427, 0.03531799837946892, -0.038412999361753464, 0.2536500096321106, -0.15285000205039978, -0.05508799850940704, -0.06255099922418594, -0.13975000381469727, 0.34911999106407166, -0.42579999566078186, 0.3479500114917755, 0.2307099997997284, -0.4620699882507324, -0.36555999517440796, 0.34349000453948975, -0.0028909998945891857, 0.3197000026702881, 0.0735820010304451, -0.020414000377058983, -0.4559899866580963, 0.38433000445365906, -0.8294100165367126, 0.4176099896430969, -0.07201900333166122, 0.14324000477790833, 0.4783799946308136, -0.05894500017166138, -0.7139899730682373, -0.37988001108169556, -0.422650009393692, 0.2785699963569641, -0.1527000069618225, -0.1751600056886673, 1.5377999544143677, 0.0064647002145648, 0.4796999990940094, 0.5131400227546692, 0.15785999596118927, -1.0734000205993652, 0.41554999351501465, -0.36708998680114746, 0.09938599914312363, 0.09573200345039368, 0.44223999977111816, 0.1234700009226799, -0.06322299689054489, -0.23559999465942383, -0.28268998861312866, 0.5363699793815613, 0.0303569994866848, 0.27588000893592834, -0.16832999885082245, 0.4429199993610382, 0.2917099893093109, 0.1608400046825409, -0.43432000279426575, 0.2303999960422516, 0.17291000485420227, 0.2050199955701828, 0.25202998518943787, -0.49441999197006226, -0.1085600033402443, -0.8403300046920776, 0.05236300081014633, -0.7744899988174438, -0.44617998600006104, 0.8787999749183655, 0.3535900115966797, 0.5292800068855286, 0.37602001428604126, 0.774370014667511, 0.5412399768829346, -0.14485999941825867, 0.7942100167274475, -0.8175299763679504, -0.4257799983024597, -0.03448899835348129, -0.5815100073814392, -0.4222399890422821, 0.5933200120925903, 0.4534299969673157, 0.1563500016927719, 0.07902699708938599, -0.7028499841690063, 0.3379400074481964, -0.7564700245857239, 0.46595001220703125, 0.14659999310970306, 0.40654999017715454, 0.048301998525857925, 0.6974899768829346, -0.23106999695301056, -0.2115200012922287, -0.3596299886703491, -0.44745001196861267, -0.2505199909210205, 0.5006099939346313, -0.1607300043106079, -0.47620999813079834, -0.694130003452301, 0.7523499727249146, -0.333950012922287, 0.2514300048351288, -0.07034099847078323, -0.09426199644804001, 0.4899500012397766, 0.2873699963092804, 0.33959001302719116, -0.5421299934387207, 0.21789999306201935, -0.33987000584602356, -0.17204000055789948, -0.40261000394821167, -0.6218100190162659, -0.24332000315189362, -0.43338000774383545, 0.32927000522613525, 0.04517899826169014, -0.4627400040626526, -0.22943000495433807, -0.10666000097990036, -0.4825100004673004, -0.17449000477790833, 0.30629000067710876, -0.3565399944782257, -0.2108599990606308, 0.7707200050354004, -0.10627000033855438, 0.45368000864982605, -0.18240000307559967, -0.44363999366760254, -0.42813000082969666, -0.46529000997543335, 0.2220200002193451, -0.6088899970054626, 0.5589699745178223, -0.03202600032091141, -0.16061000525951385, -0.16630999743938446, -0.047981999814510345, -0.49053001403808594, -0.030108999460935593, -0.7831400036811829, 0.0744049996137619, -0.797469973564148, 0.20813000202178955, 0.6066399812698364, -0.19550000131130219, 0.0781479999423027, -0.07482799887657166, -0.3425599932670593, 0.4978399872779846, -0.22374999523162842, 0.17549000680446625, -0.2818000018596649, -0.9086999893188477, -0.32179000973701477, -0.08576299995183945, -0.2867799997329712, 0.07502000033855438, -1.1608999967575073, -0.5452200174331665, -0.2582300007343292, 0.4085800051689148, 0.7886099815368652, -0.45513999462127686], u'fire': [0.19582000374794006, 0.044123001396656036, 0.3199999928474426, 0.11235000193119049, -0.3077299892902374, 0.2771799862384796, 0.6766999959945679, 0.42552000284194946, -0.07950399816036224, -1.589400053024292, 0.2184000015258789, 0.5495200157165527, -0.05954299867153168, -0.3319399952888489, 0.0968950018286705, 0.7819799780845642, -0.24434000253677368, 0.38005998730659485, -0.4644399881362915, 0.700219988822937, 0.2888700067996979, 0.14548000693321228, 0.4573099911212921, -0.4589900076389313, 0.2543199956417084, -0.029286999255418777, -0.22843000292778015, -0.4106299877166748, -0.25784000754356384, 0.13530999422073364, 0.6423100233078003, -0.1023000031709671, 0.03962700068950653, 0.01919499970972538, -0.05845699831843376, -0.06130300089716911, -0.12557999789714813, -0.10255999863147736, 0.40088000893592834, 0.26409000158309937, 0.6011800169944763, 0.20440000295639038, 0.17660999298095703, 0.18019999563694, -0.11473000049591064, 0.08802799880504608, 0.45502999424934387, -0.19952000677585602, -0.033282000571489334, 0.13856999576091766, 0.43472999334335327, -0.17106999456882477, -0.4177800118923187, 0.17147000133991241, -0.22724999487400055, 0.5130900144577026, -0.32850000262260437, -0.0694189965724945, 0.35078001022338867, -0.057509999722242355, -0.3905799984931946, -0.09374000132083893, 0.6085000038146973, 0.20753000676631927, -0.15070000290870667, -0.23312999308109283, 0.07855899631977081, -0.2214300036430359, 0.19193999469280243, -0.09115800261497498, 0.07882899791002274, -0.5463700294494629, 0.24675999581813812, 0.022050000727176666, -0.1998099982738495, 0.2510499954223633, -0.02962600067257881, 0.26969000697135925, -0.460889995098114, -0.009513500146567822, -0.022362999618053436, 0.09591300040483475, 0.05810299888253212, 0.21464000642299652, -0.12077999860048294, -0.23757000267505646, -0.2147500067949295, 0.23568999767303467, -0.06852400302886963, -0.2878899872303009, 0.1442199945449829, -0.10884000360965729, 0.10870999842882156, 0.25826001167297363, 0.3086700141429901, -0.12964999675750732, -0.33945000171661377, 0.223130002617836, 0.21985000371932983, -0.5353599786758423, -0.316540002822876, 0.540120005607605, -0.5216900110244751, 0.11450999975204468, 0.5080999732017517, -0.39127999544143677, 0.6360099911689758, 0.34784001111984253, 0.03150099888443947, -0.03030500002205372, -0.23656000196933746, -0.3326199948787689, 0.18569999933242798, 0.029776999726891518, 0.33392998576164246, -0.08699999749660492, -0.9186999797821045, 0.008331499993801117, 0.24571000039577484, 0.03903000056743622, 0.329120010137558, -0.4275600016117096, -0.18472999334335327, 0.3373599946498871, -0.8664299845695496, -0.39254000782966614, 0.024615999311208725, -0.003636399982497096, 0.18783000111579895, -0.009909600019454956, 0.2559800148010254, 0.7094600200653076, 0.3231399953365326, -0.0041010999120771885, 0.6024100184440613, 0.2934100031852722, 0.06513000279664993, 0.08791700005531311, 0.617169976234436, 0.3801000118255615, -0.2464500069618225, 0.05784599855542183, -0.4780200123786926, 0.03566800057888031, -0.5446799993515015, -0.23907999694347382, 0.5786299705505371, 0.3422499895095825, -0.2298399955034256, -0.7494099736213684, 0.4115000069141388, 0.1641799956560135, 0.20423999428749084, -0.05323699861764908, 0.5043100118637085, -0.13085000216960907, 0.6902999877929688, -0.1991499960422516, 0.20469999313354492, -0.07294599711894989, 0.368010014295578, -0.25459998846054077, 0.7071700096130371, -0.2888000011444092, -0.4875200092792511, 0.38126999139785767, 0.09882500022649765, 0.0045056999661028385, -0.5727099776268005, -0.34586000442504883, -0.06559299677610397, 0.7818400263786316, 0.46748998761177063, 0.46445000171661377, 0.2849299907684326, 0.4225800037384033, -0.08776699751615524, -0.018631000071763992, 0.1301800012588501, -0.239889994263649, 0.07333000004291534, -0.24438999593257904, -0.0710809975862503, -0.23103000223636627, -0.05460900068283081, 0.23895999789237976, -0.4064599871635437, -0.1917800009250641, 0.2329999953508377, -0.3011400103569031, 0.3776800036430359, -0.27911999821662903, -0.13652999699115753, -0.43946000933647156, 0.22958000004291534, -0.417279988527298, -0.11162000149488449, -0.14406999945640564, 0.3338100016117096, 0.385919988155365, 0.36897000670433044, 0.2815600037574768, 0.13492000102996826, 0.047605000436306, 0.4842199981212616, -0.20669999718666077, 0.2942200005054474, -0.05476300045847893, 0.1111999973654747, -0.16987000405788422, -0.17669999599456787, 0.005960599984973669, -0.8317599892616272, -0.5509600043296814, 0.7224000096321106, -0.19356000423431396, 0.38266998529434204, 0.2827799916267395, 0.22797000408172607, -0.2852199971675873, 0.6695899963378906, -0.23002000153064728, 0.19471000134944916, -0.27358999848365784, -0.1414799988269806, -0.1704999953508377, -0.20489999651908875, -0.32412999868392944, 0.004301200155168772, 0.37646999955177307, 0.10963000357151031, -0.18748000264167786, -0.2603999972343445, -0.27222999930381775, 0.40685999393463135, -0.02380100078880787, -0.0805480033159256, 0.12872999906539917, 0.04814000055193901, 0.44699999690055847, -0.17770999670028687, -0.4985800087451935, 0.02132299914956093, -0.12022999674081802, 0.01739099994301796, 0.1610500067472458, 0.08277200162410736, -0.4163599908351898, 0.026535000652074814, -0.05837099999189377, 0.07926999777555466, -0.50941002368927, -0.1721400022506714, -0.2565400004386902, 1.0713000297546387, -0.2868100106716156, -0.4212400019168854, -0.2008499950170517, 0.3749699890613556, -0.08289700001478195, -0.12272000312805176, 0.08332200348377228, -0.05850600078701973, -0.2579599916934967, -0.30748000741004944, -0.17190000414848328, -0.5405899882316589, 0.3787199854850769, -0.2198999971151352, -0.49660998582839966, 0.24573999643325806, -0.16627000272274017, 0.3060300052165985, 0.6383799910545349, 0.2339099943637848, -0.04031499847769737, -2.432300090789795, 0.43007999658584595, -0.14500999450683594, 0.356469988822937, 0.2189899981021881, 0.09717000275850296, 0.4553399980068207, 0.24326999485492706, 0.4470300078392029, 0.6090599894523621, -0.4339199960231781, 0.01743599958717823, 0.1917099952697754, -0.12984000146389008, -0.45372000336647034, -0.3435499966144562, -0.2101300060749054, -0.2600100040435791, 0.5523099899291992, 1.1252000331878662, -0.2558000087738037, -0.22035999596118927, 0.20100000500679016, 0.5666099786758423], u'bus': [0.1482200026512146, -0.6002200245857239, 0.22612999379634857, -0.21021999418735504, -0.1728300005197525, -0.1319500058889389, 0.24592000246047974, 0.17813999950885773, -0.06456699967384338, -0.8962000012397766, -0.6838799715042114, -0.03686799854040146, 0.19367000460624695, -0.23228999972343445, 0.6003599762916565, 0.38258999586105347, -0.03359900042414665, -0.2181600034236908, -0.1473499983549118, -0.4909000098705292, -0.2101600021123886, -0.15241000056266785, -0.018773000687360764, 0.018729999661445618, -0.1712999939918518, -0.007464500144124031, -0.5713300108909607, 0.5089300274848938, -0.20419999957084656, -0.3729900121688843, 0.6646699905395508, 0.3149600028991699, 0.35763001441955566, 0.21539999544620514, -0.16469000279903412, 0.07440300285816193, -0.5231900215148926, -0.42386001348495483, -0.398250013589859, 0.16471000015735626, -0.04564699903130531, -0.020085999742150307, -0.08313900232315063, -0.5741999745368958, 0.24684999883174896, 0.3428100049495697, 0.6904799938201904, -0.048246998339891434, -0.0972760021686554, -0.7268900275230408, -0.3149600028991699, -0.023692000657320023, -0.5289199948310852, 0.23968000710010529, 0.3557800054550171, -0.01594099961221218, -0.4499799907207489, -0.09512300044298172, -0.46226000785827637, 0.14422999322414398, -0.3226200044155121, 0.16818000376224518, 0.07906100153923035, -0.25940999388694763, -0.17118999361991882, 0.6908599734306335, -0.29273998737335205, -0.42699000239372253, -0.038065001368522644, 0.13750000298023224, -0.037255000323057175, 0.2659800052642822, 0.5030400156974792, -0.24122999608516693, 0.1693599969148636, -0.05823199823498726, 0.05143899843096733, -0.15973000228405, 0.06774400174617767, -0.32708999514579773, -0.3713200092315674, -0.5341699719429016, 0.4830000102519989, 0.07034599781036377, 0.04693400114774704, -0.11341000348329544, -0.035401999950408936, -0.2881700098514557, 0.1422400027513504, 1.023300051689148, 0.5706899762153625, 0.35778000950813293, 0.29186999797821045, -0.7150800228118896, 0.18074999749660492, 0.2327200025320053, 0.23468999564647675, -0.2820900082588196, 0.23938000202178955, -0.24330000579357147, -0.20732000470161438, 0.293830007314682, 0.638979971408844, -0.22473999857902527, 0.20249000191688538, 0.20292000472545624, 0.9820899963378906, 0.2955099940299988, -0.012420999817550182, 0.06504099816083908, -0.06424900144338608, -0.0824040025472641, 0.5985999703407288, -0.1544100046157837, 0.2854900062084198, -0.05963499844074249, -0.028891999274492264, 0.17044000327587128, -0.31801000237464905, 0.3718400001525879, -0.29469001293182373, -0.53125, 0.4115599989891052, -0.6255599856376648, -0.57396000623703, -0.22040000557899475, 0.19993999600410461, 0.31578999757766724, 0.16110000014305115, -0.5268099904060364, 0.3625499904155731, 0.29631999135017395, 0.6363599896430969, 0.3228200078010559, 0.1622299998998642, 0.22062000632286072, 0.16364000737667084, -0.26504001021385193, 0.34057000279426575, 0.16355000436306, -0.3888300061225891, -0.2836199998855591, 0.19593000411987305, 0.3121800124645233, -0.8065500259399414, 0.24607999622821808, 0.4458000063896179, 0.11585000157356262, -0.010622000321745872, -0.06277500092983246, 0.6466599702835083, 0.25095999240875244, 0.8477200269699097, -0.07299099862575531, 0.3646399974822998, -0.1974799931049347, 0.18973000347614288, -0.04085800051689148, 0.4444499909877777, -0.16096000373363495, 0.1759600043296814, -0.15271000564098358, -0.23337000608444214, -0.31707999110221863, 0.2093999981880188, 0.21400000154972076, -0.28242000937461853, 0.3690299987792969, -0.2601099908351898, -0.16664999723434448, -0.40132999420166016, -0.19458000361919403, 0.5794600248336792, 0.4914099872112274, -0.542680025100708, 0.23523999750614166, -0.3402999937534332, -0.2362699955701828, 0.20117999613285065, -0.10040999948978424, -0.2728399932384491, -0.33362001180648804, -0.093019999563694, 0.1053600013256073, 0.21212999522686005, 0.22773000597953796, -0.03810599818825722, 0.2654300034046173, 0.25856998562812805, -0.13101999461650848, 0.4159199893474579, -0.09757000207901001, 0.23422999680042267, -0.41572999954223633, -0.625249981880188, -0.30483999848365784, -0.10326000303030014, -0.08872299641370773, -0.09143199771642685, 0.1514499932527542, 0.675029993057251, -0.34323999285697937, 0.39034000039100647, -0.21943999826908112, 0.16630999743938446, -0.5166000127792358, -0.19707000255584717, -0.09364400058984756, 0.36410000920295715, -0.16447000205516815, -0.9655200242996216, -0.4097000062465668, -0.23090000450611115, 0.12408000230789185, 0.11189000308513641, -0.4563100039958954, -0.05199800059199333, -0.03790000081062317, -0.1253499984741211, 0.18604999780654907, 1.0235999822616577, 0.4915199875831604, -0.2169799953699112, -0.10576000064611435, 0.8502299785614014, -0.17333999276161194, 0.22324000298976898, -0.7858499884605408, -0.13819999992847443, -0.24875999987125397, -0.22311000525951385, -0.40432998538017273, -0.1443600058555603, -0.51801997423172, 0.9113199710845947, 0.5277000069618225, 0.7702900171279907, 0.815310001373291, -0.7968599796295166, -0.39351001381874084, 0.3823600113391876, 0.12030000239610672, -0.10270000249147415, -0.6318299770355225, 0.047325000166893005, -0.15296000242233276, 0.5892699956893921, -0.5507500171661377, 0.3118000030517578, 0.0904569998383522, -0.18564000725746155, 0.09830000251531601, -0.14422999322414398, 0.14475999772548676, 0.3799700140953064, -0.5055400133132935, -0.14841000735759735, 0.49737000465393066, 0.10332000255584717, -0.4192200005054474, -0.3087199926376343, 0.018225999549031258, 0.7158899903297424, 0.8956699967384338, 0.5944200158119202, -0.6866999864578247, 0.22015999257564545, -0.4855000078678131, -0.29460999369621277, 0.3866100013256073, -0.24539999663829803, -0.17201000452041626, -0.20475000143051147, 0.20648999512195587, 0.615090012550354, -0.277319997549057, -2.139400005340576, -0.11415000259876251, -0.12647999823093414, 0.5821200013160706, -0.1906999945640564, -0.25279000401496887, 0.523930013179779, -0.5123100280761719, -0.7567999958992004, 0.6759600043296814, 0.33118000626564026, 0.33684998750686646, 0.40599000453948975, -0.05669400095939636, 0.5083600282669067, -0.180649995803833, -0.7230700254440308, -0.08151599764823914, 0.3217799961566925, 0.7417200207710266, -0.46136999130249023, -0.2607499957084656, 0.2996399998664856, 0.39980000257492065], u'wax': [0.5949100255966187, -0.3763999938964844, -0.4560999870300293, -0.4163399934768677, 0.22643999755382538, 0.16373999416828156, -0.0965299978852272, 0.17911000549793243, 0.07719700038433075, -0.2179899960756302, -0.9150599837303162, 0.04559500142931938, -0.04813599959015846, -0.6922799944877625, -0.260019987821579, 0.4698300063610077, -0.24783000349998474, 0.13481000065803528, -0.2810699939727783, -0.03328099846839905, -0.14232000708580017, -0.13614000380039215, -0.2008100003004074, 0.7946000099182129, 0.4440799951553345, -0.2099599987268448, -0.7166900038719177, -0.19538000226020813, 0.09208499640226364, 0.3713499903678894, 0.4929400086402893, -0.13884000480175018, -1.191499948501587, -0.06700299680233002, -0.023581000044941902, 0.28464001417160034, -0.012875000014901161, 0.7493699789047241, 0.05411899834871292, 0.05996700003743172, -0.46086999773979187, -0.1676899939775467, -0.17092999815940857, 0.10744000226259232, 0.5429199934005737, -0.42111000418663025, 0.7713900208473206, 0.18219000101089478, 0.08082199841737747, 0.6192799806594849, 0.23669999837875366, -0.4832499921321869, -0.25766000151634216, -0.0694819986820221, 0.4263400137424469, 0.020360000431537628, -0.45357999205589294, -0.13853000104427338, 0.47898998856544495, 0.14180999994277954, -0.2255599945783615, 0.06950099766254425, 0.20422999560832977, 0.5279499888420105, 0.7773200273513794, -0.010861000046133995, -0.12343999743461609, -0.2294899970293045, 0.4760200083255768, 0.25953999161720276, 0.20959000289440155, -0.03341599926352501, 0.38034000992774963, 0.8950099945068359, -0.47005000710487366, 0.3463200032711029, 0.4383600056171417, -0.3946700096130371, -0.18252000212669373, -0.2920199930667877, -1.19159996509552, -0.5734800100326538, -0.13379999995231628, -0.6244800090789795, -0.4116800129413605, 0.32767999172210693, -0.08973900228738785, 0.5466899871826172, -0.17856000363826752, 0.1610500067472458, -0.6629999876022339, -0.4205999970436096, -0.326229989528656, -0.7616999745368958, 0.24192999303340912, -0.44064998626708984, -0.09561199694871902, 0.3985300064086914, 0.1689399927854538, -0.4120999872684479, 0.18160000443458557, -0.2328300029039383, 0.1490900069475174, -0.4133400022983551, -0.10503000020980835, -0.30889999866485596, -0.25756001472473145, -0.39500001072883606, -0.7933499813079834, 0.4105300009250641, 0.5109599828720093, 0.24533000588417053, -0.20587000250816345, -0.4714300036430359, 0.32357001304626465, 0.31182000041007996, -0.5853999853134155, 0.7824900150299072, 0.15458999574184418, 0.08290000259876251, 0.006947699934244156, 0.05380000174045563, 0.37011000514030457, 0.61558997631073, -0.053224001079797745, 0.5261200070381165, 0.1421699970960617, -0.4114300012588501, 0.32677000761032104, -0.3607900142669678, 0.1290300041437149, 0.6889899969100952, 0.24958999454975128, 0.47745001316070557, -0.25415000319480896, -0.12966999411582947, -0.5968599915504456, 0.09703999757766724, -0.04975700005888939, 0.07976800203323364, 0.286190003156662, -0.07510200142860413, -0.7844300270080566, -0.5797299742698669, 0.9472200274467468, 0.13073000311851501, -0.2838999927043915, 0.037546999752521515, 0.1428699940443039, -0.23191000521183014, -0.3105500042438507, 0.5750100016593933, -0.2176000028848648, -0.41089001297950745, -0.4499799907207489, -0.10416000336408615, -0.5501800179481506, -0.4879100024700165, 0.49922001361846924, -0.3202100098133087, 0.10367999970912933, -0.3159100115299225, 0.31018999218940735, -0.17440000176429749, 0.02943599969148636, 0.17670999467372894, 0.17434999346733093, 0.010115000419318676, 0.5404599905014038, -0.7209299802780151, -0.3181900084018707, 0.13666999340057373, -0.041439998894929886, -0.2863300144672394, 0.7755299806594849, -0.36726000905036926, 0.3540000021457672, -0.6198199987411499, -0.07685700058937073, -0.6140000224113464, 0.0851840004324913, 0.421779990196228, 0.5180699825286865, -0.09765300154685974, -0.40509000420570374, -0.40814000368118286, 0.7947499752044678, -0.1914999932050705, -0.021556999534368515, -0.10869000107049942, 0.8155800104141235, 0.3657500147819519, 0.2768000066280365, -0.055100999772548676, 0.041301000863313675, 0.1612900048494339, -0.5376399755477905, 0.30608999729156494, -0.8852599859237671, -0.44053998589515686, 0.3679800033569336, 0.11089000105857849, 0.7784799933433533, -0.17531000077724457, 0.14895999431610107, 0.2533400058746338, -0.1791599988937378, 0.10095000267028809, -0.6501100063323975, -0.40602999925613403, 0.48627999424934387, 0.24216000735759735, 0.25040000677108765, -0.06634200364351273, 0.4367699921131134, 0.43608999252319336, 0.8771700263023376, -0.17149999737739563, -0.2213599979877472, -0.6752300262451172, -0.05603199824690819, -0.06307999789714813, -0.27316001057624817, -0.30990999937057495, -0.273360013961792, 0.5423700213432312, -0.1383800059556961, 0.23323999345302582, -0.11221999675035477, -0.7904899716377258, -0.1450899988412857, -0.7527599930763245, 0.6073700189590454, -0.2204899936914444, 0.4818600118160248, -0.2979699969291687, -0.23176999390125275, -0.07580199837684631, -0.0017383999656885862, -0.2540700137615204, -0.3336600065231323, -0.22337999939918518, -0.9716699719429016, -0.06655000150203705, -0.5220000147819519, -0.1357100009918213, 0.0760200023651123, -0.5571399927139282, -0.31226998567581177, 0.24178999662399292, 0.09997200220823288, -0.5109999775886536, 0.312389999628067, 0.07819599658250809, 0.19301000237464905, -0.29096001386642456, -0.22189000248908997, -1.0163999795913696, -0.037335000932216644, -0.6345800161361694, -0.7246400117874146, -0.26462000608444214, -0.10546000301837921, 0.07402600347995758, 0.5024099946022034, 0.4255099892616272, -0.3317900002002716, -0.26684999465942383, -0.21492999792099, 0.13274000585079193, 0.6426200270652771, 0.09765300154685974, 0.05355700105428696, 0.17343999445438385, -0.07574500143527985, 0.24623000621795654, -0.14542999863624573, -0.4461199939250946, -0.6032400131225586, -0.923550009727478, -0.08748099952936172, -0.4410400092601776, -0.30706000328063965, 0.21098999679088593, -0.13364000618457794, 0.7376899719238281, 0.46942999958992004, 0.04465600103139877, -0.07852199673652649, 0.5768899917602539, -0.2876400053501129, -0.24071000516414642, 0.2349500060081482, 0.1978899985551834, -0.1374099999666214, -0.15477000176906586, 0.5702000260353088, -0.23222999274730682, -0.49740999937057495, 0.5438500046730042], u'envelope': [-0.18002000451087952, -0.010928000323474407, -0.6198099851608276, -0.20061999559402466, 0.17242999374866486, -0.008661800064146519, -0.6206899881362915, -0.6265299916267395, -0.2916699945926666, -0.9887099862098694, -0.2632000148296356, 0.13083000481128693, 0.677590012550354, -0.00620240019634366, -0.5958499908447266, 0.2206999957561493, -0.45076999068260193, -0.12246999889612198, -0.30550000071525574, -0.004538299981504679, 0.06985799968242645, -0.49538999795913696, -0.0982699990272522, -0.05690800026059151, -0.4229399859905243, 0.3644599914550781, -0.17298999428749084, -0.1765100061893463, 0.08115100115537643, -0.7707599997520447, -0.2641899883747101, -0.08666399866342545, -0.632390022277832, 0.2566399872303009, -0.05415099859237671, 0.6031799912452698, -0.22562000155448914, -0.20476999878883362, -0.5992500185966492, 0.2766599953174591, -0.5371699929237366, -0.35089999437332153, -0.016355000436306, 0.8516200184822083, 0.2220499962568283, 0.15489999949932098, 0.21493999660015106, -0.2527199983596802, -0.32697001099586487, 0.14219999313354492, 0.9460499882698059, 0.19856999814510345, 0.0783429965376854, -0.18163999915122986, -0.06096300110220909, -0.0466420017182827, -0.06247900053858757, -0.01153200026601553, 0.5722699761390686, -0.6618300080299377, 1.163599967956543, 0.42965999245643616, 0.34751999378204346, -0.48166000843048096, 0.27755001187324524, -0.06054700165987015, -0.330049991607666, -0.3465299904346466, -0.0768669992685318, 0.1716500073671341, 0.09001900255680084, -0.1673000007867813, 0.5264599919319153, 0.19487999379634857, 0.4909999966621399, 0.6763100028038025, -0.0388449989259243, -0.46362999081611633, 0.16946999728679657, 0.363970011472702, -0.16116000711917877, 0.4799099862575531, -0.026551000773906708, 0.3042699992656708, 0.11332999914884567, -0.17071999609470367, 0.31797999143600464, -0.08310899883508682, -0.24574999511241913, -0.12131000310182571, -0.3245899975299835, -0.0679130032658577, -0.13993999361991882, 0.002978699980303645, 0.1466600000858307, -0.09404999762773514, -0.2727699875831604, 0.5573700070381165, 0.20237000286579132, -0.6558899879455566, 0.36421999335289, -0.45489001274108887, -0.6825100183486938, 0.2328999936580658, -0.45302000641822815, -0.3379499912261963, -0.2129800021648407, 0.15783999860286713, 0.1387300044298172, 0.24638999998569489, 0.17461000382900238, 0.7336199879646301, -0.07648400217294693, 0.008359399624168873, -0.6186800003051758, 0.27483001351356506, -0.21918000280857086, 0.1113400012254715, 0.14831000566482544, -0.7398899793624878, 0.6933299899101257, 0.22597000002861023, -0.1339299976825714, 0.5441499948501587, 0.17818999290466309, -0.2596699893474579, 0.380950003862381, -1.04830002784729, 0.11650999635457993, -0.32552000880241394, 0.6439099907875061, -0.43314000964164734, 0.22327999770641327, 0.21344000101089478, -0.6291599869728088, 0.18400000035762787, 0.1311199963092804, 0.03821700066328049, 0.46952998638153076, 0.42653998732566833, -0.0022901000920683146, 0.23533999919891357, -0.3935199975967407, -0.29583001136779785, 0.1384900063276291, -0.46678999066352844, -0.7300199866294861, -0.4253999888896942, 0.06089499965310097, 0.19744999706745148, 0.17833000421524048, 0.6413999795913696, -0.041377998888492584, -0.2489600032567978, 0.7677900195121765, -0.002520600100979209, 0.03195599839091301, -0.31022000312805176, 0.004892100114375353, -0.08023200184106827, -0.704990029335022, -0.4668099880218506, 0.5691800117492676, -0.5367400050163269, 0.8357599973678589, 0.08587600290775299, -0.3366299867630005, 0.3424699902534485, -0.15277999639511108, -0.1737300008535385, 0.035895999521017075, 0.45774999260902405, -0.08453799784183502, -0.10976000130176544, -0.33465999364852905, -0.4724400043487549, -0.32034000754356384, 0.1759600043296814, 0.16551999747753143, -1.2489999532699585, 0.3358199894428253, -0.20777000486850739, 0.2579599916934967, 0.03512800112366676, -0.5929999947547913, -0.3000200092792511, 0.6855000257492065, 0.43838998675346375, -0.2215700000524521, 0.1015700027346611, 0.7925300002098083, 0.43255001306533813, -0.06650800257921219, 0.40397000312805176, 0.49052000045776367, -0.24124999344348907, -0.4637500047683716, 0.5299199819564819, -0.08610200136899948, 0.36754998564720154, -0.21817000210285187, -0.42511001229286194, 0.7585600018501282, 0.2300799936056137, 0.024876000359654427, -0.5295199751853943, 0.23896999657154083, -0.06504800170660019, -0.11806999891996384, 0.3728100061416626, -0.21960000693798065, 0.2890999913215637, -0.3246600031852722, -0.124549999833107, -0.21236999332904816, -0.34828001260757446, 0.01693199947476387, -0.3804300129413605, -0.25242000818252563, 0.04699699953198433, 0.23451000452041626, 0.45100998878479004, 0.03457200154662132, 0.06620799750089645, 0.038203999400138855, -0.6761900186538696, -0.5784000158309937, -0.04998999834060669, 0.06853800266981125, -0.4870400130748749, -0.3088200092315674, 0.16207000613212585, -0.6662499904632568, 0.215379998087883, -0.22914999723434448, -0.03155599907040596, -0.25418001413345337, -0.17765000462532043, 0.231330007314682, 0.2042900025844574, 0.11949999630451202, 0.1907999962568283, 0.1145000010728836, 0.4205699861049652, -0.26377999782562256, -0.7936999797821045, 0.6975499987602234, -0.6047099828720093, -0.003697100095450878, 0.2554900050163269, -0.004565299954265356, -0.19867999851703644, -0.4266200065612793, -0.3700999915599823, -0.1483599990606308, -0.01013999991118908, 0.11885999888181686, -0.638670027256012, -0.20615999400615692, -0.12064000219106674, -0.003855000017210841, -0.18570999801158905, 0.29458001255989075, 0.21785999834537506, 0.09910500049591064, 0.17901000380516052, 0.032683998346328735, 0.3864099979400635, 0.41319000720977783, 0.39695999026298523, 0.21336999535560608, 0.1716099977493286, -0.1425199955701828, -0.1109199970960617, -0.5425699949264526, -0.014290000312030315, -0.46709001064300537, -0.10993999987840652, -0.3046500086784363, -0.4569700062274933, -0.2985199987888336, 0.04441099986433983, -0.17603999376296997, 0.149849995970726, -0.7400799989700317, 0.0018028999911621213, 0.10698000341653824, -0.21041999757289886, -0.4256399869918823, 0.09386499971151352, 0.4744899868965149, -0.09317799657583237, 0.5672100186347961, 0.4542199969291687, 0.5010300278663635, -0.10361000150442123, -0.05074600130319595, -0.4993799924850464, 0.23728999495506287, -0.21243999898433685], u'oil': [0.48124998807907104, 0.5553799867630005, -0.043570999056100845, 0.23210999369621277, -0.3140000104904175, 0.21654999256134033, -0.43244001269340515, 0.3129900097846985, 0.3706800043582916, -1.9697999954223633, -0.15918999910354614, -0.0181489996612072, 0.33500000834465027, 0.6432200074195862, -0.3931199908256531, 0.12335000187158585, 0.05256500095129013, 0.3982900083065033, -0.4405499994754791, -0.4959399998188019, -0.5344300270080566, 0.24155999720096588, 0.8322299718856812, 0.07060500234365463, 0.2594299912452698, -0.2954399883747101, 0.45205000042915344, 0.40560999512672424, -0.6394000053405762, 0.3227599859237671, -0.060545001178979874, 0.9110599756240845, -0.2278199940919876, 0.16487999260425568, 0.08929000049829483, 0.03494799882173538, -0.21980999410152435, -0.06255099922418594, 0.5783100128173828, 1.0425000190734863, -0.978659987449646, -0.21594999730587006, 0.24808000028133392, -0.26350000500679016, 0.05289899930357933, -0.6155099868774414, 0.06252799928188324, 0.11326999962329865, 0.3093700110912323, 0.46772998571395874, 0.2574700117111206, 0.5333700180053711, -0.0016531000146642327, -0.38429000973701477, 0.1182200014591217, 0.42963001132011414, -0.21476000547409058, 0.8601300120353699, 0.23191000521183014, -0.4004499912261963, -0.2811700105667114, 0.9489399790763855, 0.8220499753952026, -0.8686599731445312, -0.05635000020265579, -0.18850000202655792, -0.4995099902153015, -0.20960000157356262, -0.0062200999818742275, 0.20492999255657196, -0.15434999763965607, -0.2793099880218506, -0.1114799976348877, -0.04156000167131424, -0.07238200306892395, 0.2596000134944916, 0.449429988861084, 0.20225000381469727, -0.0979280024766922, -0.08230099827051163, 0.07871600240468979, -0.9547100067138672, -0.7177799940109253, 0.6839500069618225, 0.9953600168228149, -0.33177000284194946, 0.24539999663829803, 0.42983001470565796, -0.26600000262260437, -0.2306399941444397, 0.26906999945640564, 0.4373300075531006, -0.10406000167131424, -0.17605000734329224, -0.4830299913883209, 0.6815800070762634, -0.265390008687973, 0.24574999511241913, 0.8312299847602844, 0.11722999811172485, -0.2471500039100647, 0.09515299648046494, -0.4203200042247772, -0.31845998764038086, -0.005252200178802013, 0.5649799704551697, -0.2091200053691864, -0.08006200194358826, -0.10837999731302261, 0.10743000358343124, 0.7613999843597412, -0.8095899820327759, -0.3483699858188629, 0.42796000838279724, 0.2389499992132187, 0.4989199936389923, 0.5612999796867371, 0.39792001247406006, 0.9854099750518799, 0.43720000982284546, -0.35697999596595764, -0.7131400108337402, -0.24860000610351562, 0.24842000007629395, 0.5636199712753296, 0.21886000037193298, -0.1354600042104721, -0.17382000386714935, 0.1965699940919876, 0.5974000096321106, 0.17297999560832977, 0.6415600180625916, -1.0906000137329102, 0.1714800000190735, 0.0429840013384819, 0.2347699999809265, -0.19612999260425568, 0.3931899964809418, -0.046039000153541565, -0.05071000009775162, 0.4815700054168701, -0.435589998960495, 0.2160400003194809, -0.2990800142288208, -0.5877199769020081, 0.2966499924659729, -0.2953000068664551, 0.4182499945163727, -0.0789709985256195, 0.31095001101493835, 0.38012000918388367, 0.94336998462677, 0.11672999709844589, 0.22330999374389648, 0.008383099921047688, 0.29787999391555786, 0.2933399975299835, -0.6197999715805054, -0.17367999255657196, -0.4369699954986572, 0.038052998483181, 0.07438699901103973, 0.24967999756336212, -0.015057999640703201, -0.31349000334739685, -0.3744699954986572, 0.500980019569397, -0.07908400148153305, -0.27059000730514526, 0.570580005645752, -0.01207400020211935, -0.32210999727249146, -0.27562999725341797, -0.4060400128364563, 0.4558500051498413, 0.39500999450683594, 0.5948699712753296, -0.10718999803066254, 0.12876999378204346, 0.8070899844169617, -0.40268000960350037, 0.6827600002288818, 0.2775300145149231, -0.3141399919986725, 0.49592000246047974, -0.513979971408844, 1.0184999704360962, -0.2296299934387207, -0.6586199998855591, -0.14303000271320343, 0.2880600094795227, 1.023900032043457, -0.012981999665498734, -0.2721000015735626, -0.07726799696683884, -0.4020400047302246, 0.1575399935245514, -0.25279998779296875, 0.010621000081300735, -0.22169999778270721, 0.6000800132751465, 0.24573999643325806, 0.25286000967025757, -0.518530011177063, 0.3705199956893921, 0.10471999645233154, 0.25200000405311584, 0.009399100206792355, 0.45350998640060425, -0.4747700095176697, -0.21059000492095947, -0.2666099965572357, 0.33359000086784363, -0.20418000221252441, 0.25898998975753784, 0.33487001061439514, 0.9676399827003479, 0.4406900107860565, -0.03754200041294098, 0.5324100255966187, 1.0441999435424805, -0.0328029990196228, -0.2904900014400482, 0.20573000609874725, 0.11781000345945358, -0.7202399969100952, -1.0537999868392944, 0.21473999321460724, -0.24212999641895294, -0.4099299907684326, 0.25543999671936035, 0.21728000044822693, 0.28134000301361084, 0.5659099817276001, 0.719789981842041, 0.31929001212120056, 0.3617100119590759, -0.1428699940443039, -0.41084998846054077, -0.7854800224304199, -0.3072899878025055, -0.0710970014333725, -0.1877799928188324, 0.6768100261688232, -0.7936199903488159, -0.4297899901866913, 0.3673799932003021, -0.40024998784065247, -0.047109998762607574, -1.0450999736785889, -0.047614000737667084, -0.833299994468689, 0.3786099851131439, 0.1122799962759018, 0.3197700083255768, -0.2979600131511688, -1.1509000062942505, 0.43654999136924744, -0.3355099856853485, 0.46792998909950256, -0.4916200041770935, 0.08567799627780914, -0.6905500292778015, -0.5594099760055542, 0.5017300248146057, 0.15415999293327332, -0.4488599896430969, -0.5140200257301331, -0.08537399768829346, -0.577530026435852, 0.3017899990081787, -0.3950600028038025, -0.2141599953174591, 0.17922000586986542, -0.3712100088596344, -0.21860000491142273, -1.6684999465942383, -0.6462500095367432, -0.04042699933052063, -0.4015200138092041, -0.33281001448631287, -0.4851599931716919, -0.010257000103592873, 0.07426899671554565, 0.4449000060558319, -0.32245001196861267, 0.5184199810028076, -0.5346400141716003, 0.23176999390125275, 0.48743999004364014, -0.5403800010681152, -0.4023500084877014, 0.6948300004005432, -0.12949000298976898, 0.49140000343322754, 1.010699987411499, -0.16986000537872314, -1.329200029373169, -0.6591299772262573, 0.6928799748420715], u'chocolate': [-0.2034199982881546, 0.49364998936653137, 0.025728000327944756, 0.10397999733686447, -0.807479977607727, 0.11922000348567963, -0.01694300025701523, -0.16985000669956207, 0.43963998556137085, -0.4302400052547455, 0.4549599885940552, -0.8282600045204163, -0.3798699975013733, 0.41850000619888306, -0.22588999569416046, -0.1063700020313263, 0.2950800061225891, -0.04152600094676018, -0.11439000070095062, 0.4255400002002716, 0.2628900110721588, 0.1984100043773651, 0.33441999554634094, 0.4837400019168854, -0.7359300255775452, -0.06871700286865234, -0.6509000062942505, -0.14433999359607697, -0.17584000527858734, -0.7833999991416931, -0.8658199906349182, 0.6245999932289124, -0.16021999716758728, -0.42024001479148865, -0.7132599949836731, 1.0514999628067017, -0.3244200050830841, 0.6176499724388123, 0.01992500014603138, -0.04179999977350235, -0.8561699986457825, -0.24988999962806702, 0.31066998839378357, 0.18585999310016632, 0.1358100026845932, -0.5079900026321411, 0.2791700065135956, -0.35620999336242676, -0.04310400038957596, 0.3488300144672394, 0.1271200031042099, -0.06124399974942207, 0.15151000022888184, 0.48392000794410706, -0.29069000482559204, -0.10311999917030334, -0.39441001415252686, 0.19216999411582947, 0.863319993019104, 0.3100399971008301, -0.0883760005235672, 0.0430929996073246, 0.32554998993873596, -0.40005001425743103, 0.0668649971485138, 0.03515600040555, 0.23337000608444214, 0.2513299882411957, -0.7488800287246704, -0.26701998710632324, 0.07140900194644928, 0.22222000360488892, -0.22301000356674194, 0.05670199915766716, -0.061820998787879944, 0.16965000331401825, 0.17423999309539795, -0.421640008687973, -0.3156599998474121, -0.14670999348163605, 0.3828499913215637, 0.15487000346183777, -0.3405500054359436, -0.40494999289512634, 0.3551799952983856, -0.4951399862766266, -0.10192999988794327, -0.03162200003862381, -0.412200003862381, -0.029819000512361526, -0.15162000060081482, -0.2368299961090088, -0.23241999745368958, 0.16457000374794006, -0.528980016708374, 0.4809100031852722, 0.3594599962234497, 0.03167499974370003, -0.1941000074148178, 0.11507000029087067, 0.10965000092983246, 0.21827000379562378, -0.09171699732542038, -0.7673900127410889, 0.0665849968791008, -0.7569100260734558, -0.02295999974012375, 0.3157300055027008, -0.6085799932479858, 0.6347799897193909, 0.6861600279808044, 0.504289984703064, -0.28876999020576477, -0.8105499744415283, 0.0703589990735054, -0.09156300127506256, -0.45107001066207886, 0.72461998462677, 0.1619900017976761, -0.0270409993827343, -0.04401899874210358, 0.12031999975442886, 0.24324999749660492, -0.12639999389648438, -0.3825399875640869, -0.4975599944591522, 0.4458400011062622, 0.7965400218963623, -0.3695099949836731, 0.31499001383781433, -0.543150007724762, 0.7598400115966797, -0.5330600142478943, 1.0403000116348267, -0.42441999912261963, -0.09440100193023682, -0.5499799847602844, 0.5047199726104736, -0.14723999798297882, -0.5098000168800354, 0.472790002822876, -0.32989999651908875, -0.5268200039863586, 0.0244159996509552, 0.313620001077652, -0.45339998602867126, -0.3070699870586395, -0.44707998633384705, -0.10034000128507614, -0.3293899893760681, -0.6664100289344788, 0.2596299946308136, 0.3764199912548065, 0.2678599953651428, 0.01558700017631054, -0.8495100140571594, -0.5645700097084045, -0.6566500067710876, 0.1189500018954277, -0.18098999559879303, 0.209089994430542, -0.2778399884700775, 0.27059999108314514, -0.29361000657081604, -0.08623199909925461, 0.14226999878883362, 0.43417999148368835, 0.1764799952507019, -0.12547999620437622, -0.24875999987125397, 0.08766499906778336, 0.12338999658823013, -0.05271900072693825, 0.3097899854183197, -0.035698000341653824, -0.15027999877929688, 0.07813899964094162, -0.2784300148487091, 0.17856000363826752, -0.6485999822616577, -0.3819099962711334, 0.26662999391555786, 0.3938699960708618, -0.4835500121116638, 0.273250013589859, -0.8076800107955933, 1.0924999713897705, -0.33292001485824585, 0.03549300134181976, -0.26166999340057373, 0.5958300232887268, 0.6200799942016602, -0.29027000069618225, -0.24884000420570374, -0.39414000511169434, -0.27358999848365784, -0.44095999002456665, 0.1785700023174286, 0.05140899866819382, 0.3110699951648712, 0.4728800058364868, -0.3004100024700165, 0.8201799988746643, 0.3484500050544739, -0.44332998991012573, -0.4150800108909607, 0.16556000709533691, 0.5353699922561646, -0.6037600040435791, -0.32133999466896057, 0.2404100000858307, -0.3684700131416321, -0.2611300051212311, -0.009384100325405598, -0.12125000357627869, 0.6661400198936462, 0.6332100033760071, -0.44826000928878784, 0.5346699953079224, -0.06803999841213226, 0.43514999747276306, 0.5111799836158752, -0.12916000187397003, -0.04016999900341034, -0.5457800030708313, -0.055456001311540604, 0.09634499996900558, 0.4426800012588501, 0.24255000054836273, -0.16062000393867493, -0.2719799876213074, -0.5753499865531921, -0.0014643999747931957, -0.008167900145053864, 0.6775100231170654, 0.17283999919891357, 0.002913400065153837, 0.2943499982357025, -0.4426499903202057, -0.6680499911308289, -0.3010700047016144, -0.15941999852657318, -0.08421099931001663, -0.5748000144958496, -1.0068999528884888, 0.4228900074958801, -0.4161800146102905, 0.19599999487400055, 0.486160010099411, -0.3731499910354614, 0.5903400182723999, -0.16805000603199005, 0.35804998874664307, 0.47244998812675476, 0.16689999401569366, 0.040775999426841736, 0.23348000645637512, -0.4231500029563904, 0.33948999643325806, 0.2637900114059448, -0.2291399985551834, -0.4944800138473511, 0.23579999804496765, -0.07208999991416931, 0.007266100030392408, 0.0630899965763092, -0.48903998732566833, 0.34251001477241516, 0.45840999484062195, 0.10095000267028809, -0.33133000135421753, -0.2885900139808655, -0.03657199814915657, 0.29464998841285706, 0.17595000565052032, 0.3484100103378296, -0.814300000667572, -0.5352299809455872, -1.4697999954223633, -0.0677580013871193, -0.14722000062465668, 0.5898699760437012, -0.6759499907493591, -0.6238399744033813, -0.006676199845969677, 0.7381700277328491, 0.3981100022792816, -0.3418700098991394, 0.09329000115394592, 0.6467900276184082, -0.4534299969673157, -0.26434001326560974, -0.00043541999184526503, 0.3581399917602539, 0.1745000034570694, -0.5629600286483765, -0.1001800000667572, -0.07162299752235413, -0.15243999660015106, -0.23454000055789948], u'tiger': [0.31804999709129333, 0.3861199915409088, 0.10724999755620956, 0.2826099991798401, -0.044964998960494995, 0.010611999779939651, 0.4342600107192993, 1.100600004196167, 0.1512400060892105, -0.751990020275116, 0.5425400137901306, -0.25543999671936035, -0.164000004529953, 0.16128000617027283, -0.01706000044941902, -0.2240999937057495, 0.12681999802589417, 0.8408700227737427, -0.276309996843338, 0.04430999979376793, 0.2612299919128418, -0.038947999477386475, -0.14925000071525574, -0.6048099994659424, -1.1059000492095947, -0.11134999990463257, -0.05940299853682518, -0.22909000515937805, 0.6788899898529053, 0.1828799992799759, 0.06960999965667725, -1.3831000328063965, 0.057360000908374786, -0.33441001176834106, -0.26576998829841614, -0.3406899869441986, 0.17086000740528107, 0.591480016708374, -0.8363100290298462, 0.48743000626564026, 0.2438800036907196, -0.42785000801086426, 0.39638999104499817, -0.18223999440670013, -0.3157399892807007, -0.4192900061607361, 0.4329400062561035, -0.3149999976158142, -0.23389999568462372, -0.009583299979567528, 0.9667099714279175, -0.18472999334335327, 0.15178999304771423, 0.3595600128173828, -0.054430000483989716, 0.24031999707221985, -0.017690999433398247, 1.034600019454956, -0.23621000349521637, -0.04628400132060051, -0.6318299770355225, -0.26131001114845276, 0.22495000064373016, 0.6593300104141235, 0.09763199836015701, -0.14428000152111053, -0.5109800100326538, -0.6434000134468079, 0.22279000282287598, 0.4701699912548065, 0.06844999641180038, 0.3401300013065338, 0.05033700168132782, 0.024793000891804695, -0.013725999742746353, 0.22474999725818634, 0.538320004940033, -0.6012300252914429, 0.24434000253677368, 0.35062000155448914, 0.14057999849319458, -0.24492999911308289, 0.10418999940156937, -0.49483999609947205, 0.3262900114059448, -0.6315799951553345, -0.8134499788284302, -0.035165999084711075, 0.2336599975824356, -0.26914000511169434, -0.015011999756097794, -0.09155099838972092, -0.35172998905181885, 0.032239001244306564, 0.4459199905395508, 0.326200008392334, 0.20521999895572662, -0.46459999680519104, -0.5688499808311462, -0.4661000072956085, 0.09769999980926514, 0.3709999918937683, 0.5591400265693665, 0.3366200029850006, 0.5595499873161316, -0.02667899988591671, 0.020627999678254128, 0.1621900051832199, -0.07919400185346603, 0.21178999543190002, -0.08166500180959702, 0.09616400301456451, -0.8260499835014343, -0.16286000609397888, 0.09883400052785873, 0.08930200338363647, 0.1823900043964386, 0.526639997959137, -0.6472300291061401, -0.2754899859428406, -1.0490000247955322, -0.5939000248908997, -0.20138999819755554, 0.58160001039505, -0.2954399883747101, -0.2300499975681305, 0.19732999801635742, 0.31992998719215393, 0.040029000490903854, -0.4256500005722046, -0.26076000928878784, 0.28575000166893005, -0.10008999705314636, -0.020920999348163605, -0.16854000091552734, 0.23218999803066254, 0.012138999998569489, -0.16395999491214752, -0.22856000065803528, 0.31306999921798706, 0.04444799944758415, 0.027773000299930573, 0.42594000697135925, -0.4287000000476837, -0.3947100043296814, -0.4054099917411804, 0.06397999823093414, 0.5548200011253357, 0.048680998384952545, -0.26030999422073364, -0.25606998801231384, -0.06851799786090851, -0.21720999479293823, -0.21251000463962555, 0.4876199960708618, -0.4803999960422516, -0.3929100036621094, -0.26541998982429504, 0.30684998631477356, 1.3878999948501587, 0.31551000475883484, 0.3790600001811981, 0.048222001641988754, -0.2688499987125397, -0.6744700074195862, -0.058844998478889465, 0.014894000254571438, 0.3190099895000458, -0.8532999753952026, -0.5399799942970276, -0.6579899787902832, -0.22533999383449554, -0.09932100027799606, 0.704289972782135, 0.11254999786615372, -0.2892099916934967, 0.14850999414920807, 0.2505199909210205, 0.5333200097084045, -0.013101000338792801, 0.060387998819351196, -0.15534000098705292, -0.1624400019645691, 0.1806199997663498, -0.002583499997854233, 0.10042999684810638, -0.1402599960565567, 0.29892000555992126, 0.263480007648468, -0.07395000010728836, 0.37323999404907227, 0.15981000661849976, 0.20407000184059143, 0.10287000238895416, 0.0660569965839386, 0.09644699841737747, 0.4999000132083893, -0.03250499814748764, 0.11403000354766846, -0.30171000957489014, 1.8904000520706177, -0.42511001229286194, -0.1415800005197525, -0.5488799810409546, -0.200080007314682, 0.37909001111984253, -0.6607000231742859, -0.20746999979019165, -0.36917999386787415, 0.06824299693107605, 0.05649299919605255, 0.06744500249624252, -0.2136099934577942, -0.9983000159263611, 0.32986000180244446, -0.55690997838974, 0.17576000094413757, 0.3542200028896332, -0.06519600003957748, -0.0164170004427433, 0.8304200172424316, 0.2642799913883209, 0.29993999004364014, -0.5163999795913696, 0.12353000044822693, -0.46542999148368835, 0.3827199935913086, -0.36423999071121216, -0.36278000473976135, -0.565850019454956, 0.2336599975824356, -0.7289599776268005, 0.3587400019168854, -0.006296299863606691, -0.08087799698114395, -0.19359999895095825, 0.2115900069475174, -0.09034200012683868, -0.7977100014686584, 0.30855000019073486, -0.4831799864768982, 0.13294999301433563, 0.040856000036001205, 0.9540600180625916, -0.7373700141906738, 0.46077001094818115, -0.08266200125217438, 0.22544999420642853, 0.25722000002861023, -0.5795599818229675, -1.110200047492981, -0.24181999266147614, -0.5353400111198425, -0.3999499976634979, 0.8978599905967712, -0.47029998898506165, 0.6889500021934509, -0.06440000236034393, -0.30524998903274536, -0.24538999795913696, 0.10649000108242035, 0.00011518999963300303, 0.05212299898266792, -0.2365099936723709, -0.27917999029159546, 0.12953999638557434, 0.13222000002861023, 0.45636001229286194, -0.16590000689029694, -0.16413000226020813, 0.2124200016260147, -0.09836699813604355, 0.27643001079559326, -0.3505899906158447, 0.4276700019836426, -0.3612299859523773, -0.5353800058364868, 0.1148499995470047, -0.2522599995136261, -0.17993000149726868, -0.1436299979686737, 0.12369000166654587, 0.23747000098228455, 0.2045300006866455, -0.6195799708366394, -0.23840999603271484, -0.5727400183677673, -0.22473999857902527, 0.21683000028133392, -0.28839001059532166, -0.3549500107765198, -0.4497799873352051, -0.7391999959945679, -0.1082800030708313, 0.0060952999629080296, 0.9468299746513367, 0.36774998903274536, 0.14239999651908875, 0.2597000002861023, 0.25982001423835754], u'phone': [-0.8367599844932556, 0.02263999916613102, -0.26627999544143677, 0.0669960007071495, -0.08080799877643585, -0.05901100113987923, -0.4032999873161316, -0.6476899981498718, 0.13826000690460205, -1.853600025177002, -0.05226299911737442, 0.39732998609542847, 0.310589998960495, -0.35486000776290894, 0.43674999475479126, -0.35666000843048096, -0.5878499746322632, -0.2364100068807602, -0.44242000579833984, -0.14855000376701355, 0.6067000031471252, -0.2074899971485138, 0.06123900040984154, -0.1261799931526184, -0.3539699912071228, -0.5379899740219116, 0.07612799853086472, 0.2698799967765808, 0.20476000010967255, -0.029955999925732613, -0.563260018825531, -0.018098000437021255, -0.6327999830245972, 0.08058299869298935, -1.4556000232696533, -0.16960999369621277, -0.39996999502182007, -0.5632200241088867, 0.17457999289035797, 0.0658859983086586, -0.3153199851512909, 0.3409000039100647, -0.399509996175766, 0.653689980506897, 0.15508000552654266, -0.21008999645709991, 0.4282200038433075, -0.35012000799179077, 0.08563800156116486, -0.11858999729156494, 0.4768899977207184, -0.0787070021033287, -0.22075000405311584, 0.1092899963259697, -0.09648299962282181, 0.05400199815630913, -0.14180999994277954, 0.1979600042104721, -0.18045000731945038, -0.3626500070095062, -0.014546999707818031, -0.2556900084018707, -0.22869999706745148, 0.4337100088596344, 0.11292999982833862, 0.013793000020086765, 0.06220399960875511, 0.5397300124168396, -0.04502800107002258, 0.06069700047373772, 0.03073900006711483, 0.09931699931621552, 0.23463000357151031, 0.382779985666275, 0.2333499938249588, 0.3050599992275238, -0.17691999673843384, -0.3200699985027313, -0.3072099983692169, -0.05357600003480911, 0.1537500023841858, 0.026482999324798584, 0.2028300017118454, 0.490229994058609, -0.05563800036907196, 0.13391000032424927, -0.8356599807739258, 0.22101999819278717, 0.6306099891662598, 0.07070200145244598, -0.2544200122356415, 0.19253000617027283, 0.03610699996352196, 0.13494999706745148, 0.5021600127220154, -0.1786399930715561, -0.4733699858188629, 0.07717200368642807, 0.5240100026130676, -1.030400037765503, -0.09764699637889862, 0.008646500296890736, -0.08516500145196915, -0.24211999773979187, -0.3318299949169159, -0.2722800076007843, 0.8259400129318237, 0.10418999940156937, -0.7611299753189087, 0.2701300084590912, -0.19450999796390533, 0.04864500090479851, -0.2831000089645386, -0.06955000013113022, 0.4997499883174896, -0.2602899968624115, -0.38416001200675964, -0.1001800000667572, 0.07793500274419785, -0.028750000521540642, 0.12682999670505524, -0.30656999349594116, 0.47648000717163086, -1.1655999422073364, -0.17340999841690063, -0.20735999941825867, 0.1590999960899353, -0.12060999870300293, -0.30169999599456787, -0.44249001145362854, 0.43661999702453613, 0.33364999294281006, -0.21498000621795654, 0.3095400035381317, 0.04143200069665909, -0.20484000444412231, 0.7840099930763245, 0.15368999540805817, 0.4301699995994568, -0.3055199980735779, -0.13096000254154205, -0.4007200002670288, 0.2715800106525421, 0.18100999295711517, -0.7508999705314636, 0.030758000910282135, -0.0705450028181076, -0.25635001063346863, 0.2657899856567383, -0.24257999658584595, 0.5993899703025818, -0.35005998611450195, 0.41523998975753784, -0.2703999876976013, -0.46393999457359314, 0.22123999893665314, 0.5622900128364563, 0.13928000628948212, -0.27605000138282776, -0.18184000253677368, -0.14082999527454376, 0.1416500061750412, 0.24389000236988068, -0.014700000174343586, 0.011784999631345272, 0.5071899890899658, -0.11574999988079071, -0.24605999886989594, -0.09788999706506729, 0.2846600115299225, -0.5948299765586853, 0.20952999591827393, -1.031000018119812, 0.5816299915313721, 0.01685200072824955, -0.23416000604629517, 0.0893390029668808, 0.04226899892091751, -0.2298399955034256, -0.10774999856948853, 0.0444130003452301, -0.23736000061035156, -0.13607999682426453, -0.11918000131845474, -0.6547999978065491, -0.19870999455451965, 0.28461000323295593, 0.3977400064468384, -0.18316000699996948, -0.635420024394989, 0.40964001417160034, -0.008467700332403183, -0.20510999858379364, -0.3487200140953064, 0.14817999303340912, 0.22179000079631805, -0.5550500154495239, -0.14312000572681427, -0.5384899973869324, -0.35587000846862793, 0.288100004196167, 0.1335200071334839, 0.4178299903869629, 0.09279800206422806, 0.31248000264167786, -0.5016800165176392, -0.32328999042510986, -0.004913500044494867, -0.23850999772548676, -0.35951000452041626, -0.1449500024318695, 0.07309500128030777, 0.27094000577926636, -0.4508900046348572, -0.31859999895095825, -0.06552500277757645, -0.004044199828058481, 0.3713900148868561, -0.3078500032424927, 0.9265300035476685, 0.3639400005340576, 0.18199999630451202, 0.31018000841140747, 0.24761000275611877, 0.7614499926567078, 0.4474000036716461, 0.023041000589728355, -0.738610029220581, 0.09220399707555771, -0.22434000670909882, -0.00921310018748045, 0.09540700167417526, -0.18230000138282776, 0.37275001406669617, 0.45473000407218933, -0.3326300084590912, -0.16519999504089355, 0.30943000316619873, 0.09995800256729126, -0.19943000376224518, 0.3712399899959564, 0.26365000009536743, 0.41525998711586, 0.17993000149726868, 0.539139986038208, -0.21789999306201935, 0.039027001708745956, -0.5062900185585022, 0.2830199897289276, 0.21265000104904175, 0.6627299785614014, 0.22864000499248505, -0.38161998987197876, -0.10261999815702438, 0.12738999724388123, 0.04390399903059006, 0.5060499906539917, 0.6923999786376953, -0.17854000627994537, -0.004672999959439039, 0.06254299730062485, -0.4639900028705597, -0.13503000140190125, 0.4296000003814697, 0.009167199954390526, 0.02389400079846382, 0.3045400083065033, -0.13091999292373657, -0.5503699779510498, -0.01659500040113926, -0.38787001371383667, 0.2627899944782257, -0.4611000120639801, -0.4811500012874603, 0.20404000580310822, 0.15354999899864197, -1.9747999906539917, 0.3171199858188629, -0.2315399944782257, -0.5981900095939636, -0.6898800134658813, 0.1707099974155426, -0.14022000133991241, -0.4371800124645233, -0.5512400269508362, -0.10042999684810638, 0.44218000769615173, 0.13765999674797058, -0.14821000397205353, 0.2989799976348877, 0.19833999872207642, -0.12392999976873398, -0.1003900021314621, -0.5176699757575989, 0.16130000352859497, -0.19433000683784485, 0.11298000067472458, 0.1811400055885315, -0.26826998591423035, -0.003828400047495961], u'nut': [0.16575999557971954, 0.8067700266838074, -0.23654000461101532, 0.35405999422073364, -0.20300999283790588, -0.22879000008106232, 0.3068299889564514, -0.06497299671173096, -0.0804700031876564, 0.09303300082683563, -0.3386099934577942, -0.5924800038337708, 0.372979998588562, 0.1709900051355362, -0.5046399831771851, 0.22371000051498413, -0.33928999304771423, -0.12770000100135803, -0.37599998712539673, 0.18657000362873077, 0.5759099721908569, 0.7605199813842773, 0.5874300003051758, 0.22682000696659088, -0.7627400159835815, 0.5445500016212463, -0.3600200116634369, -0.4697200059890747, -0.30184000730514526, 0.3421100080013275, 0.05687899887561798, 0.525439977645874, -0.3307099938392639, 0.053304001688957214, -0.4986500144004822, 0.8321200013160706, -0.14205999672412872, -0.049639999866485596, -0.17302000522613525, 0.4678199887275696, -0.32113999128341675, -0.09913700073957443, 0.3330700099468231, -0.7515400052070618, 0.1885800063610077, -0.3989199995994568, 0.19450999796390533, 0.126010000705719, 0.06644099950790405, -0.3113099932670593, 0.08058299869298935, 0.1969500035047531, 0.3814300000667572, -0.08855599910020828, -0.1868000030517578, 0.06384100019931793, -0.40237998962402344, 0.04825099930167198, 0.5586599707603455, -0.21825000643730164, 0.4939199984073639, 0.3606500029563904, -0.06540700048208237, 0.03467800095677376, -0.16519999504089355, 0.09487400203943253, -0.7098299860954285, 0.5904099941253662, 0.2758699953556061, 0.27788999676704407, -0.8165000081062317, 0.05823000147938728, -0.28033000230789185, 0.05439399927854538, -0.16790999472141266, 0.6028199791908264, -0.09506499767303467, -0.884909987449646, -0.3362500071525574, -0.17753000557422638, 0.2087000012397766, -0.14116999506950378, 0.28624001145362854, -0.8614699840545654, -0.242249995470047, -0.00884610041975975, 0.03728500008583069, 0.20972000062465668, -0.7519500255584717, 0.06735900044441223, -0.2872300148010254, -0.1304900050163269, -0.1563200056552887, -0.14441999793052673, 0.03855999931693077, 0.40720999240875244, -0.13642999529838562, 0.37060999870300293, -0.05142199993133545, -0.14879000186920166, -0.3086099922657013, 0.8531799912452698, 0.11225999891757965, -0.2757300138473511, 0.12268999963998795, -0.2074899971485138, -0.08192499727010727, 0.046160999685525894, -0.3783400058746338, 0.45903998613357544, 0.9554700255393982, 0.13194000720977783, -0.20541000366210938, -0.45576000213623047, 0.5099899768829346, -0.09183900058269501, -0.10426999628543854, 0.6596800088882446, -0.35453000664711, -0.014659999869763851, -0.49553000926971436, -0.4085099995136261, 0.5343499779701233, 0.24360999464988708, -0.13366000354290009, 0.5668299794197083, 0.4900200068950653, 0.3431299924850464, -0.19041000306606293, -0.02867100015282631, 0.20091000199317932, 0.6831899881362915, 0.027386000379920006, 0.5630599856376648, 0.14431999623775482, -0.07980799674987793, -0.590719997882843, 0.2972300052642822, -0.18622000515460968, -0.3957200050354004, 0.47238001227378845, 0.357230007648468, -0.22206999361515045, -0.21352000534534454, 0.47279998660087585, 0.5528299808502197, 0.4032000005245209, -0.5947800278663635, 0.9468700289726257, -0.26072999835014343, -0.5822299718856812, 0.3102099895477295, -0.1690800040960312, 0.17086000740528107, -0.6860299706459045, -0.8851000070571899, 0.6074900031089783, 0.10340999811887741, -0.20724999904632568, 0.8620700240135193, 0.10726000368595123, 0.17215000092983246, 0.11238999664783478, 0.01785000041127205, -0.46230000257492065, -0.4472599923610687, -0.14681999385356903, 0.20221999287605286, 0.38846999406814575, -0.47540000081062317, -0.5748999714851379, 0.1379300057888031, 0.2031800001859665, 0.230320006608963, 0.4292199909687042, 0.10490000247955322, 0.4449799954891205, -0.37571001052856445, 0.13776999711990356, -0.4773600101470947, 0.30169999599456787, 0.7721999883651733, 0.4097000062465668, -0.4878599941730499, 0.05045599862933159, -0.4303300082683563, 1.1404999494552612, -0.3283500075340271, 0.5982300043106079, 0.19968999922275543, -0.6023300290107727, 0.5826600193977356, 0.06695300340652466, -0.06067200005054474, 0.08358699828386307, 0.10480999946594238, -0.04624300077557564, -0.5857800245285034, 0.0044398000463843346, -0.034967001527547836, 0.14428000152111053, 0.47273001074790955, 0.45339998602867126, -0.22428999841213226, -0.1035500019788742, 0.5561500191688538, 0.4160099923610687, -0.15038999915122986, 0.28404000401496887, 0.306549996137619, 0.10232999920845032, -0.37786000967025757, -0.10565000027418137, 0.218189999461174, -0.3158400058746338, -0.15796999633312225, 0.0020826999098062515, 0.33177998661994934, 0.4191400110721588, 0.21448999643325806, -0.27312999963760376, 0.23252999782562256, 0.258760005235672, 0.2358199954032898, -0.731220006942749, 0.37852999567985535, 0.18095000088214874, 0.7760000228881836, 0.6754800081253052, 0.024013999849557877, 0.07583499699831009, -0.4135400056838989, 0.37463000416755676, -0.18815000355243683, 0.7140899896621704, 0.6026700139045715, -0.06208899989724159, 0.1702599972486496, -0.609529972076416, -0.2620199918746948, 0.293179988861084, -0.294950008392334, -0.5420899987220764, -0.6532300114631653, -0.3043699860572815, 0.07321000099182129, -0.05792500078678131, 0.6236100196838379, 0.1983799934387207, -0.5049600005149841, 0.10326000303030014, 0.17862999439239502, 0.002483299933373928, -0.10215000063180923, 0.1846799999475479, 0.27270999550819397, -0.20417000353336334, -0.13971999287605286, 0.11248999834060669, -0.35238999128341675, -0.4855799973011017, -0.3451699912548065, 0.2750299870967865, -0.123089998960495, 0.37692001461982727, -0.4190100133419037, -0.35885000228881836, 0.001167600043118, 0.39469000697135925, 0.6427099704742432, -0.20907999575138092, -0.3129599988460541, -0.23060999810695648, 0.3529700040817261, 0.15481999516487122, 0.19253000617027283, -0.43417999148368835, 0.06508799642324448, -0.5707499980926514, 0.1430799961090088, -0.3770099878311157, -0.5142999887466431, -0.4692800045013428, -0.03874899819493294, -0.20172999799251556, 0.1676200032234192, 0.4337399899959564, -0.2096100002527237, 0.1055700033903122, 0.014221999794244766, 0.08952300250530243, -0.5778200030326843, 0.37255001068115234, 0.20115000009536743, 0.027250999584794044, 0.291949987411499, -0.43970999121665955, -0.12575000524520874, -0.2036599963903427, 0.2242400050163269], u'potato': [-0.11359000205993652, 0.13548000156879425, 0.3699699938297272, 0.6524800062179565, -0.35152000188827515, 0.05736900120973587, -0.41179001331329346, -0.08928100019693375, 0.04204000160098076, -0.22622999548912048, -0.13404999673366547, -0.57573002576828, -0.5828099846839905, 0.6326599717140198, 0.10837999731302261, -0.27156001329421997, 0.07851699739694595, 0.5226699709892273, -0.4003100097179413, 0.2052299976348877, -0.3070800006389618, 0.42497000098228455, 6.927699723746628e-05, 0.011257000267505646, -0.3315899968147278, 0.08666899800300598, -0.27303001284599304, 0.02700600028038025, 0.02725999988615513, -0.25780999660491943, -0.10643000155687332, 0.2795099914073944, -0.34106001257896423, 0.0711669996380806, -0.2766999900341034, 0.8533899784088135, 0.07727699726819992, 0.7275000214576721, -0.3735100030899048, 0.16588999330997467, 0.1850000023841858, 0.38176000118255615, 0.3844200074672699, 0.18115000426769257, 0.2776699960231781, -0.16429999470710754, 0.6898400187492371, -0.6256399750709534, -0.3126299977302551, 0.19939999282360077, 0.3779299855232239, -0.06171099841594696, 0.2953999936580658, 0.00578910019248724, -0.16629000008106232, -0.2755799889564514, -0.1405400037765503, -0.03970000147819519, 0.3036699891090393, 0.04134200140833855, 0.415039986371994, -0.5346900224685669, -0.04887799918651581, 0.14101000130176544, -0.32638001441955566, 0.12419000267982483, -0.749239981174469, 0.6102799773216248, -0.46351999044418335, -0.18805000185966492, 0.04101800173521042, 0.10582999885082245, -0.27886998653411865, 0.13582000136375427, -0.680109977722168, 0.05610699951648712, 0.5167499780654907, 0.06725600361824036, -0.19672000408172607, 0.033562999218702316, 0.1096699982881546, 0.2716600000858307, -0.4749999940395355, 0.184129998087883, -0.24767999351024628, -0.3380799889564514, -0.5709800124168396, 0.12735000252723694, -0.20046000182628632, -0.8846399784088135, -0.15158000588417053, -0.3841699957847595, -0.416949987411499, -0.26015999913215637, -0.21920999884605408, -0.031248999759554863, -0.12070000171661377, 0.6083499789237976, -0.02982099913060665, 0.31970998644828796, -0.4792200028896332, -0.026419999077916145, 0.136570006608963, -0.6502500176429749, -0.7291799783706665, 0.05333400145173073, -0.2860200107097626, 0.2533400058746338, -0.7689700126647949, 0.4638400077819824, 0.3666299879550934, 0.8693900108337402, -0.24186000227928162, -0.48625001311302185, -0.37582001090049744, -0.21199999749660492, -0.29304999113082886, 0.6231600046157837, 0.11619000136852264, -0.2045300006866455, 0.1626800000667572, 0.19947999715805054, 0.24713000655174255, 0.6449000239372253, -0.48607999086380005, 0.3104499876499176, 0.3297100067138672, 1.1024999618530273, -0.2697199881076813, 0.5304399728775024, 0.2737500071525574, 1.273800015449524, -0.3837200105190277, 0.26050999760627747, 0.14729000627994537, -0.5042200088500977, 0.4449799954891205, 0.5326700210571289, -0.013214999809861183, 0.0031749000772833824, 0.03292199969291687, 0.28262999653816223, 0.11563999950885773, -0.5751399993896484, 0.17869000136852264, 0.6501500010490417, 0.30869999527931213, -0.3969399929046631, -0.28404998779296875, -0.5879499912261963, -0.8279399871826172, 0.6792600154876709, 0.08270899951457977, 0.08396600186824799, -0.2286600023508072, 0.4127199947834015, -0.4523099958896637, -0.08328700065612793, 0.4724299907684326, -0.03075299970805645, -0.1821800023317337, 0.217849999666214, -0.12670999765396118, -0.4507000148296356, 0.3821200132369995, 0.050627999007701874, 0.32078999280929565, -0.045556001365184784, -0.20634999871253967, -0.6987500190734863, -0.3096800148487091, 0.11719000339508057, -0.5246800184249878, -0.1470700055360794, -0.1856199949979782, 0.08570399880409241, 0.18978999555110931, -0.1708800047636032, 0.5250300168991089, -0.19979999959468842, 0.06235300004482269, 0.18177999556064606, -0.12279000133275986, -0.5835999846458435, -0.12644000351428986, -0.35993000864982605, 0.673799991607666, 0.22579999268054962, 0.07110799849033356, -0.6944900155067444, -0.3935999870300293, 0.7922300100326538, -0.21400000154972076, -0.1208299994468689, -0.5371900200843811, 0.32486000657081604, -0.46000999212265015, -0.44878000020980835, -0.28363001346588135, 0.44991999864578247, 0.07489299774169922, 0.3150300085544586, 0.4627400040626526, 0.5257899761199951, 0.3378399908542633, -0.23433999717235565, 0.26023998856544495, 0.19787000119686127, -0.07550200074911118, 0.18262000381946564, 0.147599995136261, -0.2688399851322174, -0.2635999917984009, 0.08025600016117096, -0.17000000178813934, 0.7005000114440918, 0.26175999641418457, -0.39965999126434326, 0.5320000052452087, -0.3210499882698059, 0.14515000581741333, -0.31442001461982727, -0.3512600064277649, -0.19981999695301056, -0.6114000082015991, -0.257999986410141, 0.28415998816490173, 0.18232999742031097, -0.28878000378608704, -0.15772999823093414, -0.16232000291347504, 0.530269980430603, -0.41429001092910767, 0.32690000534057617, 0.2765899896621704, 0.41339001059532166, 0.4361700117588043, 0.06232700124382973, -0.16835999488830566, -0.24083000421524048, -0.36847999691963196, 0.1282700002193451, -0.0698779970407486, -0.5325000286102295, -1.0537999868392944, -0.1813800036907196, 0.42357000708580017, -0.07315900176763535, 0.12759999930858612, -1.006500005722046, 0.05415000021457672, 0.4587100148200989, -0.4436500072479248, 0.03707899898290634, 0.8871700167655945, 0.017085999250411987, -0.2319599986076355, 0.008989100344479084, -0.5882999897003174, -0.030448999255895615, -0.30184000730514526, -0.2786700129508972, 0.674310028553009, 0.5259699821472168, 0.07215800136327744, -0.5135899782180786, -0.17384999990463257, -0.1279900074005127, 0.30105000734329224, 0.5559499859809875, -0.2563199996948242, -0.07418199628591537, -0.11906000226736069, 0.5144000053405762, 0.44922998547554016, -0.029095999896526337, -0.596019983291626, 0.11123999953269958, -0.7104200124740601, 0.17824000120162964, -0.8374699950218201, -0.5057299733161926, -0.04927999898791313, 0.007985400035977364, 0.09105399996042252, 0.0513480007648468, 0.49031001329421997, -0.22924000024795532, 0.21544000506401062, 0.19185000658035278, 0.3268499970436096, -0.47429999709129333, 0.306549996137619, -0.767989993095398, -0.2013700008392334, -0.7140600085258484, 0.23628999292850494, -0.06465499848127365, 0.3603900074958801, 0.6619099974632263], u'steel': [0.17050999402999878, 0.05915899947285652, -0.4838300049304962, -0.9666200280189514, -0.10575000196695328, -0.0961889997124672, 0.08910799771547318, 0.062401000410318375, -0.18052999675273895, -0.9935200214385986, -0.3626900017261505, -0.6169800162315369, 0.23214000463485718, 0.17483000457286835, -0.10401999950408936, -0.7139999866485596, -0.6335800290107727, -0.2399200052022934, -0.1082099974155426, -0.4286400079727173, 0.4204300045967102, -0.057714998722076416, 0.5902400016784668, 0.8261200189590454, -0.4591499865055084, 0.14601999521255493, -0.1754699945449829, 0.03404400125145912, 0.043584998697042465, 0.24181999266147614, -0.2008800059556961, 0.8475300073623657, 0.024984000250697136, -0.05049100145697594, -0.8025799989700317, 0.24866999685764313, -0.04127899929881096, 0.18205000460147858, 0.13631999492645264, 0.5841400027275085, -0.8101900219917297, -0.0859220027923584, -0.14580999314785004, -0.14249999821186066, -0.10377000272274017, -0.17178000509738922, -0.34876999258995056, -0.47231999039649963, 0.011957000009715557, 0.25714001059532166, 0.123819999396801, 0.8243799805641174, 0.222120001912117, 0.3707199990749359, 0.2820500135421753, 0.8108400106430054, 0.18435999751091003, 0.3337100148200989, 0.1128000020980835, -0.1439799964427948, 0.2538299858570099, 0.5901299715042114, 0.24954000115394592, -0.47777000069618225, 0.239329993724823, -0.024535000324249268, -0.4406700134277344, 0.5722699761390686, 0.09001900255680084, 0.025019999593496323, 0.0922510027885437, 0.09801100194454193, -0.08840999752283096, 0.018473999574780464, -0.13802999258041382, 0.35653001070022583, -0.7736999988555908, 0.0253090001642704, -0.37926000356674194, -0.3258199989795685, -0.16964000463485718, -0.42379000782966614, -0.2768299877643585, -0.47909000515937805, -0.0025990998838096857, 0.057135000824928284, 0.33886000514030457, 0.4996100068092346, -0.47655999660491943, 0.17015999555587769, 1.343500018119812, 0.4944700002670288, 0.7768599987030029, 0.44106999039649963, -0.6632199883460999, 0.0842830017209053, -0.42836999893188477, 0.1927099972963333, -0.25624001026153564, -0.26767000555992126, -0.5768300294876099, 0.20521999895572662, -0.021226000040769577, -0.6693500280380249, 0.36963000893592834, -0.01694200001657009, 0.03865699842572212, -0.00461059994995594, -0.6151999831199646, -0.48798999190330505, 0.08816500008106232, 0.05809599906206131, -0.2631399929523468, -0.8400700092315674, 0.04833399876952171, -0.4423699975013733, 0.35258999466896057, 0.30737999081611633, 0.04998200014233589, 0.0263069998472929, 0.0014872000319883227, -0.928380012512207, -0.31992998719215393, 0.07295499742031097, 0.3690800070762634, -0.15726999938488007, -0.1428000032901764, -0.09807799756526947, -0.5649200081825256, -0.36333999037742615, -0.13790999352931976, 1.503499984741211, 0.5205000042915344, 0.11059000343084335, 0.22542999684810638, 0.3331199884414673, -0.7023299932479858, -0.08819299936294556, 0.3502100110054016, -0.36250001192092896, -0.1891999989748001, 0.2549700140953064, 0.47749999165534973, -0.7196699976921082, 0.40077999234199524, 0.34452998638153076, 0.7380399703979492, -0.6906999945640564, -0.17931999266147614, -0.4685800075531006, 0.6258699893951416, -0.32541000843048096, 0.008304099552333355, -0.9570199847221375, 0.8212400078773499, -0.2417300045490265, -0.42195001244544983, -0.47053998708724976, -0.329010009765625, -0.13489000499248505, 0.43367999792099, 0.4690900146961212, -0.18895000219345093, -0.10684999823570251, 0.6028299927711487, -0.016812000423669815, 0.10700000077486038, 0.2363699972629547, 0.032676998525857925, 0.1967500001192093, -0.6228100061416626, 0.6192200183868408, -0.06334000080823898, 0.15818999707698822, 0.47699999809265137, -0.4252299964427948, -0.6726099848747253, 0.2442599982023239, 0.32607001066207886, -0.3973900079727173, 0.40237998962402344, -0.17003999650478363, 0.07735899835824966, 0.2267400026321411, -0.22070999443531036, -0.14020000398159027, 0.16954000294208527, 0.6454200148582458, 0.6688600182533264, 0.35969001054763794, 0.273140013217926, 0.33632001280784607, -0.19639000296592712, 0.19753000140190125, -0.09081000089645386, 0.2187899947166443, 0.020380999892950058, -0.8119000196456909, 0.14292000234127045, -0.15183000266551971, 0.46108001470565796, 0.37077999114990234, 0.15342000126838684, -0.3203999996185303, -0.2091200053691864, 0.8176500201225281, 0.3107900023460388, -0.3332599997520447, -0.44106999039649963, -0.47473999857902527, 0.4475100040435791, 0.27595001459121704, 0.3016600012779236, -0.20463000237941742, 0.05798099935054779, 0.017333999276161194, 0.24977000057697296, -0.05251700058579445, -0.23194000124931335, -0.10761000216007233, -0.025351999327540398, -0.09633900225162506, 0.7428699731826782, -0.5376099944114685, -0.14388999342918396, 0.4195300042629242, -0.40105000138282776, -0.25022000074386597, -0.17138999700546265, -0.7310400009155273, 0.018005000427365303, -0.4238300025463104, -0.21490000188350677, 0.48217999935150146, 0.2533699870109558, -0.0735969990491867, 0.5932499766349792, -0.4478200078010559, -0.7677099704742432, -0.16978000104427338, 0.1157900020480156, -0.6329799890518188, -0.2485000044107437, 0.34318000078201294, -0.4078800082206726, 0.24354000389575958, -0.08848299831151962, -0.7667700052261353, 0.044835999608039856, -0.40542998909950256, -0.18700000643730164, -0.09133800119161606, 0.5765799880027771, -0.1467600017786026, 0.6112499833106995, -0.2013999968767166, -0.14624999463558197, -0.41223999857902527, -0.016195999458432198, 0.08490099757909775, 0.11277999728918076, -0.20291000604629517, -0.36879000067710876, -0.2795099914073944, 0.5527200102806091, 0.7170600295066833, -0.2713499963283539, 0.10074000060558319, 0.033668000251054764, -0.24591000378131866, -0.18339000642299652, -0.47257000207901, -0.014127999544143677, -0.16413000226020813, -0.10610999912023544, 0.08431199938058853, -1.2825000286102295, -0.1765500009059906, -0.7361599802970886, 0.5198100209236145, 0.01566299982368946, -0.9108800292015076, -1.0413000583648682, 0.12436000257730484, 0.7915099859237671, 0.25558000802993774, -0.013685000129044056, 0.28036999702453613, 0.29829999804496765, -0.30720001459121704, 0.5525799989700317, 0.25773000717163086, 0.20492999255657196, 0.4621700048446655, 0.8587599992752075, 1.2311999797821045, 0.1827699989080429, -0.6470100283622742, -0.16865000128746033, 0.365449994802475], u'wood': [0.20473000407218933, 0.06044299900531769, -0.022324999794363976, -0.54475998878479, -0.09863100200891495, -0.1290699988603592, 0.1110600009560585, 0.25780999660491943, 0.13041000068187714, -0.9907299876213074, 0.043800000101327896, -0.18842999637126923, -0.5335699915885925, -0.06798200309276581, -0.33395999670028687, -0.019838999956846237, -0.4417800009250641, -0.03746600076556206, 0.27849000692367554, 0.007684300187975168, -0.16068999469280243, -0.12284000217914581, 0.2727600038051605, 0.10937999933958054, -0.05647699907422066, -0.1129399985074997, -0.10226999968290329, -0.2799000144004822, -0.4335399866104126, 0.8021199703216553, 0.054972000420093536, 0.6505600214004517, -0.3105500042438507, -0.2064799964427948, -0.663919985294342, 0.4864000082015991, -0.024215999990701675, -0.23529000580310822, 0.16444000601768494, -0.32475000619888306, -0.3043299913406372, -0.18419000506401062, 0.17642000317573547, -0.5603200197219849, 0.0992330014705658, 0.11048000305891037, -0.36844000220298767, -0.41877999901771545, 0.25277000665664673, -0.15199999511241913, -0.2621699869632721, 0.7472699880599976, -0.5492100119590759, 0.16192999482154846, 0.4070099890232086, 0.04297599941492081, -0.7007700204849243, 0.007965000346302986, -0.2856999933719635, -0.12529000639915466, 0.0022092999424785376, -0.20523999631404877, 0.5337799787521362, 0.2653000056743622, 0.38141998648643494, -0.9708200097084045, -0.0020242000464349985, -0.2708300054073334, 0.3623200058937073, -0.32708999514579773, -0.0605509988963604, -0.4450399875640869, -0.4947899878025055, 0.33371999859809875, -0.5537700057029724, 0.4646799862384796, 0.2436700016260147, 0.1147800013422966, -0.2014700025320053, -0.33774998784065247, -0.11858999729156494, 0.024614999070763588, -0.18950000405311584, -0.20326000452041626, -0.1430799961090088, 0.02622300013899803, -0.06931500136852264, 0.13797999918460846, 0.16277000308036804, 0.05818000063300133, 0.12928999960422516, 0.19112999737262726, 0.4440999925136566, -0.11897999793291092, 0.1889200061559677, -0.3607099950313568, -0.023012999445199966, -0.6148800253868103, -0.14568999409675598, -0.221110001206398, -0.5826699733734131, 0.5941299796104431, -0.23944999277591705, -0.32760000228881836, -0.07647500187158585, -0.002801199909299612, 0.0697460025548935, -0.01813500002026558, -0.1294099986553192, -0.07781100273132324, -0.4074999988079071, -0.3379400074481964, -0.15191000699996948, -0.32010000944137573, -0.44231000542640686, -0.46057000756263733, -0.10192999988794327, 0.826229989528656, -0.15098999440670013, -0.12482000142335892, -0.7246099710464478, -0.3940199911594391, -0.11969000101089478, 0.09431800246238708, -0.1672399938106537, 0.22168999910354614, 0.03207100182771683, -0.1787700057029724, 0.10617999732494354, 0.2669300138950348, -0.05816800147294998, 0.5578299760818481, 0.635699987411499, 0.11023999750614166, 0.3353100121021271, 0.16545000672340393, -0.691510021686554, 0.26813000440597534, -0.38148000836372375, -0.064239002764225, 0.9302600026130676, 0.6129500269889832, 0.4000200033187866, -0.6495800018310547, -0.05417900159955025, 0.263619989156723, 0.39956000447273254, -0.06419999897480011, -0.6466400027275085, -0.779229998588562, 0.21899999678134918, 0.43580999970436096, -0.7004500031471252, -0.533519983291626, -0.4016900062561035, 0.1045600026845932, -0.27250999212265015, -0.47925999760627747, -0.162090003490448, -0.1392499953508377, 0.36177000403404236, -0.24455000460147858, 0.47769999504089355, 0.08391900360584259, 0.1431400030851364, 0.6758999824523926, 0.12402000278234482, 0.02694600075483322, 0.2624500095844269, -0.4012199938297272, -0.3346500098705292, 0.15598000586032867, 0.136570006608963, 0.6127899885177612, -0.247529998421669, 0.3645800054073334, -0.3991900086402893, -0.31477999687194824, -0.1707800030708313, -0.8764299750328064, -0.18675999343395233, 0.04339899867773056, 0.295989990234375, -0.5233700275421143, -0.03418799862265587, -0.6025999784469604, -0.18039000034332275, 0.2789500057697296, 0.3673200011253357, 0.6238399744033813, 0.12138000130653381, 0.841480016708374, 0.3666499853134155, -0.020848000422120094, 0.28753000497817993, 0.41527000069618225, 0.39302998781204224, -0.00843650009483099, -0.26037999987602234, -0.386029988527298, 0.5557000041007996, -0.4655100107192993, -0.15455999970436096, -0.43678998947143555, 0.46963998675346375, 0.7524499893188477, 0.42423999309539795, 0.2155900001525879, -0.607990026473999, -0.25918999314308167, 0.4617699980735779, -0.20126000046730042, 0.16381999850273132, -0.7096099853515625, 0.5870699882507324, 0.3163999915122986, 0.14320999383926392, 0.07456400245428085, 0.06979300081729889, -0.5702900290489197, 0.13098999857902527, -0.5442000031471252, -0.019108999520540237, -0.47964999079704285, -0.42441999912261963, 0.4409100115299225, -0.3355900049209595, 0.22076000273227692, 0.3705599904060364, -0.08490400016307831, 0.03687499836087227, 0.0008391599985770881, 0.005611099768429995, -0.06320600211620331, 0.3416599929332733, 0.14576999843120575, 0.09830500185489655, -0.002535599982365966, -0.7542499899864197, 0.33223000168800354, 0.04597900062799454, -0.3978100121021271, -0.2188200056552887, 0.14396999776363373, -0.2845799922943115, 0.35199999809265137, -0.033270999789237976, 0.06028600037097931, -0.11881999671459198, -0.11552000045776367, 0.2512899935245514, -0.5042300224304199, -0.0766960009932518, -0.5546000003814697, 1.1509000062942505, 0.38231000304222107, -0.17077000439167023, -0.3685699999332428, 0.37918999791145325, 0.09876800328493118, 0.02868499979376793, 0.05012999847531319, -0.5514699816703796, -0.2880200147628784, 0.3246900141239166, 0.1440100073814392, 0.19881999492645264, 0.21137000620365143, -0.03271999955177307, 0.027014000341296196, -0.25501999258995056, -0.012861000373959541, 0.2757200002670288, -0.35655999183654785, -0.8167799711227417, 0.10497000068426132, -1.2563999891281128, 0.12471000105142593, -0.7672299742698669, -0.295199990272522, -0.3349300026893616, -0.34937000274658203, -0.5535600185394287, -0.07169400155544281, 0.19130000472068787, 0.6084499955177307, 0.41126999258995056, -0.11823999881744385, 0.2504599988460541, -0.22473999857902527, -0.06733900308609009, 0.13936999440193176, 0.18393999338150024, 0.2067600041627884, -0.19910000264644623, 0.2862299978733063, 0.162090003490448, -0.08420000225305557, -0.127020001411438, 0.8120399713516235], u'wool': [-0.14305000007152557, -0.1031700000166893, -0.00836700014770031, -0.45399001240730286, 0.19032999873161316, -0.6324099898338318, -0.26642000675201416, 0.16666999459266663, -0.04538799822330475, -0.7112399935722351, 0.30647000670433044, -1.0413999557495117, 0.2306700050830841, 0.6582499742507935, 0.06593199819326401, -0.2180899977684021, -0.08231700211763382, -0.3385399878025055, -0.5003499984741211, 0.39372000098228455, -0.3156999945640564, -0.8389599919319153, 0.3412899971008301, 0.6111299991607666, -0.32387998700141907, -0.3589499890804291, 0.2498999983072281, -0.24637000262737274, -0.16899000108242035, 0.4431999921798706, -0.3062700033187866, 0.17552000284194946, -0.7307800054550171, -0.29982998967170715, -0.47925999760627747, 0.4534600079059601, 0.4155299961566925, 0.1252399981021881, 0.052545998245477676, 0.17714999616146088, -0.6453400254249573, -0.3243499994277954, 0.30265000462532043, -0.6115800142288208, 0.6375200152397156, -0.010604999959468842, -0.2653299868106842, -0.18432000279426575, -0.2835400104522705, -0.2879599928855896, 0.05666700005531311, -0.02175999991595745, -0.3169099986553192, 0.0057760002091526985, -0.08931700140237808, 0.10044000297784805, -0.6008899807929993, -0.4053399860858917, -0.44648000597953796, -0.31327998638153076, -0.11131999641656876, -0.4922100007534027, 0.23704999685287476, 0.19068999588489532, 0.15926000475883484, 0.09582500159740448, -0.21727000176906586, -0.1363999992609024, -0.23684999346733093, 0.20062999427318573, 0.3718299865722656, 0.031877998262643814, -0.12951000034809113, -0.4064300060272217, 0.10891000181436539, 0.148499995470047, 0.048601001501083374, -0.10913000255823135, -0.24053999781608582, -0.07919599860906601, -0.25117000937461853, 0.04990699887275696, -0.4094099998474121, -0.3641299903392792, -0.0015807000454515219, 0.2292499989271164, 0.3968200087547302, 0.0001828700042096898, -0.2995699942111969, 0.0244120005518198, 0.38411998748779297, -0.0994350016117096, -0.18411000072956085, 0.22970999777317047, -0.4173400104045868, 0.3351399898529053, 0.157260000705719, 1.0091999769210815, -0.15750999748706818, 1.2148000001907349, 0.31150001287460327, 0.5750399827957153, -0.6193699836730957, 0.2587699890136719, -0.39136001467704773, -0.2950400114059448, -0.19740000367164612, 0.051552001386880875, -0.46105000376701355, 0.5947099924087524, 0.175369992852211, 0.23725999891757965, -0.8650500178337097, -0.03474799916148186, -0.0040616001933813095, 0.32892000675201416, -0.09969300031661987, 0.7408400177955627, 0.24073000252246857, -0.6715800166130066, 0.05670100077986717, 0.21086999773979187, 0.8250399827957153, 0.42671000957489014, 0.4331800043582916, 0.22753000259399414, 0.051639001816511154, 0.2767600119113922, -0.19660000503063202, -0.4520699977874756, -0.02708899974822998, -0.038297999650239944, -0.6512399911880493, -0.2126999944448471, -0.09266600012779236, 0.5627999901771545, -0.6859700083732605, 0.44387000799179077, 0.5389800071716309, 0.2670300006866455, 0.050106000155210495, 0.6374199986457825, 0.2594499886035919, -0.7214000225067139, 0.13036000728607178, 0.3398900032043457, 0.3370000123977661, -0.6962800025939941, -0.036215998232364655, -0.27237001061439514, -0.06401500105857849, 0.14270000159740448, -0.11620999872684479, -0.9869400262832642, -0.0021869998890906572, -0.14904999732971191, -0.6129800081253052, -0.5414900183677673, 0.6324599981307983, -0.0550680011510849, -0.009775600396096706, 0.056752000004053116, -0.37483999133110046, 0.019007999449968338, 0.28817999362945557, -0.4242100119590759, 0.3003300130367279, -0.06247600167989731, 0.7048900127410889, 0.47286999225616455, -0.43641000986099243, -0.1770399957895279, -0.16810999810695648, 0.46573999524116516, 0.20759999752044678, -0.09361399710178375, 0.12464000284671783, 0.457040011882782, -0.2999500036239624, -0.2073500007390976, 0.368149995803833, 0.09950599819421768, -0.29300999641418457, -0.3487299978733063, 0.6368100047111511, 0.08954799920320511, 0.8809000253677368, -0.10234999656677246, 0.12310999631881714, 0.5613800287246704, -0.15880000591278076, 0.718500018119812, 0.021624000743031502, -0.17169000208377838, -0.04642900079488754, 0.24404999613761902, -0.47822999954223633, -0.1735599935054779, 0.14024999737739563, -0.1837099939584732, 0.0020751000847667456, -0.2439499944448471, 0.7670999765396118, 0.21671999990940094, 1.1442999839782715, 0.44223999977111816, 0.5102499723434448, 0.5731199979782104, -0.5725100040435791, -0.42489001154899597, 0.07318899780511856, 0.154339998960495, -0.06763099879026413, 0.2852199971675873, 0.32161998748779297, 0.27904000878334045, -0.00907289981842041, -0.6517000198364258, 0.22152000665664673, -0.5297799706459045, 0.2744100093841553, -0.5460799932479858, -0.028550999239087105, -0.39193999767303467, 0.2463500052690506, 0.04070800170302391, -0.07644300162792206, -0.06331200152635574, -0.05159299820661545, 0.21713000535964966, 0.7168200016021729, -0.03386399894952774, -0.1444299966096878, 0.37448999285697937, 1.027899980545044, -0.3184199929237366, 0.8250799775123596, -0.21698999404907227, -0.5768300294876099, 0.21265999972820282, -0.4348500072956085, -0.11913999915122986, -1.024399995803833, 0.1763100028038025, -0.9293799996376038, 0.892009973526001, -0.08829399943351746, -0.31275999546051025, 0.07679399847984314, -0.6633999943733215, -0.3430899977684021, 0.1264200061559677, 0.4913400113582611, -0.5802199840545654, 0.48333999514579773, 0.35776999592781067, 0.030619999393820763, 0.36987999081611633, -0.1018500030040741, -0.02835099957883358, 0.18609000742435455, -0.06207900121808052, -0.03551600128412247, 0.509880006313324, -0.1149199977517128, 0.15730999410152435, 0.15514999628067017, -0.11040999740362167, -0.18769000470638275, -0.0158890001475811, -0.3264699876308441, -0.09814699739217758, 0.10791999846696854, -0.07166200131177902, -0.671750009059906, 0.14661000669002533, -0.21533000469207764, -0.017635999247431755, -0.6210500001907349, 0.41596999764442444, -0.31589001417160034, -0.08134199678897858, -0.03477700054645538, 0.5273699760437012, -0.032965999096632004, 0.2595599889755249, -0.0995120033621788, -0.17789000272750854, -0.014289000071585178, -0.29012998938560486, 0.0782570019364357, 0.5430200099945068, 0.14121000468730927, 0.4592899978160858, -0.2909800112247467, 0.2367199957370758, 0.27507999539375305, 0.12551000714302063, 0.7321299910545349, 0.5205399990081787], u'room': [-0.40577998757362366, 0.19103999435901642, -0.044477999210357666, -0.37595999240875244, -0.05220299959182739, 0.15817999839782715, -0.21863999962806702, -0.4975000023841858, 0.16392000019550323, -1.1629999876022339, -0.1303499937057495, 0.0014735000440850854, 0.20037999749183655, 0.08159500360488892, -0.18066999316215515, 0.32416999340057373, -0.1693200021982193, -0.12511999905109406, -0.18243999779224396, -0.027191000059247017, 0.04828000068664551, 0.24737000465393066, 0.0318479984998703, 0.0049044000916182995, 0.05538100004196167, -0.08629000186920166, 0.39594000577926636, -0.27219000458717346, 0.23002000153064728, -0.04913699999451637, 0.06025199964642525, 0.0061102998442947865, -0.30469000339508057, 0.23874999582767487, -0.983519971370697, 0.6596999764442444, -0.2638300061225891, -0.022272000089287758, -0.41187000274658203, -0.4643000066280365, -0.4826500117778778, -0.19175000488758087, -0.3019700050354004, 0.22804999351501465, 0.3104099929332733, 0.2082200050354004, 0.3735499978065491, -0.05658699944615364, -0.42423999309539795, -0.20329000055789948, 0.08173000067472458, -0.24427999556064606, -0.2605699896812439, -0.26238998770713806, 0.042559001594781876, -0.23757000267505646, -0.27584999799728394, 0.27059000730514526, 0.24546000361442566, -0.0019692000932991505, 0.16269999742507935, -0.2535400092601776, 0.36625000834465027, 0.4212400019168854, -0.019516000524163246, -0.7644100189208984, 0.31707999110221863, -0.48910999298095703, -0.219200000166893, -0.22186000645160675, -0.3040199875831604, -0.10048999637365341, 0.04446699842810631, -0.06138300150632858, -0.017727000638842583, 0.21276000142097473, 0.07168400287628174, -0.1995300054550171, 0.09187600016593933, -0.5662800073623657, 0.2708800137042999, 0.10559000074863434, -0.31080999970436096, 0.2368600070476532, 0.2621699869632721, -0.22548000514507294, -0.38457998633384705, -0.12598000466823578, -0.10547000169754028, -0.007067199796438217, 0.06650800257921219, 0.08886700123548508, -0.2049199938774109, 0.47064998745918274, 0.10902000218629837, -0.043425001204013824, 0.24017000198364258, -0.42267000675201416, 0.48151999711990356, -0.5970799922943115, -0.15310999751091003, 0.009181699715554714, -0.37090998888015747, -0.46007999777793884, -0.15524999797344208, -0.11145000159740448, 0.36267998814582825, 0.23366999626159668, -0.17910000681877136, 0.7394099831581116, -0.6866499781608582, 0.09576699882745743, 0.10535000264644623, 0.28499001264572144, -0.47826001048088074, 0.3111799955368042, -0.298799991607666, 0.15578000247478485, -0.48104000091552734, -0.06731099635362625, -0.003555099945515394, 0.08779700100421906, -0.18595999479293823, 0.15793000161647797, -0.0908140018582344, -0.2675899863243103, -0.32815998792648315, -0.42572999000549316, 0.37046998739242554, 0.12439999729394913, 0.2755900025367737, 0.08629900217056274, -0.1287499964237213, -0.22529000043869019, 0.39215001463890076, 0.1684899926185608, -0.11664000153541565, -0.21573999524116516, -0.46897000074386597, -0.08827900141477585, -0.032669998705387115, 0.008366400375962257, 0.03700599819421768, 0.5086699724197388, -0.6120700240135193, -0.3397800028324127, 0.3635599911212921, -0.09999000281095505, 0.04839500039815903, 0.05116900056600571, -0.24041999876499176, 0.5418300032615662, -0.06466600298881531, -0.14674000442028046, 0.10298000276088715, 0.3209399878978729, 0.09587900340557098, 0.3934899866580963, -0.3126800060272217, -0.09804599732160568, 0.5123000144958496, -0.06594900041818619, 0.39904001355171204, -0.22826999425888062, 0.508870005607605, 0.3683300018310547, -0.18533000349998474, 0.30744001269340515, 0.14329999685287476, 0.4828900098800659, -0.2825700044631958, 0.39570000767707825, -0.1017799973487854, -0.20598000288009644, -0.44644999504089355, 0.06224200129508972, -0.28466999530792236, 0.6539999842643738, 0.4133700132369995, -0.4800400137901306, 0.25547999143600464, -0.0022388999350368977, 0.06291700154542923, -0.08112700283527374, -0.28613001108169556, -0.1462700068950653, 0.6862900257110596, 0.6134899854660034, -0.023699000477790833, 0.3354699909687042, 0.6494200229644775, 0.20527000725269318, -0.20200000703334808, -0.34244000911712646, -0.01042100042104721, 0.021594999358057976, -0.5601500272750854, 0.7417299747467041, -0.7127299904823303, -0.05606599897146225, 1.0528000593185425, -0.31088998913764954, 0.034046001732349396, 0.032944001257419586, 0.40505000948905945, -0.43233001232147217, -0.04849499836564064, -0.07485999912023544, -0.27976998686790466, -0.39664000272750854, -0.051833998411893845, -0.016174999997019768, -0.19383999705314636, -0.359470009803772, 0.4510999917984009, -0.09709999710321426, -0.06397199630737305, -0.4106299877166748, -0.02995399944484234, 0.12421999871730804, 0.22824999690055847, 0.25224998593330383, -0.14249999821186066, 0.28600001335144043, -0.32194000482559204, 0.047658998519182205, -0.3534899950027466, -0.0759970024228096, 0.011567000299692154, -0.5386499762535095, 0.01620499975979328, -0.6837300062179565, 0.23601000010967255, -0.21040000021457672, 0.48058000206947327, -0.24201999604701996, -0.03928999975323677, 0.17812000215053558, -0.6167500019073486, -0.02782600000500679, 0.1642799973487854, 0.0826599970459938, -0.20948000252246857, 0.8801000118255615, -0.2484699934720993, -0.19362999498844147, 0.07501400262117386, 0.1543000042438507, -0.6323500275611877, -0.024469999596476555, 0.33901000022888184, -0.15857000648975372, 0.24643999338150024, -0.11607000231742859, 0.06772100180387497, 0.3760400116443634, 0.04660100117325783, -0.2970399856567383, 0.8518199920654297, 0.10860999673604965, -0.32120001316070557, -0.5496500134468079, 0.3246299922466278, -0.19648000597953796, -0.17513999342918396, -0.031282998621463776, -0.04176200181245804, 0.45938000082969666, -0.057401999831199646, -0.3728399872779846, -0.5277900099754333, -0.28393998742103577, -0.023428000509738922, -0.1724099963903427, 0.17685000598430634, 0.16875000298023224, -2.2720999717712402, 0.9146599769592285, -0.12009000033140182, -0.1205499991774559, -0.6441900134086609, 0.1938299983739853, -0.005151200108230114, -0.3253900110721588, 0.6905999779701233, 0.5983999967575073, -0.5943099856376648, 0.390749990940094, -0.41078001260757446, -0.30612999200820923, 0.1941400021314621, -0.09590599685907364, -0.424919992685318, 0.14192000031471252, -0.03543199971318245, -0.11980000138282776, 0.005836099851876497, -0.04307899996638298, -0.34068000316619873, 0.49445998668670654], u'salad': [-0.7224000096321106, -0.25589001178741455, 0.8113800287246704, 0.4369100034236908, 0.08902599662542343, -0.3762899935245514, -0.17702999711036682, -0.10194999724626541, 0.08892299979925156, -0.09556400030851364, -0.3154599964618683, 0.2258100062608719, -0.35760998725891113, 1.4259999990463257, -0.08298899978399277, -0.31373998522758484, 0.03311799839138985, 0.40125998854637146, -0.3597699999809265, 0.5601000189781189, -0.17956000566482544, 0.3668000102043152, -0.4321199953556061, 0.2935200035572052, 0.018581999465823174, -0.5865700244903564, -0.1196800023317337, -0.13970999419689178, -0.19322000443935394, -1.1536999940872192, -0.2977699935436249, -0.08500900119543076, -0.12245000153779984, 0.11495999991893768, -0.12846000492572784, 0.7139099836349487, -0.10388000309467316, 0.348690003156662, -0.7555999755859375, 0.08823099732398987, 0.33574000000953674, 0.08067300170660019, -0.04722899943590164, 0.10332000255584717, 0.09391599893569946, -0.23281000554561615, 0.6809700131416321, 0.3752500116825104, -0.5906500220298767, 0.27566999197006226, -0.2669599950313568, -0.5745800137519836, 0.5708000063896179, 0.4772300124168396, -0.5760800242424011, 0.07446400076150894, 0.061225999146699905, 0.04047999903559685, 0.5249099731445312, -0.02222900092601776, 0.6634799838066101, -0.25870999693870544, -0.23747000098228455, 0.29947999119758606, -0.48600998520851135, -0.1089399978518486, -0.8939999938011169, 0.2381799966096878, -0.13911999762058258, -0.0031753000803291798, 0.07965300232172012, -0.16007000207901, 0.09768100082874298, -0.2390899956226349, -0.6908699870109558, -0.1817599982023239, 1.18149995803833, -0.14949999749660492, -0.5439199805259705, 0.29155001044273376, 0.41067999601364136, 0.6071100234985352, -0.15277999639511108, 0.6151400208473206, 0.10254000127315521, -0.2970399856567383, -0.9823499917984009, 0.6295300126075745, -0.16729000210762024, -1.0898000001907349, -0.2730900049209595, -0.1731799989938736, -0.047171998769044876, -0.10007999837398529, -0.009697799570858479, 0.36059999465942383, -0.12494999915361404, 0.16315999627113342, 0.3342300057411194, 0.9980499744415283, -0.4228599965572357, -0.5538600087165833, 0.625469982624054, -0.3957499861717224, -0.3521600067615509, -0.41934001445770264, 0.08074399828910828, 0.30573999881744385, -0.6694300174713135, -0.4475499987602234, 0.8939599990844727, 0.8487300276756287, -0.5051900148391724, -0.6669300198554993, 0.37411001324653625, -0.07214400172233582, -0.9929699897766113, 0.19755999743938446, 0.4950999915599823, -0.21001000702381134, -0.23409000039100647, 0.36002999544143677, 0.9775599837303162, 0.7305899858474731, -0.2862800061702728, 0.22142000496387482, 0.13057999312877655, 0.22357000410556793, 0.05242599919438362, 1.1302000284194946, 0.25328001379966736, 0.8360199928283691, 0.0723470002412796, 0.7067199945449829, 0.08529999852180481, -0.4509600102901459, -0.46924999356269836, -0.06520699709653854, -0.6410499811172485, 0.07421199977397919, 0.0830639973282814, 0.5223100185394287, -0.08444999903440475, -0.0957920029759407, -0.03861499950289726, 0.5612800121307373, 0.24372999370098114, 0.11123999953269958, 0.36392998695373535, -0.39381998777389526, -0.3707199990749359, 0.08235800266265869, 0.6207600235939026, -0.0025265999138355255, -0.4868299961090088, -0.7813900113105774, -0.20201000571250916, -0.36542999744415283, 0.11896999925374985, -0.3226900100708008, 0.24004000425338745, -0.01245300006121397, -0.23898999392986298, 0.3348900079727173, -0.07612200081348419, 0.17570999264717102, -0.003097600070759654, -0.1738699972629547, -0.474480003118515, -1.1806999444961548, -0.3102000057697296, -0.2976999878883362, -0.6373900175094604, -0.4122900068759918, -0.17011000216007233, 0.24905000627040863, -0.2504099905490875, -0.0008810600265860558, 0.5473300218582153, -1.1274000406265259, 0.19839000701904297, 0.37222999334335327, 0.40244999527931213, -0.4593600034713745, -0.3481700122356415, 0.14302000403404236, 0.8007699847221375, 0.40615999698638916, -0.06579100340604782, -0.25699999928474426, -0.4520600140094757, 1.3027000427246094, -0.31095001101493835, -0.4045099914073944, 0.20017999410629272, 0.48278000950813293, -0.3147200047969818, -0.27803999185562134, -0.0514100007712841, 0.4326600134372711, 0.5199300050735474, -0.5315799713134766, 0.6587299704551697, 0.4207899868488312, 0.0727510005235672, 0.13124999403953552, 0.3600099980831146, -0.35879001021385193, 0.46097999811172485, -0.4528299868106842, 0.7547500133514404, 0.0355679988861084, 0.6093400120735168, -0.16151000559329987, 0.5051000118255615, 0.3185400068759918, 1.3289999961853027, -0.75941002368927, -0.023389000445604324, 0.18692000210285187, 0.45822998881340027, 0.10146000236272812, -0.9184799790382385, -0.05039599910378456, -0.4675300121307373, -0.6431000232696533, 0.4692699909210205, -0.23104000091552734, 0.0021732000168412924, 0.26243001222610474, -0.07631999999284744, 0.06700599938631058, -0.17653000354766846, 0.3035300076007843, 1.3287999629974365, 0.31685999035835266, -0.6515300273895264, -0.3852899968624115, -0.1926099956035614, -0.08438000082969666, -0.057534001767635345, -0.015304000116884708, -0.5209900140762329, 0.4248200058937073, -0.7811300158500671, -0.08550900220870972, -0.4769099950790405, 0.1887200027704239, -0.8924700021743774, -0.8856599926948547, 0.8788300156593323, -0.14834000170230865, 0.00863960012793541, 0.4300299882888794, 0.11218000203371048, -0.18505999445915222, -0.14490999281406403, 0.07955099642276764, 0.040483999997377396, 0.2117999941110611, 0.11229000240564346, -0.7865399718284607, 0.05451099947094917, -0.40762999653816223, 0.08624500036239624, -0.582859992980957, -0.2406499981880188, -0.0594870001077652, 0.3058600127696991, -0.0473489984869957, -0.5593799948692322, -0.055838000029325485, -0.46955999732017517, 0.16203999519348145, 0.1394300013780594, 0.1870100051164627, -0.34422001242637634, 0.5590800046920776, -1.0140999555587769, -0.5116699934005737, 0.1331699937582016, 0.10286000370979309, 0.11878000199794769, 0.07673099637031555, 0.4730899930000305, 0.33702000975608826, 0.5719199776649475, -0.05500499904155731, -0.17106999456882477, -0.18775999546051025, 0.6516900062561035, -0.5747699737548828, 0.6794899702072144, -0.4421299993991852, -0.4022800028324127, -0.9769099950790405, 0.42778998613357544, -1.0784000158309937, -0.019481999799609184, 0.05835999920964241], u'hat': [-0.1052900031208992, 0.2529299855232239, -0.282370001077652, -0.7393100261688232, -0.15889999270439148, -0.32291001081466675, -1.1823999881744385, -0.27974000573158264, -0.3887900114059448, -0.16147999465465546, 0.31022000312805176, 0.057041000574827194, 0.023659000173211098, 0.49889999628067017, -0.02979299984872341, 0.2043599933385849, 0.3151499927043915, 0.10465999692678452, 0.12915000319480896, 0.18971000611782074, 0.3215700089931488, -0.36987999081611633, 0.127470001578331, -0.08974699676036835, -0.7719299793243408, -0.1911199986934662, -0.011087000370025635, 0.27309998869895935, 0.23070000112056732, 0.26809999346733093, 0.15047000348567963, -0.34077000617980957, -0.10655999928712845, -0.29576998949050903, -0.9597200155258179, 0.29124000668525696, 0.017194999381899834, 0.0055010998621582985, -0.5932899713516235, 0.34466999769210815, -0.2576499879360199, -0.19156000018119812, -0.10114999860525131, 0.23966999351978302, -0.049956999719142914, 0.044449999928474426, 0.49948999285697937, -0.2667199969291687, -0.26440000534057617, -0.07066299766302109, -0.29958999156951904, 0.3468799889087677, 0.27639999985694885, 0.5266100168228149, -0.21994000673294067, 0.056616999208927155, -0.33838000893592834, -0.3683600127696991, -0.3070400059223175, -0.13669000566005707, -0.061935000121593475, -0.5914099812507629, -0.5632699728012085, 0.1905899941921234, -0.16526000201702118, -0.3245300054550171, -0.3149299919605255, 0.35850998759269714, 0.3450300097465515, -0.09475599974393845, 0.3625999987125397, 0.29493001103401184, 0.49386999011039734, 0.1619199961423874, 0.20000000298023224, -0.14414000511169434, 0.335099995136261, -0.24192999303340912, 0.14218999445438385, -0.16091999411582947, 0.20419000089168549, 0.4956800043582916, -0.556850016117096, -0.5880600214004517, 0.16158999502658844, 5.977500040899031e-05, -0.0032627000473439693, -0.03545600175857544, -0.33456000685691833, -0.23944999277591705, -0.20923000574111938, -0.3305700123310089, 0.054962001740932465, 0.2554599940776825, 0.3296999931335449, 0.15658999979496002, -0.015922000631690025, 0.13230000436306, -0.17205999791622162, -0.25536999106407166, 0.36636000871658325, 1.030500054359436, -0.1360200047492981, 0.012928999960422516, 0.08024200052022934, -0.5058000087738037, 0.06422899663448334, 0.2542800009250641, -0.2910099923610687, 0.48260998725891113, 0.009446999989449978, 0.4647800028324127, -0.3304600119590759, 0.03238299861550331, 0.5136100053787231, 0.0008178799762390554, -0.23152999579906464, 0.19227999448776245, 0.17283999919891357, -0.47574999928474426, 0.185589998960495, -0.1659500002861023, 0.5468299984931946, 0.4867999851703644, -0.01245999988168478, 0.08104699850082397, -0.03782900050282478, 0.4384300112724304, -0.12699000537395477, 0.0070420000702142715, 0.28633999824523926, -0.186599999666214, -0.5782700181007385, 0.03203799948096275, 0.12456999719142914, -0.1492999941110611, -0.24246999621391296, -0.23930999636650085, 0.2751399874687195, -0.11554999649524689, 0.30215999484062195, 0.399509996175766, 0.005708599928766489, -0.620930016040802, 0.023218000307679176, 0.4713500142097473, 0.01978899911046028, 0.24334999918937683, 0.17510999739170074, -0.09729400277137756, -0.06140400096774101, 0.5547599792480469, -0.6529600024223328, -0.44828999042510986, -0.10956999659538269, 0.15396000444889069, 0.026079000905156136, -0.6395999789237976, 0.3242200016975403, 0.22197000682353973, 0.40275999903678894, -0.759440004825592, -0.12425000220537186, 0.1634799987077713, 0.09538500010967255, -0.1573600023984909, 0.27476000785827637, 0.5923799872398376, 0.40836000442504883, -0.18543000519275665, -0.5035499930381775, -0.05142299830913544, -0.26054999232292175, 0.21252000331878662, 0.29565998911857605, -0.7831900119781494, 0.06711900234222412, 0.30302000045776367, -0.2893500030040741, -0.09095899760723114, 0.27658000588417053, 0.1736299991607666, 0.6244300007820129, -0.021466000005602837, 0.4539799988269806, 0.2367600053548813, 0.06596300005912781, -0.04572699964046478, -0.10487999767065048, 0.07196299731731415, 0.3722200095653534, 0.11727999895811081, 0.15625999867916107, 0.45263001322746277, -0.5143300294876099, 0.11845000088214874, -0.5463600158691406, -0.3198600113391876, -0.02954999916255474, -0.19544999301433563, 1.312999963760376, 0.4660100042819977, 0.09550700336694717, -0.41530001163482666, -0.15716999769210815, -0.4171299934387207, 0.20541000366210938, 0.11869999766349792, -0.37323999404907227, -0.49153000116348267, -0.11248999834060669, -0.06238900125026703, 0.22652000188827515, 0.22879000008106232, 0.9372900128364563, -0.29892998933792114, 0.17866000533103943, -0.3302299976348877, -0.11663000285625458, -0.2212499976158142, 0.04409800097346306, -0.14253999292850494, -0.042114999145269394, 0.2928900122642517, 0.4472399950027466, -0.6723700165748596, -0.18505999445915222, -0.1050800010561943, 0.09074900299310684, 0.12736999988555908, 0.4351600110530853, 0.9441499710083008, -0.4054799973964691, 0.15490999817848206, 0.08432500064373016, -0.36563000082969666, -0.0007444299990311265, -0.32471001148223877, -0.09598299860954285, 0.9153599739074707, 0.5250300168991089, 0.25685998797416687, -1.027400016784668, 0.19009999930858612, -0.15466000139713287, 0.2989799976348877, 0.052067000418901443, -0.11530999839305878, 0.2936300039291382, -0.4507000148296356, -0.24185000360012054, -0.007310799788683653, 0.2851099967956543, -0.4383000135421753, 0.47220999002456665, -0.30726000666618347, 0.3001199960708618, -0.09144899994134903, -0.5471500158309937, -0.726419985294342, -0.1774899959564209, -0.0639130026102066, 0.7382199764251709, 0.6183599829673767, -0.7680799961090088, -0.42513999342918396, -0.09081699699163437, -0.9233099818229675, 0.14868000149726868, 0.23743000626564026, 0.06539099663496017, -0.2350199967622757, 0.15737000107765198, -0.1850000023841858, -0.3892199993133545, -0.6275500059127808, -1.179800033569336, -0.46522000432014465, -0.7094399929046631, -0.3540300130844116, 0.2702000141143799, 0.21987999975681305, 0.1477299928665161, -0.0726189985871315, -0.023016000166535378, 0.6119800209999084, -0.18560999631881714, 0.37066999077796936, 0.1345899999141693, -0.23914000391960144, 0.03675699979066849, 0.18411000072956085, -0.6967399716377258, 0.5120499730110168, -0.02163200080394745, 0.07367099821567535, 0.31334999203681946, -0.4830099940299988, 0.6024600267410278, 0.24299000203609467], u'blade': [0.18366999924182892, 0.29580000042915344, -0.09348800033330917, -0.7945200204849243, 0.28929001092910767, 0.46873998641967773, 0.09809999912977219, 0.4543600082397461, -0.14695000648498535, -0.6094899773597717, -0.12168999761343002, 0.40939998626708984, 0.07287999987602234, 0.12953999638557434, -0.7973499894142151, 0.26337000727653503, -0.7953799962997437, 0.5475800037384033, -0.2481199949979782, 0.02543500065803528, -0.2045699954032898, 0.47940000891685486, 0.3612000048160553, 0.5066499710083008, 0.7892199754714966, 0.3537200093269348, -0.09932299703359604, -0.3774699866771698, 0.14985999464988708, 0.06541299819946289, -0.3795500099658966, 0.38280999660491943, 0.36618998646736145, 0.126910001039505, -0.6195899844169617, 0.27208998799324036, -0.3254300057888031, 0.3176499903202057, -0.2717300057411194, 0.6640700101852417, 0.13011999428272247, 0.30261000990867615, -0.3312000036239624, -0.7202699780464172, -0.07501299679279327, 0.5518199801445007, -0.03554299846291542, -0.05482799932360649, 0.08410800248384476, 0.43933001160621643, -0.20690999925136566, 0.1290999948978424, 0.3471899926662445, 0.030528999865055084, -0.23794999718666077, -0.4361099898815155, -0.21177999675273895, 0.5850899815559387, 0.8627899885177612, 0.37487998604774475, 0.20489999651908875, 0.08672100305557251, 0.12796999514102936, 0.4572100043296814, 0.46658000349998474, -0.4837599992752075, -0.4350700080394745, 0.35826998949050903, 0.6041399836540222, 0.007612000219523907, 0.74440997838974, 0.18939000368118286, -0.04752099886536598, 0.6708800196647644, -0.062286000698804855, 0.5826500058174133, -0.13196000456809998, -0.5605199933052063, -0.559220016002655, -0.1256600022315979, -0.27298998832702637, 0.3932499885559082, 0.25352001190185547, -0.5389599800109863, 0.015836000442504883, -0.21998000144958496, 0.06131400167942047, 0.19065000116825104, -0.5002300143241882, -0.28266000747680664, 0.7998999953269958, 0.2160699963569641, 0.6151900291442871, -0.30309000611305237, -0.1837500035762787, -0.4691399931907654, -0.20804999768733978, 0.05138299986720085, 0.709659993648529, -0.5618699789047241, -0.29124999046325684, -0.22381000220775604, -0.4325299859046936, -0.09405799955129623, -0.06845799833536148, -0.04349299892783165, 0.31268998980522156, -0.25380000472068787, -0.5563099980354309, 0.1198199987411499, -0.24546000361442566, 0.5662299990653992, 0.052622001618146896, -0.9033600091934204, -0.5521600246429443, -0.26673001050949097, -0.6436499953269958, 0.10245999693870544, -0.6602799892425537, -0.4467200040817261, -0.17816999554634094, -0.5823500156402588, -0.10018999874591827, -0.34915998578071594, 0.3245599865913391, 0.45691999793052673, 0.35242998600006104, -0.08070600032806396, -0.24956999719142914, 0.1412699967622757, -0.2198999971151352, 0.7298499941825867, 0.5852299928665161, 0.8436999917030334, 0.19582000374794006, -0.2621400058269501, -0.09652599692344666, -0.10916999727487564, 0.2520900070667267, 0.5087699890136719, 0.7311800122261047, 0.2531700134277344, -0.488429993391037, -0.860069990158081, 0.185479998588562, 0.28317999839782715, 0.26249000430107117, -0.5073000192642212, -0.31512001156806946, -0.7043499946594238, 0.04496400058269501, -0.026868000626564026, -0.32519999146461487, -0.3102700114250183, 0.6035000085830688, -0.3734300136566162, 0.8177199959754944, -0.44964998960494995, 0.4546799957752228, -0.11479000002145767, 0.2953299880027771, -0.3081299960613251, -0.3534199893474579, -0.3488599956035614, -0.045809000730514526, 0.6624400019645691, 0.4830400049686432, 0.01612200029194355, -0.1852799952030182, 0.09733700007200241, -0.32510000467300415, -0.14283999800682068, 0.10409999638795853, 0.0044685001485049725, -0.1175599992275238, -0.13625000417232513, 0.5202999711036682, 0.161640003323555, 0.2515299916267395, -0.5092200040817261, 0.12155000120401382, -0.30428001284599304, -0.05877000093460083, 0.5718799829483032, 0.14962999522686005, -0.557669997215271, 0.25496000051498413, 0.20699000358581543, 0.8366400003433228, 0.40206998586654663, -0.5872700214385986, 0.33098000288009644, 0.19594000279903412, 0.28742000460624695, 0.18852999806404114, 0.8851000070571899, -0.4729599952697754, -0.33410000801086426, 0.012663999572396278, 0.8610299825668335, 0.19415999948978424, -0.4010699987411499, 0.7149199843406677, 0.7504400014877319, -0.04143499955534935, 0.6050599813461304, -0.12624000012874603, -0.29135000705718994, -0.3080100119113922, 0.23770000040531158, 0.5487499833106995, -0.027754999697208405, 0.40220001339912415, 0.0009435800020582974, -0.22540000081062317, -0.6213300228118896, 0.06448200345039368, 0.17619000375270844, -0.41398000717163086, -0.05132799968123436, -0.7033799886703491, 0.5594000220298767, 0.2625400125980377, 0.26372000575065613, -0.3873699903488159, -0.3819800019264221, -0.06887099891901016, -0.37483999133110046, 0.07418700307607651, -0.20720000565052032, 0.4698300063610077, -0.27344000339508057, -0.3420799970626831, -0.20420999825000763, -0.15173999965190887, 0.055716000497341156, -0.21104000508785248, 0.20369000732898712, 0.03523999825119972, -0.15171000361442566, 0.5879799723625183, -0.3926199972629547, -0.20701000094413757, -0.49268001317977905, -0.4422599971294403, -0.08004400134086609, 0.0788320004940033, 0.24672000110149384, 0.38960000872612, -0.06881699711084366, -0.3809700012207031, 0.23202000558376312, -0.42083999514579773, -0.1319900006055832, 0.1864600032567978, -0.5099899768829346, 0.3298400044441223, -0.28251001238822937, -0.10384000092744827, -0.46143001317977905, 0.0863720029592514, -0.16595999896526337, -0.02708500064909458, 0.23882000148296356, -0.46254000067710876, 0.269459992647171, -0.1416500061750412, 0.8848699927330017, -0.6469299793243408, 0.18477000296115875, -0.30066001415252686, 0.05571199953556061, 0.5433899760246277, 0.4351400136947632, -0.20137999951839447, 0.0201370008289814, -0.4365699887275696, 0.03222399950027466, -0.41670000553131104, -0.07747600227594376, 0.19949999451637268, -0.0646430030465126, -0.27577000856399536, 0.42671999335289, -0.11829999834299088, -0.07814200222492218, -0.44979000091552734, -0.020251000300049782, -0.25727999210357666, 0.00252470001578331, 0.5281599760055542, 0.2881399989128113, -0.48697999119758606, 0.10214000195264816, -0.10120999813079834, 0.4200800061225891, -0.036695998162031174, 0.34161999821662903, 0.19978000223636627, 0.2107899934053421], u'bucket': [-0.590470016002655, 0.1788100004196167, 0.0852380022406578, -0.46386000514030457, -0.48083001375198364, 0.21639999747276306, 0.42763999104499817, -0.3192799985408783, -0.0024482000153511763, 0.24664999544620514, -0.3860499858856201, -0.25435999035835266, -0.43136000633239746, 0.04254699870944023, 0.24457000195980072, -0.38927000761032104, -0.0828619971871376, 0.2984299957752228, -0.09281899780035019, 0.21453000605106354, 0.2581399977207184, 0.1014999970793724, 0.5908300280570984, 0.04114900156855583, 0.1237500011920929, -0.15793000161647797, 0.1276800036430359, 0.3470200002193451, 0.43083998560905457, -0.013540999963879585, -0.598360002040863, -0.3378300070762634, -0.2358900010585785, -0.20125000178813934, -0.25088998675346375, 0.6523299813270569, 0.2520799934864044, 0.1882600039243698, -0.4850899875164032, 0.5887600183486938, -0.18526999652385712, -0.013093000277876854, 0.21278999745845795, -0.7082499861717224, 0.192440003156662, 0.08594799786806107, 0.6274899840354919, -0.21657000482082367, -0.1849599927663803, 0.13523000478744507, -0.41468000411987305, 0.31679001450538635, -0.28902000188827515, -0.4535300135612488, 0.12826000154018402, -0.08858399838209152, 0.32304999232292175, 0.219310000538826, 0.008115800097584724, 0.37321001291275024, -0.0075750998221337795, -0.11975999921560287, -0.14380000531673431, -0.16586999595165253, -0.5218899846076965, -0.5959699749946594, -0.43998000025749207, 0.021904999390244484, -0.24344000220298767, 0.4029499888420105, -0.08596599847078323, -0.16506999731063843, 0.3880400061607361, -0.09253700077533722, -0.02424200065433979, -0.38995999097824097, 0.23950999975204468, 0.16403000056743622, -0.3199400007724762, -0.8490200042724609, 0.021865999326109886, 0.4663800001144409, 0.4842199981212616, -0.7796000242233276, -0.26050999760627747, -0.05278199911117554, -0.157150000333786, 0.30425000190734863, -0.2834300100803375, -0.5996599793434143, 0.7511500120162964, 0.17924000322818756, 0.6645299792289734, -0.5815200209617615, -0.25505000352859497, -0.12103000283241272, 0.18193000555038452, 0.33406001329421997, 0.021664999425411224, -0.20830999314785004, 0.00859680026769638, 0.6259999871253967, -0.3593499958515167, -0.7695599794387817, 0.4129199981689453, -0.6165000200271606, 0.3761900067329407, 0.15575000643730164, -0.0006282100221142173, -0.3068299889564514, 0.11170999705791473, 0.2621699869632721, -0.625980019569397, -0.13161000609397888, -0.07908199727535248, -0.12927000224590302, -0.27331000566482544, -0.05949300155043602, -0.05269499868154526, -0.33799999952316284, 0.3930000066757202, -0.29556000232696533, 0.47091999650001526, 0.651170015335083, -0.3721800148487091, -0.40130001306533813, 0.18182000517845154, 0.23388999700546265, -0.07276500016450882, -0.06168299913406372, 0.10655000060796738, 0.32062000036239624, 0.017674999311566353, -0.008029400371015072, 0.25137001276016235, -0.05513700097799301, 0.2831000089645386, -0.015949999913573265, -0.3434000015258789, 0.1720300018787384, -0.07633800059556961, 0.531029999256134, -0.07621899992227554, -0.7343199849128723, 0.19146999716758728, -0.5292900204658508, -0.3492000102996826, 0.3007200062274933, 0.43904998898506165, 0.20579999685287476, -0.284060001373291, 0.11703000217676163, -0.2314399927854538, -0.5006800293922424, -0.16469000279903412, -0.15926000475883484, 0.5258499979972839, -0.6541500091552734, 0.6363499760627747, 0.32006001472473145, 0.09695500135421753, -0.22620999813079834, 0.23312999308109283, 0.28703999519348145, 0.4934000074863434, 0.1830900013446808, 0.2836199998855591, 0.8216500282287598, 0.5564699769020081, 0.058396000415086746, -0.4557799994945526, 0.3150799870491028, 0.1681700050830841, 0.1950799971818924, -0.4192099869251251, -0.7907099723815918, -0.3449999988079071, 0.29210999608039856, 0.35554999113082886, -0.6499099731445312, 0.5112599730491638, 0.6408200263977051, 0.7992200255393982, 0.3671500086784363, 0.33063000440597534, -0.38646000623703003, 1.2268999814987183, -0.057287998497486115, 0.5094299912452698, -0.29142001271247864, 0.33546000719070435, 0.021177999675273895, -0.23921999335289001, -0.0690160021185875, -0.6284499764442444, 0.04693499952554703, -0.40467000007629395, 0.792739987373352, -0.3853299915790558, -0.1967500001192093, 0.06278900057077408, 0.13147999346256256, 0.40321001410484314, -0.2027599960565567, -0.11672999709844589, -0.37637001276016235, -0.1724500060081482, -0.10700999945402145, 0.10960999876260757, -0.08732999861240387, -0.5080900192260742, -0.05490599945187569, -0.08552400022745132, 0.13704000413417816, 0.3886300027370453, -0.06436499953269958, 0.02479800023138523, 0.06105700135231018, -0.20597000420093536, -0.2455500066280365, -0.04609600082039833, 0.29629001021385193, -0.03982200101017952, 0.15523000061511993, 0.7316100001335144, -0.2672500014305115, -0.27597999572753906, 0.1828799992799759, 0.44043999910354614, -0.2227099984884262, 0.32923999428749084, -0.4183399975299835, -0.14169999957084656, -0.29229000210762024, 0.20527000725269318, 0.3834100067615509, -0.10029000043869019, 0.05558599904179573, 0.14041000604629517, 0.2603200078010559, -0.47494998574256897, -0.3521200120449066, -0.2650800049304962, -0.3731899857521057, -0.1839900016784668, -0.3719100058078766, 0.831279993057251, 0.21703000366687775, -0.3838199973106384, -0.11225999891757965, -0.24884000420570374, -0.8558700084686279, 0.35095998644828796, -0.7122499942779541, 0.18603000044822693, 0.2597000002861023, -0.2952199876308441, 0.38425999879837036, -0.07567200064659119, -0.20356999337673187, -0.4885700047016144, -0.21320000290870667, -0.11032000184059143, 0.6810299754142761, 0.034908998757600784, -0.4590499997138977, -0.2009200006723404, 0.0061511998064816, -0.35060998797416687, 0.09332100301980972, -0.2318899929523468, 0.012114999815821648, -0.38558000326156616, 0.10927999764680862, -0.14553000032901764, -0.3552800118923187, -0.9319700002670288, -0.11884000152349472, -0.771369993686676, -0.30908000469207764, -0.2795799970626831, 0.42441999912261963, 0.02721799910068512, -0.5119600296020508, 0.6640700101852417, 0.7381299734115601, 0.1200999990105629, -0.34125998616218567, -0.2368600070476532, 0.334989994764328, -0.36379000544548035, -0.07307299971580505, 0.11691000312566757, 0.4012199938297272, -0.05649600178003311, -0.11428999900817871, 0.05226600170135498, -0.022709999233484268, -0.29638001322746277, -0.05584299936890602], u'bed': [-0.20441000163555145, -0.08241699635982513, -0.056366000324487686, -0.17798000574111938, -0.33011001348495483, 0.22672000527381897, 0.2983799874782562, -0.19279000163078308, 0.3087500035762787, -0.8318799734115601, -0.23714999854564667, -0.01905599981546402, -0.05654999986290932, 0.4060100018978119, -0.08846200257539749, 0.13436000049114227, 0.17642000317573547, -0.26912999153137207, 0.26743000745773315, 0.001304500037804246, -0.17152999341487885, 0.14414000511169434, -0.45096999406814575, 0.23472000658512115, -0.011610000394284725, -0.21644000709056854, 0.29732000827789307, -0.1106100007891655, 0.3204599916934967, -0.04416099935770035, -0.20622999966144562, 0.4507000148296356, 0.030282000079751015, -0.24265000224113464, -0.3868899941444397, 0.4339500069618225, -0.5033199787139893, 0.012601999565958977, -0.2564699947834015, -0.12818999588489532, -0.2842400074005127, -0.3689599931240082, 0.19886000454425812, -0.20928999781608582, 0.2831999957561493, 0.14226000010967255, 0.8471900224685669, -0.0657230019569397, 0.1693599969148636, -0.06725399941205978, 0.3401300013065338, 0.14643000066280365, -0.10262999683618546, -0.06190799921751022, -0.31589001417160034, -0.1544400006532669, 0.3181299865245819, -0.30562999844551086, -0.04297599941492081, 0.3069300055503845, 1.0147000551223755, -0.05498800054192543, 0.3304100036621094, 0.4958899915218353, -0.32104000449180603, -0.6015200018882751, -0.16177000105381012, -0.20192000269889832, -0.7809500098228455, 0.5239999890327454, -0.17222000658512115, 0.062206000089645386, -0.6988499760627747, -0.13636000454425812, -0.03346500173211098, -0.20192000269889832, 0.2064400017261505, -0.3057900071144104, 0.08963000029325485, -0.2777700126171112, -0.1706400066614151, -0.22822999954223633, -0.0768980011343956, 0.1433500051498413, -0.0018081000307574868, 0.39695000648498535, -0.11851999908685684, -0.08784600347280502, -0.39645999670028687, 0.028137000277638435, 0.7153499722480774, 0.4503999948501587, -0.27695000171661377, 0.4922899901866913, -0.22586999833583832, 0.017500000074505806, 0.21299999952316284, 0.0024180999025702477, 0.5722900032997131, -0.37185999751091003, 0.11366000026464462, -0.023166000843048096, -0.21358999609947205, -0.006822700146585703, 0.08216399699449539, 0.5239599943161011, 0.7094799876213074, 0.14340999722480774, -0.2258799970149994, -0.2725200057029724, -0.22363999485969543, 0.04168099910020828, -0.414110004901886, -0.4479199945926666, -0.12249000370502472, 0.30970999598503113, -0.11553999781608582, -0.06580699980258942, 0.2431900054216385, -0.34266000986099243, -0.34077000617980957, -0.18612000346183777, 0.03189399838447571, 0.18240000307559967, -0.10614000260829926, 0.10779000073671341, -0.20197999477386475, 0.0039623999036848545, -0.3413800001144409, 0.18653999269008636, 0.9513999819755554, -0.08602099865674973, 0.18653999269008636, 0.20607000589370728, -0.26245999336242676, -0.28321000933647156, -0.2781200110912323, 0.11388999968767166, -0.6629199981689453, 0.04699699953198433, 0.1876000016927719, -0.2974199950695038, 0.20945000648498535, 0.21125000715255737, -0.9660599827766418, 0.07895799726247787, -0.04649699851870537, 0.17539000511169434, 0.026295000687241554, -0.19446000456809998, 0.07756700366735458, 0.7570800185203552, -0.15892000496387482, 0.3225499987602234, 0.24556000530719757, 0.10220000147819519, 0.15841999650001526, 0.10226999968290329, 0.4970000088214874, 0.21543000638484955, 0.4234600067138672, -0.47692999243736267, 0.08551300317049026, 0.23091000318527222, 0.4683299958705902, 0.49581998586654663, 0.21895000338554382, 0.1770700067281723, 0.00995550025254488, 0.2789900004863739, -0.5443000197410583, 0.7873499989509583, 0.65065997838974, -0.0005991400103084743, -0.6934099793434143, -0.2117599993944168, -0.2596000134944916, 0.47523000836372375, -0.1406800001859665, -0.8155099749565125, 0.058556001633405685, -0.2576200067996979, 0.40318000316619873, -0.15714000165462494, 0.10362999886274338, 0.12525999546051025, 0.3558200001716614, 0.6542800068855286, 0.6251000165939331, 0.2399500012397766, 0.5432000160217285, 0.9260500073432922, -0.3815999925136566, -0.5050699710845947, -0.3199000060558319, 0.35811999440193176, -0.2606399953365326, 0.032248999923467636, -0.2747800052165985, 0.03418999910354614, 0.6150299906730652, -0.2681100070476532, 0.01224599964916706, 0.0027715000323951244, 0.3922399878501892, -0.45583999156951904, -0.09354200214147568, -0.5964000225067139, -0.4143199920654297, -0.2958900034427643, -0.42945998907089233, -0.4905799925327301, 0.44672998785972595, -0.24122999608516693, 0.522599995136261, 0.26420000195503235, 0.053704001009464264, -0.4118799865245819, -0.11574000120162964, -0.059675998985767365, 0.4803299903869629, 0.20760999619960785, 0.20919999480247498, 0.4545300006866455, -0.1954900026321411, 0.23307999968528748, -0.26015999913215637, -0.5038400292396545, -0.06966400146484375, -0.16674000024795532, 0.5260099768638611, -0.38846999406814575, 0.04602300003170967, -0.5001000165939331, 1.0733000040054321, -0.47005999088287354, -0.42298999428749084, 0.27518999576568604, -0.4365899860858917, -0.4448300004005432, 0.036219000816345215, 0.08017300069332123, -0.50382000207901, 0.4691399931907654, -0.6720499992370605, -0.1863899976015091, 0.2191299945116043, 0.08595100045204163, -0.6323400139808655, -0.07743799686431885, -0.40700000524520874, 0.7196900248527527, 0.5609899759292603, -0.5418999791145325, 0.7229999899864197, 0.29343000054359436, 0.3855400085449219, 0.10164999961853027, 0.28321999311447144, 0.0064789000898599625, -0.11845999956130981, -0.31299999356269836, 0.09041000157594681, -0.05831199884414673, -0.4856700003147125, -0.30149000883102417, 0.7588599920272827, -0.27480998635292053, -0.23522000014781952, 0.383760005235672, -0.2188200056552887, -0.7511299848556519, -0.4250899851322174, -0.0796549990773201, 0.2566800117492676, 0.056251998990774155, -1.5539000034332275, 0.24890999495983124, -0.695930004119873, 0.010844999924302101, -0.376910001039505, -0.18423999845981598, -0.29840001463890076, -0.34224000573158264, -0.05004600062966347, 0.36632999777793884, 0.06451400369405746, 0.25400999188423157, -0.3190700113773346, -0.5883700251579285, -0.029519999399781227, -0.09234700351953506, -0.0509600006043911, -0.3463500142097473, -0.1029599979519844, -0.315310001373291, 0.7278800010681152, 0.21167999505996704, 0.04718099907040596, 1.0277999639511108], u'cat': [-0.2935299873352051, 0.33246999979019165, -0.0473719984292984, -0.12246999889612198, 0.07195600122213364, -0.2340800017118454, -0.062380000948905945, -0.003719199914485216, -0.39462000131607056, -0.6941099762916565, 0.3673099875450134, -0.12140999734401703, -0.044484999030828476, -0.15267999470233917, 0.34863999485969543, 0.22925999760627747, 0.5436099767684937, 0.2521499991416931, 0.09797199815511703, -0.08730500191450119, 0.8705800175666809, -0.12211000174283981, -0.07982499897480011, 0.2871200144290924, -0.6856300234794617, -0.27265000343322754, 0.2205599993467331, -0.7575200200080872, 0.5629299879074097, 0.09137699753046036, -0.7100399732589722, -0.3142000138759613, -0.5682600140571594, -0.26684001088142395, -0.6010199785232544, 0.26958999037742615, -0.1799200028181076, 0.10700999945402145, -0.5785800218582153, 0.3816100060939789, -0.6712700128555298, 0.10926999896764755, 0.07942599803209305, 0.02237199991941452, -0.08114700019359589, 0.011181999929249287, 0.6708899736404419, -0.1909399926662445, -0.336760014295578, -0.4847100079059601, -0.35405999422073364, -0.15208999812602997, 0.44503000378608704, 0.4638499915599823, 0.38409000635147095, 0.0450810007750988, -0.5907899737358093, 0.21762999892234802, 0.38576000928878784, -0.4456700086593628, 0.009332000277936459, 0.44200000166893005, 0.09706199914216995, 0.38005000352859497, -0.11880999803543091, -0.4271799921989441, -0.31005001068115234, -0.025057999417185783, 0.12689000368118286, -0.13468000292778015, 0.11975999921560287, 0.7625300288200378, 0.2524000108242035, -0.26934000849723816, 0.06862899661064148, -0.10070999711751938, 0.01106599997729063, -0.18532000482082367, 0.44982999563217163, -0.5750700235366821, 0.12278000265359879, -0.06487800180912018, 0.04445600137114525, -0.0209989994764328, -0.06983800232410431, -0.47328999638557434, -0.43073999881744385, 0.3915799856185913, -0.047814998775720596, -0.9365900158882141, -0.5512800216674805, -0.14219999313354492, -0.15828999876976013, 0.15623000264167786, 0.07046099752187729, 0.19891999661922455, 0.18941999971866608, -0.19338999688625336, -0.465939998626709, -0.028824999928474426, 0.005675199907273054, -0.005403799936175346, 0.43143999576568604, 0.12257000058889389, -0.26109999418258667, 0.04847000166773796, 0.3224399983882904, -0.31064000725746155, -0.10559000074863434, 0.9795399904251099, 0.06962600350379944, -0.023187000304460526, -0.8629299998283386, 0.4827300012111664, 0.23648999631404877, -0.0034704001154750586, -0.18931999802589417, 0.18588000535964966, 0.023211000487208366, -0.30643001198768616, -0.3571699857711792, 0.1960500031709671, -0.15839999914169312, -0.005862600170075893, 0.3524799942970276, 0.036052998155355453, -0.539330005645752, 0.49434998631477356, 0.4533199965953827, -0.18477000296115875, 0.040647998452186584, -0.09451700001955032, -0.07116000354290009, 0.7400500178337097, -0.11465000361204147, -0.2691600024700165, 0.08976499736309052, -0.2520500123500824, -0.21468999981880188, -0.38846999406814575, 0.3250899910926819, 0.25773000717163086, -0.5176399946212769, -0.38457000255584717, 0.02825400047004223, -0.2123199999332428, -0.2731100022792816, 0.6917799711227417, -0.3768100142478943, 0.14240999519824982, -0.24925999343395233, 0.40314000844955444, -0.05291600152850151, 0.07683999836444855, 0.2134999930858612, 0.10920999944210052, 0.049658000469207764, 0.02092999964952469, 0.11952999979257584, 0.28648000955581665, 0.8779100179672241, 0.08583799749612808, 0.3198300004005432, 0.518559992313385, -0.22628000378608704, 0.12402000278234482, 0.48805001378059387, 0.221110001206398, -0.5202100276947021, 0.0025106000248342752, -0.13304999470710754, -0.05256500095129013, 0.3274399936199188, 0.6498500108718872, 0.07242599874734879, -0.52742999792099, -0.20913000404834747, -0.27897000312805176, -0.10834000259637833, -0.10102999955415726, 0.15298999845981598, -0.36680999398231506, 0.08244500309228897, 0.17389999330043793, -0.28099000453948975, -0.06913600116968155, 0.7894999980926514, 0.06057099997997284, 0.386929988861084, -0.16494999825954437, -0.21800999343395233, 0.3328799903392792, -0.44567999243736267, -0.4989199936389923, -0.3443799912929535, -0.03560600057244301, -0.2423900067806244, -0.474700003862381, -0.17253999412059784, 0.07134900242090225, 1.40910005569458, 0.4616599977016449, 0.46546000242233276, -0.3097899854183197, 0.37202998995780945, 0.4789699912071228, -0.2887200117111206, -0.6551499962806702, -0.13628999888896942, -0.1428699940443039, -0.04842999950051308, -0.12785999476909637, 0.189410001039505, -0.0370509997010231, 0.5947099924087524, -0.0051617999561131, -0.008600899949669838, -0.33313000202178955, 0.2879999876022339, -0.05896500125527382, -0.6727499961853027, 0.15544000267982483, 0.07418700307607651, -0.36441001296043396, -0.021284999325871468, -0.06533700227737427, 0.13827000558376312, 0.008395000360906124, -0.041113000363111496, 0.2940100133419037, -0.10344000160694122, -0.05237099900841713, -0.630840003490448, 0.16311000287532806, 0.05282599851489067, -0.021796999499201775, -0.2811500132083893, -0.07836099714040756, -0.38124001026153564, 0.07808899879455566, 0.3841100037097931, -0.3462899923324585, -0.43220001459121704, 0.091730996966362, -0.6786699891090393, -0.041138000786304474, -0.5398100018501282, 0.10678000003099442, 0.03342999890446663, 0.8139600157737732, -0.19448000192642212, 0.02624800056219101, -0.14214999973773956, 0.2953999936580658, 0.6273800134658813, 0.2649900019168854, 0.6190999746322632, -0.04112999886274338, 0.12301000207662582, 0.3158000111579895, 0.10698000341653824, 0.023654000833630562, -0.4135499894618988, 0.03485200181603432, 0.2136099934577942, 0.045834001153707504, 0.053415000438690186, -0.36421000957489014, 0.19707000255584717, 0.5091599822044373, -0.1949000060558319, -0.1878799945116043, -0.24448999762535095, -0.6339700222015381, -0.23125000298023224, -0.1882299929857254, -1.0600999593734741, 0.47793999314308167, -1.010200023651123, 0.2460400015115738, -0.4875999987125397, 0.7914599776268005, -0.1104699969291687, -0.21762000024318695, -0.6177999973297119, 0.2781499922275543, -0.0981689989566803, -0.06320499628782272, 0.06606899946928024, -0.6930500268936157, -0.2592799961566925, 0.4459100067615509, -0.641979992389679, -0.33083999156951904, -0.30153998732566833, -0.5635899901390076, 0.6050099730491638, -0.09673000127077103, 0.44444000720977783, 0.2200700044631958], u'rope': [-0.0943169966340065, -0.2426300048828125, 0.2742699980735779, -0.7486699819564819, -0.35912999510765076, 0.028551999479532242, 0.18554000556468964, -0.19391000270843506, -0.3560599982738495, -0.40347999334335327, -0.1872200071811676, 0.2795700132846832, 0.26319000124931335, -0.4695799946784973, -0.14503000676631927, -0.040088001638650894, -0.6670699715614319, 0.18599000573158264, -0.24291999638080597, 0.0614980012178421, 0.021668000146746635, -0.8330100178718567, -0.19594000279903412, 0.34266000986099243, 0.44686999917030334, -0.15105000138282776, -0.19321000576019287, -0.1174200028181076, -0.5360000133514404, 1.2182999849319458, 0.09608999639749527, -0.21642999351024628, 0.21674999594688416, -0.19197000563144684, -0.19787999987602234, 0.45052000880241394, 0.04361899942159653, -0.06659399718046188, 0.6597700119018555, 0.40988001227378845, -0.35199999809265137, -0.3865300118923187, -0.13268999755382538, -0.6438000202178955, -0.12264999747276306, 0.5088300108909607, 0.6012200117111206, -0.6269000172615051, 0.1614599972963333, 0.06023100018501282, -0.15148000419139862, 0.10888999700546265, -0.1621900051832199, -0.5442600250244141, -0.38102999329566956, -0.20868000388145447, -0.754800021648407, -0.6194000244140625, -0.6729099750518799, 0.5089899897575378, 0.9332299828529358, 0.5684800148010254, 0.3211599886417389, 0.6805999875068665, 0.4037100076675415, -0.5570600032806396, -0.5030500292778015, 0.46198999881744385, 0.2575399875640869, 0.06407400220632553, -0.49994999170303345, 0.11547999829053879, -0.5806499719619751, 0.10123000293970108, 0.3411499857902527, 0.052427999675273895, 0.026820000261068344, 0.14122000336647034, -0.04787199944257736, -0.49625998735427856, 0.6945099830627441, -0.7625799775123596, 0.29526999592781067, -0.16087999939918518, -0.46713000535964966, -0.3280700147151947, 0.0545319989323616, -0.008987599983811378, -0.7785999774932861, 0.2588900029659271, 0.5420600175857544, 0.16708000004291534, 0.42337000370025635, -0.12931999564170837, 0.1440799981355667, -0.4071899950504303, -0.17399999499320984, 0.5814700126647949, 0.06638500094413757, -0.53329998254776, 0.13113999366760254, 0.5431699752807617, -0.16496999561786652, -0.22881999611854553, 0.3546200096607208, -0.004712900146842003, 0.3089599907398224, -0.11082000285387039, -0.6329399943351746, -0.2083200067281723, -0.40911999344825745, 0.3789600133895874, -0.3088900148868561, -0.25902000069618225, 0.3400999903678894, 0.12195999920368195, -0.1290300041437149, -0.3642899990081787, -0.1390800029039383, -0.3589699864387512, -0.1030300036072731, -0.6093199849128723, 0.9408199787139893, -0.1917400062084198, 0.07273200154304504, 0.0999009981751442, -0.003168300027027726, 0.22066999971866608, -0.02096400037407875, 0.07295499742031097, 0.41697001457214355, 0.36796998977661133, 0.42882001399993896, 0.37081998586654663, -0.5848699808120728, 0.14340999722480774, -0.14643000066280365, 0.22439000010490417, -0.417820006608963, -0.16467000544071198, -0.49955999851226807, 0.17942999303340912, 0.1281599998474121, -0.7712000012397766, -0.19433000683784485, 0.26614001393318176, 0.35837000608444214, -0.33981001377105713, 0.034873999655246735, -0.1531900018453598, -0.32471001148223877, 0.7478799819946289, 0.24693000316619873, -0.1969899982213974, 0.12681999802589417, -0.3294999897480011, 0.4483399987220764, -0.22696000337600708, 0.0741410031914711, 0.5535500049591064, 0.25949999690055847, -0.38231998682022095, -0.296999990940094, -0.07440099865198135, 0.1451999992132187, 0.16203999519348145, -0.12205000221729279, 0.02065199986100197, 0.23350000381469727, 0.1660500019788742, -0.4876900017261505, -0.15094999969005585, 0.04870299994945526, 0.07656899839639664, 0.23469999432563782, -0.032391998916864395, -0.25303998589515686, 0.7720199823379517, 0.15964999794960022, -0.7422999739646912, 0.7575600147247314, -0.06258600205183029, 0.06207599863409996, 0.10881000012159348, -0.019696999341249466, -0.12797999382019043, 0.322299987077713, 0.48860999941825867, 0.4690299928188324, 0.034189000725746155, -0.24453000724315643, 0.38144999742507935, 0.19614000618457794, -0.09182699769735336, -0.0894009992480278, -0.10523000359535217, -0.7825400233268738, -0.423009991645813, 0.6503999829292297, -0.34747999906539917, 0.7985600233078003, 0.8342400193214417, 0.7123299837112427, 0.3707599937915802, 0.2252500057220459, 0.4103499948978424, 0.2258400022983551, 0.2917900085449219, -0.11396999657154083, 0.17213000357151031, -0.40705999732017517, 0.3407000005245209, 0.1273300051689148, -0.12790000438690186, 0.053977999836206436, 0.08155400305986404, 0.08103100210428238, -0.5002099871635437, -0.2798900008201599, -0.36414000391960144, -0.18342000246047974, -0.07904700189828873, 0.2917799949645996, -0.5389000177383423, 0.24226999282836914, -0.15605999529361725, -0.05761399865150452, 0.08842699974775314, -0.518779993057251, -0.5829499959945679, 0.60794997215271, -0.35844001173973083, -0.034717001020908356, -0.05718399956822395, 0.8741199970245361, -0.1842000037431717, -0.43907999992370605, -0.41148999333381653, 0.3740200102329254, 0.06697600334882736, 0.32745999097824097, 0.1652200073003769, -0.2974100112915039, -0.23322999477386475, -0.31773000955581665, -0.09528499841690063, 0.12307000160217285, 0.5505599975585938, -0.021856000646948814, 0.280349999666214, -0.7208499908447266, -0.00233189994469285, 0.6764799952507019, -1.0499000549316406, 0.955299973487854, 0.48590001463890076, 0.06651300191879272, 0.0657849982380867, 0.6047999858856201, 0.05724100023508072, 0.19351999461650848, -0.4938499927520752, 0.14729000627994537, -0.3216499984264374, 0.03800300136208534, 0.25457000732421875, 0.1446399986743927, 0.07027299702167511, -0.5362300276756287, 0.24368999898433685, 0.23542000353336334, -0.1579200029373169, -0.2344300001859665, 0.3156299889087677, 0.04806600138545036, -0.2558000087738037, -0.6995999813079834, -0.6852499842643738, -0.8328700065612793, 0.1521099954843521, 0.188060000538826, -0.2169100046157837, 0.24115000665187836, -0.4504700005054474, -0.28395000100135803, -0.06615500152111053, 0.12596000730991364, -0.337909996509552, -0.3996700048446655, 0.5011699795722961, 0.2969200015068054, 0.22491000592708588, -0.4664500057697296, 0.02107599936425686, 0.18061000108718872, 0.16234999895095825, 0.23746000230312347, -0.04300199821591377, 0.22734999656677246, -0.06294599920511246], u'soup': [-0.24124999344348907, -0.07225000113248825, 0.5400800108909607, -0.11918999999761581, 0.167930006980896, 0.10588999837636948, 0.39642998576164246, 0.2334199994802475, -0.4023900032043457, -0.3947199881076813, -0.058132000267505646, -0.013670000247657299, -0.386680006980896, 0.3858799934387207, -0.35752999782562256, -0.4929400086402893, -0.17556999623775482, 0.50382000207901, -0.33351001143455505, 0.5926300287246704, 0.12160000205039978, -0.10948000103235245, -0.5301899909973145, 0.4242100119590759, 0.10565000027418137, -0.22018000483512878, 0.1144300028681755, 0.13714000582695007, 0.01924699917435646, -0.4927299916744232, -0.9297500252723694, 0.3755599856376648, -0.41394999623298645, 0.25411999225616455, -0.18649999797344208, 0.516979992389679, -0.12279000133275986, -0.11196000128984451, -0.5784299969673157, 0.16637000441551208, -0.047520000487565994, -0.08978600054979324, 0.09975200146436691, -0.14835000038146973, 0.0751269981265068, -0.12399999797344208, 0.754360020160675, 0.27612000703811646, -0.8261100053787231, 0.5112400054931641, 0.25953999161720276, 0.15782000124454498, 0.6270999908447266, 0.4255000054836273, -0.5620499849319458, 0.06621299684047699, 0.34556999802589417, 0.4873200058937073, 0.14278000593185425, -0.011436999775469303, 0.3805299997329712, -0.4340299963951111, 0.4280500113964081, -0.06575600057840347, -0.2987099885940552, 0.1511099934577942, -0.047775998711586, 0.308459997177124, -0.20127999782562256, 0.24755999445915222, 0.02374199964106083, -0.16357000172138214, -0.15561999380588531, -0.6469100117683411, -0.5625200271606445, -0.1397400051355362, 0.7210400104522705, 0.15932999551296234, 0.04042999818921089, -0.27967000007629395, -0.008451799862086773, 0.40342000126838684, -0.19325999915599823, -0.6717900037765503, 0.4504700005054474, -0.36800000071525574, -0.21020999550819397, 0.04252300038933754, 0.31363001465797424, -0.8996599912643433, -0.036281999200582504, -0.06612399965524673, -0.2912600040435791, -0.3885500133037567, -0.2923299968242645, -0.1452299952507019, 0.25165998935699463, 0.409960001707077, -0.2048099935054779, -0.06570199877023697, -0.14414000511169434, -0.5838099718093872, 0.2963300049304962, -0.1668899953365326, 0.022579999640583992, -0.5938199758529663, -0.17745999991893768, -0.1456100046634674, -0.21977999806404114, 0.8690599799156189, -0.03943299874663353, 0.21164999902248383, -0.1274300068616867, -0.9873999953269958, -0.5030800104141235, -0.28942999243736267, -1.0918999910354614, 0.05583199858665466, 0.04199599847197533, -0.1522500067949295, 0.06229899823665619, 0.20087000727653503, 0.6196699738502502, 0.35815998911857605, -0.05196300148963928, -0.15750999748706818, 0.47374001145362854, 0.5663999915122986, -0.34393998980522156, 0.5394099950790405, 0.38113000988960266, 0.9171199798583984, -0.4641300141811371, 0.4078400135040283, 0.23303000628948212, -0.4322200119495392, -0.062258001416921616, -0.08753299713134766, -0.47407999634742737, 0.5983800292015076, 0.11248999834060669, 0.7466899752616882, 0.06058900058269501, -0.5537800192832947, -0.44686999917030334, 0.9240000247955322, 0.16809000074863434, 0.5189599990844727, 0.47652000188827515, -0.488209992647171, -0.9747700095176697, 0.6143900156021118, -0.14124000072479248, 0.08754000067710876, -0.3705599904060364, -0.20520000159740448, -0.4298799932003021, -0.9411200284957886, 0.2784099876880646, 0.2334900051355362, 0.18579000234603882, 0.24650000035762787, 0.18655000627040863, 0.055270999670028687, -0.0685959979891777, 0.5149700045585632, -0.015869999304413795, -0.12466999888420105, -0.4095099866390228, -0.4975599944591522, 0.39035001397132874, -0.08011800050735474, -0.8344900012016296, 0.44297999143600464, -0.39120998978614807, 0.13853000104427338, 0.26980000734329224, 0.17868000268936157, 0.9211000204086304, -0.19505000114440918, -0.0841509997844696, 0.029569000005722046, 0.44773998856544495, -0.14746999740600586, -0.08306200057268143, 0.18151000142097473, 0.1444000005722046, 0.033984001725912094, -0.18368999660015106, -0.6966000199317932, 0.24753999710083008, 0.8009999990463257, -0.7877900004386902, -0.23923000693321228, 0.02773899957537651, 0.08102600276470184, -0.3383300006389618, 0.3422299921512604, -0.8171700239181519, 0.14600999653339386, 0.6963300108909607, 0.16214999556541443, 0.8031899929046631, 0.680180013179779, -0.13840000331401825, 0.6422600150108337, 0.25703001022338867, -0.20175999402999878, -0.7224000096321106, 0.05287199839949608, -0.26023998856544495, -0.19871999323368073, -0.006503600161522627, -0.09502299875020981, 0.01925400085747242, 0.2820099890232086, 0.5191699862480164, -0.2443300038576126, -0.061434999108314514, 0.6361700296401978, 0.857729971408844, 0.44391000270843506, -0.4436799883842468, -0.6616700291633606, -0.31891998648643494, -0.5375999808311462, 0.3258900046348572, -0.4632599949836731, -0.0001904700038721785, -0.212009996175766, -0.11412999778985977, -0.23196999728679657, -0.16211000084877014, 0.39236998558044434, 0.5067700147628784, 0.3600600063800812, 0.4900299906730652, 0.036389999091625214, -0.8295999765396118, 0.18110999464988708, -0.3228900134563446, -0.35822001099586487, -0.3645099997520447, -0.27138999104499817, -0.990369975566864, 0.019089000299572945, 0.44051000475883484, -0.4411199986934662, -0.09274400025606155, -0.7043799757957458, 0.3942199945449829, -0.22700999677181244, 0.36959999799728394, 0.14868000149726868, -0.06671199947595596, -0.049754999577999115, -0.22411000728607178, -0.14708000421524048, -0.20340000092983246, 0.048009999096393585, -0.42184001207351685, -0.07154499739408493, -0.3747499883174896, -0.4988900125026703, -0.2049500048160553, -0.21908999979496002, -0.17674000561237335, 0.003136300016194582, 0.06972800195217133, -0.032795000821352005, -0.8071200251579285, -0.3148399889469147, -0.06529299914836884, 0.4102199971675873, 0.3098900020122528, 0.12009000033140182, -0.41187000274658203, 0.5752999782562256, -0.7513399720191956, -0.5771700143814087, -0.2777400016784668, -0.31571999192237854, 0.31878000497817993, 0.07886099815368652, 0.20760999619960785, 0.2493399977684021, 0.3023900091648102, 0.45219001173973083, 0.5583400130271912, 0.570900022983551, -0.121799997985363, -0.31918999552726746, 0.2205899953842163, 0.04063300043344498, -0.30649998784065247, -1.333400011062622, 0.7605199813842773, -0.5418199896812439, 0.09248699992895126, 0.30757999420166016], u'street': [-0.09340299665927887, -0.3751699924468994, -0.07293500006198883, 0.09871699661016464, -0.10385999828577042, -0.03142400085926056, -0.0818990021944046, 0.1925400048494339, 0.05237799882888794, -1.1166000366210938, 0.057732999324798584, 0.030688999220728874, 0.5312899947166443, 0.7343400120735168, 0.6517800092697144, 0.07451099902391434, -0.4781799912452698, 0.5612900257110596, 0.4123600125312805, -0.1507200002670288, -0.3580099940299988, -0.059158001095056534, 0.5804700255393982, -0.15253999829292297, 0.11918000131845474, 0.22864000499248505, 0.20366999506950378, 0.035868000239133835, 0.32280001044273376, 0.5784199833869934, 0.43331998586654663, 0.264849990606308, 0.021720999851822853, 0.38051000237464905, -1.0134999752044678, -0.007625299971550703, -0.6291599869728088, -0.31022000312805176, 0.3050999939441681, -0.5843700170516968, -0.5089499950408936, 0.45006000995635986, -0.43244999647140503, 1.062399983406067, 0.35774001479148865, 0.6971700191497803, 0.4963400065898895, -0.06191299855709076, -0.5296000242233276, -0.01693600043654442, -0.32482001185417175, -0.10384999960660934, 0.5154899954795837, 0.32201001048088074, 1.0924999713897705, 0.007282999809831381, 0.0410039983689785, 0.44189000129699707, 0.02367899939417839, -0.6997399926185608, 0.6176300048828125, -0.5575399994850159, 0.6976100206375122, 0.24289000034332275, 0.2924099862575531, -0.2031400054693222, -0.024651000276207924, 0.05982299894094467, 0.5274400115013123, -0.5506100058555603, -0.744379997253418, -0.10087999701499939, -0.4115999937057495, -0.1345299929380417, 0.06098200008273125, -0.026722999289631844, -0.03638400137424469, 0.026151999831199646, 0.12219999730587006, -0.3629800081253052, 0.34244000911712646, -0.38447999954223633, 0.42153000831604004, 0.055757999420166016, 0.6331300139427185, -0.32006001472473145, 0.21567000448703766, 0.3677299916744232, 0.49790000915527344, 0.36570999026298523, 0.4107399880886078, -0.41363000869750977, 0.2842400074005127, -0.06956399977207184, -0.3991999924182892, 0.7069000005722046, -0.348580002784729, -0.2772800028324127, -0.10520000010728836, -0.5673499703407288, -0.2142699956893921, 0.4123699963092804, -0.3488300144672394, -0.274399995803833, 0.27246999740600586, 0.36041998863220215, 0.4012100100517273, -0.37606000900268555, 0.8827199935913086, 0.5166000127792358, -0.7599300146102905, -0.4565100073814392, 0.28365999460220337, -0.18393999338150024, 0.1745299994945526, 0.29058998823165894, 0.20866000652313232, -0.12138999998569489, -0.5905699729919434, -0.9176899790763855, 0.11474999785423279, -0.09734000265598297, 0.12886999547481537, -0.40393999218940735, 0.05816600099205971, 0.04464900121092796, -0.21568000316619873, -0.5340999960899353, 0.016290999948978424, 0.06921499967575073, -0.04368099942803383, 0.7878100275993347, -0.14388999342918396, -0.34676000475883484, 0.04280799999833107, 0.16571000218391418, -0.3120799958705902, -0.05790200084447861, -0.215829998254776, -0.08201800286769867, 0.3614700138568878, 0.7899100184440613, 0.19559000432491302, 0.5575699806213379, -0.45462000370025635, 0.4047999978065491, -0.087848000228405, -0.20295000076293945, -0.0996050015091896, -0.23586000502109528, -0.023814000189304352, 0.8494499921798706, -0.018542999401688576, 0.5113999843597412, 0.10001000016927719, 0.3342199921607971, 0.15584999322891235, 0.464709997177124, -0.03467100113630295, 0.0733880028128624, -0.03647900000214577, 0.18161000311374664, -0.2747800052165985, -0.5221800208091736, -0.9736899733543396, 0.0009524599881842732, 0.3664799928665161, 0.5465700030326843, 0.09746400266885757, 0.2995299994945526, 0.39570000767707825, -0.4799000024795532, 0.11484000086784363, -0.45440998673439026, 0.09460099786520004, -0.35269999504089355, -0.41034001111984253, -0.2278199940919876, -0.21780000627040863, -0.014624999836087227, -0.2830899953842163, 0.5631399750709534, 0.03543400019407272, 0.26785001158714294, 0.3087399899959564, 0.4593000113964081, 0.2743000090122223, -0.3646399974822998, 0.04908899962902069, 0.30695998668670654, -0.15222999453544617, -0.22247999906539917, -0.6270099878311157, -0.2219099998474121, -0.5466499924659729, 0.2509799897670746, -0.08652400225400925, -0.8220099806785583, -0.5600500106811523, -0.21258999407291412, 1.1225999593734741, -0.2358900010585785, 0.22184999287128448, -0.21164999902248383, -0.20552000403404236, -0.1141899973154068, -0.24654999375343323, 0.2160699963569641, 0.36684998869895935, -0.10419999808073044, -0.2956100106239319, -0.12283000349998474, -0.22091999650001526, -0.494049996137619, -0.07245200127363205, 0.5978800058364868, 0.6240599751472473, -0.04027299955487251, -0.23277999460697174, -0.6026399731636047, 0.6902999877929688, -0.15354999899864197, -0.3448599874973297, -0.30630001425743103, 0.15425999462604523, 0.03054800070822239, 0.04005200043320656, -0.26177000999450684, 0.3799799978733063, 0.3834899961948395, -0.7534499764442444, 0.37843000888824463, -0.11178000271320343, -0.2313700020313263, 0.3098199963569641, 0.4133000075817108, 0.19480000436306, 0.3582800030708313, -0.4389300048351288, 0.14390000700950623, 0.1998099982738495, -0.8293399810791016, 0.2299100011587143, -0.19268999993801117, -0.4958899915218353, -0.6133599877357483, 0.267300009727478, -0.2990500032901764, 0.6001099944114685, 0.2202800065279007, 0.8896099925041199, -0.18786999583244324, -0.21850000321865082, -0.0031234000343829393, 0.04384300112724304, -0.049084000289440155, -0.3482699990272522, 0.273389995098114, 0.39746999740600586, 0.1783899962902069, -0.22721999883651733, 0.1722699999809265, 0.17744000256061554, 0.13313999772071838, 0.031132999807596207, -0.1627800017595291, -0.06624499708414078, 0.0786530002951622, -0.015424000099301338, -0.1324699968099594, -0.32969000935554504, 0.26208001375198364, 0.29818999767303467, -0.11495000123977661, -0.022954000160098076, 0.4925599992275238, -1.6806999444961548, 0.05832900106906891, -0.02462800033390522, 0.5034300088882446, -0.040640998631715775, -0.044162001460790634, 0.549560010433197, -0.17510999739170074, 0.08901099860668182, 0.6776400208473206, 0.2777000069618225, -0.0226610004901886, 0.2459699958562851, 0.03847799822688103, -0.42601001262664795, -0.21453000605106354, -0.41106000542640686, 0.7630599737167358, 0.14282000064849854, 0.25826001167297363, 0.11136999726295471, 0.21788999438285828, -0.12212999910116196, 0.390390008687973], u'flame': [0.27011001110076904, -0.2535099983215332, 0.36340001225471497, -0.5694000124931335, 0.0033855000510811806, 0.11475999653339386, 0.18322999775409698, 0.47321000695228577, -0.05187100172042847, -0.6174700260162354, 0.0012115000281482935, 0.4253399968147278, 0.13610999286174774, -1.1543999910354614, -0.021957000717520714, 0.16899999976158142, -0.7338799834251404, 0.18497000634670258, -0.4199199974536896, 0.5313000082969666, -0.89274001121521, 0.25029000639915466, 0.37623000144958496, 0.02785399928689003, 1.142799973487854, 0.17443999648094177, -0.04780599847435951, -0.3728399872779846, -0.3275499939918518, 0.2984899878501892, 0.6694200038909912, -0.11038000136613846, -0.3918899893760681, 0.5105699896812439, -0.02429799921810627, 0.46487000584602356, -0.8052300214767456, -0.4709100127220154, 0.5394399762153625, 0.33114001154899597, 0.43540000915527344, -0.3508000075817108, -0.09773100167512894, -0.643090009689331, -0.03891199827194214, -0.22563999891281128, -0.22366000711917877, 0.2913300096988678, 0.199180006980896, 0.1296200007200241, 0.14910000562667847, 0.1397700011730194, -0.14291000366210938, -0.5293899774551392, -0.5165200233459473, 0.3712800145149231, -0.49202001094818115, -0.2939999997615814, 0.260560005903244, 0.05286699905991554, -0.3811100125312805, 0.18921999633312225, 0.25920000672340393, 0.2106499969959259, 0.14451999962329865, -0.21164999902248383, 0.21254999935626984, 0.3184199929237366, 0.07379200309515, 0.16200999915599823, 0.006760099902749062, -0.4670099914073944, 0.028547000139951706, -0.21570999920368195, -0.633679986000061, 0.6564800143241882, 0.5082299709320068, -0.600849986076355, 0.1551699936389923, 0.23193000257015228, -0.4491899907588959, 0.15323999524116516, 0.23833000659942627, -0.3491399884223938, 0.062070999294519424, 0.0279690008610487, 0.2571899890899658, 0.17233000695705414, 0.3818100094795227, -0.6114199757575989, 0.2799200117588043, -0.5372999906539917, 0.09821999818086624, -0.059160999953746796, 0.38201001286506653, -0.11450999975204468, 0.19160999357700348, 0.6204699873924255, 0.11403000354766846, -0.43852001428604126, -0.18973000347614288, 0.07768599689006805, -0.35740000009536743, 0.26368001103401184, 0.5073000192642212, 0.07698799669742584, 0.5743499994277954, -0.25418999791145325, -0.1069599986076355, 0.4017300009727478, 0.06955999881029129, 0.3785800039768219, 0.3822900056838989, 0.018871000036597252, -0.21850000321865082, -0.23455999791622162, -0.48153001070022583, 0.5626500248908997, 0.24754999577999115, -0.6499800086021423, -0.28501999378204346, -0.13979999721050262, -0.3789699971675873, 0.14030000567436218, -0.14869000017642975, -0.17328999936580658, 0.37257999181747437, 0.06770999729633331, -0.1252399981021881, 0.03299799934029579, 0.5090100169181824, 0.5204399824142456, 0.5740699768066406, 0.3184199929237366, 0.4328399896621704, 0.05736299976706505, -0.12058000266551971, -0.20699000358581543, 0.3361299932003021, 0.2410700023174286, 0.13043999671936035, -0.5504099726676941, -0.296099990606308, -0.49410000443458557, -0.14771999418735504, -0.039712999016046524, 0.09362199902534485, 0.27741000056266785, -0.258109986782074, -0.19514000415802002, 0.21171000599861145, 0.16147999465465546, 0.10200999677181244, -0.18252000212669373, 0.5888000130653381, -0.15981000661849976, 0.11495000123977661, -0.5630999803543091, -0.39395999908447266, -0.45974001288414, 0.05944700166583061, -0.5942100286483765, -0.3058600127696991, 0.087677001953125, -0.022021999582648277, 0.21241000294685364, -0.13224999606609344, 0.4260700047016144, 0.03659699857234955, -0.2743000090122223, 0.1222900003194809, 0.07012300193309784, 0.49862000346183777, 0.13874000310897827, -0.48851001262664795, -0.45274001359939575, 0.09143199771642685, -0.30272001028060913, 0.4657500088214874, -0.006570599973201752, -0.056644000113010406, -0.08306100219488144, 0.3218599855899811, -0.05067000165581703, 0.1684499979019165, 0.29739001393318176, 0.6333500146865845, 0.29482999444007874, -0.24067999422550201, -0.5794000029563904, 0.39904001355171204, -0.4044399857521057, -0.46876999735832214, -0.1899300068616867, -0.24232999980449677, -0.3577899932861328, -0.1144300028681755, 0.017805000767111778, 0.14174999296665192, 0.5341399908065796, 0.666159987449646, 0.6035100221633911, 0.7715700268745422, 0.2217400074005127, 0.31703999638557434, -0.5551699995994568, -0.15443000197410583, -0.002710100030526519, 0.24672000110149384, 0.3799099922180176, 0.567550003528595, 0.14271999895572662, -0.342960000038147, -0.23844000697135925, 0.10559000074863434, -0.18525999784469604, 0.41117000579833984, -0.4416700005531311, 0.1667499989271164, -0.416049987077713, 0.5124599933624268, 0.11542999744415283, 0.0903640016913414, -0.2916699945926666, -0.11744000017642975, -0.5202400088310242, -0.2581599950790405, 0.18458999693393707, -0.16851000487804413, -0.15665000677108765, 0.09670700132846832, -0.23194000124931335, 0.043473001569509506, -0.33643999695777893, -0.3186900019645691, 0.02384999953210354, -0.2961199879646301, -0.40814998745918274, -0.17228999733924866, -0.16874000430107117, 0.08258800208568573, -0.09052400290966034, -0.8542799949645996, -0.11838000267744064, 0.2590700089931488, 0.3429900109767914, 0.575689971446991, -0.05475800111889839, -0.09476900100708008, -0.2534100115299225, 0.12764999270439148, -0.7781000137329102, -0.5520200133323669, -0.47334998846054077, 0.20058999955654144, 0.10068999975919724, -0.23681999742984772, -0.746209979057312, -0.31314000487327576, -0.43296000361442566, -0.06463900208473206, -0.10496000200510025, 0.4212999939918518, 0.23319000005722046, -0.10478000342845917, 0.5248100161552429, -0.4550600051879883, 0.21150000393390656, 0.3536899983882904, -0.22628000378608704, 0.2257400006055832, -0.15744000673294067, 0.5693399906158447, 0.4948599934577942, -0.5419300198554993, 0.21886999905109406, -1.054800033569336, -0.27566999197006226, -0.593529999256134, -0.5763000249862671, -0.017100999131798744, 0.06599800288677216, 0.14386999607086182, 0.5343800187110901, 0.13882000744342804, 0.10548999905586243, -0.10819999873638153, -0.002472599968314171, -0.19884000718593597, 0.007629800122231245, 0.8526800274848938, 0.4972800016403198, 0.08968500047922134, -0.24956999719142914, -0.24428999423980713, -0.01448499970138073, 0.5171200037002563, 0.45897001028060913, -0.010463000275194645, 0.5952799916267395], u'cake': [0.05165499821305275, 0.30052000284194946, -0.042413998395204544, -0.29840999841690063, -0.3255299925804138, 0.11941999942064285, -0.016083000227808952, -0.3040899932384491, 0.1571200042963028, -0.3636699914932251, -0.01546700019389391, -0.8920400142669678, -0.10530000180006027, 0.382999986410141, -0.7236999869346619, -0.7616400122642517, -0.1623300015926361, -0.09935099631547928, -0.1403300017118454, -0.13595999777317047, 0.33792999386787415, 0.10614000260829926, -0.1712699979543686, 0.6398299932479858, -0.3504199981689453, -0.14674000442028046, -0.3121100068092346, 0.1729000061750412, -0.4112499952316284, -0.7890999913215637, -0.04059800133109093, 0.376800000667572, -0.46110999584198, -0.24160000681877136, -0.7782300114631653, 0.5484700202941895, -0.180649995803833, 0.4404900074005127, -0.42348000407218933, 0.06128599867224693, -0.31683000922203064, 0.14680999517440796, -0.0002530700003262609, 0.2996099889278412, 0.07157599925994873, -0.2604300081729889, 0.5842099785804749, -0.4302400052547455, 0.2962299883365631, 0.18432000279426575, 0.31057000160217285, 0.10113999992609024, 0.4884200096130371, 0.2939099967479706, -0.7626199722290039, -0.22393999993801117, -0.23321999609470367, 0.07040499895811081, 0.6204900145530701, -0.15335999429225922, 0.3961400091648102, -0.10040999948978424, 0.4216499924659729, 0.003799600061029196, -0.05796699970960617, -0.30990999937057495, 0.3520300090312958, 0.18690000474452972, -0.37860000133514404, 0.19654999673366547, 0.2635599970817566, 0.003854600014165044, -0.735230028629303, -0.040915001183748245, -0.12581999599933624, 0.3235599994659424, 0.4012500047683716, -0.010360999964177608, -0.30803999304771423, -0.2360599935054779, 0.22924000024795532, 0.49990999698638916, 0.1143999993801117, -0.13440999388694763, 0.4365299940109253, -0.6776999831199646, -0.49015000462532043, 0.403219997882843, 0.20566999912261963, -0.2903200089931488, -0.46209999918937683, -0.31512001156806946, -0.373879998922348, -0.5097100138664246, -0.16394999623298645, 0.038878001272678375, 0.0791660025715828, 0.58051997423172, -0.04074399918317795, -0.19256000220775604, -0.10216999799013138, -0.10687000304460526, 0.04738499969244003, -0.7221900224685669, 0.1740099936723709, -0.25812000036239624, 0.18156999349594116, 0.6662300229072571, -0.6748700141906738, 0.29771000146865845, 0.4307500123977661, 0.5814499855041504, 0.020022999495267868, -0.6154800057411194, -0.18818999826908112, -0.10430999845266342, -0.5593799948692322, 0.6235100030899048, 0.0568850003182888, -0.45789000391960144, 0.05938899889588356, 0.18540999293327332, 0.11231999844312668, -0.19101999700069427, -0.5771499872207642, -0.5975599884986877, 0.3200800120830536, 0.5944300293922424, -0.4917599856853485, 0.5487899780273438, -0.2434699982404709, 1.1450999975204468, -0.030267000198364258, 0.341839998960495, 0.12408000230789185, -0.3720000088214874, -0.8089900016784668, 0.27913999557495117, -0.26892998814582825, -0.3256100118160248, 0.7345100045204163, 0.08876299858093262, -0.9727299809455872, -0.33392998576164246, 0.37060999870300293, 0.36774998903274536, -0.07408100366592407, 0.34769999980926514, -0.09224399924278259, -0.28022000193595886, -0.4024899899959564, 0.3966900110244751, 0.24538999795913696, 0.5858799815177917, -0.11376000195741653, 0.27893000841140747, -0.2013300061225891, -0.3446199893951416, -0.44029998779296875, 0.12347999960184097, -0.1106799989938736, -0.31134000420570374, -0.1568399965763092, -0.010912000201642513, -0.36924999952316284, -0.02811400033533573, 0.10745000094175339, 0.07188999652862549, 0.31630000472068787, -0.34275999665260315, -0.38012999296188354, -0.18440000712871552, 0.026226000860333443, -0.8007100224494934, -0.394540011882782, 0.16575999557971954, -0.20273999869823456, -0.005646599922329187, 0.1699099987745285, -0.774399995803833, -0.06998199969530106, 0.22481000423431396, 1.01419997215271, -0.26840001344680786, -0.18935999274253845, -0.12444999814033508, 1.0525000095367432, -0.04968100041151047, 0.12233000248670578, -0.38885000348091125, 0.15306000411510468, 0.7675999999046326, -0.34718000888824463, 0.0909539982676506, -0.05830100178718567, -0.014790000393986702, -0.40529999136924744, 0.2942200005054474, -0.046227000653743744, -0.2610599994659424, 0.7628999948501587, -0.2994399964809418, 0.5188500285148621, 0.5685799717903137, 0.06530400365591049, -0.208079993724823, 0.6866099834442139, 0.159620001912117, 0.15008999407291412, -0.3240799903869629, -0.32058000564575195, -0.39212000370025635, -0.2328300029039383, -0.1360200047492981, 0.042660001665353775, 0.1146399974822998, 0.3006199896335602, -0.8049100041389465, 0.39649999141693115, 0.4156700074672699, 0.527400016784668, 0.790910005569458, -0.26218000054359436, 0.28466999530792236, -0.5550100207328796, -0.44086000323295593, 0.2097499966621399, 0.18761000037193298, 0.8589000105857849, -0.07069499790668488, -0.010161999613046646, -0.14710000157356262, -0.2328999936580658, 0.22127999365329742, 0.7968000173568726, 0.1185000017285347, -0.21332000195980072, -0.0023898999206721783, 0.05649799853563309, -0.2697399854660034, -0.10976000130176544, -0.17506000399589539, -0.03359600156545639, -0.37738001346588135, -0.9251000285148621, 0.4249800145626068, -0.014429000206291676, 0.5238400101661682, 0.6987900137901306, -0.7031700015068054, 0.6293900012969971, -0.4490000009536743, 0.18272000551223755, 0.2749499976634979, 0.6234999895095825, 0.5501700043678284, -0.019632000476121902, -0.23991000652313232, 0.3632600009441376, 0.4007999897003174, -0.5176100134849548, -0.20848000049591064, 0.3415299952030182, 0.09231700003147125, 0.1364399939775467, -0.04392699897289276, -0.3941099941730499, 0.16678999364376068, 0.8732699751853943, 0.07417000085115433, -0.17343999445438385, 0.0006527199875563383, 0.32471001148223877, 0.5379899740219116, -0.192890003323555, -0.3391900062561035, -0.4998300075531006, -0.3057200014591217, -1.4458999633789062, -0.7969300150871277, 0.1287499964237213, 0.4926599860191345, -0.6940699815750122, -0.6299899816513062, -0.06888899952173233, 0.608299970626831, 0.5944300293922424, -0.2410299926996231, 0.16859999299049377, 0.5113099813461304, 0.020448999479413033, -0.27373000979423523, -0.09400399774312973, 0.39094001054763794, 0.2915000021457672, -0.7745199799537659, 0.06734000146389008, 0.24913999438285828, -0.2928299903869629, -0.24523000419139862], u'bridge': [-0.028963999822735786, -0.7682099938392639, 0.014600999653339386, -0.8585799932479858, -0.08464200049638748, 0.23050999641418457, 0.2817400097846985, -0.19577999413013458, -0.19693000614643097, -1.0038000345230103, -0.18679000437259674, 0.06403400003910065, 0.058880001306533813, -0.22348999977111816, 0.47123000025749207, 0.03687499836087227, 0.16438999772071838, -0.01921899989247322, 0.0071701002307236195, -0.18643000721931458, 0.11789999902248383, -0.29398998618125916, 0.2016499936580658, -0.09070800244808197, -0.39316999912261963, 0.046278998255729675, -0.4514999985694885, -0.050887998193502426, 0.14318999648094177, 0.6433699727058411, 0.7815700173377991, 0.29082000255584717, 0.4349899888038635, 0.6314499974250793, -0.09473100304603577, 0.584630012512207, -0.09325399994850159, -0.3193199932575226, 0.7925000190734863, 0.18719999492168427, -0.23441000282764435, -0.03382499888539314, -0.863070011138916, 0.5428100228309631, -0.06108900159597397, 0.3599399924278259, 0.4016900062561035, 0.19957999885082245, -0.2484699934720993, 0.09409099817276001, -0.16380999982357025, 0.28022998571395874, -0.3201799988746643, -0.5060200095176697, 0.06139799952507019, 0.4602299928665161, 0.09547200053930283, -0.09888499975204468, -0.4156000018119812, 0.6577500104904175, 0.5956000089645386, -0.1276099979877472, 0.05279700085520744, -0.6479700207710266, 0.6184300184249878, 0.016536999493837357, -0.2300499975681305, 0.4034999907016754, -0.13109000027179718, -0.1507900059223175, -0.24977999925613403, 0.26058000326156616, -0.01796099916100502, -0.3397200107574463, -0.10819999873638153, 0.31084999442100525, 0.2446800023317337, 0.6037200093269348, -0.40064001083374023, -0.37872999906539917, 0.11693000048398972, -0.5261499881744385, 0.5442299842834473, -0.7388899922370911, -0.24110999703407288, -0.3032799959182739, 0.0878319963812828, 0.3756600022315979, -0.010517000220716, 0.32936999201774597, 0.8036900162696838, 0.38405001163482666, 0.6396999955177307, -0.046383000910282135, -0.11023999750614166, 0.29576998949050903, 0.14881999790668488, -0.8046299815177917, -0.01964299939572811, -0.035943999886512756, -0.3545700013637543, -0.03028300032019615, 0.38550999760627747, -0.16256999969482422, 0.5287600159645081, 0.587939977645874, 0.5998299717903137, -0.13088999688625336, 0.17475999891757965, -0.3307499885559082, -0.14289000630378723, -0.06173799932003021, 0.31516000628471375, -0.24265000224113464, -0.3719500005245209, -0.26754000782966614, -0.17625999450683594, 0.4436500072479248, -0.16412000358104706, -0.4501599967479706, -0.0772550031542778, -0.4683400094509125, 0.03674900159239769, -0.1574999988079071, 0.0870710015296936, -0.49171000719070435, 0.15196000039577484, 0.22224000096321106, 0.17271000146865845, -0.335099995136261, 0.24357999861240387, 0.7141299843788147, 0.5093700289726257, 0.3208799958229065, -0.1987999975681305, -0.44802001118659973, 0.06250400096178055, -0.3916400074958801, -0.2988699972629547, -0.4920800030231476, -0.4788999855518341, 0.09705899655818939, 0.14687000215053558, 0.2207300066947937, -0.18174000084400177, -0.016950000077486038, 0.7101799845695496, 0.2177799940109253, -0.8066400289535522, -0.493120014667511, 0.1835000067949295, 0.39660999178886414, 0.06095900014042854, -0.9773300290107727, 1.0575000047683716, -0.13269999623298645, 0.16399000585079193, -0.26673999428749084, -0.5587999820709229, 0.13432000577449799, 0.4981600046157837, 0.13787999749183655, 0.5805500149726868, -0.6549599766731262, -0.24660000205039978, 0.1607999950647354, -0.1440500020980835, -0.2855899930000305, -0.385919988155365, -0.17319999635219574, -0.07713200151920319, -0.0638049989938736, 0.4934700131416321, -0.3681800067424774, 0.5429099798202515, 0.018146000802516937, -0.9972400069236755, -0.687470018863678, -0.22120000422000885, -0.2942200005054474, 0.498879998922348, 0.24657000601291656, -0.03415299952030182, 0.0954039990901947, -0.14731000363826752, -0.32958999276161194, -0.5827900171279907, 0.0034489999525249004, 0.4940600097179413, 0.6382200121879578, -0.15439000725746155, 0.9283000230789185, 0.0605820007622242, 0.1944199949502945, -0.6106500029563904, 0.02851800061762333, 0.3858799934387207, -0.6121000051498413, 0.30518999695777893, 0.01616699993610382, 1.1319999694824219, 0.4366700053215027, -0.01902100071310997, -0.34101998805999756, -0.5129600167274475, 0.24666999280452728, 0.25999000668525696, 0.07899200171232224, 0.79339998960495, 0.560699999332428, 0.24345999956130981, 0.4280500113964081, 0.3142099976539612, -0.13686999678611755, 0.028658999130129814, -0.14883999526500702, -0.5206599831581116, -0.05064300075173378, 0.18764999508857727, 0.19731000065803528, 0.6359300017356873, -0.23308999836444855, 0.6692900061607361, -0.2150699943304062, 0.4823800027370453, 0.08182399719953537, 0.37869998812675476, -0.10251999646425247, -0.10135000199079514, 0.097120001912117, -0.49066999554634094, 0.15836000442504883, -0.7389100193977356, -0.08947999775409698, 0.7362200021743774, 0.19676999747753143, -0.16710999608039856, 0.23015999794006348, 0.09224399924278259, -0.21825000643730164, 0.05372200161218643, -0.07554399967193604, 0.46417999267578125, -0.6451699733734131, -0.34797999262809753, 0.019091999158263206, 0.17579999566078186, 0.3451099991798401, 0.2497200071811676, 0.10963000357151031, 0.2625899910926819, 0.07511399686336517, 0.4757100045681, -0.21137000620365143, -0.00230560009367764, -0.4876999855041504, -0.12726999819278717, 0.37950998544692993, 0.5870199799537659, -0.08640799671411514, -0.16558000445365906, 0.34112000465393066, -0.03467100113630295, -0.36340999603271484, -0.06837700307369232, 0.19623999297618866, 0.44343000650405884, -0.5561500191688538, -0.24478000402450562, -0.47457998991012573, 0.36796000599861145, -0.010267999954521656, 0.7336699962615967, -0.022120000794529915, -0.07127899676561356, -0.14215999841690063, -1.2687000036239624, -0.47600001096725464, 0.12972000241279602, 0.4158399999141693, -0.2325199991464615, -0.33371999859809875, 0.8378999829292297, -0.8720899820327759, -0.22920000553131104, 0.47075000405311584, -0.2903999984264374, -0.18574999272823334, -0.12447000294923782, -0.34839001297950745, -0.0012029999634250998, -0.25617000460624695, -0.3572399914264679, 0.713670015335083, 0.05368399992585182, 0.19735999405384064, 0.35155001282691956, 0.15432000160217285, 0.07666700333356857, 0.4769900143146515], u'stream': [-0.5605300068855286, -0.03855299949645996, 0.3884899914264679, -0.2689700126647949, -0.4534600079059601, -0.051114000380039215, 0.2267799973487854, 0.19411000609397888, 0.5222200155258179, -1.2773000001907349, -0.4081999957561493, -0.17874999344348907, 0.15689000487327576, -0.5267900228500366, -0.09446500241756439, -0.29433000087738037, -0.6101700067520142, 0.32833999395370483, 1.1527999639511108, 0.7024499773979187, -0.5852699875831604, 0.0006555099971592426, 0.11941000074148178, 0.45767998695373535, -0.2366899996995926, -0.0059238001704216, 0.49428001046180725, 0.27498000860214233, -0.02942500077188015, 0.23303000628948212, 0.20393000543117523, -0.05197399854660034, 0.39991000294685364, 0.4274500012397766, -0.08253999799489975, -0.11129999905824661, -0.34200000762939453, -0.11984000355005264, 0.32513999938964844, 0.08156800270080566, -0.3479599952697754, 0.455130010843277, -0.018050000071525574, 0.11858999729156494, 0.07482100278139114, 0.22605000436306, 0.3294200003147125, 0.07931400090456009, 0.6140199899673462, -0.02311200089752674, -0.15896999835968018, 0.27261999249458313, 0.22020000219345093, -0.7610800266265869, -0.1254200041294098, 0.08061599731445312, 0.0924379974603653, -0.3940599858760834, 0.4193100035190582, 0.9013800024986267, -0.09524500370025635, -0.14110000431537628, 0.9950299859046936, -0.03332599997520447, -0.09820999950170517, 0.23759999871253967, 0.0013683000579476357, 0.1961899995803833, -0.015754999592900276, 0.5192400217056274, 0.2942500114440918, 0.13524000346660614, -0.35576000809669495, 0.08534500002861023, -0.005106199998408556, 0.011114999651908875, 0.20048999786376953, -0.10520000010728836, 0.08270899951457977, -0.06406400352716446, -0.4220699965953827, -0.5355499982833862, -0.3878900110721588, -0.29962998628616333, 0.7181100249290466, 0.3094800114631653, -0.20754000544548035, -0.4388999938964844, 0.3099699914455414, -0.4556899964809418, 0.3942500054836273, 0.15727999806404114, 0.04503900185227394, -0.16929000616073608, 0.2785300016403198, 0.26802998781204224, 0.3386499881744385, -0.27243998646736145, 0.3529300093650818, 0.09421399980783463, 0.08945299685001373, 0.023643000051379204, 0.07203900068998337, -0.3667899966239929, 0.020681999623775482, 0.4683299958705902, 0.6855400204658508, -0.21491000056266785, -0.003924999851733446, -0.1734900027513504, -0.22985999286174774, -0.2517699897289276, -0.014178999699652195, 0.10270000249147415, -0.5442100167274475, 0.19514000415802002, 0.002555999904870987, -0.131740003824234, -0.1865299940109253, -0.26816999912261963, 0.07362399995326996, -0.03771800175309181, -0.1974399983882904, 0.4348500072956085, 0.2676999866962433, 0.10204999893903732, 0.16926999390125275, 0.2762700021266937, -0.06986299902200699, 0.08434999734163284, -0.50204998254776, 0.12494000047445297, 0.05428599938750267, -0.1951500028371811, 0.0703359991312027, -0.0291300006210804, 0.8208100199699402, 0.2805500030517578, -0.04708399996161461, -0.24247999489307404, 0.5475199818611145, -0.45028001070022583, -0.20940999686717987, -0.05659100040793419, -0.11292000114917755, 0.39131999015808105, 0.006696799769997597, 0.3685300052165985, 0.127020001411438, 0.25672000646591187, 0.24790999293327332, 0.09026800096035004, -0.30445998907089233, 0.02293499931693077, 0.7458299994468689, -0.29559001326560974, 0.6703199744224548, -0.6482700109481812, -0.2524699866771698, 0.0012874000240117311, -0.2338400036096573, -0.6284400224685669, 0.6971099972724915, -0.322409987449646, 0.029505999758839607, -0.015057000331580639, -0.13432000577449799, -0.09971100091934204, -0.3924199938774109, 0.18397000432014465, 0.12307000160217285, 0.3866899907588959, 0.009076399728655815, 0.3894599974155426, -0.41347000002861023, 0.10518000274896622, -0.13931000232696533, -0.07151299715042114, 0.32861000299453735, -0.1912900060415268, -0.11668000370264053, -0.2611500024795532, -0.26614999771118164, 0.2463800013065338, 0.2387399971485138, -0.4986000061035156, 0.09106700122356415, -0.06466999650001526, -0.21101999282836914, -0.17856000363826752, 0.03169799968600273, 0.8044300079345703, -0.3434999883174896, -0.13429999351501465, -0.03213300183415413, 0.14876000583171844, 0.15018999576568604, -0.8352599740028381, -0.2652899920940399, 0.24212999641895294, 0.2615399956703186, -0.176829993724823, 0.0683170035481453, -0.5370799899101257, 0.02790899947285652, -0.3368000090122223, -0.9563199877738953, 0.3292100131511688, -0.016110999509692192, 0.22307999432086945, -0.21886000037193298, -0.10938999801874161, 0.24884000420570374, -0.23814000189304352, -0.4886299967765808, 0.0306170005351305, -0.2445800006389618, 0.02648399956524372, 0.07889799773693085, 0.3494499921798706, 0.05829399824142456, 0.41319000720977783, 0.19990000128746033, -0.146139994263649, 0.2608799934387207, -0.19993999600410461, 0.048062000423669815, 0.1581999957561493, 0.5808699727058411, 0.18425999581813812, -0.28334999084472656, -0.28916001319885254, -0.17590999603271484, -0.7716799974441528, 0.6998299956321716, 0.1350799947977066, 0.027667999267578125, -0.5539600253105164, -0.28852999210357666, -0.6083400249481201, -0.14086000621318817, 0.5098099708557129, -0.10977999866008759, -0.3066900074481964, -0.8312399983406067, -0.3822000026702881, 0.6813499927520752, -0.18942999839782715, -0.3297500014305115, -0.2035199999809265, 0.098191998898983, -0.34380000829696655, 0.48541998863220215, -0.41923001408576965, 0.06131000071763992, -0.13104000687599182, 0.010394000448286533, 0.44534000754356384, -0.10491999983787537, 0.7337599992752075, 0.22939999401569366, -0.2354000061750412, -0.06226300075650215, 0.22754999995231628, -0.2503400146961212, -0.16306999325752258, 0.051642000675201416, -0.36333000659942627, 0.017449000850319862, 0.222120001912117, 0.21918000280857086, -0.3689500093460083, -0.2586100101470947, -0.1404000073671341, -0.07578299939632416, 0.30717000365257263, -1.1750999689102173, 0.23281000554561615, 0.517520010471344, 0.17565999925136566, -0.48774001002311707, 0.4843200147151947, -0.022307999432086945, -0.14962999522686005, -0.24368999898433685, -0.32690000534057617, 0.4369800090789795, -0.06211800128221512, 0.10552000254392624, 0.09494200348854065, -0.17295999825000763, 0.18626999855041504, 0.08025699853897095, -0.044220998883247375, 0.25828999280929565, -0.10980000346899033, 0.4389300048351288, 0.39465999603271484, 0.15834000706672668, 0.3891099989414215], u'well': [-0.13508999347686768, 0.3590700030326843, 0.1453000009059906, -0.12828999757766724, -0.05572500079870224, 0.40108001232147217, -0.09409800171852112, 0.23064999282360077, 0.06729499995708466, -1.9430999755859375, 0.28650999069213867, -0.003136599902063608, -0.04010000079870224, 0.28839001059532166, 0.03992899879813194, -0.008701699785888195, -0.5519800186157227, 0.1090800017118454, 0.15219999849796295, 0.01691799983382225, -0.03238600119948387, 0.21480000019073486, 0.2857699990272522, 0.1694200038909912, -0.36601999402046204, 0.056533001363277435, -0.0363910011947155, 0.05486200004816055, -0.18598000705242157, 0.23419000208377838, 0.18544000387191772, 0.3515700101852417, -0.2849699854850769, 0.12725000083446503, -0.7767800092697144, 0.046404000371694565, 0.22439999878406525, 0.2636600136756897, 0.006994999945163727, -0.18312999606132507, -0.1819700002670288, -0.3688099980354309, -0.043602000921964645, -0.1315699964761734, 0.13996000587940216, 0.34029000997543335, 0.33357998728752136, 0.4576599895954132, -0.022175999358296394, 0.4245299994945526, -0.0587569996714592, -0.17735999822616577, 0.18038000166416168, -0.17124000191688538, -0.20202000439167023, -0.004610800184309483, -0.04361800104379654, 0.23202000558376312, 0.3636400103569031, 0.12620000541210175, 0.25446000695228577, 0.12602999806404114, 0.34198999404907227, 0.04902099817991257, -0.02047000080347061, -0.022835999727249146, 0.316210001707077, 0.09193500131368637, 0.07880699634552002, 0.06074399873614311, 0.05615000054240227, -0.16083000600337982, 0.19192999601364136, 0.16940000653266907, -0.0008594599785283208, -0.2679300010204315, 0.27722999453544617, 0.3763599991798401, -0.002921199891716242, -0.14226999878883362, -0.13579000532627106, -0.05649099871516228, 0.2852100133895874, -0.161190003156662, 0.05624400079250336, 0.149959996342659, -0.01235199999064207, 0.17037999629974365, 0.0010185999562963843, -0.10470999777317047, 0.015496999956667423, 0.5724300146102905, -0.2365799993276596, -0.08958700299263, -0.01829499937593937, 0.12591999769210815, -0.05895699933171272, -0.037105001509189606, 0.006194000132381916, -0.005189999938011169, 0.0037424000911414623, -0.13741999864578247, -0.16106000542640686, -0.34779998660087585, -0.07382900267839432, 0.25780999660491943, 0.06234800070524216, 0.26497000455856323, -0.06848800182342529, -0.14069999754428864, 0.1019200012087822, -0.3961000144481659, -0.06864999979734421, -0.2096100002527237, 0.044151999056339264, 0.27584999799728394, -0.27004000544548035, -0.17976999282836914, -0.05359400063753128, -0.0949229970574379, -0.1262899935245514, -0.06983699649572372, 0.08475100249052048, 0.053001999855041504, -0.007942699827253819, 0.1582300066947937, -0.2779200077056885, 0.4728499948978424, 0.07816699892282486, 0.12296000123023987, 0.1691800057888031, 0.06995999813079834, 0.07889500260353088, -0.2052599936723709, -0.07922700047492981, -0.163100004196167, -0.07286199927330017, 0.018296999856829643, -0.07424499839544296, 0.5598400235176086, 0.1446399986743927, -0.024622000753879547, 0.09196999669075012, 0.10530000180006027, 0.00649659987539053, 0.06115100160241127, 0.1278800070285797, -0.07334999740123749, -0.29047998785972595, 0.08899399638175964, 0.20569999516010284, -0.08042699843645096, -0.03003999963402748, 0.0434579998254776, 0.19232000410556793, -0.003431200049817562, -0.13763000071048737, 0.0061897998675704, 0.5346699953079224, -0.18904000520706177, 0.03123999945819378, -0.12250000238418579, -0.13425999879837036, -0.15898999571800232, -0.21818000078201294, 0.06116199865937233, 0.06193700060248375, -0.022285999730229378, 0.26952001452445984, 0.39945000410079956, 0.32945001125335693, 0.16737000644207, -0.7559999823570251, 0.08848699927330017, 0.041255999356508255, 0.19878999888896942, -0.13572999835014343, -0.16098999977111816, -0.09004099667072296, 0.27524998784065247, 0.11711999773979187, 0.04546799883246422, 0.4345099925994873, -0.11035999655723572, 0.29603999853134155, -0.18870000541210175, -0.042642999440431595, 0.13248999416828156, -0.060175999999046326, 0.07629299908876419, -0.014825000427663326, 0.10010000318288803, -0.1324400007724762, 0.10413999855518341, -0.13305999338626862, -0.35565999150276184, 0.019883999601006508, -0.002648900030180812, -0.05190800130367279, -0.2585499882698059, 0.9628099799156189, 0.06612899899482727, 0.002629399998113513, 0.09554199874401093, 0.1292800009250641, 0.023806000128388405, 0.05456800013780594, 0.11450999975204468, -0.0015460000140592456, -0.4448600113391876, 0.28415998816490173, 0.06756500154733658, 0.13263000547885895, -0.27219000458717346, 0.249099999666214, -0.11021000146865845, -0.1343899965286255, 0.08101200312376022, 0.23074999451637268, -0.04936299845576286, 0.23496000468730927, -0.06776700168848038, -0.49022001028060913, -0.051649998873472214, -0.00029913001344539225, 0.1442900002002716, -0.15624000132083893, -0.07177700102329254, -0.2170799970626831, -0.23055000603199005, -0.15678000450134277, -0.03576600179076195, 0.06812000274658203, -0.008063700050115585, -0.04109000042080879, 0.3338100016117096, 0.19578999280929565, 0.036416999995708466, -0.45570001006126404, 0.26330000162124634, -0.11168999969959259, 0.3538399934768677, 0.26124000549316406, 0.25957000255584717, -0.718940019607544, 0.09662599861621857, 0.006437100004404783, 0.2173600047826767, 0.061778001487255096, 0.12582999467849731, 0.3488599956035614, -0.2850100100040436, -0.07272999733686447, 0.17845000326633453, 0.4495599865913391, -0.03617500141263008, 0.05040900036692619, -0.19929000735282898, 0.014173000119626522, 0.12809999287128448, -0.12745000422000885, -0.09446199983358383, 0.23976999521255493, 0.11376000195741653, -0.29271000623703003, -0.010955999605357647, -0.3495999872684479, -0.2847299873828888, 0.0688840001821518, 0.16047999262809753, 0.10939999669790268, -0.11225000023841858, -0.13835999369621277, 0.22139999270439148, 0.04048199951648712, 0.006709500215947628, -2.031599998474121, -0.031491998583078384, 0.001386999967508018, -0.12377999722957611, -0.1514499932527542, -0.04978200048208237, 0.08626600354909897, 0.24166999757289886, -0.034596998244524, 0.17589999735355377, -0.0013132999883964658, 0.11824999749660492, 0.04813800007104874, -0.20375999808311462, -0.011660999618470669, -0.06395799666643143, -0.10642000287771225, -0.23609000444412231, 0.188960000872612, 0.024149000644683838, -0.03993599861860275, -0.42100000381469727, -0.13339999318122864, 0.11190000176429749], u'penny': [0.15395000576972961, -0.3273000121116638, -0.45295000076293945, 0.6554399728775024, -0.09134799987077713, 0.3213199973106384, -0.20791999995708466, -0.22437000274658203, -0.14470000565052032, -0.3733699917793274, -0.28744998574256897, -0.34973999857902527, 0.1352500021457672, -0.15177999436855316, 0.059365998953580856, 0.48409000039100647, -0.06146800145506859, -0.2765499949455261, 0.03498600050806999, -0.16946999728679657, 0.20303000509738922, -0.0027385998982936144, 0.1033800020813942, 0.03196600079536438, 0.8603500127792358, 0.6114299893379211, -0.06970900297164917, 0.4322899878025055, 0.5381699800491333, -0.13649000227451324, 0.1911199986934662, 0.28426000475883484, 0.2182600051164627, -0.10162000358104706, -1.3203999996185303, 0.001958899898454547, -0.11836999654769897, 0.271340012550354, -0.5256999731063843, 0.5497000217437744, -0.009718700312077999, -0.06322299689054489, 0.009981599636375904, 0.46279001235961914, 0.1325799971818924, -0.00432930001989007, 0.16398000717163086, -0.061941999942064285, -0.2996399998664856, 0.23670999705791473, 0.18846000730991364, -0.003891000058501959, 0.040153998881578445, 0.29989001154899597, -0.7375699877738953, -0.16218000650405884, -0.03467300161719322, 0.011509999632835388, -0.3306800127029419, -0.8975899815559387, -0.48739001154899597, -0.03557499870657921, 0.19217999279499054, -0.12488999962806702, 0.9422399997711182, -0.4454900026321411, -0.03752899914979935, -0.5886099934577942, -0.13488000631332397, -0.25613999366760254, 0.03306499868631363, -0.37498998641967773, -0.21341000497341156, 0.02263999916613102, 0.07975099980831146, 0.5110200047492981, 0.5995200276374817, -0.20227999985218048, -0.023403000086545944, -0.402539998292923, -0.12467999756336212, 0.3512600064277649, 0.38054001331329346, -0.19960999488830566, 0.18264999985694885, 0.18885000050067902, 0.3866100013256073, 0.05896899849176407, 0.14669999480247498, -0.08013200014829636, 0.1966399997472763, 0.07712999731302261, -0.08191999793052673, -0.49882999062538147, -0.09757400304079056, 0.41481998562812805, 0.3992899954319, -0.08662799745798111, -0.10668999701738358, 0.14068999886512756, -0.3889999985694885, -0.43915000557899475, -0.800570011138916, -0.6255199909210205, 0.4137499928474426, 0.7156100273132324, -0.7139999866485596, -0.25661998987197876, 0.05824799835681915, 0.16290000081062317, -0.8641999959945679, -0.10854999721050262, 0.10503000020980835, 0.0324149988591671, 0.5901899933815002, 0.10847999900579453, 0.3714199960231781, -0.27083998918533325, 0.35266000032424927, 0.046153001487255096, -0.14789000153541565, 0.12019000202417374, -0.33706000447273254, 0.09592100232839584, -0.15150000154972076, -0.15803000330924988, 0.11670000106096268, -0.019020000472664833, 0.11021000146865845, 0.003265300067141652, -0.10777000337839127, 0.017495999112725258, -0.6300399899482727, 0.1472499966621399, 0.06679899990558624, 0.13977999985218048, -0.5807499885559082, -0.09972500056028366, 0.13738000392913818, 0.243259996175766, 0.5401800274848938, 0.3101300001144409, -0.3572399914264679, 0.01608099974691868, -0.6284700036048889, 0.4037800133228302, 0.03178900107741356, 0.4663099944591522, 0.31999000906944275, 0.18727000057697296, -0.09108900278806686, 0.5443300008773804, -0.3975200057029724, -0.9140999913215637, -0.7496299743652344, 0.2284500002861023, -0.7685499787330627, -0.2692300081253052, -0.48072001338005066, 0.09886199980974197, -0.06726399809122086, -0.37077000737190247, 0.2556999921798706, -0.6573299765586853, 0.06588199734687805, 0.7009299993515015, -0.23619000613689423, 0.8321999907493591, 0.49035000801086426, 0.1674399971961975, 0.15481999516487122, 0.3237699866294861, -0.5697399973869324, 0.1562899947166443, 0.1627800017595291, -0.5767899751663208, -0.4984399974346161, -0.21674999594688416, 0.08513999730348587, 0.17816999554634094, -0.1011200025677681, 1.0351999998092651, -0.17892000079154968, 0.135110005736351, -0.08730299770832062, -0.2556000053882599, 0.6733800172805786, -0.03971000015735626, 0.2387399971485138, -0.23116999864578247, 0.36329999566078186, -0.133310005068779, 0.19119000434875488, -0.26611998677253723, -0.6817200183868408, 0.2778399884700775, 0.3194600045681, -0.5672900080680847, 0.17976999282836914, -0.23309999704360962, 0.5238000154495239, -0.05553299933671951, 0.3242399990558624, -0.15321999788284302, 0.11663000285625458, -0.26284998655319214, -0.05944500118494034, -0.16797000169754028, 0.29607000946998596, -1.0038000345230103, 0.11011999845504761, -0.40602999925613403, -0.06929399818181992, 0.11489000171422958, -0.1546500027179718, 0.430620014667511, 0.18727999925613403, 0.2806600034236908, -0.30452999472618103, -0.13109999895095825, -0.29398998618125916, -0.04550100117921829, -0.7418599724769592, 0.4288400113582611, -0.44991999864578247, 0.3628300130367279, 0.06441300362348557, 0.08593200147151947, 0.2628999948501587, 0.17402000725269318, -0.2529500126838684, 0.4579299986362457, -0.18797999620437622, 0.2026900053024292, -0.6114500164985657, -0.07726500183343887, 0.10378000140190125, -0.12818999588489532, 0.030246999114751816, -0.503570020198822, -0.01874000020325184, -0.4665699899196625, -0.02552199922502041, 0.23739999532699585, -0.29197999835014343, -0.6138299703598022, -0.0375869981944561, -0.009797699749469757, 0.14050999283790588, 0.3887600004673004, -0.2064100056886673, 0.009380700066685677, 0.43105000257492065, -0.10617999732494354, 0.21593999862670898, 0.3139199912548065, -0.2049800008535385, 0.26019999384880066, 0.11118000000715256, -0.6375499963760376, -0.7273399829864502, 0.43345001339912415, -0.1603199988603592, 0.10631000250577927, -0.23568999767303467, -0.045740000903606415, -0.1001800000667572, -0.1634799987077713, 0.28984999656677246, 0.2823199927806854, 0.04022900015115738, -0.0620260015130043, 0.36421000957489014, -0.2611899971961975, -0.6132199764251709, 0.3133699893951416, -0.641260027885437, -0.11110000312328339, -0.5768100023269653, -0.2488200068473816, -0.057941000908613205, 0.4831100106239319, -0.6037799715995789, -0.06679800152778625, -0.23420999944210052, -0.16213999688625336, 0.8851799964904785, -0.8609099984169006, -0.6075800061225891, -0.4142700135707855, 0.3643600046634674, 0.14218999445438385, -0.3578599989414215, 0.9555699825286865, 0.2522299885749817, 0.07775600254535675, 0.008132199756801128, -0.16895000636577606, 0.12442000210285187, 0.07873199880123138], u'pie': [-0.148049995303154, 0.14914999902248383, 0.21039000153541565, 0.06516300141811371, -0.3467699885368347, 0.00523559981957078, -0.3610300123691559, -0.10628999769687653, -0.44795000553131104, 0.2111400067806244, -0.022009000182151794, -0.19062000513076782, -0.4308199882507324, 1.1855000257492065, -0.6019999980926514, -0.36381998658180237, -0.427590012550354, 0.335889995098114, 0.06157099828124046, 0.06068199872970581, 0.2488200068473816, 0.3763299882411957, -0.6371999979019165, 0.5014399886131287, -0.35585999488830566, 0.06222499907016754, -0.014429000206291676, -0.13113999366760254, 0.4402399957180023, -0.7292100191116333, -0.41023001074790955, 0.053932998329401016, -0.22904999554157257, -0.3673099875450134, -0.38670000433921814, 0.4318400025367737, -0.5451200008392334, 0.5280200242996216, -0.6906099915504456, 0.7576500177383423, 0.16901999711990356, 0.40252000093460083, 0.7374500036239624, 0.39677000045776367, 0.0895640030503273, 0.3325299918651581, 0.1126599982380867, 0.3732599914073944, 0.04315900057554245, 0.43351998925209045, 0.38589000701904297, -0.3130500018596649, 0.4032900035381317, 0.4734500050544739, -0.6595799922943115, -0.5842599868774414, -0.502560019493103, 0.020208999514579773, 0.0676560029387474, -0.12070000171661377, 0.2671999931335449, 0.09636499732732773, 0.0009215400204993784, -0.02662299945950508, -0.15849000215530396, -0.09299299865961075, -0.4423699975013733, 0.08310600370168686, -0.1090800017118454, 0.24908000230789185, -0.5905399918556213, 0.5230900049209595, 0.07183200120925903, 0.37814998626708984, 0.07321800291538239, -0.1495800018310547, 0.5135800242424011, -0.4040299952030182, -0.8361300230026245, -0.6397600173950195, 0.2517000138759613, 0.9338499903678894, 0.1131099984049797, -0.29203999042510986, -0.0678270012140274, -0.6457499861717224, -0.06851399689912796, 0.23510000109672546, -0.6620299816131592, -0.42645999789237976, -0.6269599795341492, -0.31147000193595886, -0.7578799724578857, -0.606440007686615, -0.39879998564720154, 0.2599000036716461, -0.3072800040245056, 0.18644000589847565, -0.6118699908256531, 0.10823000222444534, -0.34894001483917236, 0.19372999668121338, 0.13892999291419983, -0.8470500111579895, -0.06606400012969971, 0.17065000534057617, -0.6166499853134155, 0.37452998757362366, 0.11302000284194946, 0.6984999775886536, -0.06978999823331833, 0.13016000390052795, 0.23374000191688538, -0.43007999658584595, -0.1404699981212616, -0.1326799988746643, -0.8796200156211853, 0.6297100186347961, 0.03555100038647652, -0.3767800033092499, 0.24435000121593475, -0.03017999976873398, 0.37498998641967773, 0.10595999658107758, -0.5247200131416321, -0.45430999994277954, -0.180649995803833, 1.0465999841690063, -0.6731399893760681, 0.3505600094795227, -0.29982998967170715, 1.3135000467300415, 0.23082000017166138, 0.46241000294685364, 0.15408000349998474, -0.7623800039291382, -0.02575800009071827, 0.00045558001147583127, -0.5454400181770325, -0.31391000747680664, 0.5139399766921997, 0.21682000160217285, -0.4058299958705902, -0.769029974937439, 0.025909999385476112, 0.4107300043106079, -0.23128999769687653, 0.63850998878479, -0.012853000313043594, 0.042883001267910004, -0.5065500140190125, 0.3884199857711792, 0.33441999554634094, 0.16037000715732574, -0.41940000653266907, -0.055465999990701675, 0.02108299918472767, -0.20895999670028687, -0.06355900317430496, 0.14603999257087708, 0.15900999307632446, 0.4207099974155426, -0.012432999908924103, -0.08865000307559967, -0.09479200094938278, -0.14754000306129456, -0.17227999866008759, 0.072673000395298, 0.08217000216245651, -0.4068099856376648, -0.4353800117969513, -0.11715000122785568, -0.11343000084161758, -0.54653000831604, 0.31992998719215393, 0.032878998667001724, -0.06536299735307693, -0.23935000598430634, 0.18796999752521515, -0.37929001450538635, 0.027063999325037003, 0.2819899916648865, 0.3518199920654297, -0.4878599941730499, -0.20892000198364258, -0.7425699830055237, 0.676069974899292, -0.0734969973564148, -0.07033500075340271, -0.15793000161647797, 0.48173001408576965, 0.7790600061416626, 0.06847599893808365, -0.20559999346733093, -0.21704000234603882, 0.5806000232696533, -0.36517998576164246, 0.14451000094413757, -0.1530500054359436, -0.41359999775886536, 1.0382000207901, -0.22614000737667084, 0.6149100065231323, 0.7665299773216248, -0.29603999853134155, -0.18610000610351562, 0.6794400215148926, -0.1382099986076355, 0.22686000168323517, 0.15981000661849976, -0.3384000062942505, -0.3122499883174896, -0.2700200080871582, 0.09913600236177444, 0.01919100061058998, 0.5553500056266785, 0.12183000147342682, -0.17511999607086182, 0.31610000133514404, 0.3738600015640259, -0.0035262000747025013, 0.4031600058078766, -0.16934999823570251, -0.2771399915218353, -0.6315000057220459, -0.2533699870109558, 0.26638999581336975, 0.2530199885368347, 0.37863999605178833, -0.580269992351532, -0.06252200156450272, 0.3806000053882599, -0.21578000485897064, -0.2159000039100647, 0.24129000306129456, -0.27625998854637146, 0.49129000306129456, 0.30559998750686646, -0.49616000056266785, 0.3363099992275238, -0.6262800097465515, -0.5053300261497498, 0.08636800199747086, -0.16393999755382538, -0.8743799924850464, -0.06618499755859375, -0.34411001205444336, 0.16124999523162842, 0.5471600294113159, -0.40661999583244324, 0.2361000031232834, 0.39980000257492065, 0.13922999799251556, 0.5738300085067749, -0.1507599949836731, -0.22988000512123108, 0.2786000072956085, -0.015031999908387661, -0.5465199947357178, 0.6654199957847595, 0.2531700134277344, 0.07960300147533417, 0.03563699871301651, 0.4084100127220154, 0.5527200102806091, 0.06144699826836586, 0.06761900335550308, 0.05282700061798096, 0.6105499863624573, 0.31520000100135803, -0.48791998624801636, -0.16780999302864075, 0.4616599977016449, -0.24945999681949615, 0.60725998878479, -0.21435000002384186, -0.9169399738311768, 0.3519099950790405, -1.1267999410629272, -0.3359600007534027, -0.12099999934434891, 0.023313000798225403, -0.6509000062942505, -0.20677000284194946, 0.013385999947786331, 0.31141000986099243, 0.824970006942749, 0.014551999978721142, 0.5164700150489807, 0.651390016078949, 0.2289399951696396, -0.14601999521255493, 0.6341599822044373, -0.13605999946594238, 0.13471999764442444, -0.5945600271224976, -0.2640700042247772, -0.0785989984869957, -0.35352998971939087, 0.045219000428915024], u'glass': [0.04098999872803688, 0.1361899971961975, -0.8168900012969971, -0.991599977016449, 0.3789199888706207, 0.3908100128173828, -0.2294899970293045, -0.37876999378204346, 0.215829998254776, -1.1984000205993652, 0.16672000288963318, -0.30546998977661133, -0.08173400163650513, 0.12848000228405, 0.04082600027322769, -0.7486699819564819, -0.9859399795532227, 0.5317100286483765, -0.29291000962257385, 0.3454200029373169, 0.12791000306606293, 0.4305199980735779, 0.11428000032901764, 0.9555100202560425, -0.17666999995708466, -0.354420006275177, -0.9555799961090088, -0.2334900051355362, -0.5685799717903137, -0.479310005903244, -0.05863400176167488, 0.7222399711608887, -0.1141899973154068, 0.2729800045490265, -0.6381999850273132, 0.36302998661994934, -0.19401000440120697, 0.14701999723911285, 0.3339399993419647, 0.20550000667572021, -0.29433000087738037, -0.41881000995635986, -0.1462000012397766, 0.4406000077724457, -0.4586400091648102, -0.03636699914932251, -0.4117499887943268, -0.004552599973976612, -0.4593600034713745, -0.21855999529361725, 0.16106000542640686, 0.4594799876213074, 0.23680999875068665, 0.0014176999684423208, 0.24122999608516693, 0.27402999997138977, -0.13151000440120697, 0.24097999930381775, 0.02014699950814247, 0.2727000117301941, 0.03895200043916702, -0.14469000697135925, 0.30820000171661377, 0.5383099913597107, 0.3890799880027771, -0.13957999646663666, -0.45232000946998596, -0.10465999692678452, 0.24097999930381775, -0.2590700089931488, -0.17618000507354736, -1.0270999670028687, -0.12013000249862671, 0.22605000436306, -0.02506599947810173, 0.25773999094963074, -0.16809000074863434, -0.4230400025844574, -0.21491999924182892, -0.4321900010108948, -0.552079975605011, 0.1934400051832199, -0.6686599850654602, 0.35899001359939575, 0.1789100021123886, 0.07965400069952011, 0.3919300138950348, 0.0653270035982132, -0.22376999258995056, -0.030672000721096992, 0.7164400219917297, 0.03549399971961975, 0.2150299996137619, 0.49340999126434326, 0.08192899823188782, 0.18367999792099, 0.12042000144720078, -0.23405000567436218, 0.7529100179672241, -0.2513200044631958, -0.1530900001525879, 0.6805199980735779, 0.37950998544692993, -0.34665000438690186, -0.004418100230395794, -0.13718999922275543, 0.09533700346946716, 0.19070999324321747, -0.2797299921512604, 0.2708500027656555, -0.15654000639915466, 0.1964299976825714, -0.033284999430179596, -0.23972000181674957, -0.2820099890232086, 0.0009143499773927033, -0.5087800025939941, 0.21521000564098358, -0.5520200133323669, -0.30171999335289, -0.11405999958515167, 0.18887999653816223, 0.016186000779271126, 0.13360999524593353, -0.11401999741792679, -0.8937600255012512, 0.15344999730587006, 0.007472599856555462, -0.05534600093960762, 0.17190000414848328, -0.044606998562812805, 1.2513999938964844, 0.40957000851631165, 0.9481599926948547, -0.0044744000770151615, 0.4597800076007843, -0.3445200026035309, 0.10798999667167664, 0.21945999562740326, -0.7095299959182739, 0.3287000060081482, 0.7053400278091431, 0.19603000581264496, -0.07991600036621094, 0.20181000232696533, -0.05054299905896187, 0.0879409983754158, -0.2331400066614151, -0.15512999892234802, -0.2809300124645233, 0.44269001483917236, -0.47495999932289124, 0.026412999257445335, -0.6837599873542786, 0.2945899963378906, 0.3977400064468384, -0.6059100031852722, -0.47336000204086304, 0.09111200273036957, 0.19923999905586243, 0.06260599941015244, -0.24221999943256378, 0.34757000207901, 0.1475200057029724, 0.7319899797439575, 0.26030001044273376, 0.04490499943494797, 0.2835800051689148, 0.34891998767852783, 0.17285999655723572, -0.22495000064373016, 0.18869000673294067, 0.2504799962043762, -0.14058999717235565, -0.05687100067734718, -0.27500998973846436, -0.4138599932193756, 0.13130000233650208, 0.00712329987436533, -1.2204999923706055, 0.21367000043392181, -0.3973200023174286, -0.14392000436782837, 0.13502000272274017, 0.3518800139427185, -0.262690007686615, 0.773140013217926, 0.34915000200271606, 0.267659991979599, 0.6692299842834473, 0.918470025062561, 0.17944000661373138, -0.18934999406337738, 0.20206999778747559, -0.26603999733924866, 0.47769999504089355, -0.27845999598503113, 0.4040899872779846, -0.5610700249671936, -0.04570399969816208, 0.7985399961471558, 0.43237999081611633, 0.053950998932123184, -0.09262000024318695, 0.0836310014128685, 0.27713000774383545, 0.12455999851226807, -0.4141699969768524, -0.7804399728775024, 0.31690001487731934, 0.7554100155830383, 0.41729000210762024, -0.45473000407218933, 0.1043199971318245, 0.2253199964761734, 0.016906000673770905, 0.29436999559402466, -0.047791000455617905, -0.050519999116659164, 0.014802999794483185, 0.361270010471344, 0.24876999855041504, 0.3851799964904785, -0.16044999659061432, -0.3788999915122986, 0.21820999681949615, 0.043366000056266785, -0.22933000326156616, 0.6045699715614319, -0.4565599858760834, -0.27463001012802124, -0.23138000071048737, 0.2823599874973297, -0.04921000078320503, 0.4304499924182892, 0.16739000380039215, 0.17633000016212463, -0.23755000531673431, -0.9300299882888794, -0.24724000692367554, -0.7230799794197083, -0.4052799940109253, -0.7102599740028381, 0.04698599874973297, -0.5260400176048279, 0.20905999839305878, -0.6219499707221985, -0.7243199944496155, -0.021567000076174736, 0.015211000107228756, 0.7757800221443176, -0.8286299705505371, 0.3254599869251251, -0.48183000087738037, 0.4302600026130676, 0.21235999464988708, -0.1010499969124794, -0.6675400137901306, 0.30761000514030457, 0.3042699992656708, -0.08123800158500671, -0.2531700134277344, 0.3877899944782257, 0.2026199996471405, 0.38853999972343445, -0.5887100100517273, -0.08397000283002853, 0.26166000962257385, 0.18648000061511993, 0.015830999240279198, -0.3012000024318695, -0.24115000665187836, 0.18490999937057495, -0.1565600037574768, -0.5558599829673767, 0.5780799984931946, -1.7424999475479126, -0.004373299889266491, -1.174399971961975, -0.3520900011062622, -0.17437000572681427, 0.04056999832391739, -0.20630000531673431, -0.5309299826622009, 0.5815100073814392, 0.43276000022888184, -0.16143999993801117, 0.3646799921989441, 0.41297999024391174, 0.11671999841928482, 0.12620000541210175, 0.03280600160360336, 0.20640000700950623, 0.7106199860572815, 0.6267899870872498, -0.19033999741077423, 0.19389000535011292, -0.286080002784729, 0.13268999755382538, 0.2337999939918518], u'shell': [0.43737998604774475, 0.6556599736213684, -0.6562700271606445, -0.18977999687194824, -0.5519899725914001, 0.027698000892996788, 0.3306899964809418, 0.3021399974822998, -0.15094000101089478, -0.7985799908638, -0.2963100075721741, -0.11361999809741974, -0.5126000046730042, 0.06334800273180008, -0.6482200026512146, -0.1537099927663803, -0.5498999953269958, 1.038100004196167, -0.2821600139141083, -0.27649998664855957, 0.7829300165176392, 0.08859600126743317, -0.07273799926042557, 0.22673000395298004, -0.6361799836158752, -0.6072400212287903, 0.09466399997472763, 0.5006399750709534, 0.05582800135016441, 0.30779001116752625, -0.036122001707553864, 0.2162099927663803, -0.0251499991863966, 0.5211700201034546, 0.4832499921321869, -0.07594799995422363, 0.263619989156723, 0.14101000130176544, 0.5182999968528748, 1.3387999534606934, -0.19631999731063843, 0.5689399838447571, 0.30188998579978943, -0.09455599635839462, -0.19346000254154205, 0.003652299987152219, -0.0631600022315979, -0.048193998634815216, -0.0837400034070015, 0.2443699985742569, 0.1478700041770935, 0.013659000396728516, 0.08506099879741669, 0.371069997549057, -0.11045999825000763, 0.07722599804401398, -0.12796999514102936, 0.684689998626709, 0.20997999608516693, 0.07430499792098999, 0.09202700108289719, 0.5216900110244751, 0.3487899899482727, -0.6531699895858765, 0.5269799828529358, -0.014271999709308147, -0.4798099994659424, -0.5577399730682373, 0.45996999740600586, 0.31358999013900757, -0.12387000024318695, -0.22779999673366547, 0.4899100065231323, 0.7758499979972839, -0.13489000499248505, 0.9123700261116028, 0.6375700235366821, -0.013555999845266342, -0.011965000070631504, 0.12620000541210175, -0.510890007019043, 0.13420000672340393, -0.3855699896812439, 0.28139999508857727, 0.334199994802475, -0.35585999488830566, 0.5060200095176697, -0.5138599872589111, -0.2128400057554245, -0.17876000702381134, -0.0536159984767437, 0.3737100064754486, -0.6358699798583984, -0.035082001239061356, 0.3307400047779083, -0.058625999838113785, -0.456030011177063, 0.33709999918937683, 0.7987499833106995, -0.7346000075340271, -0.5306199789047241, 0.08364500105381012, -0.5432400107383728, -0.48763999342918396, 0.6347699761390686, -0.015134000219404697, 0.36750999093055725, 0.16641999781131744, 0.23385000228881836, 0.48190999031066895, 0.507390022277832, -0.4215399920940399, 0.07399100065231323, -0.09170699864625931, 0.15485000610351562, 0.2681500017642975, -0.15655000507831573, 0.011761000379920006, 0.35593000054359436, 0.5247200131416321, -0.08304300159215927, -0.8296999931335449, 0.16575999557971954, 0.49862998723983765, -0.15949000418186188, 0.05096299946308136, -0.2832599878311157, 0.4149799942970276, -0.21536999940872192, 0.24347999691963196, 0.00835500005632639, 0.43716999888420105, -0.6650599837303162, -0.1557299941778183, -0.08406399935483932, -0.05353999882936478, 0.17506000399589539, -0.10434000194072723, 0.4891600012779236, 0.2329999953508377, 0.2585499882698059, -0.3803200125694275, 0.23391999304294586, -0.09967400133609772, -0.28578999638557434, 0.24782000482082367, 0.45166000723838806, 0.4048599898815155, -0.11985000222921371, 0.5029100179672241, 0.22583000361919403, -0.03436499834060669, -0.17327000200748444, -0.2622799873352051, -0.04957199841737747, 0.06245899945497513, 0.16761000454425812, -0.7143700122833252, -0.0676570013165474, 0.2640799880027771, -0.155689999461174, -0.004717099945992231, 0.3328399956226349, -0.3363499939441681, -0.12444999814033508, 0.41130998730659485, -0.16899000108242035, 0.22645999491214752, -0.5010899901390076, -0.4953500032424927, -0.4350599944591522, 0.21338999271392822, 0.5585100054740906, -0.10623999685049057, 0.11077000200748444, 0.3932099938392639, -0.25699999928474426, -0.07421699911355972, 0.17986999452114105, 0.01268600020557642, -0.3713200092315674, 0.08084800094366074, 0.5345399975776672, -0.4846999943256378, 0.6049799919128418, -0.4016599953174591, 0.12155000120401382, 0.7211599946022034, 0.1269800066947937, 0.1487800031900406, 1.009600043296814, 0.2826099991798401, 0.15997999906539917, -0.3327699899673462, 0.2548600137233734, 0.2603699862957001, 0.43373000621795654, 0.15076999366283417, -0.002500399947166443, -0.2391200065612793, 0.9048399925231934, -0.4199399948120117, -0.2016499936580658, 0.05178600177168846, 0.4399999976158142, 0.15477000176906586, 0.17354999482631683, -0.8862699866294861, 0.2855600118637085, -0.35001999139785767, 0.045882001519203186, -0.06253500282764435, -0.01358999963849783, -0.5648300051689148, -0.09548699855804443, 0.20646999776363373, 0.4356499910354614, -0.24297000467777252, 0.16343000531196594, -0.23348000645637512, 0.4467499852180481, 0.007235200144350529, 0.07905499637126923, 0.3529599905014038, -0.8042700290679932, -0.17159000039100647, -0.41839998960494995, -0.4712199866771698, 0.31123000383377075, -0.6088200211524963, 0.3399899899959564, 0.0761760026216507, -0.29941999912261963, 0.1817999929189682, 0.3124299943447113, 0.24769000709056854, 0.34505999088287354, 0.048277001827955246, -1.0806000232696533, -0.4250200092792511, 0.365200012922287, -0.540120005607605, -0.06491199880838394, 0.011920000426471233, -0.8232300281524658, -0.027137000113725662, -0.174919992685318, -0.8533200025558472, -0.16175000369548798, -1.094099998474121, 0.07149200141429901, 0.19912000000476837, -0.14519000053405762, -0.14392000436782837, -0.23093000054359436, -0.397599995136261, -0.41791999340057373, 0.08411899954080582, -0.3667899966239929, 0.4757100045681, -0.2663399875164032, 0.09267300367355347, -0.4215399920940399, 0.197720006108284, 0.357340008020401, 0.1310500055551529, -0.13864000141620636, -0.10123000293970108, -0.22001999616622925, 0.39732998609542847, -0.07840300351381302, -0.6980999708175659, 0.18231000006198883, 0.0062027000822126865, -0.32047000527381897, -0.383760005235672, -1.3339999914169312, -0.43533000349998474, -0.7035099864006042, 0.3151499927043915, 0.1344199925661087, -0.10864000022411346, -0.3634699881076813, -0.5387600064277649, -0.5148599743843079, -0.14326000213623047, -0.022955000400543213, 0.28317999839782715, 0.39798998832702637, -0.23991000652313232, 0.090317003428936, -0.7907800078392029, 0.31995999813079834, 0.2683899998664856, 0.5627800226211548, 0.8342099785804749, -0.03200500085949898, 0.010850000195205212, 0.02798300050199032, -0.1288899928331375], u'pond': [-0.04329400137066841, -0.4818800091743469, -0.19259999692440033, -0.04244200140237808, 0.25242000818252563, -0.1654299944639206, 0.5768700242042542, 0.38655000925064087, -0.11868000030517578, 0.23285000026226044, -0.07667999714612961, 0.3598099946975708, 0.17538000643253326, -0.5799400210380554, -0.24342000484466553, 0.8067299723625183, -0.600380003452301, -0.023022999987006187, 0.2682799994945526, 0.6112599968910217, -0.6525400280952454, 0.24980999529361725, -0.09351199865341187, 0.433789998292923, -0.32534000277519226, -0.1252100020647049, -0.10425999760627747, -0.14778999984264374, -0.012941000051796436, 1.0224000215530396, 0.18222999572753906, -0.19561000168323517, -0.13313999772071838, 0.2553800046443939, 0.3725599944591522, 0.725350022315979, 0.2515699863433838, 0.0979819968342781, -0.03774699941277504, -0.19092999398708344, -0.2601499855518341, 0.4221700131893158, 0.29357001185417175, 0.6860399842262268, -0.2135400027036667, 0.28033000230789185, 0.6399199962615967, 0.5710800290107727, 0.07135099917650223, 0.6535099744796753, -0.16642999649047852, -0.062352001667022705, 0.37156999111175537, -0.5249500274658203, 0.23939000070095062, 0.10268999636173248, 0.21243999898433685, 0.3796299993991852, 0.45750999450683594, 0.4533500075340271, 0.11078999936580658, -0.026866000145673752, 0.496969997882843, -0.14803999662399292, 0.2136099934577942, -0.39607998728752136, -0.3576900064945221, -0.2634199857711792, -0.22814999520778656, -0.10433000326156616, 0.49191999435424805, 0.02253199927508831, -0.13670000433921814, 0.2604700028896332, -1.2213000059127808, 0.08475200086832047, 0.13377000391483307, -0.08416499942541122, 0.5325499773025513, -0.8440099954605103, 0.06819000095129013, 0.8979499936103821, 0.06943999975919724, -0.268449991941452, 0.8448500037193298, 0.2066899985074997, 0.2865999937057495, 0.3187299966812134, 0.1512800008058548, -0.7650700211524963, 0.19577999413013458, -0.13523000478744507, 0.28843000531196594, -0.3476699888706207, 0.12253999710083008, 0.2794100046157837, 0.38464000821113586, -0.6781899929046631, -0.3387799859046936, -0.10006000101566315, -0.37033000588417053, 0.028862999752163887, -0.432559996843338, -0.29201000928878784, -0.024410000070929527, 0.45910000801086426, 0.2118699997663498, -0.26416000723838806, -0.26012998819351196, -0.35558000206947327, -0.4722200036048889, -0.36403998732566833, -0.3007499873638153, -0.13083000481128693, 0.055100999772548676, -0.13606999814510345, 0.2707799971103668, 0.001156299957074225, -0.17700999975204468, 0.2107200026512146, -0.33636000752449036, -0.10301999747753143, -0.008686600252985954, 0.11761000007390976, 0.18019999563694, 0.06475500017404556, 0.02602200023829937, -0.025963999330997467, 0.04243699833750725, -0.4241600036621094, 0.055681001394987106, 0.33386000990867615, 0.5917800068855286, 0.05189900100231171, 0.047628000378608704, 0.009670699946582317, 0.5915600061416626, -0.4378499984741211, -0.4364500045776367, -0.10655000060796738, 0.6672899723052979, -0.32971999049186707, 0.2009499967098236, -0.9107699990272522, -0.4916900098323822, 0.3064799904823303, 0.2831900119781494, -0.340939998626709, -0.0010508999694138765, -0.15714000165462494, -0.24648000299930573, 0.6017199754714966, -0.6767399907112122, -0.6055499911308289, 0.7470300197601318, -0.047269999980926514, -0.08343899995088577, 0.06190599873661995, 0.22547000646591187, 0.16978000104427338, 0.32572999596595764, -0.5117899775505066, 0.665090024471283, -0.28009000420570374, -0.2756499946117401, 0.18380999565124512, 0.0534369982779026, 0.07159200310707092, -0.46035999059677124, -0.40362000465393066, 0.21313999593257904, 0.42572999000549316, 0.35526999831199646, 0.22156000137329102, -0.11875999718904495, 0.2401999980211258, -0.2668299973011017, 0.21321000158786774, 0.42949000000953674, -0.34891998767852783, 0.337660014629364, 0.41995999217033386, 0.5596500039100647, -0.2243500053882599, -0.020736999809741974, -0.3731499910354614, 0.8195400238037109, 0.08950699865818024, 0.48166000843048096, 0.45879000425338745, 0.24390999972820282, 0.5292500257492065, -0.27500998973846436, -0.13989999890327454, -0.3601900041103363, 0.8087800145149231, 0.3859899938106537, -0.9416900277137756, -0.20083999633789062, 0.05070500075817108, 0.9398300051689148, -0.36305999755859375, -0.5051800012588501, -0.417279988527298, 0.20663000643253326, 0.0004857800086028874, -0.06550399959087372, -0.41694000363349915, -0.5097399950027466, 0.2877799868583679, 0.10006000101566315, 0.4566099941730499, 0.2190999984741211, -0.6405100226402283, 0.1409199982881546, 0.04726399853825569, -0.30761000514030457, -0.001486400025896728, 0.08246400207281113, -0.2643899917602539, 0.3756600022315979, -0.18501000106334686, -0.16827000677585602, -0.3112100064754486, 0.37856999039649963, -0.005664799828082323, -0.45662999153137207, 0.048604000359773636, 0.5793799757957458, -0.18373000621795654, 0.176829993724823, -0.020243000239133835, 0.05721599981188774, -0.41214001178741455, -0.2035199999809265, 0.14294999837875366, 0.16777999699115753, -0.09564000368118286, 0.02716599963605404, -0.2905600070953369, 0.0472479984164238, 0.4130299985408783, -0.14890000224113464, -0.05889600142836571, -0.9464499950408936, -0.4318400025367737, 0.639240026473999, 0.6335300207138062, 0.23228000104427338, -0.38315001130104065, -0.10000000149011612, -0.18914000689983368, 0.2593800127506256, -0.14640000462532043, 0.3897800147533417, -0.12358999997377396, 0.22104999423027039, 0.06016699969768524, 0.14101000130176544, 0.28255999088287354, -0.26767998933792114, 0.34341999888420105, -0.31310999393463135, -0.6646400094032288, -0.2440200001001358, -0.2279299944639206, 0.24243000149726868, 0.16943000257015228, -0.32905998826026917, -0.1535400003194809, -0.09561099857091904, 0.02840300090610981, 0.04258299991488457, 0.18735000491142273, 0.5866000056266785, 0.5032600164413452, -0.8015699982643127, 0.2980000078678131, -0.4306100010871887, -0.19460999965667725, -0.620959997177124, 0.388619989156723, -0.10843999683856964, -0.8265600204467773, -0.4833100140094757, 0.006282600108534098, 0.2573699951171875, -0.20782999694347382, 0.07133600115776062, 0.5010899901390076, -0.31626999378204346, 0.1845400035381317, 0.11490000039339066, 0.41060999035835266, -0.31384000182151794, -0.7613499760627747, 0.9024800062179565, 0.29308998584747314, 0.10970000177621841, -0.13041000068187714], u'dress': [-0.6267399787902832, -0.10394000262022018, -0.3511900007724762, 0.21755999326705933, -0.28119999170303345, -0.041453998535871506, -0.20956000685691833, 0.008213900029659271, 0.014151999726891518, -0.9868299961090088, 0.29653000831604004, 0.23973000049591064, -0.16280999779701233, 0.6973199844360352, -0.2816599905490875, -0.5772200226783752, 0.6148899793624878, 0.18432000279426575, -0.5359799861907959, -0.026745999231934547, -0.09136299788951874, -1.0654999641701579e-05, 0.390500009059906, 0.2974100112915039, -0.6947600245475769, -0.5667399764060974, 0.04604800045490265, -0.044877998530864716, 0.2777799963951111, 0.11570999771356583, 0.28422999382019043, -0.24553999304771423, -0.12411999702453613, 0.1296900063753128, -0.9157900214195251, 0.758679986000061, 0.10090000182390213, 0.07507500052452087, 0.3759700059890747, -0.2884899973869324, -0.09150400012731552, -0.7200700044631958, -0.002617199905216694, -0.2264000028371811, -0.16840000450611115, -0.23273000121116638, 0.18468999862670898, -0.2655799984931946, 0.07132799923419952, -0.09945499897003174, 0.05770999938249588, -0.14205999672412872, 0.33873000741004944, -0.580489993095398, -0.44894999265670776, -0.6517400145530701, -0.6942600011825562, -0.9765999913215637, 0.2865299880504608, 0.014344999566674232, 0.5234400033950806, -0.39445000886917114, -0.1276399940252304, -0.03531799837946892, -0.36866000294685364, -0.7868800163269043, 0.29447999596595764, -0.05341799929738045, 0.2750700116157532, 0.20595000684261322, 0.11044999957084656, -0.1778700053691864, -0.24070000648498535, -0.26642999053001404, 0.181659996509552, 0.3361299932003021, 0.18842999637126923, 0.24558000266551971, -0.4330599904060364, -0.42820000648498535, -0.2866699993610382, 0.29036998748779297, -0.3906500041484833, -0.20201000571250916, 0.7304099798202515, 0.13968999683856964, 0.19304999709129333, 0.026545999571681023, -0.0750880017876625, 0.20488999783992767, -0.5372899770736694, 0.09565900266170502, 0.15826000273227692, 0.22856999933719635, -0.003734000027179718, -0.06157200038433075, 0.3113499879837036, 0.060074999928474426, 0.2504599988460541, 0.1078300029039383, 0.6907299757003784, 0.5675399899482727, -0.2279299944639206, -0.11157000064849854, -0.3607099950313568, 0.186939999461174, 0.4715699851512909, 0.3127000033855438, -0.005775399971753359, 0.04041599854826927, 0.17806999385356903, 0.7334499955177307, -0.05946800112724304, 0.39800000190734863, -0.11010000109672546, 0.20406000316143036, 0.11428999900817871, 0.4148699939250946, 0.15557999908924103, -0.9134200215339661, 0.15613000094890594, 0.3305700123310089, 0.8283299803733826, 0.16656999289989471, -0.011296999640762806, -0.14059999585151672, 0.02771499939262867, 0.1725499927997589, 0.10299000144004822, -0.03763899952173233, 0.1566299945116043, -0.3961400091648102, -0.14952999353408813, -0.006814999971538782, 0.026054000481963158, 0.30379998683929443, -0.3432199954986572, -0.23276999592781067, -0.17794999480247498, 0.26047998666763306, 0.05576999858021736, 0.16867999732494354, -0.024157999083399773, -0.42381998896598816, -0.09521199762821198, -0.044879000633955, -0.19380000233650208, -0.09920600056648254, 0.41231000423431396, 0.39928001165390015, 0.01037800032645464, 0.12135999649763107, 0.36941999197006226, -0.7182599902153015, 0.2720299959182739, -0.16864000260829926, -0.22168999910354614, -0.3129499852657318, 0.5545499920845032, 0.24566000699996948, 0.09353700280189514, -0.24790999293327332, -0.30417001247406006, -0.10839000344276428, -0.20590999722480774, -0.14259999990463257, -0.48405998945236206, 0.48611998558044434, 0.2855600118637085, 0.13601000607013702, -0.49919000267982483, 0.015521000139415264, -0.09881500154733658, 0.030340999364852905, 0.13729000091552734, -0.35767999291419983, 0.061840999871492386, 0.6809499859809875, -0.2530600130558014, 0.0733880028128624, -0.0414079986512661, 0.01929900050163269, 0.19628000259399414, -0.2635299861431122, 0.31297001242637634, 0.3584499955177307, 0.42478999495506287, 0.34665000438690186, 0.07853499799966812, -0.43309998512268066, 0.16496999561786652, -0.18931999802589417, -0.37463000416755676, 0.1685599982738495, 0.09549599885940552, 0.18139000236988068, -0.7956699728965759, -0.27233999967575073, -0.31022998690605164, -0.9007899761199951, 0.9777500033378601, 0.03966100141406059, 0.7141000032424927, 0.714959979057312, 1.0650999546051025, -0.050776999443769455, 0.49358001351356506, 0.5125100016593933, -0.5775099992752075, -0.16322000324726105, -0.07427199929952621, -0.18129999935626984, 0.02379399910569191, 0.6054099798202515, 0.04965899884700775, -0.31790000200271606, 1.1441999673843384, -0.9230999946594238, -0.05491900071501732, -0.28874000906944275, 0.2582699954509735, 0.13270999491214752, 0.3370699882507324, 0.5457900166511536, -0.06831300258636475, -0.15489999949932098, 0.15591000020503998, -0.008287999778985977, 0.3456000089645386, 0.043122999370098114, 0.5471100211143494, 0.08952700346708298, -0.4717000126838684, 0.22176000475883484, 0.563539981842041, 0.09335900098085403, 0.19981999695301056, -0.33910998702049255, -0.10226000100374222, 0.46814998984336853, 0.4014799892902374, -0.10854999721050262, -0.2751699984073639, 0.8004400134086609, -0.16395999491214752, 0.8662800192832947, -0.5315300226211548, -0.49584999680519104, 0.49226999282836914, -0.13488000631332397, 0.24166999757289886, 0.34727001190185547, -0.08280099928379059, -0.2126999944448471, 0.35837000608444214, -0.6322399973869324, 0.10822000354528427, 0.5249199867248535, 0.1152999997138977, -0.377810001373291, -0.1689399927854538, -0.045416999608278275, 0.4696800112724304, -0.3018699884414673, -0.826960027217865, -0.5113099813461304, -0.16794000566005707, 0.24929000437259674, -0.17730000615119934, -0.07244200259447098, 0.034033000469207764, -0.3352299928665161, 0.2860899865627289, 0.043063998222351074, -0.4736799895763397, -0.024810999631881714, -1.2770999670028687, 0.07215400040149689, -0.5401300191879272, -0.2588300108909607, 0.4088999927043915, 0.14222000539302826, 0.30463001132011414, -0.210099995136261, -0.2410999983549118, 0.42289999127388, -0.3111799955368042, 0.6118299961090088, -0.22643999755382538, -0.2736299932003021, 0.35295000672340393, -0.33465999364852905, -0.5633599758148193, 1.1848000288009644, -0.10470999777317047, -0.5864199995994568, 0.44058001041412354, 0.39987000823020935, 0.9988999962806702, -0.09659700095653534], u'car': [0.4644300043582916, 0.3772999942302704, -0.21458999812602997, -0.5076799988746643, -0.2457599937915802, 0.0813400000333786, 0.10145000368356705, 0.25154998898506165, -0.361519992351532, -1.6030000448226929, 0.2821899950504303, 0.36653000116348267, 0.4461100101470947, 0.27950000762939453, 0.04772200062870979, 0.30087000131607056, -0.1622599959373474, -0.026055000722408295, -0.2681500017642975, -0.46281999349594116, 0.25012001395225525, 0.6038900017738342, 0.1511099934577942, -0.06282299757003784, -0.0967549979686737, -0.3054800033569336, -0.11376000195741653, 0.539139986038208, 0.10965999960899353, -0.7061799764633179, -0.6631600260734558, 0.435589998960495, -0.04863100126385689, 0.27755001187324524, -0.48684999346733093, 0.11937999725341797, -0.5453799962997437, -0.29563000798225403, 0.03446999937295914, 0.5318700075149536, -0.0015879999846220016, 0.416920006275177, -0.2074200063943863, -0.03783300146460533, 0.4333299994468689, 0.04752099886536598, 0.835070013999939, -0.06508799642324448, -0.2997399866580963, 0.004713899921625853, 0.12338999658823013, -0.506600022315979, 0.25870001316070557, 0.21264000236988068, 0.19132000207901, 0.5420399904251099, -0.11384999752044678, -0.4238399863243103, -0.2780799865722656, -0.15105000138282776, -0.6210399866104126, 0.27678000926971436, -0.05497400090098381, 0.01847900077700615, -0.11744000017642975, 0.3302899897098541, -0.352510005235672, -0.21953000128269196, 0.05513999983668327, 0.16970999538898468, -0.36445000767707825, 0.36383000016212463, 0.20496000349521637, 0.665120005607605, -0.22540000081062317, -0.3034699857234955, -0.5278199911117554, -0.6611499786376953, 0.11773999780416489, -0.14438000321388245, 0.33886000514030457, 0.2296999990940094, 0.6307200193405151, 0.7692899703979492, 0.025359999388456345, -0.1368499994277954, 0.08425399661064148, 0.320360004901886, -0.10430999845266342, 0.7671999931335449, 1.0673999786376953, 0.428849995136261, 0.030319999903440475, -0.6337500214576721, 0.7070500254631042, -0.17670999467372894, -0.10769999772310257, -0.31876999139785767, 0.0637810006737709, -0.5750700235366821, -0.4065600037574768, 0.5583699941635132, 0.7237600088119507, 0.2644999921321869, 0.20313000679016113, -0.4394899904727936, 0.5309399962425232, 0.0754920020699501, -0.2695100009441376, -0.427700012922287, -0.1385899931192398, 0.2834100127220154, 0.1315300017595291, -0.32370999455451965, 0.22815999388694763, 0.4301599860191345, -0.015529000200331211, 0.13276000320911407, 0.048239000141620636, -0.03853600099682808, 0.013813000172376633, -0.24567000567913055, 0.4657999873161316, -0.619629979133606, -0.3956800103187561, -0.21261000633239746, 0.11121000349521637, -0.19518999755382538, 0.3277300000190735, -0.25275999307632446, 0.8751199841499329, 0.4986000061035156, 0.2574700117111206, 0.46459001302719116, -0.0972369983792305, 0.4250899851322174, 0.008081099949777126, -0.7966899871826172, 0.1979299932718277, -0.06015700101852417, 0.06047999858856201, 0.2009200006723404, 0.46373000741004944, 0.2961699962615967, -0.31812000274658203, 0.12547999620437622, -0.17666999995708466, -0.29346999526023865, -0.5918300151824951, 0.43081000447273254, 1.000599980354309, 0.4685800075531006, 0.11257000267505646, -0.5653300285339355, 0.547290027141571, -0.4342400133609772, 0.22737999260425568, 0.21526999771595, 0.08782400190830231, -0.07356200367212296, 0.5315999984741211, -0.36781999468803406, -0.4253099858760834, -0.06656099855899811, 0.34648001194000244, -0.13676999509334564, -0.266539990901947, 0.09905599802732468, -0.05212099850177765, -0.6140900254249573, -0.003617099951952696, 0.3263300061225891, 0.390500009059906, 0.029416000470519066, -0.11190000176429749, -0.2117999941110611, -0.15688000619411469, -0.29308000206947327, 0.23114000260829926, 0.1387300044298172, 0.6644499897956848, -0.2631799876689911, -0.2551800012588501, 0.29499000310897827, 0.7013199925422668, -0.3239000141620636, 0.4210900068283081, 0.27316001057624817, 0.15020999312400818, 0.05807400122284889, 0.2420099973678589, -0.621940016746521, -0.40832000970840454, 0.03827200084924698, -0.3537200093269348, 0.0139340003952384, -0.2501299977302551, 0.4291599988937378, -0.03946299850940704, -0.2979399859905243, 1.017799973487854, -0.4168500006198883, 0.15442000329494476, 0.1959799975156784, 0.44718000292778015, -0.25764000415802, 0.13437999784946442, -0.08422300219535828, -0.14142000675201416, -0.16000999510288239, -0.32541000843048096, -0.1958799958229065, 0.13933999836444855, -0.4492399990558624, 0.20826999843120575, -0.1574700027704239, 0.0355370007455349, 0.3709700107574463, -0.15335999429225922, 0.2040099948644638, 0.3598099946975708, -0.2631100118160248, -0.2743600010871887, -0.4122900068759918, 0.4553300142288208, 0.7752699851989746, 0.2933900058269501, -0.2739199995994568, 0.45037001371383667, 0.0600150004029274, 0.08135399967432022, -0.5396900177001953, 0.3187200129032135, -0.08283700048923492, 0.7791799902915955, 0.09467600286006927, 0.752560019493103, 0.3337700068950653, -0.5534899830818176, -0.15660999715328217, 0.20791000127792358, -0.1349799931049347, 0.475600004196167, 0.121799997985363, -0.16495999693870544, -0.6566900014877319, 0.5239400267601013, -0.4366599917411804, -0.008723500184714794, 0.5256500244140625, -0.12010999768972397, -0.2140599936246872, -0.1525299996137619, -0.2277500033378601, 0.5970100164413452, -0.6450899839401245, -0.4449999928474426, 0.4093700051307678, 0.11942999809980392, 0.034467000514268875, -0.32471001148223877, -0.4791499972343445, -0.2526400089263916, 0.2681800127029419, 0.05960100144147873, -0.5184199810028076, 0.3312399983406067, -0.2962299883365631, -0.2834300100803375, -0.41470998525619507, -0.045049998909235, -0.0017776000313460827, -0.37654000520706177, -0.023910999298095703, 0.5560200214385986, 0.20440000295639038, -2.2390999794006348, 0.14985999464988708, -0.09834499657154083, 0.18714000284671783, -0.19265000522136688, -0.006105700042098761, 0.39897000789642334, -0.22392000257968903, -0.4216800034046173, 1.072700023651123, 0.2934400141239166, -0.47383999824523926, 0.301690012216568, -0.15307000279426575, 0.19966000318527222, -0.4097900092601776, -0.10324999690055847, -0.04360999912023544, 0.17564000189304352, 0.6570900082588196, -0.09985999763011932, 0.4910700023174286, 0.282150000333786, 0.34553998708724976], u'mountain': [-0.4325900077819824, 0.4069199860095978, -0.12746000289916992, -0.25946998596191406, 0.06282100081443787, -0.1200300008058548, 0.3323900103569031, 0.7177900075912476, 0.04192600026726723, -0.5700500011444092, 0.3027400076389313, -0.37279999256134033, 0.1938599944114685, 0.6140599846839905, 0.19437000155448914, 0.1651799976825714, 0.24126000702381134, -0.15049000084400177, 0.8255699872970581, 0.25540998578071594, -0.12054000049829483, 0.03717400133609772, 0.1080700010061264, 0.23701000213623047, -0.518280029296875, -0.7551699876785278, 0.17464999854564667, -0.3696100115776062, 0.07433400303125381, 0.745989978313446, 0.740119993686676, -0.04455399885773659, -0.10746999830007553, -0.0753289982676506, -0.15497000515460968, 0.2491299957036972, -0.20347000658512115, -0.33577999472618103, -0.08206599950790405, -0.5176900029182434, -0.4599300026893616, 0.15199999511241913, 0.2367199957370758, 0.08711300045251846, 0.37408000230789185, -0.08137600123882294, 0.3388899862766266, 0.57600998878479, 0.8029199838638306, -0.5141299962997437, -0.1969899982213974, 0.012180999852716923, -0.0391790010035038, -0.006808800157159567, -0.6172699928283691, 0.5906999707221985, -0.23454000055789948, -0.10774999856948853, 0.23332999646663666, 0.5328400135040283, -0.041547998785972595, 0.12634000182151794, 0.8225600123405457, 0.3136399984359741, -0.49617999792099, -0.017860999330878258, -0.7069000005722046, 0.30807000398635864, -0.04761099815368652, -0.11022000014781952, -0.31512001156806946, 0.38277000188827515, -0.2402700036764145, 0.13812999427318573, -0.8324699997901917, -0.06031699851155281, 0.12473999708890915, -0.5964499711990356, 0.37501001358032227, -0.6286100149154663, -0.28227999806404114, -0.2045699954032898, 0.16800999641418457, -0.3800800144672394, 0.2992999851703644, -0.030578000470995903, 0.15338000655174255, 0.5571900010108948, -0.19689999520778656, -0.14642000198364258, 0.2819899916648865, -0.3860799968242645, 0.37470000982284546, 0.7294700145721436, -0.37307998538017273, 0.4190700054168701, 0.29315999150276184, 0.5028300285339355, -0.033764999359846115, 0.2879500091075897, -0.33910998702049255, 0.5722399950027466, 0.20948000252246857, 0.2307800054550171, -0.7796400189399719, -0.434689998626709, 0.34762001037597656, 0.3303300142288208, 0.38446998596191406, 0.29155999422073364, -0.1498900055885315, -0.4868200123310089, 0.24628999829292297, -0.3209399878978729, 0.4771899878978729, -0.0003945200005546212, -0.06251899898052216, -0.014419999904930592, 0.16561000049114227, 0.5321099758148193, 0.0829479992389679, -0.1836400032043457, -0.25352001190185547, -0.1588200032711029, -0.8052700161933899, 0.30331000685691833, -0.06644599884748459, 0.3340100049972534, -0.6063500046730042, -0.6934499740600586, -0.4333600103855133, 0.475600004196167, 0.3452500104904175, 0.30472999811172485, 0.5645999908447266, -0.13934999704360962, -0.6905199885368347, 0.5418599843978882, 0.12780000269412994, -0.648169994354248, -0.0027161999605596066, -0.18791000545024872, 0.07463300228118896, -0.2311599999666214, -0.6706100106239319, -0.155799999833107, 0.733460009098053, -0.11482000350952148, -0.44029998779296875, -0.04304099828004837, 0.3866400122642517, 0.19440999627113342, -0.29673001170158386, -0.39917001128196716, 1.176900029182434, 0.08898299932479858, 0.13739000260829926, -0.3560200035572052, -0.1449500024318695, 0.35756000876426697, 0.2187899947166443, -1.4290000200271606, 0.4948900043964386, 0.06744600087404251, -0.3794400095939636, 0.25655001401901245, 0.4043999910354614, -0.2809700071811676, 0.011707000434398651, -0.7154399752616882, -0.016341999173164368, 0.20840999484062195, 0.06521099805831909, 0.12263999879360199, -0.24496999382972717, 0.020748000591993332, 0.48824000358581543, -0.04994399845600128, 0.04038900136947632, 0.34376999735832214, 0.449290007352829, 0.08368399739265442, 0.3190099895000458, -0.1455399990081787, 0.20148999989032745, -0.19975000619888306, -0.14061999320983887, -0.18618999421596527, -0.4534800052642822, 0.07640799880027771, 0.47516998648643494, 0.7081699967384338, 0.20106999576091766, -0.19047999382019043, -0.19434000551700592, -0.4759199917316437, -0.5291299819946289, -0.5848199725151062, 0.08420199900865555, 0.891290009021759, 1.5953999757766724, -0.1402300000190735, -0.05324399843811989, -0.23657000064849854, -0.17509999871253967, 0.0668409988284111, -0.9138200283050537, 0.21514999866485596, -0.10886000096797943, 0.4830400049686432, -0.32128000259399414, -0.024136999621987343, -0.3057299852371216, -0.4421600103378296, -0.1526300013065338, -0.2598299980163574, -0.07222999632358551, -0.24121999740600586, 0.3787600100040436, 0.018685000017285347, 0.07887300103902817, 0.08126000314950943, 0.12901000678539276, -0.2636699974536896, 0.714900016784668, -0.057089000940322876, -0.12075000256299973, 0.00913809984922409, -0.5163099765777588, -0.3820199966430664, -0.3180600106716156, -0.34345999360084534, -0.4249599874019623, -0.6756299734115601, 0.5028799772262573, -0.20734000205993652, -0.2254199981689453, -0.2932800054550171, 0.3179999887943268, 0.11823000013828278, -0.44159001111984253, 0.35286998748779297, 0.36924999952316284, -0.796970009803772, -0.7480599880218506, 0.017673999071121216, -0.3468100130558014, 0.5448700189590454, -0.15932999551296234, 0.030926000326871872, -0.5558800101280212, 0.3835099935531616, 0.14555999636650085, -0.6270700097084045, 0.8557800054550171, 0.03418099880218506, 0.025926999747753143, -0.271479994058609, -0.3017300069332123, 0.24194000661373138, 0.23387999832630157, -0.3686800003051758, 0.23549999296665192, -0.39138999581336975, 0.09583400189876556, -0.6626600027084351, 0.3957799971103668, 0.33395999670028687, 0.7674099802970886, -0.09609100222587585, -0.4508500099182129, 0.040073998272418976, -0.22540999948978424, 0.2669599950313568, 0.22506999969482422, 0.01318500004708767, -1.361199975013733, 0.09342499822378159, -0.25560998916625977, 0.026986999437212944, -0.19836999475955963, -0.08812300115823746, 0.07966600358486176, -0.35041001439094543, -0.5754200220108032, -0.015957999974489212, 0.08118399977684021, 0.11710000038146973, 0.20048999786376953, -0.3944000005722046, 0.03386300057172775, -0.26267001032829285, -0.5519199967384338, -0.10005000233650208, -0.7116199731826782, 0.7220699787139893, 0.449970006942749, 0.6847299933433533, 0.07617700099945068, -0.12936000525951385], u'lemon': [-0.09937900304794312, -0.0127379996702075, -0.2144400030374527, -0.11965999752283096, -0.40226998925209045, -0.3795900046825409, -0.3540099859237671, -0.5306800007820129, 0.44760000705718994, 0.29506000876426697, 0.0017778000328689814, -0.45184001326560974, 0.20319999754428864, 0.7322099804878235, -0.27619001269340515, 0.22987000644207, -0.9309200048446655, 0.9347800016403198, -0.5615599751472473, 0.26469001173973083, -0.3325299918651581, 0.019094999879598618, -0.29339998960494995, 0.36513999104499817, 0.14512999355793, -0.1624000072479248, -0.41137000918388367, -0.04140999913215637, -1.1377999782562256, -0.6266599893569946, -1.107200026512146, 0.09696099907159805, -0.5274800062179565, 0.054377999156713486, -0.4167099893093109, 0.5287100076675415, -0.09205099940299988, 0.3228699862957001, -0.1627500057220459, -0.0806180015206337, 0.13022999465465546, -0.12563000619411469, 0.15092000365257263, 0.024751000106334686, 0.2728799879550934, -0.09857799857854843, -0.04406699910759926, 0.7069799900054932, -0.34261998534202576, 0.16730999946594238, 0.3678799867630005, 0.035725999623537064, 0.18796999752521515, 0.2581300139427185, -0.30399999022483826, -0.037957001477479935, -0.5221999883651733, -0.005499999970197678, 0.856220006942749, 0.01218200009316206, 0.5651199817657471, -0.08327800035476685, 0.17082999646663666, 0.388949990272522, 0.2790299952030182, -0.10175000131130219, -0.399619996547699, -0.02303900010883808, -0.13088999688625336, -0.3123300075531006, -0.40369999408721924, 0.4617699980735779, -0.6384199857711792, 0.17488999664783478, -0.006710799876600504, -0.02950800023972988, 0.6755599975585938, 0.25679001212120056, -0.21750999987125397, -0.3806000053882599, 0.2631799876689911, 0.6112200021743774, -0.1475600004196167, 0.004575499799102545, -0.08562999963760376, -0.549839973449707, -0.10146000236272812, 0.14564000070095062, -0.23611000180244446, -1.013100028038025, -0.08668699860572815, 0.0037815000396221876, 0.07170800119638443, 0.013558999635279179, 0.07942599803209305, 0.35701999068260193, 0.3918600082397461, -0.06325499713420868, 0.8106600046157837, 0.5273600220680237, -0.05736999958753586, -0.192330002784729, 0.40689000487327576, -0.7398300170898438, -0.24717000126838684, 0.03590000048279762, -0.4559600055217743, -0.43904998898506165, -0.21315999329090118, -0.026829000562429428, 0.6609200239181519, 0.19808000326156616, 0.382889986038208, 0.2230599969625473, -0.47756001353263855, 0.2522200047969818, -0.5900700092315674, 0.744189977645874, 0.19269999861717224, -0.017323000356554985, -0.17114999890327454, -0.15887999534606934, 0.4185500144958496, -0.32030001282691956, -0.7946299910545349, -0.4534899890422821, -0.12815000116825104, 0.2520099878311157, -0.08389700204133987, 0.9762300252914429, -0.5962299704551697, 1.1861000061035156, -0.15639999508857727, 0.8926900029182434, -0.4032599925994873, -0.040832001715898514, -0.41141998767852783, -1.0078999996185303, -0.936959981918335, 0.19539999961853027, 1.558500051498413, 0.3015199899673462, -0.5823299884796143, 0.0794449970126152, -0.1339000016450882, 0.3731200098991394, -0.8307200074195862, 0.025172999128699303, 0.8536099791526794, 0.03975899890065193, -0.38286998867988586, 0.5750200152397156, 0.20636999607086182, -0.009372400119900703, -0.1174900010228157, -0.4674000144004822, 0.3491300046443939, -0.30055001378059387, -0.07174500077962875, 0.5120499730110168, -0.02167000062763691, -0.0074824001640081406, 0.026900000870227814, 0.146029993891716, -0.35183998942375183, -0.6957799792289734, 0.03727500140666962, 0.42458999156951904, -0.33337000012397766, -0.4019399881362915, 0.6297600269317627, -0.22680999338626862, -0.15881000459194183, 0.4409100115299225, 0.2491600066423416, 0.09537000209093094, 0.3023500144481659, -0.47091999650001526, 0.599810004234314, -0.3913300037384033, 0.20191000401973724, 0.4558899998664856, 0.22759999334812164, -0.5150700211524963, 0.17163999378681183, -0.7377200126647949, 0.8157100081443787, -0.2957899868488312, 0.2795200049877167, -0.09699399769306183, 0.2750000059604645, 0.7845799922943115, -0.23071999847888947, 0.02505199983716011, 0.5210700035095215, -0.29872000217437744, -0.5097399950027466, -0.1979999989271164, -0.0414699986577034, 0.0995670035481453, 0.39684000611305237, 0.16019999980926514, 0.22750000655651093, -0.13101999461650848, 0.046581998467445374, 0.4206700026988983, 0.8518000245094299, 0.15546000003814697, 0.07219099998474121, 0.11631999909877777, 0.1567399948835373, -0.5094500184059143, -0.20615999400615692, -0.06877599656581879, 0.04496899992227554, -0.46474000811576843, 0.8648999929428101, -0.29846999049186707, -0.25227999687194824, -0.1767899990081787, 0.4479700028896332, 0.2889600098133087, -0.7171000242233276, 0.39983999729156494, -0.07664799690246582, 0.18490000069141388, -0.06889300048351288, -0.053759001195430756, 0.21794000267982483, -0.296860009431839, -0.7077400088310242, 0.3338100016117096, 0.19499999284744263, -0.3671000003814697, -0.012675999663770199, 0.34174999594688416, 0.27851998805999756, -0.12922999262809753, -0.6780800223350525, -0.6891300082206726, -0.5744900107383728, -0.4509499967098236, -0.363429993391037, 0.26712000370025635, -1.3029999732971191, 0.49535998702049255, 0.31216999888420105, 0.318230003118515, -0.05567700043320656, -0.9979400038719177, 0.7070000171661377, 0.005432900041341782, 0.5389699935913086, 0.1623000055551529, -0.012477999553084373, -0.33586999773979187, -0.10550999641418457, -0.07059500366449356, 0.539330005645752, -8.464099664706737e-05, -0.07084699720144272, 0.009911200031638145, 0.04226300120353699, -0.3861300051212311, -0.05979600176215172, -0.6952599883079529, -0.21130000054836273, 0.2193399965763092, 0.3953999876976013, 0.3820599913597107, -0.05611399933695793, -0.11953999847173691, -0.7673199772834778, 0.16046999394893646, -0.1643799990415573, 0.31869998574256897, -0.5643600225448608, -0.26976001262664795, -0.8551099896430969, 0.2504499852657318, -0.1377599984407425, 0.502810001373291, -0.2253500074148178, -0.32491999864578247, -0.03740600124001503, 0.08991099894046783, 0.23011000454425812, -0.6498799920082092, -0.17316000163555145, 0.04936100170016289, 0.30866000056266785, 0.21828000247478485, 0.5589100122451782, -0.5529500246047974, 0.23976999521255493, -0.8966799974441528, -0.3286600112915039, -0.13287000358104706, 0.05246200039982796, 0.6547499895095825], u'shirt': [-0.11726000159978867, -0.23411999642848969, -0.15573999285697937, -0.24411000311374664, -0.3494200110435486, -0.4267500042915344, -0.49022001028060913, -0.3456900119781494, 0.2650600075721741, -0.28703999519348145, -0.052267998456954956, -0.151419997215271, 0.004380300175398588, 0.5907599925994873, -0.3215700089931488, 0.3297500014305115, 0.04428499937057495, -0.05762600153684616, -0.18743999302387238, -0.3444899916648865, -0.15765999257564545, -0.3021300137042999, 0.025317000225186348, -0.4119099974632263, -1.154099941253662, -0.5797200202941895, -0.014967000111937523, 0.13655999302864075, 0.38971999287605286, -0.06565999984741211, 0.2707499861717224, -0.34455999732017517, -0.560230016708374, 0.2722499966621399, -1.0714999437332153, 0.316540002822876, 0.14860999584197998, -0.3359000086784363, -0.48857998847961426, 0.16132000088691711, -0.09255000203847885, -0.8048700094223022, -0.12308000028133392, -0.33535000681877136, 0.12011999636888504, -0.3989799916744232, 0.5091500282287598, -0.5633000135421753, -0.0951249971985817, -0.3049499988555908, -0.12955999374389648, -0.49963000416755676, 0.34929001331329346, -0.07118900120258331, -0.24445000290870667, 0.06633900105953217, -0.14611999690532684, -0.6614000201225281, -0.014492999762296677, -0.5990300178527832, 0.3577600121498108, -0.5680800080299377, -0.6734700202941895, -0.17634999752044678, -0.12176000326871872, -0.21845999360084534, 0.04370199888944626, -0.3157300055027008, -0.013313000090420246, -0.09582699835300446, 0.4855700135231018, 0.2796899974346161, 0.22373999655246735, 0.32412999868392944, 0.4205099940299988, 0.10708999633789062, 0.040394000709056854, -0.1149199977517128, -0.08870799839496613, -0.39590999484062195, 0.2955699861049652, 0.10839000344276428, -0.4275299906730652, 0.31053000688552856, -0.2054699957370758, 0.17463000118732452, -0.07581300288438797, -0.007572299800813198, -0.25808000564575195, 0.26166000962257385, 0.0744670033454895, -0.2622399926185608, -0.26556000113487244, 0.15246999263763428, 0.042371999472379684, 0.5319399833679199, 0.5910999774932861, 0.3760400116443634, 0.23643000423908234, -0.3079499900341034, 0.4416399896144867, 0.22624999284744263, -0.23109999299049377, 0.3683199882507324, -0.43887001276016235, 0.09816499799489975, 0.39430001378059387, 0.3972199857234955, 0.012287000194191933, -0.0004017599858343601, -0.38436999917030334, 0.46588000655174255, 0.011692999862134457, -0.02615799941122532, -0.12167000025510788, 0.05524099990725517, -0.02399599924683571, 0.26677000522613525, 0.43327999114990234, -1.4206000566482544, -0.09597399830818176, 0.15835000574588776, 0.9544299840927124, 0.2934199869632721, -0.22605000436306, 0.008270500227808952, -0.21660999953746796, 0.01651499979197979, -0.001040700008161366, -0.22213000059127808, 0.02576100081205368, -0.23047000169754028, -0.5141599774360657, -0.47036001086235046, -0.17524999380111694, 0.3656800091266632, -0.060756001621484756, 0.3272700011730194, 0.4405600130558014, -0.0903559997677803, -0.17688000202178955, -0.0063450997695326805, 0.14395999908447266, 0.13891999423503876, -0.08955900371074677, 0.7069200277328491, -0.5672399997711182, 0.2755100131034851, 0.5272799730300903, 0.07040499895811081, 0.019943000748753548, 0.49827998876571655, 0.17781999707221985, -0.8564599752426147, 0.32058998942375183, -0.8178300261497498, 0.07538700103759766, -0.31057000160217285, 0.1502400040626526, 0.7041800022125244, -0.048948001116514206, -0.6746500134468079, -0.5433300137519836, 0.13595999777317047, 0.18147000670433044, -0.7000600099563599, -0.44683000445365906, 0.9768800139427185, 0.24372999370098114, 0.03844999894499779, 0.08184400200843811, 0.04400099813938141, -0.6203500032424927, -0.20277999341487885, -0.2970600128173828, -0.5911399722099304, -0.04206499829888344, 0.4945800006389618, 0.3345400094985962, -0.07405699789524078, -0.32523998618125916, -0.18421000242233276, 0.3899799883365631, -0.32978999614715576, 0.33441001176834106, -0.5485299825668335, 0.6534600257873535, 0.8406199812889099, 0.32249000668525696, 0.22146999835968018, 0.6464200019836426, 0.17554999887943268, -0.06653500348329544, 0.43608999252319336, 0.18657000362873077, -0.24570000171661377, -0.7274699807167053, -0.4146000146865845, -0.7476199865341187, -0.15070000290870667, 1.1770999431610107, -0.14392000436782837, 0.758870005607605, 0.34174999594688416, 0.3926199972629547, -0.4370500147342682, 0.5667600035667419, 0.4563100039958954, -0.9429500102996826, -0.2141599953174591, 0.04517500102519989, -0.44312000274658203, -0.5216299891471863, 0.1618099957704544, 0.34757000207901, 0.5656899809837341, 0.29517999291419983, -0.5171399712562561, -0.1672700047492981, -0.05807900056242943, 0.6105999946594238, 0.2170799970626831, 0.3301900029182434, 0.1236800029873848, 0.13369999825954437, -0.2789100110530853, 0.0653420016169548, -0.43342000246047974, -0.3225499987602234, -0.37171998620033264, 0.6924700140953064, 0.1231599971652031, 0.024960000067949295, 0.005610200110822916, 0.5827500224113464, -0.3136399984359741, 0.1677899956703186, -0.28213998675346375, 0.05283999815583229, 0.6869900226593018, 0.5686600208282471, 0.07811100035905838, -0.9603099822998047, 0.2103399932384491, -0.028589000925421715, 0.47391000390052795, 0.13806000351905823, -0.674310028553009, 0.03142800182104111, -0.7898200154304504, 0.14482000470161438, -0.01178400032222271, -0.2830899953842163, -0.2981100082397461, 0.637499988079071, 0.19628000259399414, -0.10865999758243561, 0.334989994764328, -0.5264999866485596, -0.7192500233650208, 0.28327998518943787, 0.010514000430703163, 0.3960399925708771, 0.4316900074481964, -0.7488999962806702, 0.2145099937915802, -0.2912999987602234, 0.36410999298095703, -0.8466699719429016, 0.33215001225471497, -0.1285800039768219, 0.02625799924135208, -0.044077999889850616, -0.6762099862098694, -0.5133600234985352, -0.2953299880027771, -1.0089000463485718, -0.6935999989509583, -0.06862399727106094, 0.5847100019454956, 0.4698199927806854, -0.24917000532150269, 0.11381000280380249, 0.12919999659061432, -0.33386000990867615, 0.44091999530792236, -0.46724000573158264, -0.10884000360965729, -0.0374940000474453, -0.32910001277923584, -0.14775000512599945, -0.10126999765634537, -0.6255000233650208, 0.5851899981498718, -0.34999001026153564, -0.4412899911403656, 0.7086499929428101, 0.15971000492572784, 0.4401800036430359, -0.005329799838364124], u'concrete': [0.08548200130462646, -0.3613100051879883, -0.39215001463890076, -0.9258099794387817, -0.19812999665737152, 0.1677200049161911, -0.09894700348377228, -0.1960899978876114, -0.15735000371932983, -1.694200038909912, -0.24834999442100525, 0.11362999677658081, -0.025443999096751213, 0.19345000386238098, 0.29326000809669495, -0.14512999355793, -0.806439995765686, -0.022136999294161797, 0.08662699908018112, 0.06024099886417389, -0.036942001432180405, 0.010312999598681927, 0.29201000928878784, 0.5462200045585632, -0.8716599941253662, 0.09337200224399567, 0.3874799907207489, 0.19354000687599182, 0.023169999942183495, 0.38471001386642456, -0.13384999334812164, 0.5308200120925903, -0.11309000104665756, -0.1504400074481964, 0.17479999363422394, 0.8260400295257568, -0.05741199851036072, 0.324970006942749, 0.7266299724578857, 0.09787199646234512, -0.5958499908447266, 0.25064998865127563, -0.397489994764328, -0.15911999344825745, -0.3516800105571747, 0.3499400019645691, -0.15839000046253204, 0.29517999291419983, -0.14408999681472778, -0.15565000474452972, -0.014957999810576439, 0.6133900284767151, 0.08413399755954742, 0.05173100158572197, 0.21244999766349792, 1.246899962425232, -0.1967799961566925, -0.11745999753475189, -0.017527999356389046, 0.5714100003242493, 0.2196899950504303, 0.019814999774098396, -0.0332689993083477, -0.2775700092315674, 0.14053000509738922, 0.24007000029087067, 0.18357999622821808, 0.6114100217819214, -0.0814720019698143, 0.013415999710559845, -0.12456999719142914, 0.1287900060415268, -0.38398000597953796, 0.016092000529170036, -0.24023999273777008, 0.41007000207901, -0.3188900053501129, 0.043063998222351074, 0.3207800090312958, -0.35962000489234924, 0.3051300048828125, -0.12099000066518784, 0.6426299810409546, -0.034678999334573746, -0.5434200167655945, 0.2163199931383133, 0.2474299967288971, -0.14399999380111694, -0.07557199895381927, 0.006140499841421843, 0.49116000533103943, 0.14350999891757965, 0.18565000593662262, 0.2814899981021881, -0.10795000195503235, -0.1429699957370758, -0.023926999419927597, -0.5425000190734863, 0.3333500027656555, -0.4035300016403198, -0.13832999765872955, 0.07910899817943573, -0.07289600372314453, -0.2278899997472763, 0.06775400042533875, 0.4255000054836273, 0.10164999961853027, -0.18674999475479126, -0.23319999873638153, -1.0830999612808228, -0.5466200113296509, -0.724590003490448, -0.6216199994087219, -0.7242699861526489, -0.5524200201034546, -0.0662039965391159, 0.3102700114250183, 0.06212000176310539, -0.17906999588012695, -0.23645000159740448, 0.6868399977684021, 0.03913300111889839, 0.05596499890089035, 0.6544600129127502, -0.4688200056552887, -0.10321000218391418, -0.07742500305175781, -0.011020000092685223, -0.23695999383926392, 0.18129000067710876, -0.2198600023984909, 0.8760700225830078, 0.4322899878025055, 0.4148299992084503, -0.07203999906778336, -0.34014999866485596, -0.3907899856567383, 0.04018300026655197, -0.2959800064563751, -0.5568100214004517, 0.004270500037819147, 0.38019999861717224, 0.21852000057697296, -0.49748000502586365, -0.09860199689865112, 0.12964999675750732, 0.07632700353860855, 0.5181199908256531, -0.32622000575065613, -0.5255500078201294, -0.3579399883747101, 0.2903600037097931, -0.2932699918746948, -0.14566999673843384, 0.17680999636650085, 0.20539000630378723, 0.2218800038099289, -0.2499600052833557, -0.4122700095176697, 0.2424599975347519, 0.03506699949502945, 0.5168300271034241, 0.3543500006198883, -0.38995999097824097, 0.878030002117157, 0.3565100133419037, -0.07444100081920624, 0.4043999910354614, 0.26010000705718994, 0.07475399971008301, 0.43154001235961914, -0.10832999646663666, 0.47808000445365906, -0.7143499851226807, 0.3329299986362457, -0.27077001333236694, -0.2487799972295761, -0.26855000853538513, 0.14067000150680542, -1.2333999872207642, 0.12453000247478485, -0.5465499758720398, 0.7099400162696838, -0.13109000027179718, -0.610509991645813, 0.22935999929904938, 0.4562700092792511, 0.2666099965572357, 0.5899699926376343, 0.5559599995613098, 0.760200023651123, 0.11025000363588333, 0.4603700041770935, -0.363319993019104, 0.050411999225616455, -0.3627600073814392, -0.3838199973106384, 0.07320199906826019, 0.23886999487876892, -0.3201799988746643, 0.6368700265884399, -0.4418399930000305, -0.40501001477241516, -0.3503200113773346, -0.1891700029373169, 0.10527999699115753, 0.34095999598503113, -0.33250999450683594, -0.599560022354126, 0.34193000197410583, -0.018143000081181526, 0.35989001393318176, -0.37856999039649963, -0.05659500136971474, -0.3202199935913086, 0.1068200021982193, 0.06475000083446503, -0.42866000533103943, 0.16399000585079193, -0.13393999636173248, 0.8605999946594238, 0.1174200028181076, 0.8073899745941162, 0.19005000591278076, 0.0220979992300272, 0.08316099643707275, 0.025090999901294708, -0.10930000245571136, 0.33011001348495483, -0.710889995098114, 0.05627800151705742, -0.14364999532699585, -0.2455900013446808, 0.40766000747680664, 0.03589800000190735, 0.15836000442504883, 0.09180500358343124, -0.5496399998664856, -0.31520000100135803, 0.1507900059223175, -0.043480001389980316, -0.6193600296974182, -0.20754000544548035, -0.3815099895000458, -0.6233599781990051, -0.1932000070810318, 0.5626199841499329, 0.05114800110459328, -0.21550999581813812, 0.33809998631477356, 0.2624000012874603, -0.3421599864959717, 0.4300900101661682, -0.5706899762153625, 0.2434300035238266, -0.1370300054550171, -0.1370600014925003, -0.3104099929332733, -0.18758000433444977, 0.8493099808692932, -0.3922500014305115, 0.23284000158309937, 0.054788000881671906, -0.5561699867248535, 0.31652000546455383, 0.15602999925613403, -0.34411001205444336, 0.25450998544692993, 0.017316000536084175, -0.8212400078773499, -0.38705000281333923, -0.3447900116443634, -0.18336999416351318, 0.05376100167632103, -0.35833001136779785, -0.0859220027923584, -1.7773000001907349, 0.17428000271320343, 0.6527500152587891, -0.1841599941253662, 0.2301200032234192, -0.9841300249099731, -0.2547700107097626, -0.13183000683784485, 0.18932999670505524, 0.03765900060534477, 0.294950008392334, 0.550819993019104, 0.20896999537944794, -0.12961000204086304, -0.19001999497413635, 0.23725999891757965, 0.23702000081539154, 0.6917700171470642, 0.32225000858306885, 0.8716599941253662, -0.009744900278747082, -0.15898999571800232, -0.032186999917030334, 0.4274600148200989], u'balloon': [0.09854999929666519, -0.17083999514579773, -0.3286300003528595, -0.35258999466896057, -0.2448900043964386, 0.3311600089073181, 0.35888999700546265, -0.17455999553203583, -0.17732000350952148, -0.48475998640060425, 0.4094899892807007, -0.20819999277591705, 0.1410599946975708, -0.04390700161457062, 0.18066999316215515, 0.2858699858188629, -0.025644000619649887, 0.6800299882888794, 0.2905600070953369, 0.3950600028038025, 0.16952000558376312, -0.27213001251220703, 0.2434699982404709, 0.456279993057251, 0.08779899775981903, -0.4396199882030487, 0.02229199931025505, -0.07867699861526489, -0.15251000225543976, 0.22342999279499054, -0.18435999751091003, -0.17871999740600586, -0.053022000938653946, -0.21739999949932098, -0.1394300013780594, -0.3364599943161011, -0.4032300114631653, -0.5540500283241272, 0.44282999634742737, 0.8275899887084961, -0.9672399759292603, 0.151419997215271, 0.0348609983921051, 0.25303998589515686, -0.6289700269699097, 0.4625599980354309, -0.16374999284744263, -0.03386399894952774, 0.2556900084018707, -0.17500999569892883, 0.26510000228881836, -0.35416001081466675, -0.32183000445365906, 0.6345400214195251, -0.6086400151252747, -0.14322000741958618, -0.4771200120449066, -0.0899680033326149, 0.4383600056171417, 0.025692999362945557, 0.060072001069784164, -0.055114999413490295, 0.5392600297927856, -0.75559002161026, 0.3291899859905243, 0.24232999980449677, 0.2630400061607361, -0.022538000717759132, -0.3263700008392334, -0.16697999835014343, 0.45778998732566833, 0.6409599781036377, 0.23713000118732452, -0.0034783000592142344, 0.4720900058746338, 0.166920006275177, 0.5371699929237366, -0.05579499900341034, 0.29576998949050903, -0.635420024394989, -0.26822999119758606, 0.013527000322937965, 0.4811600148677826, 0.7243899703025818, -0.2810800075531006, 0.06377899646759033, 0.2652300000190735, -0.20329000055789948, -0.027073999866843224, -0.09871499985456467, 0.4432699978351593, -0.06578700244426727, 0.22192999720573425, -0.2625499963760376, 0.6562600135803223, 0.33757999539375305, -0.447380006313324, 0.8455899953842163, -0.7602699995040894, 0.21397000551223755, 0.4192799925804138, 0.3564000129699707, -0.16666999459266663, 0.11495999991893768, 0.7505199909210205, -0.4945499897003174, -0.5051599740982056, 0.3714199960231781, -0.004447500221431255, 0.4721300005912781, -0.3444499969482422, 0.837119996547699, 0.3519499897956848, 0.5188699960708618, 0.3091700077056885, -0.693120002746582, -0.5187199711799622, 0.7336699962615967, 0.16274000704288483, -0.08588899672031403, 0.66839998960495, -0.5208100080490112, 0.08698800206184387, -0.02047399990260601, 0.14065000414848328, -0.329010009765625, -0.7194700241088867, -0.06845299899578094, 0.7716400027275085, -0.15556000173091888, 0.12952999770641327, 0.69691002368927, 0.04138199985027313, 0.1059499979019165, 0.06185400113463402, 0.3142299950122833, 0.3764300048351288, -0.2041199952363968, -0.5556300282478333, -0.6845999956130981, -0.13902999460697174, -0.5718200206756592, -0.6228799819946289, 0.09809199720621109, -0.36438998579978943, 0.19243000447750092, 0.041165001690387726, 0.1661600023508072, 0.13288000226020813, 0.5344700217247009, -0.1090800017118454, 0.4000700116157532, 0.6775299906730652, -0.208979994058609, 0.8883799910545349, 0.6623799800872803, 0.04258599877357483, 0.06662599742412567, -0.6099799871444702, 0.7633399963378906, 0.11886999756097794, -0.8979899883270264, 0.4458099901676178, -0.1832199990749359, 0.5488399863243103, -0.8668500185012817, -0.09507499635219574, 0.2884100079536438, -0.01879500038921833, -0.02343199960887432, -0.4556399881839752, 0.22335000336170197, 0.1771100014448166, -0.20306000113487244, -0.008656900376081467, -0.30188000202178955, -0.0382549986243248, -0.18119999766349792, 0.05647699907422066, 0.1271200031042099, 0.28683000802993774, -0.3521200120449066, 0.27204999327659607, 0.4937500059604645, 0.2037300020456314, -0.8180599808692932, 0.3755899965763092, 0.11999999731779099, -0.5254499912261963, -0.8037199974060059, 0.22211000323295593, 0.7376599907875061, -0.35995998978614807, 0.22022999823093414, -0.5815799832344055, 0.06252399832010269, 0.2141599953174591, 0.6596900224685669, 0.08104600012302399, -0.4066399931907654, 0.46643000841140747, -0.06612300127744675, -0.21594999730587006, -0.16308000683784485, 0.5895400047302246, 0.2379399985074997, -0.03724199905991554, 0.32565000653266907, 0.0026891001034528017, 0.9505000114440918, -0.19006000459194183, -0.4204599857330322, -0.049908000975847244, -0.5411700010299683, 0.19550999999046326, -0.09181399643421173, -0.4529699981212616, -0.3405100107192993, -0.012811999768018723, -0.2503800094127655, 0.6223400235176086, -0.19442999362945557, 0.04409100115299225, -0.3712199926376343, -0.19453999400138855, -0.26589998602867126, -0.9260900020599365, -0.01810400001704693, 0.25672000646591187, -0.4557900130748749, -0.013577999547123909, 0.13955000042915344, -0.09140799939632416, -0.5659099817276001, 0.05525900050997734, -0.09835399687290192, -0.3027999997138977, -0.6807500123977661, 0.10006999969482422, -0.28957998752593994, 0.14970000088214874, -0.16370999813079834, -0.04802300035953522, 0.017622999846935272, -0.23064999282360077, -0.4478699862957001, 0.2242400050163269, -0.12691999971866608, 0.40810999274253845, 0.37240999937057495, -0.5593600273132324, 0.03378299996256828, -0.1090100035071373, -0.5548700094223022, 0.048041000962257385, 0.4775800108909607, 0.013698999769985676, -0.332940012216568, 0.022166000679135323, 0.18197999894618988, 0.7768200039863586, -0.3347199857234955, 0.04888100177049637, 0.3042199909687042, -0.1367499977350235, 0.5104900002479553, 0.022646000608801842, 0.0508279986679554, 0.008611599914729595, 0.6365799903869629, -0.020534999668598175, 0.5776399970054626, 0.6060000061988831, 0.5644099712371826, -0.011220999993383884, -0.4963200092315674, -1.100100040435791, -0.7223600149154663, -0.6348000168800354, 0.1589300036430359, -0.24252000451087952, 0.12946000695228577, -0.2939099967479706, 0.1871500015258789, -0.09801000356674194, -0.10857000201940536, -0.024903999641537666, 0.17720000445842743, -0.9440500140190125, 0.4867500066757202, 0.31567999720573425, 0.1843400001525879, -0.4359000027179718, -0.371969997882843, 0.12212000042200089, -0.43773001432418823, -0.252920001745224, 0.127470001578331, -0.021604999899864197, 0.3716900050640106], u'cave': [-0.5335299968719482, 0.3953799903392792, -0.6190900206565857, -0.12439999729394913, 0.3276199996471405, -0.08961699903011322, 0.40443000197410583, 0.23603999614715576, -0.055810000747442245, -0.31790998578071594, -0.18618999421596527, -0.33889999985694885, -0.12284000217914581, 0.3503899872303009, 0.16819000244140625, 0.006061300169676542, -0.2237199991941452, -0.08748099952936172, 0.2139900028705597, 0.6152499914169312, -0.16485999524593353, 0.3169899880886078, 0.6363499760627747, 0.2682400047779083, -0.22573000192642212, -0.2564300000667572, 0.2838599979877472, -0.4568699896335602, -0.4213300049304962, 1.3016999959945679, 0.5760999917984009, 0.44613000750541687, -0.5725600123405457, -0.1452600061893463, 0.6906499862670898, -0.09208299964666367, 0.33243000507354736, 0.026633000001311302, -0.19763000309467316, -0.37891000509262085, 0.05485000088810921, 0.4346599876880646, 0.5031700134277344, 0.5508300065994263, -0.4539799988269806, 0.14263999462127686, 0.42906999588012695, 0.29899001121520996, -0.12775999307632446, -0.1193000003695488, 0.10327000170946121, 0.011118999682366848, -0.36388999223709106, 0.46928998827934265, 0.36010000109672546, 0.6281999945640564, -0.32837000489234924, 0.04516100138425827, 0.09915000200271606, 0.3787600100040436, -0.443589985370636, -0.014360999688506126, 0.606939971446991, 0.3451800048351288, 0.14454999566078186, -0.3301199972629547, 0.16867999732494354, -0.2961699962615967, -0.28314998745918274, 0.02519799955189228, -0.14957000315189362, -0.25641000270843506, -0.3644999861717224, -0.4975999891757965, -1.0322999954223633, -0.18488000333309174, -0.08821199834346771, -0.30432000756263733, 0.3275200128555298, -0.4108799993991852, 0.29427000880241394, 0.052776999771595, -0.33191999793052673, -0.3121800124645233, -0.2692500054836273, 0.8627099990844727, 0.19257999956607819, 0.18479999899864197, -0.35054001212120056, -0.33463001251220703, -0.2051199972629547, 0.07444699853658676, -0.23907999694347382, 0.4095599949359894, 0.5648999810218811, 0.9209799766540527, 0.14170999825000763, 0.15803000330924988, 0.743910014629364, -0.08837399631738663, 0.09492599964141846, 0.6487399935722351, 0.3869900107383728, -0.1919800043106079, 0.23543000221252441, 0.15995000302791595, 0.483379989862442, 0.2730099856853485, 0.4449999928474426, -0.06980600208044052, -0.409060001373291, -0.6081600189208984, 0.15154999494552612, -0.027682000771164894, 0.19787999987602234, -0.038040000945329666, -0.31560999155044556, -0.22867999970912933, -0.03975199908018112, -0.08478099852800369, -0.26423999667167664, -0.37602001428604126, -0.675819993019104, -0.0857739970088005, -0.1634799987077713, -0.14147000014781952, -0.3180600106716156, 0.16026000678539276, 0.04141699895262718, -0.008233999833464622, -0.07121700048446655, 0.3866199851036072, -0.16439999639987946, 0.05613800138235092, -0.26715999841690063, 0.1530199944972992, -0.4221999943256378, -0.1979999989271164, -0.3169899880886078, -0.40727999806404114, -0.01017600018531084, -0.7992500066757202, -0.1999099999666214, -0.5950800180435181, -0.2987299859523773, -1.3544000387191772, 0.09706799685955048, 0.6063600182533264, -0.2501400113105774, -0.003122099908068776, -0.05805400013923645, 0.2574400007724762, -0.64205002784729, -0.573199987411499, 0.40077999234199524, 0.006764700170606375, -0.051398999989032745, 0.2071399986743927, -0.5836399793624878, 0.15169000625610352, 0.17188000679016113, 0.11830999702215195, 0.4683299958705902, 0.20272000133991241, 0.2559800148010254, 0.509909987449646, 0.5097799897193909, 0.0848039984703064, 0.05987999960780144, -0.6876800060272217, 0.08966600149869919, 0.3485099971294403, 1.3431999683380127, 0.21250000596046448, -0.340719997882843, 1.2723000049591064, -0.35962000489234924, 0.030254999175667763, 0.25442999601364136, 0.13011999428272247, 0.4433700144290924, 0.27265000343322754, 0.4457699954509735, -0.03079799935221672, 0.10480999946594238, -0.5113800168037415, 0.1404999941587448, 0.2897999882698059, 0.20221999287605286, -0.09950099885463715, 0.0888189971446991, 0.44477999210357666, 0.08122900128364563, -0.08717100322246552, -0.2549999952316284, -0.11427000164985657, -0.24854999780654907, -0.25501999258995056, 0.24560999870300293, 0.49818000197410583, 1.2253999710083008, 0.2520099878311157, -0.17159999907016754, 0.02936200052499771, 0.29693999886512756, -0.022281000390648842, -1.0469000339508057, -0.34891998767852783, -0.074925996363163, 0.3399200141429901, -0.3566800057888031, -0.007269499823451042, 0.02860499918460846, -0.11077000200748444, 0.13721999526023865, -0.31610000133514404, -0.44593000411987305, 0.3770599961280823, 0.22245000302791595, 0.5653700232505798, 0.33496999740600586, 0.368369996547699, -0.376800000667572, -0.6230599880218506, -0.04745600000023842, -0.5401600003242493, -0.44179001450538635, 0.12655000388622284, -0.12992000579833984, -0.3224300146102905, -0.25661998987197876, -0.33741000294685364, -0.046716999262571335, -0.13871000707149506, -0.12996000051498413, -0.09997399896383286, -0.47383999824523926, -0.3530299961566925, -0.14249999821186066, -0.40467000007629395, -0.08125399798154831, 0.23115000128746033, -0.18626999855041504, -0.10233999788761139, -0.5616000294685364, 0.14232000708580017, -0.3218199908733368, -0.3055500090122223, -0.1798900067806244, 0.21142999827861786, -0.295199990272522, -0.01195400021970272, 0.48276999592781067, -0.17983999848365784, 0.40073999762535095, 0.3299899995326996, 1.0374000072479248, 0.015409000217914581, -0.3280400037765503, 0.1914999932050705, -0.3198600113391876, -0.3736000061035156, 0.24232999980449677, -0.4537999927997589, -0.130840003490448, -0.19439999759197235, -0.12634000182151794, -0.20288999378681183, 0.07146400213241577, 0.4336700141429901, -0.7508800029754639, 0.013110999949276447, 0.19030000269412994, 0.4419800043106079, -0.13127000629901886, -0.07745400071144104, -0.8182799816131592, -0.5735899806022644, -0.28200000524520874, 0.2917200028896332, -0.32914999127388, 0.04944400116801262, 0.2688699960708618, -0.1619900017976761, 0.026410000398755074, 0.25029000639915466, -0.042413998395204544, 0.25854000449180603, 0.41982999444007874, -0.2773900032043457, -0.12426000088453293, -0.00860149972140789, 0.2282399982213974, 0.20305000245571136, -0.10661999881267548, -0.14569999277591705, 0.2841799855232239, 0.34426000714302063, -0.22303999960422516, 0.049199000000953674], u'bowl': [-0.18025000393390656, 1.2187999486923218, 0.4313200116157532, -0.5559200048446655, 0.06584600359201431, -0.12246999889612198, 0.5308200120925903, 0.09643200039863586, -0.007739400025457144, -0.17044000327587128, -0.3754099905490875, 0.12296999990940094, 0.01617100089788437, -0.3746199905872345, 0.15078000724315643, -0.16962000727653503, -0.2948000133037567, 0.02876099944114685, -0.5062900185585022, -0.8512099981307983, -0.44370999932289124, -0.06564400345087051, -0.20541000366210938, 0.15230999886989594, 0.016898000612854958, -0.6497099995613098, -0.45291998982429504, 0.03203599900007248, -0.3763599991798401, -1.2274999618530273, 0.28395000100135803, -0.08025600016117096, -0.01841600053012371, -0.473470002412796, -1.6784000396728516, 0.5074099898338318, -0.28001001477241516, 0.6706799864768982, -0.23197999596595764, -0.4867500066757202, 0.03702099993824959, -0.16031000018119812, -0.12820999324321747, 0.09071200340986252, -0.1947299987077713, 0.14937999844551086, 0.1802700012922287, 0.04250200092792511, 0.00658239983022213, 0.38304001092910767, 0.3236599862575531, 0.4086199998855591, -0.4085899889469147, 0.1437399983406067, -0.5069100260734558, -0.0728359967470169, 0.14621999859809875, 0.07649999856948853, 0.4874500036239624, -0.2415499985218048, -0.6507899761199951, -0.4885599911212921, -0.7434599995613098, 0.8161900043487549, 0.07714799791574478, -0.19431999325752258, 0.38398998975753784, 0.06899300217628479, 0.4905500113964081, 0.04693799838423729, 0.7721199989318848, -0.15448999404907227, 0.21032999455928802, 0.25113001465797424, -1.0321999788284302, 0.4171200096607208, 0.7777699828147888, 0.16526000201702118, 0.403329998254776, -0.1440100073814392, 0.21773000061511993, 0.46977001428604126, -0.9059600234031677, 0.10852999985218048, 0.27300000190734863, -0.09594900161027908, -0.9193800091743469, -0.2119700014591217, 0.3841499984264374, -0.5120999813079834, 0.3198400139808655, 0.5980799794197083, 0.23736999928951263, 0.043234001845121384, -1.2872999906539917, 0.28474000096321106, 0.5694500207901001, -0.16203999519348145, -0.11900000274181366, -0.01879199966788292, 0.37966999411582947, 0.06698500365018845, 0.2525799870491028, 0.006842100061476231, 0.38670000433921814, 0.29666998982429504, -0.09932799637317657, 0.07898099720478058, -0.4984799921512604, 0.2184399962425232, 0.2184399962425232, -0.07414399832487106, -0.08093900233507156, -0.06850100308656693, -0.7369999885559082, 0.19226999580860138, 0.022235000506043434, 0.4856500029563904, -0.033771999180316925, -0.368149995803833, 0.3178800046443939, 0.22228999435901642, 0.5730299949645996, 0.1773100048303604, -0.37762999534606934, -0.43893998861312866, -0.11039000004529953, 0.6404500007629395, -0.5204799771308899, 0.2769699990749359, -0.20235000550746918, 0.7275300025939941, -0.04125500097870827, 0.3713099956512451, -0.18153999745845795, -0.12884999811649323, 0.0803539976477623, 0.2575100064277649, -0.44132000207901, 0.5235099792480469, 0.3737100064754486, 0.8080000281333923, -0.09662699699401855, 0.2220900058746338, -0.11507999897003174, 0.8595100045204163, -0.3598000109195709, 0.053304001688957214, 0.586650013923645, 0.2978599965572357, 0.032749999314546585, -0.04536399990320206, -0.18006999790668488, -0.529449999332428, 0.548039972782135, -0.2679699957370758, -0.30046001076698303, -0.44227999448776245, -0.5120900273323059, 0.3113200068473816, 0.05477700009942055, 0.11163999885320663, -0.5525500178337097, -0.2962000072002411, 0.38269999623298645, 0.42302000522613525, 0.4636099934577942, 0.4674699902534485, -0.257860004901886, -0.5882800221443176, 0.33024999499320984, -0.12785999476909637, -1.1598999500274658, -0.16856999695301056, 0.0692719966173172, 0.2942099869251251, 0.25196000933647156, 0.06645199656486511, 1.100600004196167, -0.2072100043296814, 0.5696700215339661, -0.048294998705387115, 0.2922399938106537, -0.2505300045013428, -0.3094399869441986, -0.7736499905586243, 0.2898299992084503, 0.1467999964952469, -0.4555799961090088, -0.04267600178718567, 0.5471000075340271, 1.1412999629974365, -0.31290000677108765, 0.628059983253479, -0.40250998735427856, -0.548770010471344, 0.15699000656604767, 0.5175700187683105, 0.3945100009441376, 0.5166900157928467, 1.9617999792099, -0.07099699974060059, 0.5524200201034546, -0.3341299891471863, 0.04203199967741966, -0.4258100092411041, 0.5598999857902527, -0.14997999370098114, 0.44133999943733215, -0.0783109962940216, 0.3409999907016754, 0.9475899934768677, -0.19864000380039215, 0.4699400067329407, -1.1958999633789062, -0.5592100024223328, -0.5985100269317627, -0.6438000202178955, 0.20689000189304352, -0.3819800019264221, 0.8713399767875671, -0.06437800079584122, -0.3784799873828888, 0.07691799849271774, -0.3842400014400482, -0.5391600131988525, 0.5927199721336365, -0.4846299886703491, 0.2768299877643585, 0.04951300099492073, -0.04136300086975098, -0.3128499984741211, 0.2500799894332886, -0.5736799836158752, 0.19718000292778015, -0.5720000267028809, 0.4485900104045868, -0.7296500205993652, -0.21427999436855316, -0.828540027141571, -0.23883000016212463, -0.05116400122642517, 0.49399998784065247, 0.09742800146341324, -1.219499945640564, -0.3384400010108948, -0.0012721000239253044, -0.20866000652313232, -0.5800999999046326, -0.11130999773740768, 0.0781330019235611, -0.5368300080299377, -0.16872000694274902, -0.10730999708175659, -0.31852999329566956, 0.1601399928331375, 0.3602299988269806, -0.14278000593185425, 0.10345999896526337, 0.1055700033903122, -0.4044800102710724, -1.34660005569458, 0.4810500144958496, 0.05017700046300888, 0.07222799956798553, -0.37525999546051025, -0.22673000395298004, -0.28314998745918274, 0.33149001002311707, 0.23277999460697174, -0.5394799709320068, -0.32714998722076416, 0.46342000365257263, 0.652999997138977, 0.216839998960495, -0.7004500031471252, -0.98198002576828, 0.07991500198841095, -0.519540011882782, 0.12080000340938568, -0.3033199906349182, 0.008907600305974483, 0.4986099898815155, 0.34185999631881714, -0.13289999961853027, 0.1145000010728836, -0.5907700061798096, -0.7939000129699707, 0.0313429981470108, -0.0835380032658577, 0.02485400065779686, -0.17023000121116638, 0.06460800021886826, 0.14943000674247742, 0.5669699907302856, -0.3406200110912323, -0.18313999474048615, -0.37226998805999756, -0.623740017414093, 0.19429999589920044], u'snow': [-0.6960999965667725, -0.33390000462532043, -0.6654199957847595, -0.1645900011062622, -0.7028300166130066, 0.05326399952173233, 0.5750799775123596, 1.1246000528335571, -0.4114300012588501, -0.9333500266075134, -0.3970000147819519, -0.13948999345302582, -0.2172500044107437, 0.49382999539375305, -0.16481000185012817, -0.4367299973964691, -0.39998000860214233, -0.14701999723911285, 0.5827999711036682, 0.7312300205230713, -0.16808000206947327, 0.05009299889206886, 0.20340999960899353, 0.09328299760818481, -0.18943999707698822, -0.009279600344598293, 0.006421300116926432, -0.5586000084877014, 0.07970800250768661, 0.03417700156569481, 0.503000020980835, -0.08412300050258636, -0.15241000056266785, 0.04239799827337265, -0.9586499929428101, 0.13481999933719635, 0.10694999992847443, 0.222120001912117, 0.1638299971818924, 0.08141600340604782, -0.6143699884414673, 0.6029899716377258, 0.5384299755096436, 0.33915001153945923, -0.0600459985435009, -0.12329000234603882, 0.30417001247406006, 0.06783799827098846, -0.05832900106906891, -0.24790999293327332, -0.2817699909210205, 0.32273000478744507, -0.1263899952173233, -0.4066399931907654, -0.4257799983024597, 0.7136600017547607, 0.18675999343395233, -0.4957599937915802, 0.566349983215332, 0.3941099941730499, -0.11875999718904495, 0.6279799938201904, 0.5019299983978271, -0.38534000515937805, -0.32332998514175415, -0.2961300015449524, -0.19840000569820404, 0.08204200118780136, -0.6366599798202515, -0.2517699897289276, 0.07022500038146973, 0.23885999619960785, -0.353410005569458, -0.30614998936653137, -0.7897999882698059, -0.014515000395476818, -0.0966619998216629, 0.27063998579978943, 0.37095001339912415, -0.39160001277923584, 0.15589000284671783, 0.4017600119113922, -0.1231599971652031, -0.00693110004067421, -0.17538000643253326, 0.2931700050830841, -0.03566199913620949, -0.06250300258398056, -0.11821000277996063, -0.26708000898361206, 0.3343299925327301, -0.4103899896144867, -0.44940999150276184, -0.058538999408483505, -0.5972999930381775, -0.060832999646663666, 0.014623000286519527, 0.031390998512506485, 0.041092999279499054, 0.21222999691963196, 0.5430399775505066, 0.5144400000572205, -0.24469999969005585, -0.03493700176477432, -0.6158300042152405, 0.24116000533103943, 0.9361199736595154, 0.29662999510765076, -0.017330000177025795, 0.3986400067806244, -0.39899998903274536, -0.6992700099945068, 0.010898999869823456, 0.04480399936437607, 0.09644400328397751, 0.20555000007152557, 0.37108999490737915, 0.13219000399112701, 0.29941999912261963, -0.2849400043487549, -0.07110299915075302, -0.4533799886703491, -0.22125999629497528, -0.31672999262809753, -0.10643000155687332, 0.040453001856803894, -0.15323999524116516, 0.33191001415252686, 0.2780100107192993, -0.2514300048351288, -0.41784000396728516, 1.135200023651123, 0.18708999454975128, 0.5793200135231018, 0.1491200029850006, 0.4273099899291992, -0.8135300278663635, 0.355459988117218, 0.10287000238895416, -0.10858000069856644, 0.1369200050830841, 0.11450999975204468, -0.6860700249671936, -0.17114999890327454, -0.5270799994468689, 0.2895300090312958, 0.5146999955177307, 0.2554900050163269, -0.23138999938964844, -0.4427500069141388, 0.4267899990081787, -0.41475000977516174, 0.04118200019001961, -0.266400009393692, 0.6096699833869934, 0.03782999888062477, 0.2737100124359131, -0.5267000198364258, 0.12029000371694565, 0.520799994468689, 0.5951899886131287, -1.131500005722046, 0.19505000114440918, -0.25279998779296875, 0.3463599979877472, 0.8206499814987183, 0.6327099800109863, 0.09168200194835663, 0.38433000445365906, -0.8110799789428711, 0.1823199987411499, 0.19067999720573425, -0.13030999898910522, 0.213359996676445, 0.07445400208234787, -0.09449800103902817, 0.4759399890899658, -0.31025999784469604, -0.1171799972653389, 0.09289100021123886, 0.22066999971866608, -0.16720999777317047, 0.7170299887657166, 0.30142998695373535, -0.40608999133110046, -0.16231000423431396, 0.31314998865127563, -0.5932499766349792, -0.5340399742126465, -0.10869999974966049, -0.2302599996328354, 0.3650699853897095, 0.3064799904823303, -0.7557600140571594, -0.20767000317573547, -0.4696600139141083, -0.21035000681877136, 0.009192399680614471, 0.5056999921798706, 0.4556399881839752, 0.8414499759674072, -0.19412000477313995, 0.2396399974822998, 0.858519971370697, 0.05229000002145767, -0.0011898999800905585, -0.2938700020313263, 0.044186998158693314, -0.23885999619960785, 0.19207000732421875, -0.007945899851620197, -0.25773000717163086, 0.3114500045776367, -0.4761500060558319, -0.0005643100012093782, -0.89410001039505, -0.38666999340057373, -0.3790700137615204, 0.5282099843025208, -0.455130010843277, 0.5356699824333191, 0.1321599930524826, 0.39741000533103943, -0.4903999865055084, 0.24118000268936157, -0.11714000254869461, 0.27006998658180237, 0.1518400013446808, 0.42315998673439026, -0.3970800042152405, 0.13827000558376312, -0.2763800024986267, 0.2990800142288208, -0.7600799798965454, 0.06175199896097183, -0.44519999623298645, -0.5131999850273132, 0.12123999744653702, 0.1579200029373169, -0.5706700086593628, -0.6879299879074097, -0.33873000741004944, -0.4329099953174591, -0.4681699872016907, -0.8466699719429016, -0.658519983291626, -0.591159999370575, -0.04340599849820137, -0.013031000271439552, 0.11246000230312347, -0.35374000668525696, 0.392300009727478, 0.11720000207424164, -0.5626800060272217, 0.8347700238227844, -0.3467499911785126, 0.05456800013780594, -0.4849399924278259, 0.12108000367879868, -0.15503999590873718, -0.04700800031423569, -0.26649999618530273, 0.02459299936890602, 0.701229989528656, 0.2128400057554245, -0.07779599726200104, 0.05083499848842621, 0.3865000009536743, 0.3753400146961212, -0.4874899983406067, -0.01373900007456541, 0.5785199999809265, -0.9042500257492065, -0.006280600093305111, -0.2867400050163269, -0.01774900034070015, -1.0189000368118286, -0.7137100100517273, -0.3655700087547302, -0.7341200113296509, -0.02737100049853325, -0.07139600068330765, 0.6479200124740601, -0.057280998677015305, -0.25119999051094055, 0.0395670011639595, 0.0769760012626648, 0.34571999311447144, 0.34606000781059265, -0.3832300007343292, -0.07401099801063538, -0.14153000712394714, -0.03109000064432621, 0.5313699841499329, -0.35708001255989075, -0.28262999653816223, 0.09866300225257874, 0.1769299954175949, -0.3929699957370758, 0.2770799994468689], u'rubber': [0.2986299991607666, 0.06507299840450287, -0.11800999939441681, -0.013868999667465687, -0.33441999554634094, -0.6196799874305725, 0.0966470018029213, 0.688510000705719, -0.012130999937653542, -0.5205399990081787, -0.0633540004491806, -0.284280002117157, -0.3378799855709076, -0.5685399770736694, 0.2430499941110611, -0.3166300058364868, -0.19812999665737152, 0.7768800258636475, 0.06390400230884552, 0.38328999280929565, -0.3698500096797943, 0.002942899940535426, 0.2739099860191345, 0.4095200002193451, -0.7394000291824341, 0.13287000358104706, -0.2543799877166748, 0.1751600056886673, -0.34529998898506165, 0.7320299744606018, 0.35207998752593994, -0.40283000469207764, -0.22826999425888062, 0.33076998591423035, 0.03148899972438812, 0.4479300081729889, 0.14959000051021576, -0.34338998794555664, 0.5040799975395203, 0.6566600203514099, -0.13862000405788422, -0.3896700143814087, -0.10209999978542328, -0.003396800020709634, -0.3211899995803833, -0.21886000037193298, -0.29502999782562256, -0.4569700062274933, 0.014921000227332115, 1.4467999935150146, 0.192780002951622, 0.43167999386787415, -0.11180999875068665, 0.4858799874782562, 0.455159991979599, 0.1584099978208542, -0.07071900367736816, 0.1256999969482422, 0.02778399921953678, -0.7682499885559082, 0.13549000024795532, -0.3788999915122986, -0.8564000129699707, -0.48135998845100403, 0.7329999804496765, -0.05452600121498108, -0.5305899977684021, -0.20156000554561615, -0.4888800084590912, 0.4542999863624573, -0.330020010471344, 0.30017998814582825, -0.24556000530719757, 0.6433699727058411, 0.05084700137376785, 0.31984999775886536, 0.3308899998664856, -0.11275999993085861, 0.1404999941587448, -0.5774099826812744, 0.2191700041294098, 0.3236300051212311, 0.001630699960514903, 0.38207998871803284, -0.4682599902153015, -0.03551200032234192, 0.04054199904203415, -0.1838800013065338, -0.5474900007247925, 0.06058499962091446, 0.2213200032711029, -0.39937999844551086, -0.2192399948835373, -0.31856000423431396, 0.43827998638153076, 0.20409999787807465, -0.6905800104141235, 0.12713000178337097, -0.3513199985027313, -0.7056099772453308, 0.05018499866127968, 0.9441800117492676, -0.46459999680519104, -0.8096699714660645, 0.2837499976158142, 0.31582000851631165, -0.45987001061439514, -0.22417999804019928, -0.3726600110530853, -0.046720001846551895, 0.6299300193786621, -0.0013876999728381634, -0.19347000122070312, -0.2539600133895874, 0.51146000623703, 0.19089999794960022, 0.28334999084472656, 0.6500300168991089, 0.1527000069618225, -0.24301999807357788, -0.29218998551368713, -0.20714999735355377, 0.07373400032520294, -0.3077999949455261, -0.5839499831199646, 0.5425500273704529, 0.3295600116252899, -0.18283000588417053, 0.5159100294113159, 0.052848998457193375, 0.10576999932527542, 1.0450999736785889, 0.08685000240802765, 0.7823299765586853, -0.41321998834609985, -0.27790001034736633, 0.09461499750614166, 0.16659000515937805, 0.7350800037384033, 0.5509099960327148, 0.29973000288009644, 0.47947001457214355, 0.15665000677108765, -0.31314000487327576, -0.02555599994957447, 0.7722399830818176, -0.14316000044345856, -0.2017199993133545, -0.01362099964171648, -0.3018600046634674, -0.12732000648975372, 0.14295999705791473, 0.17744000256061554, -0.27441999316215515, 0.6162099838256836, -0.20860999822616577, 0.0681539997458458, -0.6604499816894531, 0.36476001143455505, -0.2655400037765503, -0.14114999771118164, -0.1935800015926361, 0.015495999716222286, -0.07668200135231018, 0.6678000092506409, -0.3740699887275696, 0.03956000134348869, 1.0391000509262085, 0.24718999862670898, 0.03426099941134453, -0.5722299814224243, 0.4587399959564209, 0.37490999698638916, 0.32493001222610474, 0.1035899966955185, -0.4659099876880646, 0.0006781899719499052, 0.5013999938964844, 0.039684999734163284, -0.09893699735403061, 0.26076000928878784, 0.07077699899673462, -0.029262999072670937, -0.10665000230073929, -0.09894700348377228, -0.3204900026321411, 0.751010000705719, 0.7310400009155273, 0.15775999426841736, -0.03548799827694893, 0.43623000383377075, 0.8976899981498718, -0.3375000059604645, -0.041127998381853104, 0.18987999856472015, 0.6549800038337708, 0.3146199882030487, -0.17563000321388245, 0.3952699899673462, 0.30741000175476074, 0.16116000711917877, 0.5968599915504456, 0.1319900006055832, 0.010394999757409096, -0.25290998816490173, 0.35238000750541687, 0.6152399778366089, -0.25044000148773193, -1.347499966621399, -0.526960015296936, -0.13305999338626862, 0.3326599895954132, 0.11784999817609787, -0.46347999572753906, 0.6791800260543823, -0.10200999677181244, 0.510699987411499, -0.25613000988960266, 0.03677799925208092, -0.5928999781608582, 0.4637100100517273, -0.6692600250244141, 0.4978500008583069, 0.20206999778747559, -0.1068200021982193, 0.3999499976634979, -0.5158600211143494, 0.08559499680995941, -0.32339999079704285, 0.2120400071144104, 0.39921000599861145, -0.14725999534130096, -0.2765200138092041, -0.06712699681520462, 0.8468300104141235, 0.3279399871826172, 0.05340899899601936, -0.656470000743866, -0.265749990940094, 0.36757999658584595, 0.23101000487804413, -0.08473599702119827, -0.7307900190353394, 0.2536099851131439, -0.2222599983215332, 0.16269999742507935, 0.1887200027704239, -0.0710889995098114, 0.04977300018072128, -0.47350001335144043, 0.38672998547554016, -0.9177500009536743, 0.609000027179718, -0.694890022277832, 0.5725600123405457, -0.048909999430179596, -0.7856900095939636, 0.28022998571395874, 0.22181999683380127, -0.33755001425743103, -0.40898001194000244, -0.17364999651908875, -0.15484000742435455, -0.3169499933719635, 0.14271999895572662, 0.9213399887084961, -0.9323400259017944, -0.17607000470161438, -0.06615299731492996, 0.1668899953365326, 0.07637500017881393, -0.18207000195980072, 0.7608100175857544, -0.36103999614715576, -1.0786999464035034, -0.13590000569820404, -1.2842999696731567, -0.12902000546455383, -0.7518600225448608, 0.7635599970817566, -0.23419000208377838, -1.184000015258789, -0.6232399940490723, 0.43474000692367554, -0.12745000422000885, 0.11439000070095062, 0.5758500099182129, 0.17409999668598175, -0.46950000524520874, -0.085037000477314, -0.4584600031375885, -0.3844900131225586, 0.43893998861312866, 0.5967900156974792, 0.3405100107192993, 0.911620020866394, -0.430620014667511, -0.46198999881744385, -0.1901800036430359, 0.3259100019931793], u'field': [-0.061406999826431274, 0.8113499879837036, -0.3444800078868866, 0.012179000303149223, -0.42010000348091125, -0.3826200067996979, -0.3380100131034851, 0.03444800153374672, 0.08486200124025345, -1.1705000400543213, 0.4835900068283081, 0.1919800043106079, 0.10066000372171402, -0.3658500015735626, -0.22750000655651093, 0.44995999336242676, -0.6097699999809265, 0.4548099935054779, -0.09268900007009506, -0.032719001173973083, -0.2374899983406067, -0.23265999555587769, -0.020376000553369522, -0.10805000364780426, 0.12275999784469604, -0.12861000001430511, -0.21507999300956726, 0.243149995803833, 0.5108000040054321, 0.166360005736351, 0.4152500033378601, -0.4619300067424774, 0.055257998406887054, 0.0824190005660057, -1.1270999908447266, 0.15453000366687775, 0.8820899724960327, 0.5530499815940857, 0.0829479992389679, 0.21703000366687775, 0.13032999634742737, 0.08916500210762024, 0.1488099992275238, -0.1695600003004074, 0.3852899968624115, 0.08438099920749664, 0.09793700277805328, 0.2893500030040741, -0.12529000639915466, -0.1669600009918213, -0.13670000433921814, -0.4176200032234192, -0.38374000787734985, 0.08410300314426422, -0.3728500008583069, -0.28988000750541687, 0.022122999653220177, 0.00963549967855215, -0.04306099936366081, 0.1006999984383583, 0.0935320034623146, 0.23000000417232513, 0.5463299751281738, -0.17354999482631683, -0.542739987373352, -0.2516700029373169, 0.13003000617027283, -0.1565999984741211, 0.0423399992287159, -0.23593999445438385, -0.04647599905729294, 0.25971999764442444, 0.09581699967384338, -0.029601000249385834, -0.6776300072669983, -0.1512099951505661, 0.3594299852848053, -0.06719499826431274, 0.15127000212669373, -0.07429300248622894, 0.2504799962043762, -0.4974699914455414, -0.1171799972653389, -0.030990000814199448, -0.20111000537872314, 0.01695300079882145, 0.18941999971866608, -0.3083699941635132, 0.702750027179718, -0.332720011472702, 0.38036999106407166, 0.7501699924468994, 0.07282800227403641, -0.020130999386310577, -0.1149199977517128, -0.1311500072479248, -0.7482200264930725, -0.474700003862381, -0.16464999318122864, -0.40435999631881714, -0.1106100007891655, 0.32975998520851135, -0.0256120003759861, 0.1242000013589859, -0.34981000423431396, 0.46998000144958496, 0.38332000374794006, 0.387470006942749, -0.6282899975776672, 0.5948799848556519, 0.11378999799489975, -0.6088100075721741, 0.16469000279903412, 0.2824699878692627, -0.2855300009250641, 0.431769996881485, 0.04568599909543991, 0.10518000274896622, 0.39361000061035156, 0.4465999901294708, 0.19784000515937805, 0.18156999349594116, 0.5763900279998779, -0.0047332001850008965, 0.11302000284194946, 0.3533500134944916, 0.4905500113964081, -0.46327999234199524, -0.4779199957847595, 0.1404999941587448, 0.09761299937963486, 0.2230599969625473, -0.390529990196228, 0.09979599714279175, 0.1488499939441681, 0.11817000061273575, 0.4496600031852722, 0.25900998711586, -0.3422299921512604, -0.06939200311899185, -0.24922999739646912, -0.318450003862381, 0.24714000523090363, -0.029732000082731247, 0.18209999799728394, 0.06750299781560898, 0.04010400176048279, 0.35892999172210693, 0.2265699952840805, 0.07471299916505814, 0.4026600122451782, -0.1146399974822998, -0.020865999162197113, -0.5087599754333496, 0.35677000880241394, 0.026360999792814255, 0.2780500054359436, -0.10739000141620636, 0.357450008392334, -0.18343999981880188, -0.31540998816490173, -0.2886599898338318, 0.22266000509262085, -0.3608199954032898, 0.28022998571395874, -0.2690500020980835, -0.20816999673843384, 0.7192500233650208, -0.2807900011539459, 0.06561899930238724, 0.5054200291633606, -0.023993000388145447, 0.025856999680399895, -0.6273000240325928, 0.15936000645160675, 0.04439299926161766, 0.6028900146484375, -0.058876000344753265, -0.5834699869155884, 0.13192999362945557, -0.02994300052523613, 0.7610999941825867, 0.1432500034570694, 0.43641000986099243, -0.19095000624656677, -0.28251999616622925, -0.21091000735759735, -0.06511499732732773, -0.63823002576828, 0.09100800007581711, -0.20243999361991882, 0.5629799962043762, 0.09887900203466415, -0.0426190011203289, -0.4925200045108795, 0.33518001437187195, 0.3081600069999695, 0.4332500100135803, 0.015955999493598938, -0.18276000022888184, 0.9791300296783447, 0.22793999314308167, -0.45155999064445496, -0.6323599815368652, 0.505649983882904, -0.11806000024080276, 0.37237000465393066, 0.18649999797344208, 0.43132999539375305, 0.1575399935245514, 0.24085000157356262, 0.18782000243663788, -0.5006700158119202, -0.3201499879360199, -0.4542199969291687, 0.1463100016117096, -0.4248799979686737, -0.1457200050354004, 0.2694399952888489, -0.3357900083065033, 0.41381001472473145, -0.16944999992847443, -0.06112400069832802, 0.019040999934077263, 0.06546200066804886, -0.5783200263977051, -0.032218001782894135, 0.1300099939107895, -0.1440500020980835, -0.2364100068807602, 0.45201998949050903, 0.30764999985694885, 0.511430025100708, 0.1645900011062622, -0.4985699951648712, 0.45267000794410706, 0.08387000113725662, -0.09980200231075287, -0.6741499900817871, 0.12794999778270721, 0.20916999876499176, -0.123989999294281, 0.20307999849319458, 0.14564000070095062, -0.8216599822044373, -0.6837999820709229, -0.4498400092124939, 0.6438900232315063, -0.2509700059890747, 0.25376999378204346, 0.1024399995803833, 0.21859000623226166, -0.5861799716949463, -0.9214800000190735, 0.9865900278091431, -0.1233299970626831, -0.06552600115537643, -0.3598400056362152, -0.1704999953508377, -0.007354999892413616, 0.22894999384880066, -0.5842499732971191, 0.5024799704551697, -0.38141000270843506, -0.31457000970840454, -0.5232700109481812, -0.697629988193512, -0.11604999750852585, 0.6168799996376038, 0.02544800005853176, 0.13319000601768494, -0.6249300241470337, -0.14690999686717987, -0.15919999778270721, -0.46000000834465027, 0.06009500101208687, -1.9464000463485718, 0.3186900019645691, 0.14740000665187836, 0.4817799925804138, -0.8748800158500671, -0.2878200113773346, 0.30281999707221985, -0.013024999760091305, 0.2805800139904022, -0.34237998723983765, -0.06404999643564224, 0.10233999788761139, 0.2633399963378906, -0.07237699627876282, -0.1907300055027008, -0.244609996676445, -0.5953699946403503, 0.07592800259590149, 0.2828100025653839, 0.4627699851989746, 0.45576000213623047, -0.5138900279998779, 0.1250399947166443, 0.29368001222610474], u'sword': [0.07666999846696854, -0.11688999831676483, 0.39563000202178955, -0.5665900111198425, 0.16203999519348145, 0.7889800071716309, -0.006830200087279081, 0.7134900093078613, -0.0739549994468689, -0.5066099762916565, 0.32054001092910767, -0.013856000266969204, -0.4099099934101105, -0.0009621800272725523, -0.354310005903244, 0.34898000955581665, -0.13729000091552734, 0.6599500179290771, 0.16315999627113342, -0.10567999631166458, -0.1236800029873848, -0.12953999638557434, 0.21727000176906586, 0.21383999288082123, 0.8621000051498413, -0.1542699933052063, -0.1506199985742569, -0.43441998958587646, 0.12565000355243683, 0.555899977684021, 0.7495099902153015, 0.11501000076532364, 0.024744000285863876, -0.1220100000500679, -0.12140999734401703, -0.3611699938774109, 0.2881700098514557, 0.18470999598503113, -0.08506599813699722, 0.3754900097846985, 0.27285999059677124, -0.01800600066781044, -0.26166999340057373, -0.31659001111984253, 0.12117999792098999, -0.059783998876810074, 0.09412799775600433, -0.4507800042629242, 0.3974300026893616, 0.11253000050783157, -0.8394200205802917, -0.11249999701976776, 0.3630000054836273, 0.1255899965763092, -0.7459099888801575, -0.5253700017929077, -0.4276599884033203, 0.7964000105857849, 0.5190200209617615, 0.23075999319553375, 0.3870700001716614, 0.6206600069999695, 0.49702000617980957, 0.7643600106239319, -0.1279900074005127, -0.6298199892044067, -0.7659199833869934, 0.027566999197006226, 0.6222400069236755, 0.18964000046253204, 0.06543400138616562, 0.2609800100326538, -0.04850799962878227, 0.04970400035381317, 0.11343000084161758, 0.49935999512672424, 0.03454200178384781, -0.3218199908733368, -0.44179001450538635, -0.19518999755382538, 0.02573399990797043, 0.688040018081665, 0.48921999335289, 0.14916999638080597, -0.19990000128746033, -0.025529999285936356, -0.07955899834632874, 0.20096999406814575, -0.2683199942111969, 0.3049199879169464, 0.21979999542236328, -0.014713999815285206, 0.6851699948310852, -0.1678999960422516, -0.003047599922865629, -0.42305999994277954, 0.12190999835729599, 0.0600150004029274, 0.7619400024414062, 0.23339000344276428, -0.031686000525951385, 0.22362999618053436, -0.700190007686615, 0.3642599880695343, 0.7129600048065186, -0.12636999785900116, 0.11482000350952148, -0.05618499964475632, -0.2287999987602234, -0.4003799855709076, 0.4021500051021576, 0.6374800205230713, -0.15086999535560608, -0.31657999753952026, 0.3953999876976013, -0.00019159000657964498, -0.4250600039958954, 0.06533800065517426, 0.06228400021791458, -0.6771100163459778, -0.18758000433444977, -0.1684899926185608, -0.8660699725151062, -0.1132500022649765, -0.1799599975347519, 0.2047799974679947, -0.42785000801086426, -0.2826699912548065, -0.1944900006055832, -0.39914000034332275, 0.19708000123500824, 0.2333800047636032, 0.6750100255012512, 0.17925000190734863, -0.29124000668525696, -0.510860025882721, 0.22684000432491302, -0.17990000545978546, 0.5665599703788757, 0.20796999335289001, 0.3754099905490875, 0.12952999770641327, -0.387800008058548, -0.2992999851703644, 0.2874299883842468, 0.18756000697612762, -0.17228999733924866, -0.19222000241279602, -0.2815600037574768, -0.2372400015592575, 0.16283999383449554, 0.48723000288009644, -0.36256998777389526, -0.4303100109100342, 0.11941000074148178, -0.3292100131511688, 0.18609000742435455, -0.4478299915790558, -0.04190700128674507, -0.19810999929904938, 0.6362699866294861, 0.28790000081062317, 0.384880006313324, 0.6343700289726257, -0.19122999906539917, -0.0025696000084280968, 0.1671999990940094, 0.03349899873137474, -0.6250699758529663, 0.019896000623703003, -0.31887999176979065, -0.017836999148130417, -0.013794000260531902, 0.25266000628471375, 0.028946999460458755, -0.6083599925041199, -0.02837499976158142, 0.10074000060558319, 0.36169999837875366, -0.3591099977493286, 0.05906499922275543, 0.2468699961900711, 0.11879999935626984, 0.30465999245643616, 0.5799499750137329, -0.1244100034236908, -0.08124800026416779, 0.4625599980354309, -0.16931000351905823, -0.07104499638080597, -0.23401999473571777, -0.026427000761032104, 0.6953499913215637, 0.38245999813079834, 0.09155700355768204, 0.2662999927997589, -0.6012099981307983, -0.7508800029754639, 0.12071000039577484, -0.3053100109100342, 1.0339000225067139, 0.4977400004863739, -0.006051300093531609, 0.3640199899673462, 0.25920000672340393, 0.08138100057840347, 0.05954800173640251, 0.06489299982786179, -0.4673199951648712, -0.1613599956035614, -0.21529999375343323, 0.22359000146389008, 0.1094600036740303, -0.0653349980711937, 0.5081899762153625, -0.15209999680519104, 0.1183599978685379, -0.176269993185997, -0.8135600090026855, 0.11642000079154968, -0.7399600148200989, 0.2430800050497055, 0.09023900330066681, 0.07541900128126144, -0.8668500185012817, -0.8370500206947327, -0.4331200122833252, -0.11330000311136246, 0.11255999654531479, -0.47040998935699463, 0.13162000477313995, -0.21279999613761902, -0.43213000893592834, -0.11440999805927277, -0.3075900077819824, 0.4066999852657318, -0.8103200197219849, -0.3734000027179718, 0.15764999389648438, 0.43136999011039734, 0.3415899872779846, -0.32444000244140625, -0.30654001235961914, -0.19267000257968903, 0.16660000383853912, 0.47516998648643494, -0.7835299968719482, 0.3694100081920624, -0.3214299976825714, -0.24834999442100525, -0.7568299770355225, 0.35168999433517456, 0.09386900067329407, -0.46998000144958496, 0.45423999428749084, -0.1436000019311905, -0.1666100025177002, -0.05446700006723404, 0.09312699735164642, -0.1529799997806549, 0.16169999539852142, -0.09358800202608109, 0.27039000391960144, 0.02868800051510334, -0.5258899927139282, 0.03209799900650978, -0.717490017414093, 0.0042213997803628445, -0.7178000211715698, 0.3968000113964081, 0.008993299677968025, -0.6252400279045105, 0.43970999121665955, 0.6108400225639343, -0.7174199819564819, -0.15949000418186188, -1.111799955368042, -0.5652599930763245, -0.6171299815177917, -0.1292800009250641, 0.37279000878334045, 0.32166001200675964, -0.3732900023460388, -0.15442000329494476, 0.27110999822616577, -0.30614998936653137, -0.6612200140953064, 0.006881400011479855, -0.34595000743865967, 0.01002500019967556, 0.08417200297117233, 0.09989400207996368, -0.27105000615119934, 0.07768899947404861, 0.13753999769687653, 0.33754000067710876, 0.46939000487327576, 0.4787200093269348, -0.12190999835729599, -0.11784999817609787], u'forest': [-0.4553300142288208, 0.14614999294281006, -0.028433000668883324, 0.4449000060558319, 0.2285899966955185, -0.15681999921798706, 0.37571001052856445, 0.7640500068664551, 0.05597500130534172, -0.6198700070381165, 0.0308190006762743, 0.27226999402046204, -0.7272599935531616, -0.3653999865055084, -0.06481800228357315, 0.028324000537395477, -0.16965000331401825, -0.33952000737190247, -0.0022130999714136124, 0.6386100053787231, -0.027898000553250313, 0.16787000000476837, 0.36421000957489014, 0.3041499853134155, -0.5038700103759766, -0.19586999714374542, -0.18714000284671783, -0.7062699794769287, -0.7008100152015686, 0.6091700196266174, 1.118299961090088, -0.2337300032377243, -0.186489999294281, -0.532800018787384, -0.3072499930858612, -0.2736299932003021, 0.22200000286102295, -0.33090999722480774, 0.0373230017721653, -0.5599499940872192, -0.11685000360012054, -0.23660999536514282, 0.1239200010895729, 0.25042998790740967, -0.13211999833583832, -0.7863600254058838, 0.3677999973297119, -0.08190499991178513, 0.2209099978208542, -0.15232999622821808, -0.28883999586105347, -0.08431000262498856, -0.31373998522758484, -0.027212999761104584, -0.34068000316619873, -0.004175299778580666, 0.45120999217033386, -0.6420199871063232, 0.4110899865627289, -0.3860200047492981, -0.5005999803543091, -0.16268999874591827, 0.3902199864387512, 0.39820998907089233, -0.06618300080299377, -0.3976300060749054, -0.04666300117969513, -0.025479000061750412, 0.17369000613689423, -0.4007900059223175, -0.1694899946451187, 0.0035663999151438475, -0.19540999829769135, 0.0956140011548996, -0.7635200023651123, 0.6553699970245361, 0.314300000667572, -0.09836799651384354, -0.16574999690055847, -0.25516998767852783, -0.13888999819755554, -0.19228999316692352, 0.043122999370098114, -0.5199199914932251, 0.06196900084614754, -0.1434199959039688, 0.07853200286626816, 0.4482100009918213, 0.31909000873565674, 0.1433500051498413, -0.05810900032520294, -0.6963800191879272, 1.070199966430664, 0.6493099927902222, -0.08873999863862991, -0.2679300010204315, 0.8350200057029724, 0.14541999995708466, -0.12591999769210815, -0.338019996881485, 0.26030001044273376, 0.7916399836540222, -0.3965199887752533, 0.1432500034570694, -0.5196200013160706, 0.011504000052809715, 0.7020000219345093, 0.3754799962043762, 0.07944600284099579, -0.14177000522613525, -0.09503400325775146, -1.180899977684021, 0.007367799989879131, 0.23003000020980835, -0.008278599940240383, -0.4730300009250641, 0.3493799865245819, 0.5358800292015076, 0.1505499929189682, 0.7030500173568726, -0.5055500268936157, -0.07188999652862549, -0.32444998621940613, 0.28999999165534973, -0.7345700263977051, -0.08603999763727188, 0.14427000284194946, -0.10975000262260437, -0.047182999551296234, -0.21708999574184418, 0.3119100034236908, 0.8053500056266785, 0.20618000626564026, 0.43595001101493835, 0.3619900047779083, 0.31613999605178833, -0.06443999707698822, 0.534280002117157, -0.17955000698566437, -0.08309999853372574, 0.9397799968719482, -0.2620899975299835, -0.4823099970817566, -0.38506001234054565, -0.5208600163459778, 0.1724500060081482, 0.358489990234375, -0.011175000108778477, -0.46772000193595886, -1.0341999530792236, 0.5318499803543091, 0.062052998691797256, -0.5852500200271606, 0.056696999818086624, 0.14509999752044678, -0.10209999978542328, 0.17246000468730927, 0.16620999574661255, -0.022647999227046967, 0.42379000782966614, 0.1846799999475479, -0.5909000039100647, 1.1274000406265259, 0.20784999430179596, -0.34793001413345337, -0.5600799918174744, 0.37942999601364136, -0.10337000340223312, -0.7009299993515015, -0.23818999528884888, 0.07017499953508377, -0.19878000020980835, 0.25356000661849976, 0.7009099721908569, -0.16054999828338623, 0.18550999462604523, 0.4791699945926666, 0.24014000594615936, -0.32120999693870544, -0.46852999925613403, -0.07366199791431427, -0.00921849999576807, -0.217849999666214, -0.7259299755096436, 0.08939900249242783, -0.6579300165176392, -0.5604699850082397, 0.05898800119757652, -0.10224000364542007, 0.21549999713897705, 0.1710900068283081, 0.6637899875640869, -0.22721999883651733, -0.3616900146007538, 0.23124000430107117, 0.03875900059938431, 0.30776000022888184, -0.6998100280761719, 0.03141399845480919, 0.3068299889564514, 0.7733200192451477, -0.044029999524354935, -0.34839001297950745, -0.40202000737190247, 0.26883000135421753, -0.0619329996407032, -0.48627999424934387, -0.11219000071287155, -0.30441999435424805, -1.0264999866485596, -0.2310599982738495, -0.3088200092315674, -0.38929998874664307, -0.9232800006866455, 0.25797000527381897, -0.09213099628686905, 0.044996000826358795, 0.3875199854373932, 0.7087399959564209, -0.1343899965286255, 0.23704999685287476, -0.8527200222015381, -0.25018998980522156, -0.4983600080013275, 0.13038000464439392, -0.047123998403549194, -0.40334999561309814, 0.3156200051307678, -0.25439000129699707, 0.5812000036239624, 0.3692600131034851, 0.43483999371528625, 0.08927299827337265, -0.3935000002384186, 0.42899999022483826, -0.33612000942230225, -0.14174999296665192, 0.19393999874591827, 0.27660998702049255, 0.17994999885559082, 0.2510800063610077, 0.4675700068473816, 0.49500998854637146, -0.013867000117897987, -0.7776200175285339, 0.25115999579429626, 0.48517999053001404, -0.15771999955177307, 0.10424000024795532, -0.20434999465942383, 0.22687000036239624, -0.40483999252319336, 0.23757000267505646, -0.2589600086212158, 0.9761099815368652, 0.2214599996805191, -0.037477001547813416, 0.38273000717163086, 0.05754299834370613, -0.32328000664711, 0.19115999341011047, 0.03038799948990345, 0.23113000392913818, -0.0014016999630257487, -0.4591600000858307, 0.3042599856853485, -0.5610600113868713, 0.4709300100803375, 0.5023699998855591, -0.0022698999382555485, 0.275519996881485, -0.007533799856901169, -0.3501800000667572, -0.14698000252246857, 0.2326200008392334, -0.06268099695444107, -1.4399000406265259, 0.20734000205993652, 0.18873000144958496, -0.33006998896598816, -0.4044699966907501, -0.15041999518871307, -0.12377999722957611, -0.3385300040245056, -0.6320000290870667, -0.3138900101184845, 0.25014999508857727, -0.5543100237846375, 0.35221999883651733, -0.30357998609542847, 0.07054299861192703, -0.11794000118970871, -0.5719199776649475, 0.02833300083875656, 0.2984200119972229, 0.7641199827194214, 0.09679900109767914, 0.13560999929904938, 0.16078999638557434, 0.18308000266551971], u'animal': [0.2565299868583679, 0.6659200191497803, -0.5313000082969666, 0.2034199982881546, 0.4004899859428406, -0.23473000526428223, 0.09909000247716904, 0.05783399939537048, -0.12076999992132187, -1.1297999620437622, 0.351639986038208, -0.3264999985694885, -0.6683300137519836, 0.1878799945116043, 0.22281000018119812, 0.054691001772880554, 0.02574400044977665, 0.31266000866889954, -0.28723999857902527, 0.23792999982833862, -0.040330998599529266, 0.32853999733924866, 0.22044000029563904, 0.4564700126647949, -0.3756299912929535, 0.4340200126171112, 0.3815400004386902, -0.3524799942970276, 0.19494999945163727, 0.4075999855995178, -0.3786500096321106, 0.23547999560832977, -0.8429099917411804, -0.49410998821258545, 0.052115000784397125, -0.16011999547481537, 0.6431400179862976, 0.3040499985218048, -0.4385499954223633, -0.24153999984264374, -0.2404399961233139, -0.23006999492645264, 0.14079999923706055, -0.5330700278282166, -0.5171999931335449, -0.06473399698734283, -0.12547999620437622, 0.07800299674272537, 0.36785000562667847, 0.24616000056266785, -0.1687600016593933, 0.509440004825592, -0.05587499961256981, 0.11483000218868256, 0.29214999079704285, -0.02961600013077259, 0.09481099992990494, 0.02406900003552437, -0.632610023021698, -0.40382999181747437, -0.20840999484062195, 0.039753999561071396, 0.5818899869918823, 0.11466000229120255, -0.33500000834465027, -0.2722499966621399, -0.2824000120162964, -0.5117999911308289, 0.22428999841213226, 0.4560000002384186, -0.15357999503612518, 0.7042800188064575, 0.3029400110244751, -0.20954999327659607, -0.011788999661803246, 0.2949199974536896, 0.189520001411438, 0.03159099817276001, 0.12140999734401703, 0.6594799757003784, 0.14449000358581543, -0.17718000710010529, -0.3830699920654297, 0.1444700062274933, -0.10626000165939331, -0.43810999393463135, 0.04372499883174896, 0.20719000697135925, -0.527649998664856, -0.16038000583648682, -0.2649799883365631, -0.24921999871730804, 0.27570998668670654, 0.1018500030040741, -0.0028409999795258045, 0.44866999983787537, 0.14924000203609467, 0.0026418000925332308, -0.5196599960327148, -0.30382001399993896, 0.44991999864578247, 0.14053000509738922, -0.04528899863362312, 0.15192000567913055, -0.03029000014066696, 0.2046400010585785, -0.09852500259876251, 0.11546999961137772, 0.29337000846862793, 0.8753799796104431, 0.033805001527071, -0.023912999778985977, -1.073699951171875, 0.14057999849319458, 0.23330000042915344, 0.4397900104522705, 0.015073000453412533, 0.48339998722076416, -0.04738900065422058, 0.26837000250816345, -1.0795999765396118, 0.3658899962902069, -0.0595179982483387, 0.4625000059604645, -0.4977700114250183, 0.14284999668598175, -0.4919799864292145, 0.5266299843788147, -0.10239999741315842, -0.2236199975013733, 0.684499979019165, -0.2836399972438812, 0.20221999287605286, 0.4528000056743622, -0.371289998292923, -0.30313000082969666, 0.43129000067710876, -0.167820006608963, -0.1497199982404709, 0.7798699736595154, -0.052025001496076584, 0.1359899938106537, -0.02482300065457821, -0.6685600280761719, 0.15082000195980072, 0.12764999270439148, -0.02181600034236908, 0.03223999962210655, -0.22332000732421875, 0.021629000082612038, 0.06932500004768372, 0.5422000288963318, -0.19016000628471375, -0.30066999793052673, 0.17117999494075775, 0.23500999808311462, -0.2186799943447113, 0.053247999399900436, -0.2387399971485138, 0.0728830024600029, 0.41495001316070557, 0.4085800051689148, 0.27456000447273254, -0.16155999898910522, 0.2637999951839447, 0.014453000389039516, 0.2439199984073639, -0.5014500021934509, -0.31703001260757446, 0.21059000492095947, -0.04576199874281883, -0.039115000516176224, 0.19433000683784485, 0.5992000102996826, 0.042562998831272125, -0.12241999804973602, 0.32646000385284424, -0.40514999628067017, 0.013263000175356865, -0.5655800104141235, 0.1998399943113327, -0.1989700049161911, -0.4957900047302246, -0.36991000175476074, -0.1800999939441681, -0.22556999325752258, 0.0028444998897612095, -0.2533400058746338, 0.5590900182723999, -0.5683500170707703, -0.46966999769210815, 0.06097399815917015, -0.41321998834609985, -0.2499600052833557, -0.28459998965263367, 0.19744999706745148, -0.09938699752092361, -0.24677999317646027, -0.5394600033760071, 0.3160899877548218, 0.771049976348877, -0.32183000445365906, 0.36138999462127686, -0.0870250016450882, 0.5662599802017212, 0.7648599743843079, -0.14511999487876892, -0.11134999990463257, -0.14413000643253326, 0.5301600098609924, 0.133760005235672, -0.2758199870586395, -0.1320600062608719, -0.6573299765586853, 0.058187998831272125, 0.38300999999046326, 0.056685999035835266, 0.16166000068187714, 0.2745800018310547, 0.1724500060081482, -0.2945899963378906, 0.0962970033288002, -0.5051299929618835, -0.5278599858283997, -0.1012599989771843, 0.16703000664710999, -0.0690540000796318, -0.09359599649906158, -0.038444001227617264, 0.4320800006389618, 0.5994600057601929, 0.07432900369167328, 0.11862999945878983, -0.355320006608963, 0.20506000518798828, 0.20252999663352966, -0.029247000813484192, -0.27250000834465027, -0.5608400106430054, 0.13691000640392303, -0.3948900103569031, -0.07884600013494492, -0.2933799922466278, -0.193790003657341, -1.5217000246047974, 0.19235999882221222, 0.13663999736309052, -0.21657000482082367, 0.02522999979555607, 0.6789699792861938, -0.33232998847961426, -0.3061099946498871, 0.09135899692773819, -0.012469000183045864, 0.9517999887466431, 0.7577199935913086, 0.2700499892234802, -0.4959399998188019, -0.44736000895500183, 0.03477900102734566, -0.21820999681949615, -0.44495001435279846, -0.04699299857020378, -0.21626000106334686, -0.05405300110578537, 0.49917998909950256, -0.33855000138282776, 0.42897000908851624, -0.5569900274276733, 0.4239799976348877, -0.06568100303411484, 0.24517999589443207, 0.006638399790972471, -0.22098000347614288, -0.2653200030326843, 0.4981299936771393, -1.9490000009536743, 0.2665799856185913, 0.11726000159978867, 0.03715899959206581, -0.5181800127029419, -0.14343999326229095, 0.005057500209659338, 0.32357001304626465, -0.7579299807548523, 0.12188000231981277, 0.3437800109386444, 0.015168000012636185, 0.20837999880313873, -0.0841199979186058, 0.0203079991042614, -0.5185800194740295, -0.23014000058174133, -0.532729983329773, -0.17937999963760376, -0.14921000599861145, 0.24040000140666962, 0.22181999683380127, 0.6888300180435181, -0.018990999087691307], u'elephant': [0.21552999317646027, 0.17279000580310822, -0.1679600030183792, 0.37042999267578125, 0.322519987821579, 0.337909996509552, 0.4079799950122833, 0.2532599866390228, -0.5283700227737427, -0.17699000239372253, -0.1487800031900406, -0.06152600049972534, -0.5151000022888184, 0.09721899777650833, 0.15772999823093414, -0.05737899988889694, 0.37674999237060547, -0.018866000697016716, -0.6496899724006653, -0.20750999450683594, -0.2642599940299988, 0.37092000246047974, 0.19485999643802643, 0.21626000106334686, -0.5140100121498108, 0.024080000817775726, 0.023307999595999718, -0.24848000705242157, 0.03832000121474266, 0.8095600008964539, 0.11740999668836594, -0.18875999748706818, -0.6737099885940552, -0.20262999832630157, 0.35005998611450195, 0.41065001487731934, 0.13683000206947327, 0.3804599940776825, -0.5178599953651428, -0.09970799833536148, -0.017020000144839287, -0.20879000425338745, -0.18681000173091888, -0.6404399871826172, -0.41538000106811523, -0.2214599996805191, 0.4196000099182129, -0.25001999735832214, 0.21875, -0.174919992685318, 0.5755299925804138, -0.17824000120162964, -0.46709001064300537, 0.2672100067138672, 0.34064000844955444, 0.13872000575065613, -0.008340500295162201, 0.6792799830436707, -0.6369699835777283, 0.027341999113559723, -0.38947001099586487, -0.42949000000953674, 0.4889099895954132, 0.21014000475406647, 0.5685799717903137, 0.44429001212120056, -0.5168399810791016, -0.3212299942970276, 0.4338099956512451, -0.11834999918937683, -0.3405100107192993, 0.5361499786376953, -0.18709999322891235, -0.2051900029182434, 0.43377000093460083, 0.08907099813222885, 1.080399990081787, -0.18560999631881714, -0.011586000211536884, -0.016495000571012497, 0.3427700102329254, 0.11123000085353851, -0.01979999989271164, -0.4001699984073639, -0.21428999304771423, -0.4786800146102905, -0.3737800121307373, 0.03560100123286247, -0.49206000566482544, -0.6378399729728699, 0.02162100002169609, -0.3672400116920471, -0.1472499966621399, 0.3010199964046478, -0.11917000263929367, 0.17238999903202057, 0.40389999747276306, 0.0138349998742342, -0.27035000920295715, 0.21749000251293182, 0.11377999931573868, 0.5786600112915039, 0.6745499968528748, -0.6456599831581116, -0.11213000118732452, -0.4332599937915802, -0.07657899707555771, -0.14372999966144562, 0.2677200138568878, 0.643779993057251, 0.03299200162291527, 0.38988998532295227, -1.0908000469207764, -0.11351999640464783, 0.3078500032424927, -0.13473999500274658, 0.1955299973487854, 0.5431200265884399, -0.23792999982833862, -0.4691300094127655, -0.4127900004386902, -0.11948999762535095, -0.4653399884700775, -0.21020999550819397, -0.2372100055217743, 0.14337000250816345, -0.04265499860048294, 0.3118300139904022, -0.3639200031757355, -0.8952500224113464, 0.43397998809814453, -0.4758000075817108, 0.06043199822306633, 0.28001999855041504, 0.6369799971580505, -0.027512000873684883, 0.4928300082683563, -0.43160998821258545, -0.05243799835443497, 0.2473600059747696, 0.1393599957227707, -0.04625599831342697, -0.5672399997711182, -0.06536000221967697, -0.013964000158011913, -0.09832099825143814, 0.5993899703025818, -0.13343000411987305, -0.1698099970817566, -0.04595800116658211, -0.5268200039863586, 0.761650025844574, 0.08529800176620483, 0.11107999831438065, -0.013930000364780426, 0.13439999520778656, -0.0014236000133678317, -0.16745999455451965, -0.11965999752283096, 0.49226999282836914, 0.49059998989105225, 0.21020999550819397, 0.8982300162315369, -0.2561900019645691, -0.048149000853300095, -0.2500799894332886, 0.31488001346588135, -0.018610000610351562, -0.15380999445915222, -0.4606899917125702, 0.28189998865127563, -0.015572000294923782, 0.7160599827766418, -0.06634599715471268, -0.1766500025987625, -0.27893999218940735, 0.707260012626648, 0.1498900055885315, 0.10491999983787537, -0.2029699981212616, -0.0018992000259459019, 0.09890399873256683, -0.3056100010871887, -0.16651999950408936, 0.20035000145435333, -0.6391699910163879, 0.18286000192165375, 0.5062000155448914, 0.5785300135612488, 0.21062999963760376, 0.38624998927116394, 0.20771999657154083, -0.034758999943733215, -0.10518000274896622, -0.35192999243736267, 0.27382999658584595, 0.15986000001430511, -0.03789300099015236, -0.25352999567985535, 0.16399000585079193, 1.573199987411499, -0.19678999483585358, 0.05821999907493591, -0.45618999004364014, 0.5029000043869019, 0.7412800192832947, -0.197720006108284, 0.16896000504493713, -0.13964000344276428, -0.17723999917507172, 0.013616999611258507, -0.12544000148773193, 0.04627000167965889, -0.6227899789810181, 0.3919000029563904, -0.7015399932861328, -0.23221999406814575, -0.3003700077533722, 0.020476000383496284, 0.34073999524116516, 0.02336600050330162, -0.10034000128507614, -0.03201499953866005, -0.3714900016784668, -0.42142999172210693, -0.38885998725891113, -0.16684000194072723, -0.09022299945354462, -0.12665000557899475, 0.02461099997162819, 0.1858299970626831, -0.34303000569343567, -0.28161999583244324, -0.02316500060260296, 0.2960500121116638, 0.3580299913883209, -0.6390900015830994, -0.4004499912261963, 0.1022299975156784, -0.2969299852848053, -0.4341900050640106, 0.30849000811576843, -0.22214999794960022, 0.19107000529766083, -1.2367000579833984, -0.1557299941778183, 0.028380999341607094, 0.20699000358581543, 0.25308001041412354, -0.012864000163972378, -0.23680999875068665, -0.3848400115966797, 0.0017180000431835651, -0.3661699891090393, 0.8171600103378296, -0.09626699984073639, -0.004539899993687868, 0.052250999957323074, -0.010517000220716, -0.15539999306201935, 0.06466100364923477, 0.3440699875354767, 0.18065999448299408, -0.10014999657869339, -0.3695099949836731, 0.11388999968767166, 0.3522000014781952, 0.4823499917984009, -0.5789099931716919, 0.4420900046825409, 0.11558999866247177, 0.06906700134277344, -0.1590300053358078, -0.5489599704742432, -0.2881999909877777, -0.24075999855995178, -1.0322999954223633, -0.049747999757528305, -0.3921700119972229, 0.22363999485969543, -0.6814799904823303, -0.4582900106906891, 0.3792699873447418, 0.24295000731945038, -0.9356300234794617, -0.22238999605178833, -0.36730000376701355, 0.1188800036907196, -0.30726000666618347, -0.6433299779891968, 0.015829000622034073, 0.0722069963812828, -0.4367299973964691, -0.31227999925613403, -0.12380000203847885, -0.08451999723911285, 0.14024999737739563, 0.16912999749183655, 0.490229994058609, 0.3298499882221222], u'tile': [0.6001999974250793, 0.17053000628948212, -0.16619999706745148, -0.6670799851417542, -0.4564499855041504, -0.4809199869632721, -0.5285300016403198, -0.32128000259399414, 0.08400300145149231, -0.29280000925064087, -0.1693899929523468, 0.29447001218795776, 0.17547999322414398, 0.20679999887943268, 0.3488599956035614, 0.12204000353813171, -0.23649999499320984, 0.13978999853134155, -0.13165000081062317, -0.24174000322818756, -0.17463000118732452, -0.015099000185728073, 0.24705000221729279, -0.051927000284194946, -0.24437999725341797, -1.0663000345230103, -0.2468400001525879, 0.43922001123428345, -0.5976600050926208, 0.4798699915409088, 0.33292001485824585, 0.9048900008201599, -0.4510500133037567, -0.18002000451087952, -0.073123998939991, 0.8874199986457825, 0.02958899922668934, -0.5759599804878235, 0.20047999918460846, 0.6314799785614014, 0.2536599934101105, 0.22859999537467957, 0.10248000174760818, 0.3689799904823303, -0.34975001215934753, 0.43257999420166016, 1.1075999736785889, 0.18688000738620758, -0.5537099838256836, -0.6680200099945068, -0.04153500124812126, 0.14016999304294586, 0.5905600190162659, 0.24280999600887299, 0.15063999593257904, 0.08496899902820587, -0.14530999958515167, 0.0730310007929802, 0.22258000075817108, 0.049515001475811005, 0.14278000593185425, -0.5780500173568726, 0.07090699672698975, 0.01050500012934208, 0.44040998816490173, -0.047449998557567596, 0.2761000096797943, -0.1885399967432022, 0.11807999759912491, -0.09035299718379974, 0.1738699972629547, -0.4203000068664551, 0.02973099984228611, -0.33776000142097473, -0.04159000143408775, 0.11740999668836594, -0.3521600067615509, -0.17794999480247498, -0.25731000304222107, -0.7303400039672852, -0.4615100026130676, -0.3135499954223633, -0.00828550010919571, -0.7390499711036682, -0.04143200069665909, 0.7415800094604492, -0.2100200057029724, -0.1832199990749359, 0.15985000133514404, 0.30753999948501587, 0.3444400131702423, 0.27941998839378357, 0.6055300235748291, 0.019756000488996506, 0.09118600189685822, -0.119159996509552, 0.01999399997293949, -0.37646999955177307, 0.19603000581264496, -0.5243099927902222, -0.23319000005722046, 0.5036900043487549, -0.33371999859809875, -0.36142998933792114, 0.2931100130081177, -0.157260000705719, -0.18756000697612762, -0.32030999660491943, -0.4327400028705597, 0.3194800019264221, 0.14959000051021576, -0.30647000670433044, -0.37672001123428345, -0.5545600056648254, -0.9908599853515625, -0.047912001609802246, -0.42135000228881836, 0.3009200096130371, -0.5954300165176392, 0.08677399903535843, 0.6757100224494934, 0.3372899889945984, 0.18347999453544617, 0.8644199967384338, 0.25067999958992004, -0.19193999469280243, 0.01312199980020523, 0.8333100080490112, 0.3314400017261505, 0.20038999617099762, 0.08136700093746185, 0.5044699907302856, 0.2585200071334839, 0.2905600070953369, 0.04566499963402748, -0.17871999740600586, -0.5347200036048889, 0.15921999514102936, 0.41624000668525696, -0.2746399939060211, 0.39866000413894653, -0.37404999136924744, -0.10498999804258347, -0.31283000111579895, 0.18244999647140503, 0.4928700029850006, -0.013718999922275543, 0.40529999136924744, -0.23266999423503876, -0.5177599787712097, -0.7662699818611145, -0.19701999425888062, 0.006811399944126606, -0.2224999964237213, -0.5084400177001953, 0.3776400089263916, -0.25321999192237854, -0.0020763000939041376, -0.09177599847316742, 0.32826998829841614, 0.056526001542806625, 0.18674999475479126, -0.11903999745845795, 0.774649977684021, 0.9607999920845032, 0.4987800121307373, 0.2811500132083893, 0.4399999976158142, 0.23976999521255493, 0.4075799882411957, -0.21318000555038452, 0.35666000843048096, 0.7394400238990784, 0.049529001116752625, 0.03681100159883499, 0.6115999817848206, -0.43171998858451843, 0.06000100076198578, -0.08939500153064728, -0.8176500201225281, -0.624239981174469, -0.4671899974346161, 0.4357199966907501, -0.032246001064777374, -0.7968800067901611, -0.8754500150680542, 0.9257400035858154, 0.013426000252366066, 0.322270005941391, 0.5725299715995789, 0.6836000084877014, 0.18601000308990479, -0.21028999984264374, -0.12816999852657318, 0.3546000123023987, 0.2416599988937378, -0.3279300034046173, 0.5623800158500671, -0.06316699832677841, -0.295960009098053, 0.2142699956893921, -0.029589999467134476, -0.16372999548912048, -0.04167500138282776, 0.6640999913215637, 0.10713999718427658, -0.2926599979400635, 0.4047499895095825, -0.6233900189399719, -0.13369999825954437, 0.29798001050949097, 0.06898699700832367, 0.03891000151634216, -0.02519500069320202, 0.6908299922943115, 0.13451999425888062, 0.24771000444889069, -0.48666998744010925, 0.0791110023856163, 0.21830999851226807, 0.4131999909877777, -0.03229200094938278, -0.07437600195407867, 0.2610200047492981, -0.025728000327944756, -0.42563000321388245, -0.17302000522613525, -0.027981000021100044, -0.056905001401901245, -0.04629499837756157, 0.19517000019550323, -0.2950100004673004, 0.2778399884700775, 0.4072900116443634, 0.144679993391037, -0.11986999958753586, 0.029916999861598015, -0.30048999190330505, 0.4122700095176697, -0.21995000541210175, 0.1981000006198883, -0.41343000531196594, -0.17583000659942627, -0.2349099963903427, -0.18619999289512634, 0.28001999855041504, -0.5840100049972534, -0.539330005645752, 0.20353999733924866, 0.4130299985408783, 0.7886499762535095, 0.10321000218391418, -0.3903200030326843, -0.6843000054359436, 0.7575299739837646, 0.32923001050949097, 0.12058000266551971, 0.15727999806404114, 0.2073799967765808, 0.47821998596191406, -0.10453999787569046, 0.005017200019210577, 0.33741000294685364, -0.18922999501228333, 0.2985300123691559, 0.02958899922668934, 0.18564000725746155, 0.5630699992179871, 0.30138999223709106, -0.5945199728012085, -0.3698599934577942, -0.47523999214172363, 0.3995400071144104, 0.003450399963185191, -0.6183900237083435, 0.381630003452301, -0.4850899875164032, 0.09867099672555923, -0.9874200224876404, 0.2839699983596802, 0.1130400002002716, -0.04036799818277359, -0.43981000781059265, 0.09119600057601929, 0.25488999485969543, 0.534280002117157, 0.26941999793052673, -0.24494999647140503, -0.10426999628543854, -0.5880799889564514, -0.1011900007724762, 0.4345700144767761, -0.013872000388801098, 1.1588000059127808, -0.093190997838974, 0.021796999499201775, 0.17124000191688538, -0.5399600267410278, 0.26197001338005066, 0.4557799994945526], u'gate': [0.3996799886226654, -0.5955299735069275, 0.22569000720977783, -0.29271000623703003, 0.5082299709320068, -0.45785000920295715, 0.2545900046825409, 0.876579999923706, -0.28988000750541687, -0.3119400143623352, -0.30226999521255493, 0.09816499799489975, 0.5979400277137756, -0.2483700066804886, -0.042121000587940216, -0.1698399931192398, -0.19107000529766083, -0.2056799978017807, -0.062015000730752945, 0.25586000084877014, 0.15997999906539917, -0.24055999517440796, 0.09464199841022491, -0.0841120034456253, -0.1428299993276596, 0.20114000141620636, -0.08004400134086609, -0.245169997215271, -0.4846700131893158, 0.3268199861049652, 0.5025500059127808, -0.3037700057029724, -0.18825000524520874, 0.5335699915885925, -0.2138800024986267, -0.05458600074052811, -0.704770028591156, -0.6595500111579895, -0.27720001339912415, 0.09921000152826309, -0.2148900032043457, 0.1599700003862381, -0.8911100029945374, 0.3718299865722656, -0.5793300271034241, -0.03818700090050697, 0.4235599935054779, 0.23214000463485718, 0.10480000078678131, 0.054795000702142715, 0.15511000156402588, -0.2307399958372116, 0.17329999804496765, -0.3439899981021881, 0.22487999498844147, -0.21872000396251678, 0.3643600046634674, 0.1370600014925003, -0.23017999529838562, 0.046553999185562134, 0.19791999459266663, -0.02616400085389614, 0.8801299929618835, 0.07353299856185913, 0.37724000215530396, -0.19089999794960022, -0.3423900008201599, -0.24562999606132507, -0.08086500316858292, -0.28327998518943787, 0.11795999854803085, 0.28971999883651733, -0.24083000421524048, -0.08575599640607834, -0.40132999420166016, 0.22296999394893646, 0.29962998628616333, -1.0490000247955322, 0.13987000286579132, -0.06527400016784668, -0.050085000693798065, 0.23725999891757965, 0.30733999609947205, -0.17472000420093536, -0.16992999613285065, -0.13158999383449554, -0.13952000439167023, 0.13389000296592712, -0.2331800013780594, -0.08512800186872482, 0.4364300072193146, -0.38067999482154846, 0.2581300139427185, 0.20779000222682953, -0.2654399871826172, 0.5976700186729431, 0.059661999344825745, -0.5661900043487549, 0.08889699727296829, -0.2126999944448471, -0.1594099998474121, 0.06695099920034409, -0.2847900092601776, -0.20985999703407288, 0.3952600061893463, -0.16258999705314636, 0.2554900050163269, 0.12225999683141708, 0.03291799873113632, -0.49351999163627625, -0.2707099914550781, 0.04324600100517273, 0.15272000432014465, -0.17465999722480774, 0.13300999999046326, 0.44189000129699707, -0.20430999994277954, -0.3608900010585785, -0.38778001070022583, -0.17357000708580017, 0.03385400027036667, -0.011292999610304832, 0.49053001403808594, -0.1958799958229065, 0.21900999546051025, -0.5515900254249573, 0.1028200015425682, -0.2235099971294403, -0.20677000284194946, -0.5308899879455566, 0.2903600037097931, 0.5557399988174438, 0.3983500003814697, 0.44258999824523926, -0.1109900027513504, 0.23660999536514282, 0.8073800206184387, -0.5673099756240845, -0.5155199766159058, -0.1496800035238266, -0.48723000288009644, -0.06276199966669083, -0.6146399974822998, -0.21884000301361084, 0.21039000153541565, -0.44868001341819763, 0.4016300141811371, -0.21613000333309174, -0.15990999341011047, -0.1423500031232834, 0.3220300078392029, 0.2990500032901764, -0.08293599635362625, -0.9413400292396545, 0.7079499959945679, 0.05948900058865547, 0.09956800192594528, 0.04857499897480011, 0.2801699936389923, -0.48489999771118164, 0.816789984703064, 0.011823000386357307, 0.24782000482082367, -1.0678000450134277, 0.15227000415325165, 0.5574399828910828, -0.1718900054693222, 0.24309000372886658, 0.08038099855184555, -0.08611799776554108, 0.5055099725723267, 0.0850439965724945, 0.8797699809074402, -0.7624599933624268, 0.0466420017182827, -0.005086500197649002, -0.3942599892616272, -0.08094199746847153, 0.22746999561786652, -0.460640013217926, -0.009284400381147861, 0.16357000172138214, -0.4682599902153015, -0.11045999825000763, 0.2972399890422821, -0.1286199986934662, -0.43650999665260315, 0.07404600083827972, 0.4215199947357178, 0.5122399926185608, -0.0709569975733757, 0.3013699948787689, 0.5116000175476074, 0.08206500113010406, -0.33803001046180725, -0.23274999856948853, -0.20350000262260437, -0.12571999430656433, 0.0276699997484684, 0.10825999826192856, 1.2724000215530396, -0.41659000515937805, -0.004300999920815229, -0.25356000661849976, 0.18063999712467194, 0.2453099936246872, 0.27188000082969666, -0.32041001319885254, 0.16007000207901, -0.10735999792814255, 0.18581999838352203, 0.2401999980211258, -0.2432200014591217, -0.8283299803733826, -0.5375900268554688, -0.2171200066804886, -0.376800000667572, -0.26284998655319214, 0.6333799958229065, 0.43011000752449036, 0.7036399841308594, 0.4083400070667267, 0.3441700041294098, -0.19672000408172607, 0.08408399671316147, -0.8067299723625183, -0.8641600012779236, -0.06028800085186958, -0.5115799903869629, -0.6539599895477295, -0.6121600270271301, -0.8045799732208252, -0.3306899964809418, 0.31376999616622925, 0.45778000354766846, 0.5031700134277344, 0.24706000089645386, -0.23632000386714935, 0.15001000463962555, 0.05731699988245964, 0.12275999784469604, 0.24169999361038208, 0.123259998857975, -0.19125999510288239, -0.07759799808263779, -0.07353799790143967, 0.24021999537944794, 0.15102000534534454, 0.16526000201702118, -0.05602800101041794, -0.4875499904155731, 0.05228099972009659, 0.18012000620365143, -0.42796000838279724, -0.08601599931716919, 0.09433099627494812, 0.11851000040769577, -0.6880800127983093, 0.13444000482559204, 0.07052399963140488, -0.05891399830579758, -0.557640016078949, 0.774869978427887, -0.25742998719215393, -0.08215499669313431, -0.033806998282670975, 0.08020500093698502, -0.3290799856185913, -0.2553499937057495, -0.12922999262809753, 0.5054200291633606, -0.09712599962949753, 0.24062000215053558, -0.07909899950027466, -0.7465299963951111, -0.3760800063610077, -1.5347000360488892, 0.2808000147342682, -0.2810400128364563, 0.22874000668525696, -0.16721999645233154, -0.45489001274108887, 0.13050000369548798, -0.3679499924182892, -0.389490008354187, 0.33309999108314514, 0.2302599996328354, -0.41089001297950745, -0.14247000217437744, -0.14182999730110168, 0.4557400047779083, 0.05293799936771393, -0.32857999205589294, 0.4469299912452698, 0.43615999817848206, 0.2380799949169159, -0.009477199986577034, -0.16147999465465546, -0.24262000620365143, 0.4896099865436554], u'cotton': [-0.4855400025844574, -0.11411000043153763, 0.045823998749256134, -0.32666000723838806, -0.18908999860286713, -0.21142999827861786, 0.17017999291419983, -0.26513001322746277, 0.1303199976682663, -0.5010499954223633, -0.24133999645709991, -0.7208700180053711, 0.14618000388145447, 0.08438900113105774, 0.09275899827480316, -0.006888499949127436, -0.15324999392032623, -0.3196200132369995, -0.4148699939250946, -0.26739001274108887, -0.5365300178527832, -0.5952500104904175, 0.15410999953746796, 0.32923001050949097, 0.0034668000880628824, 0.15602000057697296, -0.3842799961566925, -0.5342400074005127, -0.7240300178527832, 0.13455000519752502, -0.37338998913764954, 0.35613998770713806, -0.7850599884986877, 0.030786000192165375, -0.7758299708366394, 0.7107499837875366, 0.5915200114250183, -0.2745800018310547, 0.22495999932289124, -0.07376500219106674, -0.48627999424934387, -0.6427900195121765, -0.14503000676631927, 0.20397000014781952, 0.23124000430107117, -0.28937000036239624, -0.10552000254392624, -0.23397000133991241, 0.07769100368022919, -0.17403000593185425, 0.8070099949836731, 0.5702999830245972, -0.3167699873447418, -0.30235999822616577, -0.30469000339508057, -0.34318000078201294, -0.33410000801086426, -0.43689998984336853, 0.3951199948787689, -0.8269000053405762, -0.4124799966812134, -0.8205699920654297, -0.49733999371528625, 0.1007699966430664, 0.21347999572753906, 0.12196999788284302, 0.06375200301408768, -0.5659800171852112, -0.4268999993801117, 0.0028291998896747828, 0.6122400164604187, 0.22891999781131744, -0.2737799882888794, -0.13967999815940857, -0.17177000641822815, 0.04463899880647659, 0.035057999193668365, -0.42930999398231506, -0.22032999992370605, 0.2334199994802475, 0.2787199914455414, -0.09884999692440033, -0.7591800093650818, 0.023218000307679176, 0.1363700032234192, 0.17847999930381775, -0.19894999265670776, 0.11539000272750854, 0.4516800045967102, -0.3623400032520294, 0.48315998911857605, -0.10379000008106232, 0.2775700092315674, 0.12947000563144684, -0.2814500033855438, 0.4572800099849701, 0.30994001030921936, 0.36462000012397766, -0.2651199996471405, 0.3347100019454956, 0.10780999809503555, 0.8299099802970886, -0.5795400142669678, -0.1703599989414215, -0.6583700180053711, 0.2259799987077713, -0.06634899973869324, 0.16624000668525696, -0.6624400019645691, 0.23859000205993652, -0.15967999398708344, -0.09475299715995789, -0.26686999201774597, 0.1301099956035614, 0.19442999362945557, 0.22536000609397888, 0.6286399960517883, 1.0521999597549438, 0.44765999913215637, -0.12161999940872192, -0.40128999948501587, 0.12320999801158905, 0.7168400287628174, -0.0148930000141263, 0.10965999960899353, 0.5205600261688232, 0.03360699862241745, 0.4652000069618225, 0.5386599898338318, 0.2632800042629242, -0.03514999896287918, 0.7741100192070007, -0.43303999304771423, -0.3841499984264374, -0.5683900117874146, 0.022863000631332397, -0.20923000574111938, 0.6434999704360962, -0.4309700131416321, -0.23236000537872314, -0.20410999655723572, 0.012570999562740326, 0.2374899983406067, -0.7210400104522705, -0.31431999802589417, 1.0090999603271484, -0.07662300020456314, -0.8828799724578857, 0.3297800123691559, -0.3161900043487549, -0.46502000093460083, -0.06824900209903717, -0.00914829969406128, -0.9161400198936462, 0.075764000415802, 0.10509999841451645, -0.4009999930858612, -0.15745000541210175, 0.728879988193512, 0.2402999997138977, 0.05519099906086922, -0.11202000081539154, -0.4175899922847748, -0.12303999811410904, -0.11715000122785568, -0.6187999844551086, 0.006577900145202875, -0.11455000191926956, 0.07845199853181839, 0.3882899880409241, -0.46428999304771423, 0.34360000491142273, -0.7879199981689453, 0.23592999577522278, 0.8235999941825867, -0.032896000891923904, 0.2930299937725067, 0.5084199905395508, -0.36539000272750854, -0.03266099840402603, 0.021655000746250153, 0.49292999505996704, -0.4200200140476227, -0.2716499865055084, 0.016815999522805214, -0.43529000878334045, 0.08568400144577026, 0.28240999579429626, 0.021177999675273895, 0.35137999057769775, -0.22491000592708588, 1.1813000440597534, -0.16779999434947968, -0.14451999962329865, -0.3361000120639801, -0.012010999955236912, 0.22123999893665314, -0.4632500112056732, -0.1005999967455864, -0.37081000208854675, 0.06120600178837776, -0.13173000514507294, 1.0228999853134155, 0.08201699703931808, 0.7480400204658508, -0.3377000093460083, 0.5619099736213684, 0.5848699808120728, -0.29785001277923584, -0.18203000724315643, -0.11230000108480453, -0.29725998640060425, -0.45824000239372253, 0.41157999634742737, 0.22112999856472015, 0.024855000898241997, -0.03136000037193298, -0.13488000631332397, 0.12303999811410904, -0.6951299905776978, 0.1538199931383133, -0.8869900107383728, -0.20995000004768372, 0.001999499974772334, -0.10659000277519226, 0.0900299996137619, 0.1970600038766861, 0.539650022983551, -0.006381500046700239, 0.16357000172138214, 0.668470025062561, 0.19258999824523926, -0.22033999860286713, 0.5473999977111816, 0.5388100147247314, -0.07762199640274048, 0.6043699979782104, -0.539929986000061, -0.4928399920463562, 0.04162700101733208, -0.41266000270843506, 0.0008732699789106846, -0.4851199984550476, 0.5968400239944458, -0.7574999928474426, 0.026757000014185905, -0.12982000410556793, -0.6014900207519531, -0.16142000257968903, -0.38106998801231384, -0.10547000169754028, 0.25148001313209534, -0.08052700012922287, -0.345770001411438, 1.1341999769210815, 0.12383999675512314, -0.20691999793052673, 0.2670600116252899, 0.1441899985074997, -0.1682399958372116, -0.031022999435663223, -0.0888649970293045, 0.07929600030183792, -0.08092299848794937, -0.22544999420642853, -0.051600001752376556, -0.22554999589920044, -0.20680999755859375, -0.5492100119590759, 0.7500799894332886, -0.04782399907708168, 0.17794999480247498, -0.08101499825716019, 0.2015099972486496, -0.6751599907875061, -0.22316999733448029, -0.560259997844696, -0.5600299835205078, -0.39875999093055725, 0.5852699875831604, -0.7232800126075745, -0.5482500195503235, 0.15508000552654266, -0.05444199964404106, 0.20017999410629272, 0.06644300371408463, 0.45837000012397766, -0.456169992685318, -0.4622400104999542, -0.4316500127315521, -0.04379900172352791, -0.11607000231742859, -0.009190299548208714, 0.3391599953174591, -0.1487099975347519, 0.7196800112724304, -0.03236699849367142, -0.8023899793624878, 0.16742999851703644, 0.7063199877738953], u'beach': [-0.4320099949836731, -0.2933399975299835, -0.48465999960899353, 0.060446999967098236, -0.04103799909353256, -0.41811999678611755, 0.4274500012397766, -0.0069090998731553555, 0.5849499702453613, 0.015306999906897545, 0.15109999477863312, -0.25380000472068787, 0.22428999841213226, 0.08945100009441376, 0.21660999953746796, -0.263949990272522, 0.13710999488830566, -0.10920000076293945, -0.0519540011882782, 0.45311999320983887, -0.10172999650239944, 0.11129999905824661, -0.6325100064277649, -0.08830200135707855, -0.6087999939918518, 0.28492000699043274, -0.08144400268793106, 0.7396399974822998, 0.02466600015759468, 0.3647499978542328, 0.4773299992084503, 0.47595998644828796, -0.5397099852561951, 0.5702400207519531, -0.7512099742889404, 0.15308000147342682, -0.19144999980926514, 0.03715499863028526, 0.26752999424934387, -0.16556000709533691, -0.09781699627637863, 0.012021999806165695, -0.2955799996852875, 0.2582300007343292, 0.3024500012397766, -0.07694599777460098, 1.651900053024292, -0.0736910030245781, 0.6893699765205383, 0.5112800002098083, -0.4029099941253662, -0.2632899880409241, 0.29607999324798584, 0.006649199873209, 0.18619999289512634, 0.7170000076293945, -0.30483999848365784, 0.05480999872088432, 0.4508399963378906, -0.4000000059604645, 0.2762799859046936, 0.10085999965667725, 0.5221800208091736, 0.5269500017166138, 0.11255999654531479, -0.604390025138855, -0.1390099972486496, 0.4124999940395355, -0.23343999683856964, -0.8361700177192688, -0.46525999903678894, 0.03348600119352341, 0.056720998138189316, 0.6028199791908264, -0.8146700263023376, 0.03896699845790863, 0.2949199974536896, 0.2979300022125244, 0.10671000182628632, -0.22262999415397644, -0.21278999745845795, 0.3163299858570099, -0.28711000084877014, 0.39680999517440796, 0.06382600218057632, 0.010235000401735306, 0.15848000347614288, 0.6916999816894531, 0.37470000982284546, -0.2079399973154068, -0.030262000858783722, -0.16917000710964203, -0.15689000487327576, -0.6185100078582764, 0.645579993724823, 0.3735499978065491, 0.10414999723434448, -0.4878999888896942, 0.07742299884557724, -0.21313999593257904, -0.1748799979686737, 0.23321999609470367, 0.34011998772621155, 0.09249299764633179, -0.019791999831795692, 0.17646999657154083, 0.08387099951505661, -0.5645700097084045, 0.04395199939608574, -0.6807199716567993, -0.19068999588489532, -0.2963399887084961, 0.32444998621940613, -0.26916998624801636, 0.3121899962425232, -0.0465410016477108, 0.1377599984407425, 0.27469998598098755, -1.0018999576568604, 0.21193000674247742, 0.2685999870300293, -0.03359999880194664, -0.07972200214862823, -0.1890600025653839, -0.20900000631809235, 0.3901900053024292, 0.0027022999711334705, -0.6487699747085571, 0.060228001326322556, -0.2026900053024292, 0.24323999881744385, 0.1521500051021576, -0.13381999731063843, 0.04261799901723862, -0.1891999989748001, -0.43114998936653137, 0.049038998782634735, -0.338809996843338, 0.5961099863052368, -0.049890998750925064, -0.0016232000198215246, -0.22680999338626862, 0.09941700100898743, 0.4470599889755249, -0.2772899866104126, -0.003742600092664361, 0.3906700015068054, 0.25892001390457153, -0.2991200089454651, 0.359279990196228, 0.9423800110816956, 0.20146000385284424, 0.16744999587535858, -0.36327001452445984, 0.38238999247550964, -0.6306599974632263, -0.1640699952840805, -0.0504629984498024, 0.17001000046730042, 0.82396000623703, 0.41703000664711, -0.5101600289344788, -0.011384000070393085, 0.07276800274848938, -0.23003999888896942, -0.09393200278282166, 0.6202300190925598, 0.4890100061893463, -0.02712400071322918, 0.515529990196228, -0.36204999685287476, 0.05561700090765953, -0.4457699954509735, 0.39511001110076904, -0.2547000050544739, -0.3721100091934204, 0.1747100055217743, 0.32460999488830566, -0.08579400181770325, -0.16609999537467957, 0.258760005235672, 0.3343299925327301, 0.08599700033664703, 0.4854699969291687, 0.22964000701904297, -0.05057799816131592, 0.06785999983549118, -0.6738399863243103, 0.2017199993133545, 0.39476001262664795, 0.04609899967908859, 0.8388400077819824, -0.5151299834251404, -0.4153499901294708, 0.03421200066804886, 0.4604800045490265, 0.8984900116920471, -0.3680500090122223, -0.49526000022888184, -0.27368998527526855, 1.2204999923706055, -0.03280000016093254, -0.490090012550354, -0.16898000240325928, -0.0035810000263154507, -0.20217999815940857, 1.079699993133545, 0.04023199900984764, 0.31272000074386597, -0.013697000220417976, -0.41780999302864075, 0.5163400173187256, -0.35982999205589294, 0.1544799953699112, 0.07246699929237366, 0.36827999353408813, -0.030090000480413437, -0.4812999963760376, -0.11006999760866165, -0.2189600020647049, 1.0987999439239502, -0.5626099705696106, 0.3695000112056732, -0.15896999835968018, -0.10290999710559845, -0.21150000393390656, -0.16057999432086945, -0.9566699862480164, -0.18880000710487366, -0.302619993686676, 0.7893700003623962, 0.0967240035533905, 0.20473000407218933, -0.14744000136852264, 0.48482000827789307, 0.01408699993044138, 0.03617300093173981, -0.17806999385356903, 8.60880027175881e-05, -0.1566299945116043, -0.3499999940395355, -0.020353000611066818, 0.3046500086784363, 0.054850999265909195, -0.7703999876976013, 0.12133999913930893, -0.297650009393692, -0.2593899965286255, -0.8634200096130371, -0.07730899751186371, 0.22423000633716583, 0.10516999661922455, 0.0985490009188652, 0.040196001529693604, -0.015992000699043274, -0.6761199831962585, 0.4442000091075897, -0.11573000252246857, 0.13741999864578247, -0.36337000131607056, -0.07703600078821182, 0.11898999661207199, -0.6297699809074402, -1.0720000267028809, -0.5944300293922424, -0.5244200229644775, 0.2601099908351898, -0.1494700014591217, 0.23684999346733093, 0.017371999099850655, -0.23494000732898712, -0.6330400109291077, 0.002254999941214919, 0.15509000420570374, 0.10461000353097916, -0.5629199743270874, -1.743299961090088, 0.7585099935531616, -0.6096199750900269, 0.31025999784469604, -0.03555300086736679, 0.0047785998322069645, -0.8283900022506714, -0.09606000036001205, -0.47832998633384705, -0.174919992685318, 0.20597000420093536, -0.41130000352859497, -0.14500999450683594, -0.18467000126838684, -0.6093500256538391, 0.1155799999833107, -0.46347999572753906, 0.1698800027370453, -0.3138599991798401, -0.09368299692869186, 0.08566500246524811, 0.30702999234199524, 0.3274500072002411, -0.6273000240325928], u'pizza': [0.2573400139808655, 0.48339998722076416, 0.3989500105381012, -0.021903999149799347, -0.23251000046730042, 0.19120000302791595, -0.06044600158929825, -0.2576099932193756, -0.4521099925041199, 0.04151400178670883, -0.26910001039505005, -0.7065899968147278, -0.19061000645160675, 0.6170300245285034, -0.3178200125694275, 0.02794099971652031, -0.16662000119686127, -0.10881000012159348, -0.34463998675346375, 0.42024001479148865, 0.36226001381874084, 0.724590003490448, -0.03792399913072586, -0.13018999993801117, 0.4104599952697754, -0.053523000329732895, 0.2442300021648407, 0.018025999888777733, 0.43281999230384827, -1.2633999586105347, -0.5408599972724915, 0.47268998622894287, 0.35892999172210693, -0.2677899897098541, -0.573710024356842, 0.35585999488830566, -0.3968999981880188, -0.35332000255584717, -0.48881998658180237, 0.24764999747276306, 0.15627999603748322, 0.03886000066995621, 0.007922300137579441, 0.5410500168800354, -0.13862000405788422, 0.3245899975299835, 1.0425000190734863, 0.12856000661849976, 0.036834001541137695, -0.22976000607013702, -0.08797299861907959, -0.6903899908065796, 0.35074999928474426, 0.568340003490448, -0.1960200071334839, -0.2433999925851822, -0.0757019966840744, 0.3730199933052063, 0.14014999568462372, -0.2434300035238266, 0.7911199927330017, -0.25911998748779297, -0.017420999705791473, -0.46355998516082764, -0.12606999278068542, -0.37097999453544617, -0.2365099936723709, 0.5047500133514404, -0.30573999881744385, 0.2805500030517578, 0.5291799902915955, 0.15871000289916992, -0.09864799678325653, -0.4066300094127655, 0.135220006108284, -0.23015999794006348, -0.1546500027179718, -0.10036999732255936, -0.06646999716758728, -0.3589499890804291, -0.1241300031542778, 0.4432699978351593, 0.08242399990558624, -0.44133999943733215, -0.1444700062274933, -0.4839499890804291, 0.29214999079704285, 0.24426999688148499, -0.3284499943256378, 0.05443299934267998, 0.25119999051094055, 0.13314999639987946, -0.162650004029274, -0.12270999699831009, -0.004196200054138899, 0.04838600009679794, -0.2868100106716156, 0.20424999296665192, -0.4036400020122528, 0.09727499634027481, 0.3317500054836273, -0.08769100159406662, 0.010023999959230423, -0.5127900242805481, -0.08306899666786194, -0.3919900059700012, 0.15068000555038452, 0.759190022945404, -0.21369999647140503, 0.1502400040626526, 0.36298999190330505, 0.031101999804377556, 0.28150999546051025, -0.5997999906539917, 0.44464001059532166, -0.6328200101852417, -0.612030029296875, 0.24945999681949615, -0.3671000003814697, -0.056995999068021774, -0.17517000436782837, 0.1625099927186966, 0.8041499853134155, -0.10598000138998032, -0.3004800081253052, 0.18238000571727753, 0.04848499968647957, 0.13027000427246094, -0.3310900032520294, -0.13229000568389893, 0.04734300076961517, 0.43838000297546387, -0.18592999875545502, -0.018343999981880188, 0.19720999896526337, -0.23306000232696533, 0.0301510002464056, 0.19395999610424042, -0.14499999582767487, 0.38100001215934753, 0.15014000236988068, 0.05243200063705444, -0.27917999029159546, -0.39125001430511475, -0.29462000727653503, 0.15047000348567963, -0.08170600235462189, 0.4011000096797943, -0.057500001043081284, -0.5065500140190125, -0.47707000374794006, 1.0189000368118286, 0.31264999508857727, -0.04170700162649155, -0.471670001745224, -0.2994900047779083, -0.11953999847173691, 0.27206000685691833, -0.3297800123691559, -0.5152400135993958, 0.5026000142097473, 0.4451799988746643, -0.4128200113773346, 0.33215001225471497, -0.03264300152659416, 0.23382000625133514, 0.06802800297737122, -0.07913699746131897, 0.4782100021839142, -0.17513999342918396, 0.07776299864053726, 0.21427999436855316, -0.4674699902534485, 0.3134399950504303, -0.5692499876022339, 0.5978400111198425, 0.06689900159835815, 0.20826999843120575, 0.5441100001335144, -0.7247800230979919, -0.0035687999334186316, 0.2248699963092804, 0.350490003824234, -0.04080500081181526, 0.1125200018286705, -0.16416999697685242, 0.28461000323295593, 0.4342400133609772, 0.34097999334335327, -0.27204999327659607, 0.6425099968910217, 0.7712900042533875, -0.24036000669002533, -0.12202999740839005, -0.18943999707698822, 0.12602999806404114, -0.8521100282669067, -0.22276000678539276, -0.43415001034736633, 0.2337000072002411, 0.416049987077713, -0.6657299995422363, 1.1912000179290771, 0.2775999903678894, -0.8071500062942505, -0.23507000505924225, 0.7476900219917297, -0.006661499850451946, 0.2277500033378601, -0.007005400024354458, -0.3985599875450134, 0.2220200002193451, 0.43487000465393066, -0.5541099905967712, -0.6639000177383423, 0.13468000292778015, 0.20789000391960144, -0.17927999794483185, -0.20625999569892883, 0.7346699833869934, 0.5489400029182434, 0.40077000856399536, 0.06321600079536438, -0.02654300071299076, -0.7243499755859375, -0.6112899780273438, -0.07966499775648117, -0.6388599872589111, -0.273140013217926, -0.28610000014305115, 0.2628900110721588, 0.48561999201774597, -0.13763999938964844, -0.07879800349473953, 0.3697499930858612, -0.0556269995868206, 1.1484999656677246, 0.03950599953532219, -0.8982899785041809, 0.23681999742984772, -0.3614799976348877, -0.11355999857187271, -0.07284100353717804, -0.797569990158081, -0.4997600018978119, -0.2144699990749359, -0.04903699830174446, 0.39695999026298523, -0.17423999309539795, -0.4120500087738037, 1.1871000528335571, 0.516700029373169, -0.059790998697280884, 0.7142099738121033, 0.1655000001192093, 0.33952999114990234, -0.10357999801635742, 0.2929700016975403, -0.268310010433197, 0.17679999768733978, 0.32690000534057617, -0.42350998520851135, -0.11839000135660172, 0.16728000342845917, 0.06569000333547592, -0.431769996881485, 0.28575000166893005, 0.3914699852466583, 0.49807000160217285, -0.13639000058174133, -0.602400004863739, -0.507889986038208, 0.05228300020098686, 0.3962000012397766, 0.8692499995231628, -0.09685400128364563, -1.1965999603271484, 0.042479000985622406, -1.2523000240325928, -0.122359998524189, -0.1243399977684021, 0.31723999977111816, -0.2824299931526184, 0.18327000737190247, -0.08085700124502182, 0.14132000505924225, 0.5522099733352661, -0.27663999795913696, 0.3112199902534485, 0.0756089985370636, 0.22901000082492828, -0.007023199927061796, -0.19508999586105347, 0.26923999190330505, -0.48833999037742615, -0.3415899872779846, 0.34529000520706177, 0.03223099932074547, -0.3895699977874756, 0.050930000841617584], u'wheel': [0.2759299874305725, -0.14595000445842743, 0.2587699890136719, -0.7950800061225891, 0.07151799649000168, 0.23479999601840973, 0.3342300057411194, 0.10639999806880951, -0.0656609982252121, -0.7878299951553345, -0.0540350005030632, 0.026962999254465103, 0.13971999287605286, -0.11311999708414078, 0.36157000064849854, -0.011106999590992928, 0.18998000025749207, -0.08000099658966064, 0.12563000619411469, -0.3603900074958801, -0.07567500323057175, 0.7827900052070618, 0.31668001413345337, 0.17714999616146088, 0.18328000605106354, 0.23759999871253967, 0.4562999904155731, 0.23859000205993652, 0.2563000023365021, -0.0867369994521141, -0.041756000369787216, 0.2868900001049042, -0.06046999990940094, 0.278329998254776, -0.4923099875450134, 0.8466100096702576, -0.164450004696846, -0.5235599875450134, -0.4563100039958954, 0.7394099831581116, -0.5522099733352661, 0.06695199757814407, -0.57669997215271, -0.3629400134086609, 0.2809999883174896, 0.3633599877357483, 0.6704300045967102, -0.015852000564336777, -0.15604999661445618, 0.09413699805736542, 0.14016999304294586, 0.19995999336242676, 0.5587700009346008, -0.35798999667167664, 0.38940998911857605, 0.45291000604629517, 0.24153000116348267, -0.44203001260757446, -0.04218899831175804, 0.09017899632453918, 0.2556000053882599, 0.2652300000190735, 0.4681600034236908, -0.16030000150203705, -0.04883899912238121, 0.680679976940155, -0.5597900152206421, -0.27031999826431274, -0.11563000082969666, 0.5357199907302856, 0.24856999516487122, 0.10944999754428864, 0.5856800079345703, 0.330130010843277, 0.22070999443531036, 0.3361000120639801, -0.054347001016139984, -0.5460000038146973, -0.09393399953842163, -0.25492000579833984, 0.12479999661445618, 0.7410699725151062, -0.07918699830770493, 0.06223500147461891, -0.8714699745178223, -0.004662900231778622, 0.3948799967765808, 0.5266199707984924, -0.7446399927139282, 0.15633000433444977, 0.899869978427887, 0.2100600004196167, 0.014228999614715576, -0.24354000389575958, 0.3836599886417389, -0.3976399898529053, -0.3397600054740906, 0.2747200131416321, 0.13134999573230743, -0.4411500096321106, -0.29712000489234924, 0.893339991569519, -0.14778000116348267, 0.03788800165057182, 0.07701700180768967, -0.11896000057458878, -0.059025999158620834, 0.46338000893592834, -0.35986000299453735, -0.5819000005722046, -0.20446999371051788, 0.15033000707626343, 0.008111599832773209, -0.3310999870300293, -0.19596000015735626, 0.25387001037597656, -0.48443999886512756, 0.23115000128746033, -0.30768001079559326, 0.24944999814033508, -0.01306500006467104, -0.6860100030899048, 0.5038099884986877, -0.6721400022506714, -0.4609600007534027, -0.2940700054168701, 0.002195199951529503, 0.26412999629974365, 0.181209996342659, -0.042559001594781876, 0.3808499872684479, 0.6202800273895264, 0.36226001381874084, 0.30371999740600586, -0.5758299827575684, 0.6316499710083008, -0.183569997549057, -0.6938499808311462, -0.2619999945163727, 0.32910001277923584, -0.060458000749349594, 0.937690019607544, -0.05741000175476074, -0.2247599959373474, -0.004006700124591589, 0.1606599986553192, -0.25968998670578003, -0.6528300046920776, 0.41545000672340393, 0.8128600120544434, 0.3071199953556061, 0.36406001448631287, -0.5825099945068359, -0.2515200078487396, 0.7283599972724915, -0.3874399960041046, 0.7048400044441223, -0.10768000036478043, -0.07333599776029587, 0.3342199921607971, 0.5435299873352051, -0.7444900274276733, -0.8700799942016602, 0.030141999945044518, 0.624459981918335, 0.40911999344825745, -0.3272800147533417, 0.7402899861335754, 0.6413800120353699, 0.08283200114965439, -1.0264999866485596, 0.27024999260902405, 0.4914399981498718, 0.5270699858665466, 0.26333001255989075, -0.29973000288009644, 0.224140003323555, 0.28584998846054077, -0.1354600042104721, -0.14270000159740448, 0.04947900027036667, 0.6960800290107727, 0.2581399977207184, 0.14757999777793884, 0.6477800011634827, -0.8575199842453003, 0.4005900025367737, -0.31793999671936035, 0.8876199722290039, -0.19543999433517456, 0.11873000115156174, -0.29993999004364014, -0.12950000166893005, 0.49226000905036926, 0.4603300094604492, 0.30702000856399536, -0.17813999950885773, -0.1711300015449524, 0.1780800074338913, 0.23436999320983887, 0.8830900192260742, 0.0812389999628067, 0.6271899938583374, 0.12168999761343002, 0.31099000573158264, -0.0077379001304507256, 0.21317000687122345, -0.6759399771690369, -0.2779900133609772, 0.16720999777317047, 0.46713998913764954, -0.43459999561309814, 0.4075700044631958, -0.11011999845504761, 0.5960500240325928, -0.48611998558044434, -0.1880899965763092, 0.5128999948501587, -0.31233999133110046, 0.003328500082716346, -0.0784510001540184, 0.23858000338077545, 0.051426000893116, -0.06432700157165527, 0.3215000033378601, 0.34095999598503113, 0.16946999728679657, 0.30465999245643616, -0.07160600274801254, 0.04430500045418739, -0.04865799844264984, -0.6667199730873108, 0.15484000742435455, -0.3364099860191345, 0.4032599925994873, 0.10266999900341034, -0.015453999862074852, 0.5509399771690369, -0.28595998883247375, -0.45188000798225403, 0.060940999537706375, 0.03178500011563301, -0.19762000441551208, -0.13431000709533691, -0.8467000126838684, -0.2853100001811981, -0.2804799973964691, 0.1796099990606308, 0.5372700095176697, 0.5415899753570557, -0.5266799926757812, -0.668969988822937, 0.016852999106049538, -0.4075700044631958, 0.2309200018644333, 0.2710300087928772, -0.5441700220108032, -0.39430001378059387, -0.35565000772476196, -0.6022599935531616, -0.27897998690605164, -0.8560799956321716, 0.40619000792503357, 0.6959699988365173, -0.20844000577926636, -0.6921799778938293, 0.44920000433921814, -0.03720499947667122, 0.18351000547409058, -0.01726200059056282, -0.5813599824905396, -0.08135800063610077, 0.07683499902486801, 0.09344200044870377, 0.31942999362945557, -0.197160005569458, -1.195199966430664, 0.10333999991416931, -0.25126999616622925, 0.2561199963092804, 0.14351999759674072, -0.27368998527526855, -0.02487899921834469, -0.19754000008106232, 0.16619999706745148, 0.17330999672412872, -0.5470700263977051, -0.9500899910926819, -0.17964999377727509, -0.10146000236272812, 0.2604599893093109, -0.014190999791026115, 0.08944199979305267, -0.10130000114440918, 0.24714000523090363, 0.1365399956703186, -0.34529000520706177, 0.3126699924468994, 0.3097499907016754, 0.050259001553058624], u'wave': [0.5001199841499329, -0.42381998896598816, 0.2573400139808655, -0.5990399718284607, -0.05979999899864197, -0.11302000284194946, 0.293179988861084, 0.0642940029501915, 0.1157199963927269, -1.187000036239624, 0.667110025882721, 0.46316999197006226, 0.4270699918270111, -0.08921799808740616, 0.8587200045585632, 0.14007000625133514, -0.13673000037670135, 0.3040100038051605, 0.29061999917030334, 0.6625099778175354, 0.09299399703741074, -0.3091700077056885, 0.22314999997615814, -0.3900099992752075, -0.048521000891923904, 0.3238700032234192, 0.5078999996185303, 0.4805600047111511, 0.04185999929904938, -0.06443600356578827, -0.40459999442100525, -0.065481998026371, -0.692110002040863, 0.19806000590324402, -0.35135000944137573, 0.009594200178980827, -0.5069299936294556, -0.4900299906730652, 0.40097999572753906, 1.1581000089645386, 0.5234500169754028, 0.31832998991012573, 0.18027999997138977, 0.07230199873447418, -0.05121900141239166, -0.2351599931716919, 0.18366000056266785, -0.4948999881744385, 0.44209998846054077, 0.0692870020866394, -0.11984000355005264, -0.10851000249385834, 0.4180600047111511, -0.02568499930202961, 0.2807300090789795, -0.03892600163817406, -0.27004000544548035, 0.3466799855232239, 0.14383000135421753, 0.17027999460697174, -0.0885550007224083, 0.34711000323295593, 0.31415000557899475, 0.03242500126361847, -0.3068599998950958, 0.1805800050497055, -0.027482999488711357, -0.006241300143301487, 0.26949000358581543, 0.6316199898719788, 0.40731000900268555, 0.4143800139427185, -0.3472999930381775, -0.09607700258493423, -0.33959001302719116, -0.273140013217926, 0.05945200100541115, -0.5659599900245667, -0.07731200009584427, 0.4212400019168854, -0.6234599947929382, -0.18967999517917633, -0.22125999629497528, 0.13941000401973724, 0.3765200078487396, 0.2746700048446655, -0.19413000345230103, 0.15967999398708344, 0.20917999744415283, -0.4884899854660034, 0.4794900119304657, 0.16832000017166138, -0.2231999933719635, -0.5413699746131897, -0.19300000369548798, -0.3585900068283081, -0.15378999710083008, -0.2445099949836731, 0.7935799956321716, -0.0861319974064827, 0.021649999544024467, 0.3564800024032593, 0.302949994802475, -0.09188400208950043, -0.5082899928092957, 0.28240999579429626, -0.22891999781131744, 0.020641999319195747, 0.20461000502109528, 0.23749999701976776, -0.22777999937534332, -0.2293500006198883, -0.02213199995458126, -0.023429999127984047, 0.4108699858188629, 0.14034999907016754, 0.144679993391037, 0.05591300129890442, -0.5018600225448608, -0.9708300232887268, 0.5797299742698669, -0.47519999742507935, -0.3410399854183197, 0.2924000024795532, 0.6122400164604187, 0.04058599844574928, 0.35710999369621277, 0.28095000982284546, -0.052949998527765274, -0.016953999176621437, -0.24355000257492065, 0.8039000034332275, 0.5230299830436707, -0.13030999898910522, -0.2865700125694275, -0.24570000171661377, 0.20983000099658966, 0.3509399890899658, 0.11221999675035477, 0.009866399690508842, -0.381879985332489, 0.06155899912118912, -0.333869993686676, 0.6818699836730957, -0.2966899871826172, 0.004291300196200609, 0.10041999816894531, -0.3588100075721741, 0.29065001010894775, 0.517549991607666, 0.35743001103401184, 0.17027999460697174, -0.18045000731945038, 0.5854799747467041, 0.24900999665260315, 0.08962699770927429, 0.009221700020134449, -0.45072001218795776, -0.37457001209259033, -0.37386998534202576, -0.3095000088214874, -0.3872300088405609, -0.3088200092315674, -0.2117599993944168, -0.04984600096940994, -0.04993300139904022, 0.3815799951553345, 0.6490899920463562, -0.11862999945878983, 0.2274399995803833, 0.2783699929714203, 0.4365200102329254, 0.1713400036096573, -0.22134000062942505, -0.06638400256633759, -0.5854700207710266, -0.3296400010585785, -0.13061000406742096, -0.258650004863739, 0.27796998620033264, -0.28088000416755676, -0.7021399736404419, 0.5374699831008911, 0.2379699945449829, 0.2681399881839752, 0.2773900032043457, -0.0032238001003861427, 0.3416900038719177, -0.16579000651836395, -0.03590499982237816, 0.14090000092983246, -0.012141999788582325, -0.22099000215530396, -0.0013469000114127994, 0.0204050000756979, -0.011861000210046768, 0.40507999062538147, -0.6504899859428406, -0.1427299976348877, 0.042725998908281326, 0.4505699872970581, 0.2729800045490265, -0.18087999522686005, 0.08686500042676926, 0.02433300018310547, -0.11858999729156494, -0.5384299755096436, 0.7267699837684631, -0.03382999822497368, 0.17649999260902405, 0.0498879998922348, 0.2563599944114685, 0.3742400109767914, 0.3648099899291992, -0.26872000098228455, 0.3270300030708313, 0.35572999715805054, -0.6718999743461609, 0.20761999487876892, -0.026823999360203743, 0.5041400194168091, 0.2296999990940094, -0.31891998648643494, -0.6070700287818909, 0.2826499938964844, -0.4781399965286255, 0.3629400134086609, -0.5569599866867065, 0.08066499978303909, -0.0225210003554821, 0.3683199882507324, 0.14354999363422394, -0.389739990234375, -0.1028899997472763, -0.31349998712539673, -0.15500999987125397, 0.36629000306129456, -0.3159100115299225, -0.04208200052380562, 0.058775000274181366, 0.4432399868965149, 0.3184100091457367, -0.2565000057220459, -0.3405900001525879, -0.3898699879646301, -0.583079993724823, 0.3019599914550781, 0.05978500097990036, -0.04086799919605255, 0.11694999784231186, -0.3362799882888794, 0.32791000604629517, 0.16324999928474426, 0.4349699914455414, 0.1159299984574318, -0.0794370025396347, 0.2965399920940399, -0.09745699912309647, -0.6246399879455566, 0.416810005903244, 0.16050000488758087, -0.483379989862442, 0.49911001324653625, 0.11993999779224396, -0.6136699914932251, -0.4256199896335602, -0.9376599788665771, 0.03558799996972084, 0.09043599665164948, -0.1449899971485138, -0.22164000570774078, -0.26291999220848083, -0.23939000070095062, -0.42833998799324036, 0.38787999749183655, 0.3599799871444702, -1.7719999551773071, 0.29923000931739807, 0.18494999408721924, 0.10591000318527222, -0.2593899965286255, -0.17035000026226044, -0.16475999355316162, 0.2889400124549866, -0.032722000032663345, -0.033959999680519104, -0.31619998812675476, 0.047263000160455704, 0.36577001214027405, -0.028714999556541443, 0.22401000559329987, -0.4629800021648407, -0.009511199779808521, -0.07730299979448318, 0.6854900121688843, 0.7343599796295166, -0.0906440019607544, 0.49803000688552856, 0.32892999053001404, -0.26135000586509705], u'plant': [0.09741000086069107, 1.0256999731063843, -0.2631100118160248, -0.6152099967002869, -0.29061999917030334, -0.2470400035381317, 0.04382000118494034, 0.09625600278377533, 0.6405199766159058, -1.079200029373169, 0.3714599907398224, -0.36142000555992126, 0.06916999816894531, 0.186489999294281, 0.4149700105190277, 0.10337000340223312, -0.20716999471187592, 0.23982000350952148, -0.10786999762058258, 0.13523000478744507, -0.451449990272522, -0.10633999854326248, 0.13139000535011292, 0.2851499915122986, -0.25440001487731934, 0.07759500294923782, -0.2671000063419342, 0.05597800016403198, -0.38100001215934753, 0.3443799912929535, 0.0929419994354248, 0.4361500144004822, -0.428710013628006, 0.21920999884605408, 0.4806399941444397, 0.45708999037742615, -0.07073599845170975, -0.0937110036611557, -0.40895000100135803, -0.1525000035762787, -0.5963699817657471, 0.5424799919128418, 0.22222000360488892, 0.5243600010871887, -0.4418799877166748, -0.3113200068473816, 0.38659998774528503, 0.3288100063800812, -0.057941000908613205, 0.42500999569892883, 0.1512800008058548, 0.2515600025653839, -0.2511799931526184, -0.3626900017261505, -0.23331999778747559, 0.46917998790740967, 0.8968799710273743, -0.026947999373078346, 0.4347899854183197, 0.13053999841213226, -0.2090499997138977, 0.043411001563072205, 0.584089994430542, -0.42092999815940857, -0.47828999161720276, -0.1535000056028366, 0.032653000205755234, 0.6963099837303162, -0.02829200029373169, 0.2923299968242645, -0.00691419979557395, 0.17935000360012054, 0.5007200241088867, 0.35058000683784485, -1.048699975013733, 0.2955299913883209, -1.1484999656677246, 0.13223999738693237, 0.36531001329421997, 0.346560001373291, -0.5180100202560425, -0.3747200071811676, -0.270689994096756, 0.09378299862146378, 0.32425999641418457, 0.3871000111103058, 0.36862999200820923, 0.01001300010830164, -0.17549000680446625, 0.17430999875068665, 0.25409001111984253, -0.3120900094509125, 0.4437299966812134, -0.1953199952840805, 0.10012000054121017, -0.3790000081062317, -0.10507000237703323, -0.6776900291442871, -0.1463100016117096, -0.6322299838066101, 0.05104700103402138, -0.15660999715328217, -0.23427000641822815, -0.378030002117157, 0.0027286000549793243, -0.2826499938964844, 0.39792999625205994, 0.3315199911594391, -0.5515199899673462, 0.6333400011062622, 0.9311299920082092, -0.2302200049161911, -0.19889000058174133, -0.6101400256156921, -0.23885999619960785, 0.6295400261878967, 0.29686999320983887, 0.7850099802017212, 0.4743199944496155, 0.6421599984169006, -0.6006799936294556, -0.48458001017570496, -0.2687999904155731, 0.07134199887514114, -0.23815999925136566, 0.3315899968147278, 0.4843499958515167, 0.45306000113487244, 0.42285001277923584, 0.15037000179290771, 1.117400050163269, 0.8786799907684326, 0.24445000290870667, 0.012347999960184097, -0.3763599991798401, -0.0953420028090477, -0.376010000705719, -0.7095999717712402, 0.6432600021362305, -0.14549000561237335, 0.20220999419689178, -0.49202001094818115, 0.6283699870109558, -0.8160799741744995, -0.08655399829149246, 0.3788599967956543, -0.22396999597549438, -0.721019983291626, 0.14313000440597534, 0.16819000244140625, 0.6593800187110901, -0.26089999079704285, -0.29179999232292175, -0.0806410014629364, 0.5364999771118164, 0.1231599971652031, -0.42089998722076416, -0.3343600034713745, -0.0518839992582798, 0.21964000165462494, -0.27274999022483826, -0.16954000294208527, 0.07787899672985077, 0.25734999775886536, -0.21706999838352203, -0.015860000625252724, -0.164000004529953, -0.11460000276565552, -0.07812900096178055, -0.21044999361038208, -0.4615199863910675, -0.093129001557827, 0.3083899915218353, -0.21153999865055084, 0.039333000779151917, 0.9534599781036377, 0.2593500018119812, 0.1477999985218048, 0.7738800048828125, -0.33456000685691833, -0.3283100128173828, -0.26774001121520996, -0.3431900143623352, -0.29170000553131104, 0.16843000054359436, 0.014205999672412872, 0.4023599922657013, 0.010219999589025974, 0.8045099973678589, -0.547789990901947, -0.4849900007247925, 0.6059399843215942, -0.30913999676704407, -0.06023800000548363, -0.29003000259399414, -0.04467500001192093, 0.286190003156662, -0.5440599918365479, -0.29482001066207886, 0.4436199963092804, 0.09249000251293182, 0.14063000679016113, 0.014295999892055988, -0.19009999930858612, -0.05278699845075607, -0.0455860011279583, 0.028527000918984413, -0.33254000544548035, 0.05485299974679947, -0.22803999483585358, 0.0008837599889375269, -0.2319899946451187, -0.08042900264263153, -0.9146599769592285, 0.6604599952697754, 0.46474000811576843, -0.11330000311136246, 0.47262001037597656, -0.040536001324653625, -0.022381000220775604, 0.36577001214027405, 0.014072000049054623, -0.39079999923706055, 0.014832999557256699, -0.13534000515937805, 0.1488800048828125, -0.024824000895023346, 0.3228699862957001, -0.33583998680114746, -0.032079000025987625, 0.4336099922657013, -0.2972800135612488, -0.017573000863194466, 0.5407500267028809, 0.28786998987197876, -0.21199999749660492, 0.3048900067806244, -0.2959499955177307, -0.2380799949169159, -0.06528200209140778, 0.25758999586105347, -0.5817499756813049, -0.04279499873518944, -0.6432600021362305, -0.8366400003433228, 0.13906000554561615, 0.7696999907493591, -0.5336400270462036, -0.008922499604523182, -0.337119996547699, 0.06817399710416794, 0.07225599884986877, -0.9782199859619141, 0.2107200026512146, 1.2705999612808228, -0.1907700002193451, 0.08536499738693237, 0.521619975566864, 0.27922001481056213, -0.4338200092315674, 0.5112599730491638, 0.31007999181747437, -0.0494219996035099, -0.35012999176979065, 0.0947749987244606, -0.0860079973936081, 0.1562899947166443, 0.38098999857902527, 0.5083600282669067, -0.1340699940919876, 0.019394999369978905, -0.07390599697828293, -0.4065999984741211, -0.15118999779224396, 0.03155599907040596, 0.40619999170303345, -1.7999999523162842, -0.4755299985408783, 0.10846000164747238, 0.2585799992084503, -0.8305500149726868, 0.01881900057196617, -0.16565999388694763, 0.060649000108242035, -0.05901399999856949, 0.45037001371383667, 0.46428000926971436, -0.29036998748779297, 0.08383700251579285, 0.2397100031375885, 0.41218000650405884, -0.3998199999332428, 0.5155100226402283, -0.38767001032829285, 0.6120700240135193, 0.7857000231742859, -0.09763800352811813, -0.670740008354187, 0.49882999062538147, 0.4616900086402893], u'sandwich': [0.3548800051212311, 0.003486400004476309, 0.46492999792099, -0.11963000148534775, -0.10588999837636948, -0.41804999113082886, 0.32155999541282654, -0.22725999355316162, 0.07297100126743317, -0.22272999584674835, -0.5388200283050537, -0.33243000507354736, -0.34964001178741455, 0.668179988861084, -0.02610200084745884, 0.11928000301122665, 0.19497999548912048, -0.015622000209987164, -0.4050000011920929, 0.15995000302791595, 0.14316000044345856, 0.4791199862957001, -0.5535699725151062, 0.07223699986934662, -0.19621999561786652, -0.34657999873161316, -0.051837000995874405, 0.10560999810695648, 0.12391000241041183, -0.5220199823379517, -0.3232100009918213, -0.03635900095105171, 0.3161099851131439, -0.02564300037920475, -0.15748000144958496, 0.5523200035095215, -0.41339001059532166, 0.06350299715995789, -0.44765999913215637, 0.4510200023651123, -0.2934100031852722, -0.42002999782562256, -0.04171299934387207, 0.5645700097084045, -0.5797399878501892, 0.21744999289512634, 0.7960799932479858, -0.7031300067901611, -0.38986000418663025, 0.2307099997997284, -0.3426800072193146, -0.4882499873638153, 0.5868899822235107, 0.7178900241851807, -0.016944000497460365, 0.057645998895168304, -0.17881999909877777, 0.5100700259208679, -0.09931699931621552, 0.1503800004720688, 0.34595000743865967, -0.04181100055575371, -0.05079000070691109, 0.16637000441551208, 0.23783999681472778, 0.197610005736351, -0.1697400063276291, 0.288100004196167, -0.492110013961792, 0.028519000858068466, 0.2296299934387207, -0.26058998703956604, -0.033500999212265015, -0.5253999829292297, -0.8575500249862671, -0.46202000975608826, 0.3197599947452545, 0.10248000174760818, -0.08442000299692154, 0.46149998903274536, -0.05385899916291237, 0.3531399965286255, 0.3012700080871582, -0.1621599942445755, -0.3534500002861023, -0.5343599915504456, 0.12860000133514404, -0.038040999323129654, -0.44144999980926514, -0.22834999859333038, -0.06407800316810608, -0.43323999643325806, 0.17452000081539154, -0.5506600141525269, -0.3209500014781952, -0.3122999966144562, -0.07531899958848953, 0.4161899983882904, -0.0017219999572262168, -0.27496999502182007, 0.016297999769449234, -0.00968869961798191, 0.44064000248908997, -0.7886300086975098, -0.1834300011396408, -0.2835899889469147, -0.03962799906730652, 0.41168999671936035, -0.5049200057983398, -0.13044999539852142, 0.7813599705696106, 0.5017200112342834, -0.3520900011062622, -0.7665799856185913, 0.21223999559879303, -0.275409996509552, -0.3068700134754181, 0.061128001660108566, 0.22603000700473785, 0.11448000371456146, 0.13861000537872314, -0.42188000679016113, 0.38944000005722046, -0.2816700041294098, -0.3166100084781647, -0.4205699861049652, 0.15035000443458557, -0.33469998836517334, 0.7515100240707397, 0.6812700033187866, -0.0937110036611557, 0.6333600282669067, -0.06463100016117096, 0.4992299973964691, 0.4772700071334839, -0.0659869983792305, -0.433789998292923, 0.4147599935531616, -0.3009699881076813, 0.3597699999809265, 0.15162000060081482, 0.06260199844837189, -0.031874001026153564, -0.08770199865102768, -0.31154000759124756, 0.001408900017850101, -0.14642000198364258, 0.04933999851346016, -0.33059999346733093, -0.8367999792098999, -0.6904299855232239, 0.1724099963903427, 0.6636800169944763, 0.08037099987268448, -0.46540001034736633, -0.2301200032234192, -0.8971999883651733, -0.6543400287628174, 0.11969000101089478, 0.05713199824094772, 0.10286000370979309, 0.14103999733924866, -0.7105000019073486, 0.33094000816345215, 0.0272659994661808, -0.2681500017642975, 0.42326000332832336, 0.06468000262975693, 0.4998700022697449, -0.793749988079071, -0.4390200078487396, -0.20324000716209412, -0.19154000282287598, 0.2566100060939789, -0.5926200151443481, 0.16216999292373657, -1.0069999694824219, 0.2581399977207184, 0.7570000290870667, -1.1818000078201294, 0.22746999561786652, 0.32273998856544495, -0.04674699902534485, -0.8636900186538696, 0.4233100116252899, -0.24643999338150024, 0.292059987783432, 0.5162699818611145, -0.21536999940872192, 0.2617200016975403, -0.007294099777936935, 1.0714999437332153, 0.2640100121498108, -0.26405999064445496, 0.046828001737594604, 0.07593599706888199, -0.3776099979877472, -0.45796999335289, 0.06155100092291832, -0.11642999947071075, 0.18799999356269836, -0.3350600004196167, 0.9335500001907349, 0.4312700033187866, 0.2524699866771698, -0.16498999297618866, -0.14875000715255737, -0.4215100109577179, 0.29569000005722046, 0.08646299690008163, -0.060940999537706375, 0.10260999947786331, 0.19088000059127808, -0.0017534999642521143, -0.24265000224113464, 0.28073999285697937, 0.30726999044418335, -0.5913199782371521, 0.007857900112867355, -0.12375999987125397, 0.4064599871635437, 1.1576999425888062, 0.03827900066971779, -0.16753999888896942, -0.1367499977350235, -0.5512300133705139, -0.2687099874019623, -0.33254000544548035, -0.4327000081539154, 0.47784000635147095, 0.11483000218868256, -0.010885999538004398, -0.34630998969078064, -0.021030999720096588, 0.9603999853134155, -0.2184700071811676, 0.31442999839782715, 0.23138000071048737, -0.3853200078010559, 0.5682799816131592, 0.0537169985473156, -0.10916999727487564, -0.14007000625133514, -0.3950200080871582, -0.8505399823188782, 0.056460000574588776, -0.14381000399589539, -0.2521499991416931, -0.2757900059223175, -0.6859700083732605, 0.7611100077629089, 0.30476999282836914, 0.27362000942230225, 0.6389099955558777, 0.22062000632286072, 0.581849992275238, 0.2228199988603592, 0.10392999649047852, 0.35409998893737793, -0.1081399992108345, 0.4932299852371216, -0.09683600068092346, 0.15067000687122345, 0.08052399754524231, 0.13987000286579132, -0.33009999990463257, 0.2072100043296814, -0.1878799945116043, 0.43966999650001526, -0.0943249985575676, -0.4506700038909912, -0.08480899780988693, 0.5097299814224243, 0.35547998547554016, -0.1956000030040741, 0.1638599932193756, -0.6731200218200684, 0.2399500012397766, -0.7892699837684631, 0.055716000497341156, -0.15532000362873077, 0.029252000153064728, -0.15487000346183777, -0.4427500069141388, 0.1374099999666214, 0.5573899745941162, 0.38618001341819763, -0.5750100016593933, 0.5557699799537659, -0.03477099910378456, -0.1459600031375885, -0.10950999706983566, -0.08009599894285202, 0.03567900136113167, -0.7380300164222717, -0.6485000252723694, 0.37731999158859253, -0.16937999427318573, 0.08927799761295319, 0.14252999424934387], u'mat': [0.155239999294281, -0.4860000014305115, -0.18129999935626984, -0.9030399918556213, -0.11396999657154083, -0.7607499957084656, -0.35839998722076416, -0.14061999320983887, -0.15584999322891235, -0.14473000168800354, -0.08105400204658508, -0.11655999720096588, -0.48653000593185425, 0.521589994430542, 0.7912300229072571, -0.14177000522613525, -0.3487600088119507, 0.9420499801635742, 0.15109999477863312, -0.12872999906539917, 0.13007000088691711, -0.23321999609470367, -0.1426199972629547, -0.0956289991736412, 0.22381000220775604, 0.1393900066614151, 0.2791300117969513, -0.03719300031661987, 0.8545200228691101, 0.11477000266313553, 0.07310699671506882, -0.10152000188827515, 0.4264200031757355, -0.3592199981212616, -1.0098999738693237, -0.062453001737594604, 0.6758999824523926, -0.08302100002765656, 0.09800899773836136, -0.09683000296354294, 0.19638000428676605, -0.37485000491142273, -0.3352400064468384, -0.2088800072669983, 0.4607299864292145, 0.3513000011444092, 0.4884899854660034, -0.24695999920368195, -0.321370005607605, -0.10426999628543854, -0.22915999591350555, 0.180759996175766, -0.3194600045681, -0.12529000639915466, -0.3096199929714203, 0.909850001335144, -0.22669999301433563, -0.3218599855899811, 0.5602499842643738, 0.31349000334739685, 0.47067999839782715, -0.025131000205874443, -0.3609200119972229, 0.34880998730659485, -0.34303000569343567, -0.23010000586509705, -0.15440000593662262, 0.11435999721288681, -0.16015000641345978, 0.36719998717308044, -0.08932799845933914, -0.40064001083374023, -0.6164799928665161, 0.46988001465797424, -0.575219988822937, 0.25536999106407166, 0.6398900151252747, -0.300570011138916, 0.3097899854183197, -0.3567099869251251, 0.7058500051498413, 0.06301400065422058, 0.19729000329971313, -0.15365999937057495, -0.607230007648468, -0.3205299973487854, 0.40123000741004944, -0.2289399951696396, -0.21096999943256378, -0.32844001054763794, -0.21403999626636505, -0.24296000599861145, 0.6474400162696838, 0.018623000010848045, -0.33254000544548035, -0.5284900069236755, 0.1524599939584732, 0.5245400071144104, 0.6116499900817871, 0.47617000341415405, 0.45205000042915344, 0.19791999459266663, -0.19589999318122864, 0.18764999508857727, -0.4388900101184845, -0.21038000285625458, 0.08809500187635422, 0.3608799874782562, -0.05716099962592125, -0.582859992980957, -0.07034599781036377, -0.012994999997317791, 0.030966000631451607, -0.0036943000741302967, -0.001816999982111156, 0.40095001459121704, -0.3635199964046478, 0.30338001251220703, -0.4750699996948242, -0.07031700015068054, -0.1555200070142746, -0.19787999987602234, 0.18914000689983368, 0.6169700026512146, -0.12036000192165375, -0.03240299969911575, -0.14258000254631042, -1.0361000299453735, 0.22731000185012817, 0.09427499771118164, 0.5007399916648865, 0.2845099866390228, -0.008825600147247314, 0.4955799877643585, 0.047410998493433, -0.6842399835586548, -0.06452900171279907, -0.16312000155448914, -0.272350013256073, -0.46230000257492065, 0.4411099851131439, 0.5512300133705139, 0.2541300058364868, -0.5860499739646912, -0.4542199969291687, 0.02281000092625618, -0.6213300228118896, 0.003945699892938137, -0.16606000065803528, -0.5636199712753296, 0.14760999381542206, 0.07189299911260605, -0.005492500029504299, -0.3440299928188324, -0.3578299880027771, 0.2175700068473816, 0.445279985666275, -0.41446998715400696, 0.16572999954223633, 0.29186001420021057, 0.033048998564481735, -0.4320499897003174, -0.5220800042152405, 0.4805299937725067, 0.6166899800300598, -0.0838249996304512, -0.3074600100517273, 0.8657299876213074, 0.14410999417304993, 0.006243899930268526, -0.3980500102043152, -0.07772699743509293, 0.5359200239181519, -0.3826799988746643, -0.19470000267028809, 0.4821600019931793, 0.2785800099372864, -0.0021214999724179506, 0.31435999274253845, -0.14225000143051147, 0.3600899875164032, -0.04593300074338913, 0.28181999921798706, 0.6786800026893616, 0.0010376999853178859, 0.1662999987602234, 0.672819972038269, 0.09982199966907501, 0.5934799909591675, 0.3210799992084503, 0.08648999780416489, 0.35686999559402466, -0.17205999791622162, 0.4217199981212616, -0.4378800094127655, 0.5153700113296509, 0.28022000193595886, 0.4860199987888336, 0.42166000604629517, 0.2223300039768219, 0.12713000178337097, 0.18459999561309814, 0.05683499947190285, -0.12532000243663788, 0.37922999262809753, 0.322270005941391, 0.08299099653959274, -0.329800009727478, -0.6144999861717224, -0.4144800007343292, -0.2558499872684479, -0.0751200020313263, 0.7007799744606018, 0.27059000730514526, 0.48429998755455017, -0.15237000584602356, -0.25169000029563904, 0.024267999455332756, -0.6741300225257874, -0.1379700005054474, 0.06377600133419037, 0.38541001081466675, -0.13533000648021698, -0.24289999902248383, 0.6267300248146057, -0.2169400006532669, 0.21513999998569489, 0.20393000543117523, -0.10341999679803848, 0.39006999135017395, 0.8990200161933899, 0.2985000014305115, -0.1540900021791458, 0.4511300027370453, 0.7291399836540222, -0.3371399939060211, -0.3964900076389313, -0.39941999316215515, 0.031031999737024307, 0.48069000244140625, 0.40105998516082764, 0.36597999930381775, -0.39243000745773315, 0.15070000290870667, -0.31929001212120056, 0.09934200346469879, 0.2425599992275238, 0.19616000354290009, -0.16269999742507935, 0.2386000007390976, 0.33469998836517334, -0.18832999467849731, -0.06264399737119675, -0.40209001302719116, 0.4597899913787842, 0.0828000009059906, 0.35097000002861023, -0.337009996175766, 0.3402499854564667, 0.09957200288772583, -0.3478800058364868, -0.3740299940109253, 0.511210024356842, 0.28130999207496643, 0.2256300002336502, -0.4694400131702423, -0.2735399901866913, -0.31286001205444336, -0.11658000200986862, 0.2448599934577942, -0.029867000877857208, 0.09556099772453308, -0.4453299939632416, -0.03622300177812576, -0.510640025138855, -0.26058000326156616, -0.20794999599456787, 0.18126000463962555, -0.6441299915313721, 0.5199400186538696, 0.024543000385165215, -0.28360000252723694, -0.4315299987792969, 0.5210999846458435, -0.0011349000269547105, -0.14214999973773956, -0.07404299825429916, 0.2533800005912781, -0.566100001335144, 0.16301999986171722, 0.7503200173377991, 0.9272699952125549, 0.35067999362945557, -0.46358001232147217, 0.3755500018596649, -0.02619899995625019, 0.7538099884986877, 0.26875001192092896, -0.0915760025382042, 0.14865000545978546], u'screw': [-0.08532100170850754, -0.17313000559806824, 0.13208000361919403, -0.40865999460220337, 0.17045000195503235, 0.21453000605106354, -0.11963000148534775, -0.21055999398231506, 0.22134999930858612, -0.7712299823760986, -0.3912000060081482, -0.043880999088287354, 0.655239999294281, -0.1740799993276596, -0.0323759987950325, 0.3990600109100342, 0.027619000524282455, -0.8083999752998352, 0.02536199986934662, 0.20949000120162964, -0.16192999482154846, 0.45493000745773315, 0.3366900086402893, 0.3649899959564209, 0.05402199923992157, 0.28547000885009766, -0.25492000579833984, 0.2488500028848648, 0.07158099859952927, -0.17308999598026276, -0.08537399768829346, 0.5732399821281433, 0.31396999955177307, 0.026341000571846962, -0.17955000698566437, 0.6135500073432922, -0.005823099985718727, 0.3763200044631958, 0.07024499773979187, 0.6201800107955933, -0.9006400108337402, -0.047805000096559525, 0.21154999732971191, -0.4960800111293793, -0.248539999127388, -0.16332000494003296, -0.16031000018119812, 0.03048500046133995, 0.17835000157356262, 0.13176999986171722, -0.2034199982881546, 0.5041599869728088, -0.19812999665737152, 0.07475200295448303, 0.12313999980688095, 0.3903299868106842, -0.13966000080108643, 0.03428800031542778, 0.30274999141693115, -0.033962998539209366, 0.3064199984073639, -0.20624999701976776, 0.17443999648094177, 0.47262999415397644, -0.00991430040448904, 0.6152600049972534, -0.13919000327587128, 0.48890000581741333, 0.4606800079345703, -0.09281200170516968, 0.09937699884176254, 0.0044952998869121075, 0.18446999788284302, 0.39188000559806824, -0.022526999935507774, 0.4991700053215027, -0.034028999507427216, -0.46678999066352844, -0.28780999779701233, -0.35554999113082886, -0.5616199970245361, 0.0686890035867691, 0.3593499958515167, -0.27046000957489014, 0.27480000257492065, 0.21141000092029572, 0.33755001425743103, 0.16641999781131744, -0.3184700012207031, -0.07735099643468857, 0.7769500017166138, 0.262470006942749, -0.020762000232934952, -0.009602700360119343, 0.0044502997770905495, 0.28870001435279846, -0.18498000502586365, 0.3993400037288666, -0.14077000319957733, -0.4333299994468689, 0.08098000288009644, 0.7751100063323975, -0.20420999825000763, -0.2648699879646301, -0.02367500029504299, -0.013505999930202961, -0.07551900297403336, -0.14568999409675598, -0.8630599975585938, 0.11191000044345856, -0.12621000409126282, 0.2792600095272064, 0.038061998784542084, -0.5074800252914429, -0.2764900028705597, 0.41978999972343445, -0.45210000872612, 0.424780011177063, -0.001080599962733686, 0.13968999683856964, 0.10852999985218048, -0.7115700244903564, 0.9259600043296814, -0.28648000955581665, 0.6923499703407288, 0.09286899864673615, -0.33340001106262207, 0.21368999779224396, 0.25398001074790955, 0.29469001293182373, 0.969290018081665, 0.4770300090312958, 0.3558399975299835, -0.06453700363636017, 0.16234000027179718, 0.6447299718856812, -0.4370500147342682, -0.06723900139331818, -0.352539986371994, -0.5302600264549255, -0.13367000222206116, -0.05380000174045563, -0.38468998670578003, 0.24327999353408813, 0.09773799777030945, 0.22443999350070953, 0.025527000427246094, -0.4278300106525421, 0.6360599994659424, 0.9454299807548523, -0.10875000059604645, -0.3968000113964081, 0.6834099888801575, -0.2037999927997589, 0.16843000054359436, 0.048186998814344406, 0.7140799760818481, -0.6337900161743164, 0.4330799877643585, 0.45965999364852905, -0.2953299880027771, 0.48572999238967896, -0.3881300091743469, -0.4926399886608124, 0.26881998777389526, -0.27772000432014465, -0.0874290019273758, 0.37125998735427856, 1.0872000455856323, 0.26475998759269714, -0.5177599787712097, 0.13681000471115112, 0.26723000407218933, -0.06384199857711792, 1.1562000513076782, -0.07908900082111359, -0.6523000001907349, 0.29267001152038574, -0.11275000125169754, -0.09577800333499908, 0.32168999314308167, 0.20100000500679016, 0.8861600160598755, -0.10079000145196915, -0.09026499837636948, -0.4625700116157532, 0.6815999746322632, 0.37003999948501587, 0.7557299733161926, 0.04760099947452545, -0.17966000735759735, 0.28591999411582947, 0.8453500270843506, 0.7414000034332275, 0.18734000623226166, 0.1730400025844574, -0.18959000706672668, -0.681190013885498, -0.015003999695181847, -0.08903300017118454, 0.23861999809741974, 0.09966699779033661, 1.1833000183105469, 0.31540000438690186, 0.7159299850463867, 0.14983999729156494, 0.3861300051212311, -0.0869859978556633, -0.36441999673843384, 0.11412999778985977, -0.05692800134420395, -0.2186499983072281, 0.4680599868297577, 0.38098999857902527, -0.24098999798297882, -0.5024499893188477, -0.2965500056743622, 0.33270999789237976, -0.30952998995780945, 0.059748001396656036, -0.9336900115013123, 0.4592599868774414, 0.3592900037765503, 0.40970999002456665, -0.4444600045681, -0.09991499781608582, 0.2420700043439865, -0.242249995470047, 0.507669985294342, 0.19735999405384064, 0.0862559974193573, 0.26875999569892883, -0.07133899629116058, -0.8178899884223938, 0.3208400011062622, -0.21525999903678894, -0.6124100089073181, -0.058736998587846756, 0.16142000257968903, -0.18380999565124512, -0.21602000296115875, -0.40553998947143555, -0.4500400125980377, 0.07718999683856964, -0.123539999127388, -0.6784800291061401, 0.10655000060796738, 0.8092799782752991, 0.22542999684810638, -0.6362699866294861, 0.10898000001907349, -0.294050008058548, 0.3878900110721588, -0.2826800048351288, 0.5380799770355225, -0.22460000216960907, -0.36882999539375305, 0.05384200066328049, -0.4382399916648865, -0.7999799847602844, 0.3880400061607361, -0.2694399952888489, -0.4027799963951111, 0.19080999493598938, 0.07033900171518326, -0.4865800142288208, -0.5644199848175049, 0.295960009098053, -0.13652999699115753, -0.000999080017209053, 0.5491499900817871, 0.32806000113487244, 0.1888899952173233, -0.012160000391304493, -0.1671299934387207, -0.11484000086784363, 0.22970999777317047, -0.27226001024246216, -0.18071000277996063, -0.11496999859809875, -0.217739999294281, 0.063789002597332, -0.3916899859905243, 0.4243200123310089, 0.22332000732421875, 0.010733000002801418, -0.4105899930000305, -0.6502500176429749, -0.0814879983663559, -0.2868100106716156, 0.08235400170087814, -0.7053999900817871, 0.18435999751091003, -0.11489000171422958, -0.5930899977684021, -0.0809980034828186, -0.20804999768733978, -0.47633999586105347, 0.02500700019299984, 0.4410800039768219], u'farm': [-0.4576199948787689, 0.5241600275039673, -0.5845999717712402, 0.05648300051689148, -0.030037999153137207, -0.05389299988746643, 0.034297000616788864, -0.13247999548912048, -0.4952099919319153, -0.5430799722671509, -0.5392500162124634, -0.3605400025844574, -0.0054039000533521175, 0.2623099982738495, -0.09107799828052521, 0.6911900043487549, -0.10141000151634216, -0.35842999815940857, 0.3569299876689911, 0.132750004529953, 0.2791900038719177, 0.4315899908542633, 0.15918000042438507, 0.3355399966239929, -0.22887000441551208, 0.226500004529953, -0.30838000774383545, -0.18643000721931458, -0.5339599847793579, 0.7016299962997437, -0.4939199984073639, 0.2147199958562851, -0.47332999110221863, 0.2721799910068512, 0.033472999930381775, 0.5497999787330627, 0.3468100130558014, -0.33730998635292053, -0.030880000442266464, -0.5413900017738342, 0.15063999593257904, -0.037470001727342606, 0.5265700221061707, 0.1640699952840805, -0.47777000069618225, -0.19211000204086304, -0.44508999586105347, 0.08123999834060669, 0.052618999034166336, -0.08512499928474426, -0.15230000019073486, 0.641219973564148, 0.38499000668525696, -0.023409999907016754, 0.03279000148177147, -0.5331100225448608, -0.1929900050163269, 0.05622100085020065, -0.3646000027656555, -0.487309992313385, 0.06436099857091904, -0.4817500114440918, 0.11004000157117844, -0.10208000242710114, -0.31711000204086304, -0.031449999660253525, -0.2925400137901306, -0.25356999039649963, -0.25192001461982727, -0.2276100069284439, 0.13130000233650208, 0.6401100158691406, -0.19523000717163086, -0.08381299674510956, -0.6721400022506714, -0.4219900071620941, -0.247529998421669, -0.6663399934768677, -0.18490999937057495, 0.044217001646757126, 0.1563899964094162, 0.11153999716043472, 0.05044800043106079, -0.19618000090122223, 0.28411000967025757, -0.3179199993610382, 0.11208000034093857, 0.02839599922299385, -0.3819099962711334, -0.5015599727630615, 0.7849400043487549, -0.4191400110721588, 0.2648099958896637, 0.17149999737739563, -0.31000998616218567, -0.4532800018787384, 0.3421199917793274, -0.2908399999141693, -0.6209400296211243, -0.09137099981307983, -0.30449000000953674, 0.24698999524116516, -0.2032500058412552, -0.036201998591423035, -0.38874000310897827, 0.16579000651836395, 0.12272000312805176, -0.32016998529434204, 0.0378590002655983, 0.150859996676445, -0.353520005941391, -0.6419900059700012, -0.5676599740982056, 0.546999990940094, 0.011222000233829021, -0.024399999529123306, 0.2552199959754944, 0.41405999660491943, 0.40314000844955444, 0.0833820030093193, 0.032930001616477966, 0.239329993724823, -0.0556350015103817, 0.06184900179505348, 0.4089199900627136, 0.4809400141239166, 0.06277599930763245, 0.38218000531196594, -0.055052999407052994, -0.049341000616550446, 0.2902899980545044, 0.7841200232505798, 0.2039099931716919, 0.1361899971961975, -0.4239499866962433, -0.25523000955581665, 0.38760998845100403, 0.13822999596595764, -0.26109999418258667, -0.3095499873161316, 0.18422000110149384, -0.5124599933624268, 0.06518399715423584, -0.5126000046730042, -0.32771000266075134, 0.26282998919487, 0.2816700041294098, -0.08879899978637695, -0.20250999927520752, -0.2621900141239166, -0.11958000063896179, 0.15328000485897064, -0.16638000309467316, -0.17122000455856323, 0.2111700028181076, 0.1552100032567978, -0.32534998655319214, 0.16301000118255615, 0.04535900056362152, -0.3996100127696991, 0.5005599856376648, 0.12681999802589417, -0.20587000250816345, -0.12997999787330627, 0.14246000349521637, -0.3395000100135803, 0.27469000220298767, -0.6989499926567078, -0.059960998594760895, 0.1813499927520752, -0.3766399919986725, -0.15602000057697296, -0.08989100158214569, 0.3101600110530853, -0.1058799996972084, -0.06435400247573853, 0.19705000519752502, -0.35370999574661255, 0.13097000122070312, 0.4556100070476532, 0.1480100005865097, 0.09228599816560745, -0.005650700069963932, -0.3372099995613098, 0.2453799992799759, -0.21770000457763672, -0.27285999059677124, -0.2813799977302551, 0.6596800088882446, 0.03743499889969826, -0.8409199714660645, 0.7973600029945374, -0.278219997882843, -0.35569998621940613, -0.21097999811172485, 0.2702000141143799, 0.048193998634815216, -0.5581200122833252, -0.10867000371217728, -0.20095999538898468, 0.7439699769020081, -0.0986350029706955, -0.292059987783432, 0.1352500021457672, 0.5256100296974182, 0.014506000094115734, 0.5473799705505371, -0.18268999457359314, 0.29892000555992126, 0.251010000705719, -0.026940999552607536, -0.447270005941391, -0.2362000048160553, -0.40042999386787415, -0.24270999431610107, 0.14438000321388245, -0.13787999749183655, 0.5333899855613708, 0.4875600039958954, -0.09165400266647339, 0.1760600060224533, -0.6560400128364563, -0.18573999404907227, -0.40817001461982727, 0.2125599980354309, 0.1387999951839447, -0.23015999794006348, 0.34154999256134033, 0.14077000319957733, 0.13451999425888062, 0.11134999990463257, -0.24856999516487122, -0.023194000124931335, -0.10824999958276749, 0.6130800247192383, 0.29326000809669495, 0.1859000027179718, -0.007612199988216162, -0.5680199861526489, 0.3911899924278259, 0.0021742000244557858, 0.42219001054763794, 0.031932998448610306, -0.10694999992847443, -0.6982799768447876, -0.21877999603748322, 0.41190001368522644, 0.058299001306295395, 0.344760000705719, -0.7797899842262268, -0.30761998891830444, 0.3547399938106537, 0.1801699995994568, 0.40575000643730164, 1.2996000051498413, -0.24492000043392181, -0.13458000123500824, 0.09746699780225754, -0.14636999368667603, 0.004844000097364187, 0.08382900059223175, 0.1962900012731552, -0.08543899655342102, -0.4631800055503845, -0.30845001339912415, 0.17260000109672546, 0.06456399708986282, 0.19169999659061432, -0.21435999870300293, -0.23850999772548676, -0.5491600036621094, 0.5210899710655212, 0.4399699866771698, -0.13018999993801117, 0.35809001326560974, 0.11468999832868576, -2.109999895095825, 0.7894200086593628, 0.216389998793602, 0.11444000154733658, -0.8810999989509583, -0.12358999997377396, 0.08832799643278122, -0.2739199995994568, -0.12625999748706818, 0.7215099930763245, 0.6696299910545349, -0.4840799868106842, 0.06131099909543991, -0.31450000405311584, 0.0015796000370755792, -0.19562000036239624, 0.49312999844551086, -0.3431600034236908, -0.24924999475479126, 0.30052998661994934, -0.027921000495553017, -0.367900013923645, 0.3598000109195709, 0.622189998626709], u'eggs': [0.05490599945187569, 0.9803000092506409, -0.15599000453948975, 0.31665000319480896, -0.2812100052833557, -0.27535000443458557, 0.10318999737501144, 0.38350000977516174, 0.20046000182628632, -0.7509999871253967, -0.4731900095939636, -1.080399990081787, -0.385019987821579, -0.17712000012397766, -0.2197200059890747, -0.7276600003242493, -0.28832998871803284, 0.07782799750566483, -0.5377900004386902, 0.29264000058174133, -0.08819799870252609, -0.06273200362920761, 0.0456710010766983, -0.037859998643398285, -0.11377999931573868, -0.5727800130844116, -0.2563300132751465, 0.12185999751091003, -0.17847000062465668, -0.14767999947071075, -0.7020699977874756, 0.5396000146865845, -0.6222000122070312, -0.1320600062608719, 0.4040299952030182, 0.7212600111961365, -0.40356001257896423, 0.8909599781036377, -0.26282998919487, -0.13169999420642853, 0.259880006313324, 0.05407100170850754, -0.01653899997472763, -0.06410200148820877, -0.28018999099731445, 0.21521000564098358, 0.43536001443862915, 0.467960000038147, -0.3135800063610077, 0.6303899884223938, -0.21211999654769897, 0.12490999698638916, 0.032329000532627106, 0.2089499980211258, -0.8693199753761292, 0.5735099911689758, 0.224140003323555, 0.14088000357151031, -1.0264999866485596, 0.058956000953912735, -0.1168999969959259, -0.16791999340057373, 0.34112998843193054, -0.1856600046157837, 5.488700116984546e-05, -0.7149699926376343, 0.1695600003004074, 0.20148000121116638, -0.5309399962425232, -0.2116200029850006, 0.03826100006699562, 0.260560005903244, -0.47321999073028564, 0.23788000643253326, -0.8923500180244446, 1.0013999938964844, 0.10385999828577042, -0.06342799961566925, 0.606689989566803, 0.18827000260353088, -0.765250027179718, -0.007399599999189377, -0.5501700043678284, 0.1837099939584732, 0.3096500039100647, 0.020468000322580338, 0.35738998651504517, -0.10794000327587128, -0.2813299894332886, -0.3071799874305725, 0.4105300009250641, -0.07949700206518173, -0.7421600222587585, 0.5143300294876099, 0.33730998635292053, -0.11640000343322754, -0.38475000858306885, 0.7217100262641907, -0.1517300009727478, 0.0026551000773906708, 0.5192300081253052, -0.5909900069236755, 0.1280899941921234, -0.32071998715400696, 0.2777499854564667, -0.45243000984191895, 0.011471999809145927, -0.008081899955868721, -0.46592000126838684, 0.14313000440597534, 0.11057999730110168, 0.34303998947143555, -0.296889990568161, 0.5156499743461609, -0.2669300138950348, 0.040323998779058456, 0.014248000457882881, 0.23157000541687012, 0.46807000041007996, -0.6093400120735168, -0.4392099976539612, -0.2780199944972992, -0.04664500057697296, 0.9466599822044373, -0.31005001068115234, 0.22890999913215637, -0.14148999750614166, 0.44203999638557434, 0.20835000276565552, 0.34000998735427856, 0.12685999274253845, 0.3722899854183197, -0.2589400112628937, 1.0949000120162964, -0.22497999668121338, -0.4490399956703186, 0.6211000084877014, 0.2418700009584427, -0.1814499944448471, 0.4776099920272827, 0.3564099967479706, -0.2517299950122833, -1.2613999843597412, -0.4748600125312805, -0.35514000058174133, 0.01144499983638525, 0.17952999472618103, -0.37408000230789185, 0.04288399964570999, 0.22404000163078308, -0.9976900219917297, 0.5519999861717224, 0.05790700018405914, 0.31762999296188354, -0.02096400037407875, -0.23149000108242035, -0.35861000418663025, 0.17870000004768372, -0.16728000342845917, -0.39792001247406006, -0.41130000352859497, 0.23675000667572021, -0.2807199954986572, -0.0029810001142323017, 0.23319000005722046, 0.2722100019454956, -0.22742000222206116, -0.22070999443531036, 0.19304999709129333, -0.17940999567508698, 0.7892600297927856, 0.22176000475883484, 0.09730999916791916, -0.04658700153231621, 0.09852799773216248, -0.22251999378204346, 0.4322099983692169, -0.04169199988245964, 0.22059999406337738, -0.8378199934959412, 0.2872999906539917, -0.4668999910354614, -0.037262000143527985, -0.6506800055503845, -0.3073599934577942, -0.17764000594615936, 0.9971699714660645, -0.04721999913454056, 0.4648599922657013, -0.26190000772476196, 0.6996399760246277, 0.9332500100135803, -0.2136400043964386, -0.5242900252342224, 0.05423500016331673, 0.5254600048065186, 0.11836999654769897, -0.8357399702072144, 0.1291400045156479, -0.14630000293254852, 0.29583999514579773, -0.4089899957180023, -0.16791999340057373, 0.37070998549461365, -0.038013000041246414, -0.14957000315189362, 0.2615799903869629, -0.15839999914169312, 0.06519799679517746, 0.21410000324249268, 0.5774400234222412, -0.5637199878692627, 0.3791700005531311, -0.4679799973964691, -0.1796099990606308, 0.07817099988460541, 0.46184998750686646, -0.20453999936580658, 0.07829800248146057, -0.12409999966621399, 0.6456400156021118, 0.43810001015663147, -0.3111700117588043, -0.9365599751472473, -0.8392999768257141, -0.07588700205087662, 0.5149199962615967, 0.18352000415325165, 0.504360020160675, 0.34973999857902527, 0.123259998857975, -0.2298399955034256, 0.37022000551223755, 0.022776000201702118, 0.23697000741958618, 0.54694002866745, 0.009570799767971039, 0.5690000057220459, -0.6888499855995178, -0.6002699732780457, -0.5376999974250793, -0.3971099853515625, -0.15717999637126923, 0.29205000400543213, -1.3043999671936035, 0.1088000014424324, 0.6392199993133545, -0.23091000318527222, -0.5036799907684326, -1.1064000129699707, 0.01219400018453598, 0.03888799995183945, 0.5124800205230713, -0.2412700057029724, 0.6893900036811829, 0.12596000730991364, 0.2606399953365326, -0.11963000148534775, 0.38218000531196594, 0.14124000072479248, 0.13230000436306, -0.7109299898147583, -0.21528999507427216, -0.5879200100898743, 0.2797499895095825, 0.11146999895572662, -0.38332998752593994, -0.5685799717903137, 0.05305999889969826, 0.5043399930000305, -0.47172001004219055, 0.10694000124931335, 0.07782500237226486, 0.2194799929857254, -0.1933099925518036, 0.25892001390457153, -1.5009000301361084, -0.31068000197410583, -1.0773999691009521, -0.586929976940155, 0.15644000470638275, -0.21562999486923218, 0.09426599740982056, -0.00504940003156662, -1.2476999759674072, 0.8039199709892273, -0.031599998474121094, 0.11759000271558762, 0.4555000066757202, 0.0938280001282692, -0.21828000247478485, -0.4075999855995178, 0.8262100219726562, 0.044075001031160355, 0.09326200187206268, -0.5057899951934814, 0.14323000609874725, -0.48403000831604004, -0.1689700037240982, -0.1302500069141388], u'foam': [0.7214199900627136, -0.2443999946117401, 0.04974199831485748, -0.5932999849319458, -0.22668999433517456, -0.704990029335022, 0.21831999719142914, 0.09726899862289429, 0.29646000266075134, -0.6076899766921997, -0.021852999925613403, -0.10344000160694122, -0.35016000270843506, -0.4760499894618988, 0.5972200036048889, 0.9087700247764587, 0.39574000239372253, 0.5377500057220459, -0.26368001103401184, 0.6750100255012512, 0.05596499890089035, 0.15547999739646912, 0.416130006313324, -0.039733000099658966, -0.21674999594688416, 0.1652200073003769, -0.22607000172138214, 0.2728100121021271, -0.9508000016212463, -0.18501000106334686, 0.47536998987197876, -0.1098100021481514, 0.34231001138687134, -0.22742000222206116, 0.18052999675273895, 0.5798799991607666, -0.25468000769615173, 0.25922998785972595, 0.6082000136375427, 1.1759999990463257, -0.05896500125527382, -0.17524999380111694, 0.1604900062084198, -0.1739100068807602, -0.4879299998283386, 0.29262998700141907, 0.42054998874664307, -0.031248999759554863, 0.3413900136947632, 0.04391299933195114, 0.42572999000549316, 0.09741900116205215, -0.6074900031089783, -0.0982000008225441, -0.0027958001010119915, 0.2345300018787384, -0.5448200106620789, 0.11630000174045563, 0.46415001153945923, -0.2128099948167801, -0.09590300172567368, -0.08301900327205658, -0.3755800127983093, 0.04539300128817558, 0.3235900104045868, -0.4318599998950958, -0.41499000787734985, -0.09483200311660767, -0.1282300055027008, 0.6365799903869629, 0.4207000136375427, -0.4927299916744232, 0.31911998987197876, 0.3458999991416931, -0.10242000222206116, 0.32705000042915344, 0.884850025177002, -0.5110200047492981, -0.7739700078964233, -0.270440012216568, -0.6050999760627747, -0.10186000168323517, -0.3416599929332733, -0.2682099938392639, -0.47609999775886536, 0.26614001393318176, 0.4506399929523468, 0.3634899854660034, -0.2329300045967102, 0.033383000642061234, 0.3665199875831604, 0.2462099939584732, -0.1470700055360794, -0.610759973526001, 0.3241199851036072, 0.12319999933242798, -0.6886000037193298, -0.0018535000272095203, 0.17778000235557556, -0.3007499873638153, 0.31244000792503357, 0.7497199773788452, -0.30590999126434326, -0.4066599905490875, 0.18907999992370605, 0.4778900146484375, -0.27125000953674316, 0.1816300004720688, -0.5772200226783752, -0.21573999524116516, 0.06967899948358536, 0.4798800051212311, -0.4530700147151947, -0.5758900046348572, -0.34624001383781433, 0.13905000686645508, 0.009238200262188911, 0.4594300091266632, -0.43748998641967773, -0.6170899868011475, 0.40689000487327576, -0.19419999420642853, 0.48871999979019165, 1.1512000560760498, -0.6563900113105774, 0.1154400035738945, 0.22368000447750092, 0.0863339975476265, 0.49292999505996704, 0.18161000311374664, 0.31672999262809753, 0.5425099730491638, 0.4263400137424469, 0.004296300001442432, 0.2646099925041199, -0.3305700123310089, -0.36809998750686646, 0.6451399922370911, 0.24702000617980957, 0.05126599967479706, 0.24944999814033508, -0.24573999643325806, 0.371069997549057, -0.8220800161361694, 0.36959001421928406, -0.17964999377727509, 0.3633100092411041, -0.25262001156806946, 0.08801999688148499, -0.28404998779296875, -0.2055400013923645, 0.5285999774932861, -0.2836099863052368, 0.3595699965953827, -0.1870799958705902, -0.5676100254058838, -0.14803999662399292, 0.26135000586509705, 0.2861799895763397, 0.1335500031709671, 0.058125998824834824, -0.22975000739097595, -0.4455699920654297, 0.06190500035881996, 0.9240099787712097, 0.030066000297665596, 0.0374240018427372, 0.8982899785041809, 0.43123000860214233, 0.24167999625205994, 0.18154999613761902, 0.30539000034332275, 0.6760900020599365, 0.39871999621391296, -0.8858699798583984, -0.9533399939537048, 0.22147999703884125, 0.2300100028514862, 0.6120399832725525, -0.9870100021362305, 0.38752999901771545, -0.6333900094032288, 0.48736000061035156, -0.4419099986553192, -0.29941999912261963, 0.05558300018310547, 1.2467999458312988, -0.1436000019311905, 0.5179499983787537, -0.03166399896144867, 0.5724300146102905, 0.7250400185585022, -0.5217900276184082, -0.1956299990415573, -0.15369999408721924, 0.16060000658035278, -0.29043999314308167, 0.5040000081062317, 0.4533799886703491, 0.027233999222517014, -0.2293200045824051, 0.3324100077152252, 0.02767300046980381, -0.03241100162267685, -0.02049499936401844, 0.41894999146461487, 0.2519800066947937, -0.2752000093460083, -1.0088000297546387, 0.43472999334335327, 0.26993000507354736, 0.5935800075531006, 0.07892400026321411, -0.3103199899196625, 0.6013100147247314, 0.3076399862766266, 0.22157999873161316, -0.1954600065946579, 0.147039994597435, -0.6533899903297424, 0.8890799880027771, 0.5343700051307678, 0.44293999671936035, 0.47694000601768494, 0.21289999783039093, -0.3172599971294403, -0.22755999863147736, -0.14571000635623932, -0.31516000628471375, -0.00026376001187600195, 0.3502900004386902, -0.04956600069999695, -0.025026999413967133, -0.2025199979543686, 0.20565000176429749, -0.21332000195980072, 0.21345999836921692, -0.40733999013900757, 0.08128999918699265, -0.1727599948644638, 0.607509970664978, -0.20518000423908234, -0.21144999563694, -0.5220199823379517, -0.7948600053787231, -0.1775200068950653, 0.45677000284194946, -0.21337999403476715, -0.06544700264930725, -0.29583999514579773, 0.3528299927711487, -0.2182299941778183, -0.21250000596046448, -0.9131399989128113, -0.13109000027179718, 0.509190022945404, -0.38881999254226685, -1.0810999870300293, 0.362529993057251, 0.6542099714279175, -0.5199699997901917, -0.21806000173091888, 0.04143400117754936, 0.20868000388145447, 0.41839998960494995, -0.03438999876379967, -0.5654199719429016, -0.456169992685318, 0.18848000466823578, 0.3486199975013733, 0.6294699907302856, 0.16368000209331512, -0.27129998803138733, 0.0658470019698143, -0.4302400052547455, 0.16516999900341034, -0.4478299915790558, -0.177729994058609, -1.1527999639511108, -0.2415499985218048, -0.3317900002002716, 0.3616600036621094, 0.07733099907636642, -0.1369200050830841, 0.14656999707221985, 0.2955000102519989, 0.20068000257015228, 0.19458000361919403, 0.2797999978065491, -0.015413999557495117, 0.0319180004298687, -0.5241699814796448, 0.4293999969959259, -0.12178000062704086, 0.3810499906539917, 0.3145500123500824, -0.06985100358724594, -0.3880699872970581, -0.022092999890446663, 0.3308599889278412], u'pear': [-0.4942300021648407, -0.18297000229358673, 0.44304001331329346, -0.4510299861431122, 0.056237999349832535, -0.27480000257492065, -0.2837899923324585, 0.37160998582839966, 0.3116599917411804, 0.2960900068283081, -0.3200500011444092, 0.07034599781036377, -0.1393900066614151, -0.18363000452518463, -0.11114999651908875, -0.23207999765872955, -0.2144699990749359, 0.31869998574256897, -0.12370000034570694, 0.2599700093269348, -0.023281000554561615, 0.3957599997520447, -0.015328999608755112, 0.3753499984741211, -0.41398999094963074, -0.5057700276374817, -0.3608799874782562, 0.23246000707149506, -0.3813999891281128, 0.0246799997985363, 0.22246000170707703, 0.06509800255298615, -0.3228900134563446, -0.005961600225418806, -0.014344999566674232, 0.6074699759483337, 0.6171500086784363, -0.3642300069332123, 0.10711999982595444, -0.3361299932003021, 0.2760699987411499, -0.04283199831843376, 0.37676000595092773, -0.02633100003004074, -0.39563998579978943, 0.16057999432086945, -0.12605999410152435, 0.4430299997329712, -0.5186899900436401, 0.41034001111984253, 0.1168999969959259, -0.40268999338150024, 0.7676500082015991, 0.28824999928474426, -0.23601000010967255, -0.9970300197601318, -0.41690000891685486, 0.2502099871635437, 0.46309998631477356, -0.3010700047016144, 0.6288300156593323, -0.4002099931240082, 0.3524099886417389, 0.36522001028060913, -0.0682860016822815, 0.061765000224113464, -0.3737899959087372, 0.6853500008583069, -0.06708899885416031, -0.612309992313385, -0.22901999950408936, 0.2697499990463257, -0.813040018081665, 0.017696000635623932, -0.9120200276374817, 0.38062000274658203, 0.7420200109481812, 0.060068000108003616, 0.30441001057624817, -0.17007000744342804, -0.1330299973487854, 0.4163599908351898, 0.9521899819374084, -0.4190399944782257, 0.07509300112724304, -0.4743100106716156, -0.5066900253295898, 0.4134199917316437, 0.45427998900413513, -0.6422100067138672, 0.029270999133586884, -0.3160499930381775, -0.524590015411377, -0.06267999857664108, -0.14365999400615692, 0.1805499941110611, 0.6010599732398987, -0.0149940000846982, -0.5056099891662598, 0.6816800236701965, 0.16746999323368073, 0.33594000339508057, 0.20750999450683594, -0.32194000482559204, -0.43794000148773193, 0.06920299679040909, -0.30542999505996704, 0.34040001034736633, -0.12319999933242798, 0.07418400049209595, 0.3598800003528595, 0.16394999623298645, 0.002821400063112378, -0.342739999294281, 0.2134000062942505, 0.1935500055551529, -0.5444300174713135, 0.2270900011062622, 0.49893999099731445, -0.21753999590873718, -0.42346999049186707, -0.8436999917030334, -0.05343199893832207, -0.16949999332427979, -0.2757599949836731, -0.4557499885559082, -0.57669997215271, 0.5781000256538391, -0.22497999668121338, 0.07276400178670883, -0.1596899926662445, 0.9448599815368652, 0.3358300030231476, 0.8906099796295166, 0.15390999615192413, 0.027379000559449196, -0.49917998909950256, -0.7196800112724304, -0.5129600167274475, -0.4399999976158142, 0.8108400106430054, 0.29177001118659973, -0.2499600052833557, -0.6150400042533875, -0.017573999240994453, 0.5605800151824951, 0.035100001841783524, -0.9208199977874756, 0.2001499980688095, 0.18154999613761902, -0.405349999666214, 0.41356998682022095, 0.33664000034332275, -0.13304999470710754, -0.6256399750709534, -0.13099999725818634, -0.3070000112056732, -0.35392001271247864, -0.47113001346588135, 0.42785000801086426, -0.2912600040435791, -0.15307000279426575, -0.182669997215271, 0.256850004196167, 0.04069799929857254, -0.49028998613357544, -0.1422799974679947, -0.10673999786376953, -0.16944000124931335, -0.3394100069999695, -0.39083001017570496, -0.476859986782074, -0.23522000014781952, -0.27553999423980713, 0.3284600079059601, 0.15981000661849976, 0.029387999325990677, 0.09920799732208252, -0.15389999747276306, -0.2089499980211258, -0.7142599821090698, -0.1765500009059906, 0.22119000554084778, -0.7136899828910828, -0.09018299728631973, -0.8030499815940857, 1.003100037574768, 0.04788700118660927, -0.04679400101304054, 0.020620999857783318, 0.04366699978709221, 0.8116000294685364, -0.1456799954175949, 0.01956300064921379, -0.080935999751091, 0.30469998717308044, 0.282370001077652, -0.15830999612808228, -0.42052000761032104, 0.5417500138282776, -0.1729699969291687, 0.20972999930381775, 0.2105800062417984, -0.4338800013065338, 0.046160001307725906, 1.080299973487854, -0.437389999628067, 0.2216300070285797, 0.1932699978351593, -0.21622000634670258, 0.2364799976348877, 0.050533000379800797, 0.08240299671888351, -0.10343000292778015, 0.16676999628543854, -0.15565000474452972, 0.4871099889278412, -0.15150000154972076, -0.3480600118637085, -0.12886999547481537, -0.058393001556396484, -0.1254899948835373, -0.3539400100708008, 0.5329499840736389, -1.26010000705719, 0.6527699828147888, 0.12967999279499054, 0.7590699791908264, -0.1661199927330017, -0.19734999537467957, -0.05967999994754791, 0.17542000114917755, 0.1047699972987175, -0.15745000541210175, 0.3541100025177002, 0.5691800117492676, 0.31894001364707947, -0.08565899729728699, -0.09971799701452255, -0.24369999766349792, 0.061482999473810196, 0.014658999629318714, -0.38324999809265137, -0.23794999718666077, -0.5333499908447266, 0.549310028553009, -0.1639699935913086, 0.5316200256347656, 0.5259000062942505, -0.5981199741363525, 0.7096700072288513, 0.5006399750709534, -0.08432400226593018, -0.29137998819351196, 0.17848999798297882, 0.15078000724315643, 0.3136900067329407, -0.0470150001347065, -0.05013500154018402, 0.5945500135421753, 0.06796900182962418, 0.6450899839401245, 0.076323002576828, 0.7082399725914001, 0.21030999720096588, -0.6288999915122986, 0.24111999571323395, -0.34994998574256897, 0.07104600220918655, 0.823199987411499, -0.1815900057554245, 0.4913899898529053, -0.07929900288581848, -0.2577899992465973, 0.2641499936580658, 0.31317999958992004, 0.25738999247550964, 0.3786199986934662, -0.8823000192642212, -0.4498099982738495, -0.4977000057697296, 0.17985999584197998, -0.46661999821662903, -0.4349299967288971, -0.13800999522209167, -0.06873500347137451, 0.5160800218582153, -0.4700700044631958, -0.13654999434947968, -0.1158600002527237, 0.5974900126457214, -0.44828000664711, 1.12090003490448, -0.04410700127482414, 0.40064001083374023, -0.6359300017356873, -0.0021726000122725964, -0.007809900213032961, 0.48822999000549316, 0.10563000291585922], u'ball': [-0.22694000601768494, 0.47336000204086304, -0.11235000193119049, 0.29225000739097595, 0.23659999668598175, -0.149509996175766, -0.03090899996459484, -0.22472000122070312, -0.31172001361846924, -1.0547000169754028, -0.04713499918580055, 0.12985000014305115, -0.1479099988937378, -0.4679200053215027, -0.415800005197525, 0.3707900047302246, -0.6682900190353394, 0.014515000395476818, 0.24865999817848206, 0.5795400142669678, -0.0358240008354187, 0.29967001080513, -0.19175000488758087, -0.3318299949169159, 0.3621799945831299, -0.06555099785327911, -0.1975499987602234, 0.2396399974822998, 0.0010936999460682273, -0.013113000430166721, 1.0534000396728516, -0.03325200080871582, 0.37136998772621155, 0.40116000175476074, -1.6887999773025513, -0.14000000059604645, 0.40547001361846924, 0.30469000339508057, -0.54093998670578, 0.7211899757385254, -0.1894800066947937, -0.1465200036764145, -0.11462000012397766, -0.27823999524116516, 0.6705600023269653, 0.14906999468803406, 0.5081200003623962, -0.24398000538349152, 0.06781099736690521, 0.34784001111984253, -0.31314998865127563, -0.2504099905490875, -0.004103799816220999, -0.18942999839782715, -0.2728700041770935, -0.2240999937057495, -0.09808900207281113, 0.08944399654865265, 0.22169999778270721, -0.3169400095939636, 0.3335700035095215, 0.04289200156927109, -0.26728999614715576, -0.17506000399589539, -0.19156000018119812, -0.41124001145362854, 0.27788999676704407, -0.6042900085449219, 0.3136500120162964, -0.30689001083374023, 0.46007001399993896, 0.157260000705719, 0.18640999495983124, 0.2034599930047989, 0.2777099907398224, -0.045604001730680466, 0.7141600251197815, 0.34362998604774475, 0.37643998861312866, -0.3420799970626831, 0.33869001269340515, 0.1643799990415573, 0.41165998578071594, 0.07741200178861618, -0.16064999997615814, -0.10733000189065933, 0.16722999513149261, 0.018414000049233437, -0.05323199927806854, 0.030685000121593475, 1.0247999429702759, 0.42917001247406006, -0.5799800157546997, -0.2632499933242798, -0.2748900055885315, -0.4460799992084503, -0.2625400125980377, -0.15289999544620514, -0.07303199917078018, -0.6278799772262573, -0.020806999877095222, 0.03218099847435951, -0.10769999772310257, 0.011633999645709991, 0.15271000564098358, 0.2761499881744385, 0.7236499786376953, 0.1344899982213974, -0.29407998919487, 0.3362100124359131, 0.28369998931884766, 0.13504000008106232, -0.020772000774741173, -0.014592999592423439, 0.017537999898195267, 0.011862999759614468, -0.37380000948905945, 0.27379998564720154, -0.28672000765800476, -0.5355100035667419, -0.022957999259233475, -0.24303999543190002, 0.4429900050163269, -0.33149001002311707, -0.2528400123119354, 0.186599999666214, 0.36675000190734863, 0.2168000042438507, 0.16162000596523285, 0.05544599890708923, 0.20750999450683594, 0.3902899920940399, -0.34007999300956726, 0.08540699630975723, -0.10809999704360962, 0.032937001436948776, 0.23583999276161194, -0.40101000666618347, -0.46571001410484314, 0.028749000281095505, 0.18594999611377716, -0.26677998900413513, -0.35514000058174133, -0.319489985704422, -0.5093700289726257, 0.41631001234054565, -0.4754300117492676, -0.4712100028991699, 0.2603699862957001, -0.35752999782562256, 0.2430099993944168, 0.20629000663757324, -0.25722000002861023, -0.6624900102615356, 0.5052300095558167, -0.30757999420166016, 0.22881999611854553, -0.8461899757385254, 0.6144199967384338, -0.18172000348567963, 0.12021999806165695, -0.9714199900627136, -0.2582699954509735, -0.802839994430542, 0.3231000006198883, -0.29646000266075134, -0.324970006942749, 0.35420000553131104, 0.3064599931240082, 0.2376900017261505, -0.2680499851703644, -0.22266000509262085, -0.40748000144958496, 0.11900000274181366, 0.36173999309539795, 0.3320100009441376, -0.2350499927997589, 0.7038099765777588, 0.04772299900650978, -0.4294700026512146, 0.4907200038433075, -0.322050005197525, 0.7640600204467773, 0.32631000876426697, -0.21776999533176422, -0.09609699994325638, 0.012614999897778034, -0.31876999139785767, 0.02342200092971325, 0.5128600001335144, 0.7004500031471252, 0.16214999556541443, 0.3309299945831299, -0.35242998600006104, 0.012403000146150589, -0.33055999875068665, 0.17845000326633453, -0.11743000149726868, 0.34373998641967773, -0.11055999994277954, 1.972000002861023, 0.452210009098053, 0.14377999305725098, -0.519760012626648, -0.02863599918782711, 0.20068000257015228, 0.2739500105381012, 0.016858000308275223, -0.5586699843406677, 0.20468999445438385, -0.22250999510288239, 0.602370023727417, -0.12678000330924988, -0.09579800069332123, 0.009269299916923046, -0.32319000363349915, 0.10096000134944916, -0.574150025844574, -0.08373899757862091, -0.28231000900268555, -0.008444299921393394, -0.04980099946260452, -0.2291799932718277, 0.033998001366853714, -0.49476000666618347, -0.15589000284671783, 0.7593100070953369, -0.14125999808311462, 0.2501699924468994, 0.12796999514102936, 0.14458000659942627, 0.08350399881601334, -0.23465999960899353, 0.39458999037742615, -0.523639976978302, 0.32978999614715576, -0.6041399836540222, 0.34244999289512634, 0.012726999819278717, 0.15222999453544617, 0.383760005235672, -0.18940000236034393, -0.4876999855041504, -0.19505999982357025, -0.5588700175285339, 0.10849999636411667, -0.10513000190258026, 0.5346800088882446, -0.050540000200271606, 0.2496899962425232, 0.23009000718593597, -0.3861199915409088, -0.4878300130367279, -0.7579799890518188, 0.7218700051307678, -0.14985999464988708, -0.2139499932527542, -1.2375999689102173, -0.4500100016593933, -0.38047000765800476, -0.15577000379562378, -0.5535899996757507, 0.04698500037193298, 0.02075199969112873, -0.4448300004005432, 0.22603000700473785, -0.6266899704933167, -0.09151700139045715, -0.08572299778461456, 0.3404099941253662, -0.01209999993443489, -0.15389999747276306, -0.10232999920845032, 0.15639999508857727, -0.003985600080341101, -0.649869978427887, -1.1216000318527222, 0.16380999982357025, -1.2101999521255493, 0.04356599971652031, -0.8860999941825867, 0.1885800063610077, 0.6552199721336365, 0.49022001028060913, -0.027480000630021095, -0.4846400022506714, 0.13868999481201172, -0.46636998653411865, -0.12024000287055969, 0.13718000054359436, 0.6313899755477905, -0.05383799970149994, 0.6296399831771851, 0.3690299987792969, 0.4050000011920929, -0.2569099962711334, 0.4270800054073334, -0.5936499834060669, 0.2768700122833252, -0.5273900032043457], u'town': [-0.5115799903869629, -0.2906799912452698, -0.09758699685335159, -0.1718900054693222, 0.19697999954223633, -0.008538099937140942, 0.07091200351715088, 0.010708999820053577, 0.15263999998569489, -0.7230499982833862, -0.5908499956130981, -0.3072499930858612, 0.01457200013101101, 0.8177199959754944, 0.446399986743927, 0.4260300099849701, 0.0422540009021759, -0.45596998929977417, 0.4872100055217743, -0.18836000561714172, 0.07263900339603424, 0.28532999753952026, 0.1582300066947937, 0.30434998869895935, -0.26840001344680786, -0.6444500088691711, -0.19012999534606934, 0.1243399977684021, -0.1734199970960617, 0.27379998564720154, 0.30838000774383545, -0.061177000403404236, -0.6400399804115295, 0.6166800260543823, -0.13424000144004822, -0.10903999954462051, 0.014208000153303146, 0.46136999130249023, -0.3600899875164032, -0.9478800296783447, 0.4529300034046173, -0.14531999826431274, 0.20297999680042267, 0.35951998829841614, 0.38398000597953796, 0.19452999532222748, 0.501039981842041, 0.020194999873638153, 0.38969001173973083, 0.5049099922180176, 0.349700003862381, 0.10430999845266342, 0.34081000089645386, 0.409280002117157, 0.16854000091552734, 0.36048999428749084, -0.18161000311374664, 0.06279999762773514, 0.29826000332832336, 0.16210000216960907, -0.09097599983215332, -0.7797999978065491, -0.03670499846339226, -0.51214998960495, 0.6532899737358093, -0.21499000489711761, -0.0705069974064827, 0.13524000346660614, -0.035009000450372696, -0.26431000232696533, 0.3591200113296509, -0.6216899752616882, 0.00578150013461709, -0.35089999437332153, -0.41440001130104065, -0.17872999608516693, 0.36024001240730286, 0.2973800003528595, -0.18206000328063965, -0.2870199978351593, -0.02423899993300438, -0.3234499990940094, 0.11905000358819962, 0.29592999815940857, 0.1043500006198883, -0.30820000171661377, -0.42535001039505005, -0.2640100121498108, 0.5809500217437744, 0.4693000018596649, 0.06699399650096893, 0.006512000225484371, 0.5156400203704834, -0.1511400043964386, 0.06721899658441544, 0.23744000494480133, 0.29846999049186707, 0.20100000500679016, -0.1155799999833107, 0.23639999330043793, 0.0420369990170002, 0.44301000237464905, 0.0486299991607666, -0.14720000326633453, 0.14032000303268433, -0.04438500106334686, 0.9248800277709961, 0.3786900043487549, 0.052910998463630676, 0.10492999851703644, 0.19652999937534332, -0.5361800193786621, -0.3411499857902527, -0.4070099890232086, 0.5415899753570557, 0.42983999848365784, 0.4160600006580353, -0.11065000295639038, 0.1387999951839447, 0.033486999571323395, 0.00575320003554225, 0.04962199926376343, -0.03014500066637993, 0.25154000520706177, 0.11590000241994858, 0.28659000992774963, 0.21778999269008636, -0.01384699996560812, 0.15172000229358673, 0.06836000084877014, 0.2663399875164032, 0.08898500353097916, -0.1858700066804886, -0.12450999766588211, 0.4039500057697296, 0.1487399935722351, -0.16060000658035278, -0.126460000872612, 0.21053999662399292, -0.7915199995040894, -0.2870999872684479, -0.5124800205230713, -0.54653000831604, 0.47815999388694763, -0.6260700225830078, -0.011748000048100948, 0.3815400004386902, -0.09825299680233002, -0.7746700048446655, 0.11005999892950058, 0.5242699980735779, 0.0071137999184429646, -0.07517600059509277, 0.18825000524520874, 0.7540599703788757, 0.032092999666929245, -0.11387000232934952, 0.13278000056743622, 0.42181000113487244, 0.3855299949645996, 0.5717200040817261, -0.37189000844955444, -0.06653100252151489, -0.7781999707221985, -0.35427001118659973, -0.30546998977661133, -0.07822900265455246, -0.2668299973011017, 0.2067900002002716, -0.6684899926185608, -0.514769971370697, 0.017633000388741493, 0.15414999425411224, 0.4438300132751465, 0.14959000051021576, 0.8075699806213379, -0.3668999969959259, -0.4395900070667267, -0.016600999981164932, -0.031077999621629715, -0.45124998688697815, 0.4854399859905243, -0.021431000903248787, 0.3316799998283386, 1.288699984550476, -0.2736400067806244, -0.6982499957084656, -0.15373000502586365, 0.12291999906301498, 0.43900999426841736, 0.40856999158859253, 0.5166900157928467, -0.14639000594615936, 0.05272800102829933, 0.0789479985833168, 0.23622000217437744, -0.4024200141429901, -0.6523399949073792, -0.22491000592708588, 0.07651299983263016, 0.7652400135993958, -0.45120999217033386, -0.20510999858379364, 0.25676000118255615, 0.00036899998667649925, -0.6495100259780884, 0.04875300079584122, -0.2670300006866455, 0.4589099884033203, 0.19853000342845917, -0.5338699817657471, -0.07753700017929077, 0.08266299962997437, -0.4955799877643585, 0.5911700129508972, 0.157150000333786, 0.22657999396324158, -0.1718900054693222, -0.28161999583244324, 0.09941499680280685, 0.15734000504016876, -0.6344500184059143, 0.25986000895500183, -0.212459996342659, 0.03831399977207184, -0.5189599990844727, 0.336899995803833, -0.5344399809837341, 0.21030999720096588, -0.08621799945831299, 0.2935500144958496, 0.12088000029325485, -0.44578999280929565, -0.422109991312027, 0.9976900219917297, 0.05242500081658363, 0.17754000425338745, 0.05001499876379967, 0.06941799819469452, 0.37494000792503357, 0.22572000324726105, 0.247639998793602, 0.6995800137519836, 0.2457199990749359, -0.5183899998664856, 0.3007600009441376, -0.07727400213479996, -0.0095186997205019, 0.18251000344753265, -0.2235099971294403, 0.1062999963760376, -0.18190999329090118, 0.05587000027298927, 0.03135199844837189, 0.3036699891090393, 0.10067000240087509, 0.188510000705719, -0.4125800132751465, -0.1583700031042099, -0.247079998254776, 0.29249998927116394, 0.48006001114845276, 0.21987999975681305, 0.08473499864339828, -0.20258000493049622, -0.24410000443458557, 0.1401599943637848, -0.3324899971485138, 0.1551000028848648, -0.028358999639749527, -0.46219998598098755, -0.4673599898815155, -0.0482960008084774, -0.2769399881362915, -0.08476100116968155, 0.04896499961614609, -2.2223000526428223, -0.05324200168251991, 0.2739799916744232, 0.533240020275116, -0.6202700138092041, 0.41923999786376953, -0.3374199867248535, -0.3484399914741516, -0.41310998797416687, 0.8433300256729126, 0.21751999855041504, 0.06747200340032578, 0.31139999628067017, 0.3883500099182129, 0.1077599972486496, -0.08612299710512161, 0.027665000408887863, -0.25630998611450195, 0.05111899971961975, 0.8225299715995789, 0.1808999925851822, -0.16861000657081604, -0.08903399854898453, 0.8544999957084656], u'fruit': [0.16405999660491943, 0.36713001132011414, 0.07867199927568436, -0.2714900076389313, 0.1551000028848648, -0.04672899842262268, -0.32179999351501465, 0.6062700152397156, 0.6570000052452087, -0.7318400144577026, -0.12365999817848206, -0.5851100087165833, -0.7316399812698364, -0.1277099996805191, 0.19645999372005463, -0.3279300034046173, -0.2525399923324585, -0.5825499892234802, -0.38558998703956604, 0.2784099876880646, -0.40290001034736633, 0.3400300145149231, 0.08451300114393234, 0.285970002412796, -0.43101999163627625, 0.06333500146865845, -0.6871200203895569, -0.6372699737548828, -0.5897600054740906, 0.16761000454425812, -0.4077799916267395, 0.7024700045585632, -0.7773000001907349, -0.4758000075817108, -0.4433099925518036, 0.6542900204658508, 0.10802999883890152, -0.32229000329971313, -0.6540899872779846, -0.3893199861049652, -0.2182299941778183, 0.17868000268936157, 0.17704999446868896, -0.19966000318527222, -0.5047600269317627, -0.12627999484539032, -0.12590999901294708, 0.1847900003194809, 0.07705099880695343, 0.17885999381542206, -0.03378999978303909, 0.08905299752950668, 0.19916999340057373, -0.2812199890613556, -0.7895699739456177, 0.021606000140309334, -0.30820000171661377, -0.12535999715328217, 0.43213000893592834, -0.3300800025463104, 0.4326300024986267, -0.7554900050163269, 0.031055999919772148, 0.14196999371051788, -0.5579900145530701, -0.11799000203609467, -0.36625000834465027, -0.12116999924182892, -0.6381800174713135, 0.1578799933195114, 0.0689070001244545, 0.28951001167297363, 0.021896999329328537, -0.08917800337076187, -0.5329800248146057, 0.04472000151872635, 0.5724899768829346, -0.9434900283813477, 0.1042499989271164, 0.3094500005245209, -0.03232799842953682, 0.3149299919605255, -0.07264100015163422, 0.2524699866771698, 0.04372600093483925, -0.13885000348091125, -0.06179499998688698, -0.2028300017118454, 0.11867000162601471, -0.20690999925136566, 0.18477000296115875, -0.711929976940155, -0.33557000756263733, -0.29434001445770264, -0.19520999491214752, 0.15668000280857086, 0.1911800056695938, -0.4183799922466278, -0.08088299632072449, 0.2978000044822693, 0.008482299745082855, 0.3517799973487854, -0.08327600359916687, -0.19373999536037445, -0.2603699862957001, -0.06785299628973007, -0.16832999885082245, -0.09009300172328949, 0.01362099964171648, -0.1602499932050705, 0.15532000362873077, 0.44435998797416687, -0.001001400058157742, -0.35361000895500183, 0.4689500033855438, -0.11084999889135361, -0.89860999584198, 0.7298799753189087, 0.16836999356746674, 0.0283610001206398, -0.5502600073814392, -0.5378900170326233, 0.3646099865436554, 0.0738930031657219, -0.6675000190734863, 0.2565700113773346, 0.11710000038146973, 0.6432999968528748, 0.4017699956893921, 0.3999199867248535, -0.539900004863739, 0.7104099988937378, 0.4193899929523468, 0.5689399838447571, -0.6277599930763245, 0.027462000027298927, -0.3258399963378906, 0.08753000199794769, -0.33030998706817627, -0.23638999462127686, 0.4583199918270111, 0.31547001004219055, -0.14328999817371368, -0.45882999897003174, 0.002229300094768405, 0.26996999979019165, -0.04930200055241585, -0.49375998973846436, 0.3332499861717224, -0.4680800139904022, -0.7574999928474426, 0.3070000112056732, 0.10653000324964523, -0.4389300048351288, -0.06625600159168243, -0.4982199966907501, -0.28174999356269836, -0.5033800005912781, 0.007218900136649609, 0.2814599871635437, 0.1376499980688095, 0.18622000515460968, 0.004520299844443798, 0.1418599933385849, -0.06634899973869324, -0.6644300222396851, 0.16008999943733215, 0.1695600003004074, -0.04140999913215637, -0.024010999128222466, -0.29019999504089355, -0.389739990234375, -0.5104699730873108, 0.20050999522209167, 0.37116000056266785, 0.21889999508857727, 0.24435999989509583, -0.46358999609947205, 0.162540003657341, -0.758840024471283, -0.1296900063753128, -0.20532000064849854, -0.3197300136089325, -0.5595099925994873, 0.2107200026512146, -0.4583300054073334, 1.1483999490737915, 0.019953999668359756, -0.10170000046491623, -0.450219988822937, -0.29971998929977417, 0.848800003528595, 0.04554300010204315, -0.25887998938560486, -0.3163900077342987, -0.01661200076341629, 0.0647059977054596, 0.1156499981880188, -0.5217900276184082, 0.5768100023269653, 0.24571000039577484, -0.3228999972343445, 0.3512899875640869, 0.08592800050973892, 0.04811900109052658, 0.258650004863739, -0.058194998651742935, 0.28881001472473145, 0.4203700125217438, 0.025631999596953392, 0.1280200034379959, -0.6667400002479553, -0.28433001041412354, -0.025373000651597977, 0.3237900137901306, -0.3110699951648712, 0.8488799929618835, 0.1770399957895279, 0.13144999742507935, 0.16514000296592712, 0.8612499833106995, -0.1726599931716919, -0.6744800209999084, -0.4090900123119354, -1.07260000705719, 0.2983599901199341, 0.27983999252319336, 0.5184199810028076, 0.3769800066947937, 0.3175399899482727, 0.15625999867916107, 0.2018200010061264, 0.16311000287532806, 0.15595999360084534, 0.3181000053882599, 0.9851499795913696, -0.328139990568161, -0.5559099912643433, -0.8914399743080139, 0.007133699953556061, -0.008643499575555325, -0.2165900021791458, -0.4976600110530853, -0.49125999212265015, -1.0083999633789062, 0.1382399946451187, 0.5593100190162659, 0.2764500081539154, -0.4136900007724762, -0.3843199908733368, 0.7037000060081482, 0.217849999666214, -0.05464800074696541, 0.31610000133514404, 0.2903499901294708, 0.14817999303340912, -0.3287700116634369, -0.08952300250530243, 0.44822999835014343, 0.16694000363349915, 0.28995999693870544, -0.08125700056552887, 0.24532000720500946, -0.12145999819040298, 0.19573000073432922, -0.46507999300956726, -0.20262999832630157, -0.2019300013780594, 0.3688899874687195, 0.5639500021934509, -0.6385999917984009, 0.6638299822807312, -0.17012999951839447, 0.22936999797821045, 0.023886999115347862, -0.17905999720096588, -1.4332000017166138, -0.4009299874305725, -0.6236199736595154, -0.4292199909687042, -0.2919299900531769, -0.022158000618219376, -0.16543999314308167, -0.3332200050354004, -0.48221999406814575, 0.6424499750137329, 0.5257599949836731, -0.11576999723911285, 0.5207499861717224, 0.0785129964351654, 0.24637000262737274, 0.027494000270962715, 0.6203600168228149, -0.16374999284744263, 0.16324999928474426, 0.22627000510692596, -0.20959000289440155, -0.6285899877548218, 0.12426000088453293, -0.4361000061035156], u'ground': [-0.19115999341011047, 0.45017001032829285, 0.36970001459121704, 0.0046165999956429005, -0.1648399978876114, -0.40514999628067017, 0.11912000179290771, 0.34220999479293823, 0.14990000426769257, -1.8885999917984009, 0.4752900004386902, 0.21807000041007996, 0.353410005569458, 0.039243001490831375, 0.1515599936246872, 0.41815000772476196, -0.5483999848365784, 0.2573300004005432, -0.21368999779224396, -0.07564499974250793, -0.6828500032424927, -0.06618999689817429, 0.7429100275039673, -0.4046800136566162, -0.13454000651836395, -0.27803999185562134, 0.053975000977516174, 0.33465999364852905, -0.6696699857711792, 0.5310400128364563, 0.27913999557495117, -0.29249000549316406, -0.25369998812675476, 0.2975600063800812, -0.42028000950813293, 0.3968600034713745, 0.058051999658346176, 0.3302299976348877, -0.018939999863505363, 0.18061000108718872, 0.08946699649095535, 0.04135600104928017, 0.5446699857711792, 0.07272899895906448, 0.6474400162696838, 0.004512900020927191, 0.2360599935054779, 0.37060999870300293, 0.20695999264717102, -0.054262999445199966, -0.039271000772714615, -0.19461999833583832, -0.37742000818252563, 0.033555999398231506, 0.048367999494075775, 0.22779999673366547, 0.08355499804019928, 0.033406998962163925, 0.29423999786376953, 0.19122999906539917, -0.1278499960899353, -0.20430000126361847, 0.45197999477386475, 0.10034999996423721, -0.20000000298023224, -0.1307400017976761, 0.26524001359939575, -0.4907799959182739, 0.2835499942302704, 0.18609000742435455, 0.08836500346660614, 0.13373999297618866, -0.6195200085639954, -0.06819699704647064, -0.2326499968767166, 0.23388999700546265, 0.04564100131392479, 0.6410199999809265, -0.08277399837970734, 0.0038950000889599323, -0.05189000070095062, 0.11279000341892242, -0.0988370031118393, -0.1356399953365326, -0.39792999625205994, -0.10452999919652939, -0.12174999713897705, -0.21130000054836273, -0.5674499869346619, -0.0003886700142174959, 0.6446499824523926, 0.05254200100898743, 0.14184999465942383, 0.2267400026321411, -0.3604300022125244, -0.3434000015258789, -0.734279990196228, 0.12421999871730804, 0.32791000604629517, 0.453000009059906, -0.14788000285625458, -0.0014675999991595745, -0.4002699851989746, -0.2651599943637848, -0.3016600012779236, -0.0716399997472763, 0.38343000411987305, 0.13017000257968903, 0.0649150013923645, -0.31832998991012573, -0.12818999588489532, -0.3544999957084656, -0.04979600012302399, 0.20689000189304352, -0.2474299967288971, -0.019096000120043755, -0.2287299931049347, 0.14778999984264374, -0.41117000579833984, -0.7099999785423279, 0.06207000091671944, -0.7070199847221375, 0.13952000439167023, 0.28870999813079834, -0.430869996547699, 0.1332699954509735, 0.04936600103974342, 0.014662000350654125, 0.36364999413490295, 0.07610200345516205, -0.46860000491142273, 1.142199993133545, -0.32491999864578247, 0.46083998680114746, 0.019920000806450844, 0.09644799679517746, 0.5010300278663635, 0.12307000160217285, -0.14159999787807465, 0.12256000190973282, 0.3054499924182892, 0.20422999560832977, -0.13179999589920044, 0.38927000761032104, -1.0823999643325806, -0.17994000017642975, 0.13151000440120697, -0.006710600107908249, -0.5242599844932556, -0.45511001348495483, -0.12318000197410583, 0.2685199975967407, -0.30562999844551086, 0.030420999974012375, 0.5920299887657166, -0.2775599956512451, 0.21505999565124512, -0.21994000673294067, 0.29201000928878784, 0.44148001074790955, 0.03419499844312668, -0.20013000071048737, -0.10440000146627426, 0.02197599969804287, 0.36869001388549805, -0.029881000518798828, -0.2617799937725067, 0.2146500051021576, -0.012621000409126282, 0.212909996509552, 0.23649999499320984, -0.17402000725269318, 0.5457299947738647, -0.1117200031876564, 0.052345000207424164, 0.5786899924278259, 0.16785000264644623, -0.05405300110578537, 0.1128700003027916, -0.228860005736351, 0.4194500148296356, 0.16628000140190125, 0.2660500109195709, 0.13401000201702118, -0.2639400064945221, 0.03671199828386307, 0.5415899753570557, 0.03330500051379204, 0.3374499976634979, -0.03053000010550022, 0.37380000948905945, 0.27911999821662903, 0.5270699858665466, -0.606249988079071, -0.1179800033569336, 0.3284600079059601, 0.23804999887943268, 0.35238000750541687, -0.012424999848008156, 0.06695500016212463, 0.7917900085449219, -0.1663299947977066, -0.17890000343322754, 0.1308099925518036, -0.04874400049448013, 0.4774700105190277, 0.5149400234222412, -0.5669900178909302, 0.04267499968409538, 0.2454800009727478, -0.07158300280570984, -0.2795499861240387, -0.26930999755859375, 0.2553600072860718, 0.22479000687599182, -0.32458001375198364, 0.2386700063943863, 0.08193899691104889, 0.2666099965572357, 0.12077999860048294, 0.5521900057792664, -0.035930000245571136, -0.05676399916410446, -0.21645000576972961, -0.4514699876308441, 0.10117000341415405, 0.020246999338269234, 0.21849000453948975, -0.41025999188423157, 0.24800999462604523, 0.19910000264644623, 0.06311599910259247, 0.13575999438762665, 0.3949100077152252, 0.1879899948835373, 0.320279985666275, -0.14205999672412872, 0.47641998529434204, -0.46055999398231506, 0.1586499959230423, -0.07634100317955017, 0.00500820018351078, 0.2888700067996979, -0.7573500275611877, -0.4823099970817566, -0.024343999102711678, -0.3987799882888794, -0.01685900054872036, 0.07350099831819534, -0.41686001420021057, 0.4571399986743927, 0.25328999757766724, -0.1153699979186058, -0.5708100199699402, 0.3887600004673004, 0.5635799765586853, 0.14119000732898712, -0.57778000831604, -0.02607000060379505, 0.18407000601291656, 0.17077000439167023, -0.12896999716758728, 0.39840999245643616, -0.15063999593257904, -0.058306001126766205, 0.03680900111794472, -0.9596499800682068, 0.032214999198913574, 0.39386001229286194, 0.021900000050663948, -0.33597999811172485, 0.2443999946117401, -0.33417001366615295, 0.08765599876642227, -0.09311500191688538, 0.13628000020980835, -2.047800064086914, -0.05931999906897545, 0.3284200131893158, 0.0022444999776780605, -0.6829500198364258, -0.16603000462055206, 0.48217999935150146, -0.15926000475883484, -0.05276099964976311, -0.14348000288009644, -0.29409000277519226, -0.35071998834609985, 0.09354300051927567, -0.05462000146508217, -0.055160000920295715, 0.012547999620437622, -0.07252100110054016, -0.1747100055217743, 0.1844799965620041, 0.19519999623298645, 0.045779999345541, -0.43007001280784607, 0.05516500025987625, 0.1984499990940094], u'log': [-0.35054001212120056, 0.030473999679088593, -0.14699000120162964, -0.17880000174045563, -0.4968000054359436, -0.30281999707221985, 0.2663800120353699, -0.07545100152492523, -0.30594000220298767, -0.4224900007247925, -0.7383700013160706, 0.2628999948501587, 0.1965399980545044, -0.29655998945236206, -0.24688999354839325, 0.32155999541282654, -0.022628000006079674, -0.04459499940276146, 0.1790499985218048, -0.06353799998760223, 0.638949990272522, -0.1155100017786026, 0.4199399948120117, 0.5199699997901917, -0.31591999530792236, -0.17091000080108643, 0.11595000326633453, -0.11819999665021896, 0.08025400340557098, 0.5454099774360657, 0.27489998936653137, 0.6505100131034851, -0.5155100226402283, -0.08388499915599823, -0.2725900113582611, 0.7322900295257568, -0.2928600013256073, -0.8154500126838684, -0.018021000549197197, -0.11682000011205673, -0.10527999699115753, 0.5510600209236145, -0.21446000039577484, 0.709309995174408, -0.20821000635623932, -0.13779999315738678, 0.3245300054550171, -0.2688100039958954, 0.20321999490261078, -0.17484000325202942, 0.6173700094223022, 0.34981998801231384, -0.15724000334739685, -0.39111000299453735, 0.2169400006532669, 0.1406800001859665, -0.676360011100769, -0.08813100308179855, -0.7843499779701233, 0.3449400067329407, 0.4566499888896942, 0.08973400294780731, 0.3453899919986725, 0.2853200137615204, -0.10802000015974045, -0.5637800097465515, -0.11495999991893768, 0.10916999727487564, 0.15264999866485596, -0.13766999542713165, -0.2878600060939789, 0.1859000027179718, -0.6782699823379517, 0.6376299858093262, -0.4194200038909912, 0.36654001474380493, 0.4517199993133545, -0.027607999742031097, -0.4586000144481659, -0.4779900014400482, -0.17590999603271484, 0.27233999967575073, 0.46303001046180725, -0.5884100198745728, 0.06582199782133102, -0.278219997882843, -0.7910199761390686, -0.006065600086003542, -0.20638999342918396, -0.4265899956226349, -0.03105499967932701, -0.22609999775886536, -0.055952999740839005, -0.047012001276016235, 0.45285001397132874, -0.4045700132846832, 0.05741100013256073, 0.1392199993133545, -0.0701730027794838, -0.4636099934577942, -0.515500009059906, 0.9738500118255615, 0.5202199816703796, -0.387580007314682, 0.13641999661922455, -0.06963200122117996, 0.018053000792860985, 0.12221000343561172, -0.33145999908447266, -0.44516998529434204, -0.4012199938297272, -0.23631000518798828, -0.18731999397277832, -0.013535999692976475, -0.622439980506897, -0.12933999300003052, 0.16258999705314636, 0.6708199977874756, -0.41005998849868774, 0.050234999507665634, 0.09965000301599503, -0.27772998809814453, 0.5425000190734863, 0.10910999774932861, 0.315310001373291, 0.7256399989128113, -0.4096499979496002, -0.6073899865150452, 0.06839500367641449, -0.3963499963283539, -0.15953999757766724, 1.1612000465393066, -0.31918999552726746, -0.1287900060415268, 1.0401999950408936, -0.0765800029039383, 0.09391500055789948, 0.07599999755620956, -0.49702998995780945, -0.05939900130033493, 0.26798000931739807, -0.26381000876426697, -0.4369199872016907, 0.24042999744415283, -0.015831999480724335, -0.1714800000190735, 0.1776600033044815, 0.08804900199174881, 0.13404999673366547, 0.08482500165700912, -0.039698000997304916, 0.3476400077342987, -0.10706999897956848, -0.20237000286579132, -0.11595000326633453, 0.25224000215530396, 0.2651900053024292, 0.18770000338554382, -0.17985999584197998, -0.3850800096988678, -0.23898999392986298, -0.12732000648975372, -0.2731800079345703, -0.31185001134872437, 0.0030759000219404697, 0.6895899772644043, 0.1538199931383133, -0.038231998682022095, 0.11378999799489975, 0.012861999683082104, -0.2229200005531311, -0.13459999859333038, 0.026412999257445335, 0.40408000349998474, 0.4006099998950958, 0.13830000162124634, 0.027281999588012695, 0.039322998374700546, 0.3135800063610077, -0.48173001408576965, -0.12093999981880188, 0.13407999277114868, 0.2803199887275696, -0.3561300039291382, -0.4262999892234802, -0.8173800110816956, 0.1133200004696846, 0.2269199937582016, 0.6426100134849548, 0.7249000072479248, 0.38804998993873596, 0.6223400235176086, -0.01538699958473444, -0.3166100084781647, 0.32425999641418457, 0.007034999784082174, 0.03407900035381317, -0.24695000052452087, -0.15711000561714172, -0.12268999963998795, -0.12838000059127808, -0.33557000756263733, 0.012688999995589256, 0.4393100142478943, 0.5628399848937988, -0.36761999130249023, -0.3284299969673157, 0.3307499885559082, -0.36792999505996704, 0.04063500091433525, -0.47078999876976013, -0.6744300127029419, -0.15167999267578125, -0.04599599912762642, -0.6343200206756592, -0.17301000654697418, -0.3704400062561035, 0.4617899954319, -0.0030181999318301678, 0.49059000611305237, -0.5366899967193604, 0.07670199871063232, 0.3382300138473511, 0.058389000594615936, 0.13872000575065613, 0.03946699947118759, -0.16403000056743622, 0.1413699984550476, 0.042357999831438065, -0.019457999616861343, -0.49445998668670654, -0.0647680014371872, -0.3265500068664551, -0.34442999958992004, 0.435699999332428, -0.24925999343395233, -0.6348400115966797, -0.3393999934196472, -0.4677099883556366, 0.30375000834465027, 0.4238399863243103, 0.16774000227451324, -0.13999000191688538, -0.2067600041627884, -0.11118000000715256, -0.21839000284671783, 0.42991000413894653, 0.7929199934005737, -0.19900000095367432, 0.1231900006532669, -0.35324999690055847, 0.134660005569458, 0.068511001765728, -0.3340499997138977, 0.34014999866485596, 0.017650000751018524, 0.2389499992132187, -0.18352000415325165, 0.08856900036334991, 0.26805999875068665, 0.18720999360084534, -0.07743699848651886, -0.27327001094818115, -0.17333999276161194, 0.23295000195503235, -0.447299987077713, 0.33610999584198, -0.4400300085544586, 0.2901400029659271, 0.23330999910831451, -0.24511000514030457, -0.14594000577926636, 0.5781099796295166, -0.1181500032544136, 0.20253999531269073, -0.20566999912261963, -1.1888999938964844, 0.25540000200271606, -0.3355799913406372, -0.25999000668525696, -0.6014900207519531, -0.4341300129890442, -0.29750001430511475, 0.19133000075817108, 0.15369999408721924, -0.5070599913597107, 0.1636199951171875, 0.024429000914096832, -0.10512000322341919, 0.05390800163149834, 0.21323999762535095, 0.2844400107860565, -0.6095499992370605, -0.337909996509552, -0.15812000632286072, -0.2359199970960617, -0.05211599916219711, 0.17211000621318817, -0.05826300010085106, 0.2571299970149994], u'moss': [-0.525950014591217, 0.2858799993991852, 0.24693000316619873, 0.511139988899231, 0.7442499995231628, 0.08371300250291824, 0.13117000460624695, -0.44244998693466187, 0.17726999521255493, -0.041613999754190445, 0.07547999918460846, 0.09212099760770798, -0.16971999406814575, -0.20785999298095703, -0.016610000282526016, 0.04374400153756142, -0.44484999775886536, 0.09724000096321106, -0.09119199961423874, 0.3343600034713745, -0.20834000408649445, 0.11817999929189682, -0.19292999804019928, 0.29600000381469727, -0.21753999590873718, -0.2712000012397766, -0.5455999970436096, -0.03466400131583214, -0.10853999853134155, -0.42723000049591064, 0.5207700133323669, -0.3462100028991699, -0.4151799976825714, -0.1621900051832199, -0.7271199822425842, -0.3892099857330322, -0.3959200084209442, 0.12925000488758087, 0.1372700035572052, -0.3822900056838989, -0.9676100015640259, 0.24922999739646912, 0.2663800120353699, -0.1910499930381775, 0.6576399803161621, 0.11486999690532684, 0.17151999473571777, 0.43459999561309814, -0.038780998438596725, 0.10261999815702438, -0.7405999898910522, -0.006571699865162373, -0.5835300087928772, 0.219310000538826, -0.2954399883747101, 0.3025299906730652, 0.0742650032043457, -1.0880999565124512, -0.27017998695373535, -0.5520099997520447, -0.04572900012135506, -1.0986000299453735, 0.05155299976468086, 0.35141000151634216, -0.009512100368738174, -0.2671099901199341, -0.009468300268054008, 0.04075099900364876, 0.6493399739265442, -0.42524999380111694, -0.1508300006389618, 0.2147900015115738, -0.43595001101493835, 0.25321999192237854, -1.0300999879837036, -0.10034999996423721, 0.25892001390457153, 0.04454199969768524, 0.24558000266551971, 0.27660998702049255, -0.4735499918460846, -0.3512299954891205, 0.03651700168848038, -0.013663000427186489, 0.12939000129699707, 0.6152899861335754, -0.039090000092983246, 0.3145599961280823, -0.4962100088596344, -0.3267500102519989, -0.1531900018453598, -0.14975999295711517, 0.23529000580310822, 0.06581699848175049, -0.06668999791145325, -0.3563700020313263, 0.48758000135421753, -0.41795000433921814, 0.1832199990749359, -0.4796600043773651, -0.1653899997472763, -0.4450699985027313, -0.4653800129890442, 0.17041000723838806, -0.28560999035835266, 0.27507999539375305, 0.724590003490448, 0.05752300098538399, -0.03414100036025047, -0.09761600196361542, -0.4671599864959717, -0.11647000163793564, 0.07222999632358551, -0.020785000175237656, 0.1327899992465973, -0.2974500060081482, -0.26875999569892883, 0.6788100004196167, -0.3414500057697296, -0.40046998858451843, -0.4160799980163574, -0.3384400010108948, 0.6343799829483032, 0.13957999646663666, -0.17188000679016113, -0.5758799910545349, 0.39614999294281006, -0.01913299970328808, 0.4889500141143799, -0.016036000102758408, 0.01106099970638752, 0.5743299722671509, -0.23427000641822815, -0.10633999854326248, 0.19746999442577362, 0.20332999527454376, 0.13850000500679016, -0.23475000262260437, -0.5697000026702881, -0.12377999722957611, 0.6324700117111206, 0.2198600023984909, 0.6901900172233582, -0.8081600069999695, -0.4499100148677826, 0.43661001324653625, 0.30180999636650085, -0.1751600056886673, 0.007674400229007006, -0.2744799852371216, -0.09363000094890594, -0.3759700059890747, -0.6867899894714355, -0.5226200222969055, -0.38391000032424927, -0.08288600295782089, 0.0881040021777153, -0.1790200024843216, 0.36146000027656555, 0.22304999828338623, -0.16902999579906464, -0.37856999039649963, 0.22750000655651093, 0.2996399998664856, -0.38561999797821045, 0.22887000441551208, -0.4414600133895874, 0.7746400237083435, -0.0011932000052183867, -0.27761000394821167, 0.33983999490737915, 0.07602400332689285, -0.04833900183439255, 0.032924000173807144, -0.2692599892616272, 0.4124999940395355, -0.16604000329971313, 0.052021000534296036, 0.4489400088787079, -0.5181499719619751, 0.20092999935150146, -0.07054000347852707, -0.34022998809814453, -0.11174999922513962, -0.23321999609470367, -0.6262099742889404, -0.6141499876976013, -0.5502300262451172, 0.2513299882411957, 0.15636999905109406, -0.07694400101900101, 0.020526999607682228, -0.3124299943447113, 0.07002799957990646, 0.4021799862384796, 0.3951199948787689, 0.3078800141811371, 0.31244999170303345, 0.27869999408721924, -0.13585999608039856, 0.6404100060462952, -0.18689000606536865, -0.1538199931383133, 0.5954200029373169, 0.45267000794410706, -0.268779993057251, 0.34505000710487366, 0.07141400128602982, -0.2537499964237213, -0.27742999792099, -0.1421000063419342, 0.26554998755455017, 0.47398000955581665, -0.027212999761104584, -0.2495799958705902, -0.04486199840903282, 0.16861000657081604, 0.05127599835395813, -0.4651300013065338, -0.4998599886894226, 0.1281599998474121, -0.33438000082969666, 0.0524899996817112, 0.32245999574661255, 0.17855000495910645, 0.8756300210952759, -0.0660260021686554, 0.09832999855279922, 0.30487000942230225, 0.70660001039505, 0.40939998626708984, 0.029089000076055527, 0.1669600009918213, -0.09781999886035919, -0.3360399901866913, -0.009839500300586224, 0.0247150007635355, 0.14696000516414642, -0.41124001145362854, -0.3081899881362915, -0.30557000637054443, -0.1936500072479248, 0.3157300055027008, -0.1652500033378601, -0.27232998609542847, 0.41672998666763306, 0.10350999981164932, 0.07419200241565704, -0.1370300054550171, 0.38137999176979065, -0.23819999396800995, -0.26657000184059143, -0.13402000069618225, -0.6896899938583374, 0.6873599886894226, 0.16347000002861023, 0.43821999430656433, 0.11110000312328339, 0.2058899998664856, -0.5908600091934204, 0.23948000371456146, -0.4554699957370758, -0.07270199805498123, -0.018842000514268875, -0.2741999924182892, 0.052675001323223114, 0.19505999982357025, 0.040123000741004944, 0.056398000568151474, -0.5389800071716309, 0.3528999984264374, -0.35412999987602234, -0.2662299871444702, 0.17156000435352325, -0.1463800072669983, 0.2889699935913086, -0.1440500020980835, -0.12520000338554382, -0.22121000289916992, 0.03844600170850754, -0.8358200192451477, 0.5471000075340271, 0.26603999733924866, 0.048705000430345535, -0.3198699951171875, -0.18806999921798706, 0.2073500007390976, 0.2202499955892563, 0.32684001326560974, -0.2430099993944168, 0.23468999564647675, 0.5195900201797485, -0.06545600295066833, 0.10698000341653824, 0.05096700042486191, 0.17628000676631927, -0.06644699722528458, 0.25123000144958496, -0.18055999279022217, 0.32905998826026917], u'dust': [-0.18177999556064606, 0.24175000190734863, -0.21145999431610107, -0.5236600041389465, -0.11461000144481659, -0.3428899943828583, -0.022167999297380447, 0.4259899854660034, 0.248539999127388, -0.8963299989700317, 0.34248000383377075, -0.5414699912071228, 0.23097999393939972, -0.2721399962902069, -0.2641899883747101, -0.37022000551223755, -0.4842599928379059, -0.13053999841213226, 0.10181000083684921, 0.7721199989318848, -0.45361998677253723, 0.09587600082159042, 0.47189000248908997, 0.5791500210762024, 0.00891919992864132, -0.2775900065898895, -0.034129999577999115, -0.098301000893116, -0.03568999841809273, -0.11379999667406082, 0.43132999539375305, 0.4182400107383728, -0.6451699733734131, -0.18019999563694, -0.3193899989128113, -0.05162699893116951, -0.7574599981307983, 0.5971300005912781, 0.5433700084686279, 0.7500600218772888, -0.08249899744987488, 0.22294999659061432, 0.5242900252342224, -0.06465300172567368, 0.6276599764823914, -0.01043500006198883, -0.08337000012397766, -0.04145200178027153, -0.35690000653266907, -0.802299976348877, 0.66007000207901, 0.31821998953819275, -0.4171200096607208, 0.3575800061225891, -0.0836779996752739, 0.09880899637937546, -0.15583999454975128, -0.7871299982070923, 0.7629899978637695, -0.12779000401496887, -0.21645000576972961, 0.2353300005197525, 1.0591000318527222, -0.32701998949050903, 0.04791399836540222, -0.21976999938488007, -0.08176299929618835, 0.18770000338554382, -0.4271399974822998, -0.11285000294446945, 0.9386799931526184, -0.199180006980896, 0.0007437799940817058, -0.1958400011062622, -0.638450026512146, -0.11304999887943268, -0.5808799862861633, -0.6893100142478943, 0.7547799944877625, -0.13323000073432922, 0.04191099852323532, -0.26396000385284424, 0.054090000689029694, -0.02349199913442135, -0.32346999645233154, -0.12269999831914902, 0.36847999691963196, 0.2859100103378296, -0.12357000261545181, -0.1640399992465973, -0.012734999880194664, -0.05000799894332886, 0.08876000344753265, 0.21644000709056854, -0.7229499816894531, 0.165460005402565, 0.004600000102072954, 0.21517999470233917, 0.3355399966239929, 0.23040999472141266, 0.304749995470047, 0.3736000061035156, -0.28057000041007996, 0.22436000406742096, -0.37762001156806946, 0.2677899897098541, 0.40156999230384827, -0.1408900022506714, 0.004863800015300512, -0.20688000321388245, -0.1896899938583374, -0.35460999608039856, -0.26655998826026917, -0.070762999355793, -0.43700000643730164, 0.3037799894809723, -0.07516200095415115, 0.3278000056743622, 0.2731100022792816, -0.7725300192832947, 0.12483999878168106, -0.2602100074291229, -0.23512999713420868, 0.5702499747276306, -0.46160000562667847, 0.26879000663757324, -0.4405899941921234, 0.838450014591217, -0.30052000284194946, 0.17967000603675842, 0.5271599888801575, 0.005655100103467703, 0.2824400067329407, -0.041909001767635345, 0.6297600269317627, -0.2542400062084198, -0.5748400092124939, 0.35346001386642456, -0.026186000555753708, -0.2732900083065033, 0.3056600093841553, 0.3307400047779083, -0.4115999937057495, 0.30480000376701355, 0.0484049990773201, 0.12551000714302063, 0.4914799928665161, 0.6145300269126892, 0.5571699738502502, 0.01460999995470047, -0.09915400296449661, -0.23853999376296997, -0.5684900283813477, 0.16728000342845917, 0.5499399900436401, 0.1305599957704544, 0.35078999400138855, -0.3461099863052368, 0.7432000041007996, 0.18244999647140503, -0.13447000086307526, -0.46299999952316284, 0.32986000180244446, 0.18548999726772308, 0.9086700081825256, 0.0015249999705702066, 0.48666998744010925, 0.4631899893283844, 0.36114001274108887, 0.222120001912117, -0.14173999428749084, 0.3213900029659271, 0.5503600239753723, -0.34209999442100525, 0.12443999946117401, -0.41113001108169556, 0.01500099990516901, -0.07194600254297256, -0.23055000603199005, -0.6612200140953064, -0.3368000090122223, -0.44310998916625977, -0.07016599923372269, 0.4404900074005127, -0.5422499775886536, -0.558489978313446, 1.1038000583648682, -0.9845200181007385, -0.3029699921607971, -0.006089699920266867, 0.5500699877738953, 0.4054900109767914, -0.7218199968338013, 0.08728300034999847, 0.10266999900341034, -0.6489999890327454, 0.3637300133705139, 0.5249599814414978, -0.12266000360250473, 0.07921600341796875, 0.4554100036621094, 0.5435400009155273, 0.8916599750518799, 0.2525700032711029, -0.10103999823331833, 0.1730400025844574, 0.6115700006484985, -0.7153900265693665, -0.4122300148010254, 0.2814599871635437, -0.3911600112915039, -0.030559999868273735, 0.1770700067281723, -0.11727999895811081, -0.13297000527381897, -0.5235599875450134, 0.3118000030517578, -0.4685400128364563, -0.028846999630331993, -0.3995699882507324, 0.13354000449180603, 0.020099999383091927, -0.13830000162124634, -0.23173999786376953, -0.12771999835968018, -0.32973000407218933, 0.16683000326156616, 0.3140200078487396, 0.11009000241756439, -0.12963999807834625, 0.21358999609947205, 0.057107001543045044, -0.49246999621391296, -0.3284299969673157, 0.025479000061750412, -0.15331000089645386, 0.6207699775695801, -0.22333000600337982, -0.2837800085544586, -0.5969499945640564, -0.10209000110626221, 0.12297999858856201, -0.6297900080680847, -0.3076600134372711, -0.8072900176048279, -0.16165000200271606, -0.24132999777793884, -0.06941699981689453, 0.13882000744342804, -0.19393999874591827, -0.15167999267578125, -0.8905699849128723, -0.3882400095462799, -0.538320004940033, 0.7725399732589722, 0.4175899922847748, -0.12326999753713608, -0.47056999802589417, -0.10074000060558319, 0.16561000049114227, 0.23625999689102173, -0.5073800086975098, 0.09101399779319763, 0.1573300063610077, 0.2968200147151947, -0.2878299951553345, 0.056825000792741776, -0.45726001262664795, -0.24928000569343567, -0.051309000700712204, -0.17299999296665192, 0.4522800147533417, -0.3042899966239929, 0.3613699972629547, -0.6921600103378296, -0.1722699999809265, -1.6146999597549438, -0.09869199991226196, -0.43233001232147217, -0.3065800070762634, -0.5416499972343445, 0.4830400049686432, -0.3561199903488159, -0.09474900364875793, 0.44780999422073364, 0.7172099947929382, 0.18615999817848206, 0.24250000715255737, -0.652400016784668, -0.6804400086402893, 0.08386100083589554, 0.7835299968719482, -0.04864000156521797, 0.308789998292923, 0.36746999621391296, -0.5294899940490723, 0.3953700065612793, 0.05963499844074249, 0.06657999753952026, 0.0339989997446537], u'velvet': [0.31233999133110046, 0.1009799987077713, 0.8325999975204468, 0.18105000257492065, 0.1676200032234192, -0.3221299946308136, -0.33862000703811646, -0.29447999596595764, 0.5387899875640869, 0.09625300019979477, 0.031244000419974327, 0.13041000068187714, -0.0849120020866394, 0.1560100018978119, 0.23326000571250916, -0.34257999062538147, -0.34902000427246094, 0.3462800085544586, -0.5817400217056274, 0.5473099946975708, 0.3243600130081177, 0.18504999577999115, 0.24044999480247498, 0.3393000066280365, -0.03764199838042259, -0.5703799724578857, 0.2893500030040741, 0.18376000225543976, -0.41850000619888306, 0.44655999541282654, 0.0012517999857664108, 0.07146800309419632, -0.26537999510765076, -0.05130400136113167, 0.12488000094890594, 0.33212000131607056, 0.011721000075340271, -0.19474999606609344, 0.13941000401973724, -0.0359949991106987, -0.48949000239372253, -0.30395999550819397, -0.11759000271558762, -0.07952400296926498, 0.26166999340057373, 0.18273000419139862, -0.07529299706220627, -0.2795400023460388, -0.6342899799346924, -0.0787699967622757, 0.41690000891685486, -0.2358199954032898, -0.0976099967956543, -0.6392300128936768, -0.037703000009059906, -0.16756999492645264, -0.24432000517845154, -0.6169000267982483, 0.12280000001192093, -0.14858999848365784, -0.13354000449180603, 0.10038000345230103, -0.3174999952316284, 0.14883999526500702, 0.27542001008987427, -0.5545600056648254, 0.5475599765777588, 0.36671000719070435, 0.19731999933719635, 0.07196199893951416, 0.11795999854803085, 0.3209199905395508, -0.3609600067138672, -0.0216279998421669, -0.1826000064611435, 0.2524400055408478, -0.361299991607666, 0.11739999800920486, 0.2799600064754486, -0.7076600193977356, -0.04467000067234039, 0.3205200135707855, 0.021900000050663948, -0.39566999673843384, 0.2210099995136261, 0.11229000240564346, 0.5931400060653687, 0.6030700206756592, -0.8299599885940552, -0.31446000933647156, 0.5097200274467468, -0.019226999953389168, 0.02321000024676323, 0.05343100056052208, -0.05704300105571747, 0.29109999537467957, 0.6930999755859375, 0.12974999845027924, 0.07953599840402603, 0.3968000113964081, 0.19797000288963318, 0.34891998767852783, -0.060933999717235565, -0.28560999035835266, -0.10920000076293945, -0.33313000202178955, 0.10926999896764755, 0.3067600131034851, 0.009083899669349194, -0.3828299939632416, -0.1734900027513504, 0.5903199911117554, 0.6669999957084656, -0.08120299875736237, 0.6208900213241577, -0.2244199961423874, 0.027904000133275986, 0.4592899978160858, -0.10379000008106232, -0.7619400024414062, -0.6499199867248535, -0.19177000224590302, 0.39353999495506287, -0.3190099895000458, -0.03344999998807907, -0.4308600127696991, 0.09007800370454788, 1.1232000589370728, -0.8030999898910522, 0.4456300139427185, -0.006844299845397472, -0.07470600306987762, -0.4111500084400177, -0.40577998757362366, 0.08028099685907364, -0.3160099983215332, -0.8629400134086609, 0.6262699961662292, -0.36309000849723816, 0.05903699994087219, 0.10916999727487564, 0.34046998620033264, -0.6128900051116943, -0.42607998847961426, 0.5340999960899353, 0.10853999853134155, -0.1102600023150444, -0.24851000308990479, -0.13594000041484833, 0.32335999608039856, -0.1334799975156784, 0.31007999181747437, -0.4182099997997284, -0.02837900072336197, -0.04830799996852875, 0.6060900092124939, 0.7289299964904785, -0.24944999814033508, -0.004336699843406677, 0.21987999975681305, 0.0101539995521307, 0.07211799919605255, -0.3631500005722046, 0.38690000772476196, 0.14591999351978302, 0.15065999329090118, 0.03314900025725365, 0.3093000054359436, 0.6012799739837646, 0.19280000030994415, -0.6145300269126892, 0.5357699990272522, 0.3039900064468384, 0.013012000359594822, -0.3536899983882904, -0.913070023059845, 0.12894000113010406, 0.9831799864768982, -0.713890016078949, -0.4975999891757965, 0.8119300007820129, 0.1376200020313263, 0.7049800157546997, 0.17396999895572662, -0.03535899892449379, 0.007410800084471703, 0.42763999104499817, 0.30265000462532043, 0.399509996175766, -0.5060300230979919, 0.12559999525547028, 0.12689000368118286, -0.3422999978065491, -0.2672399878501892, -0.06600899994373322, -0.005778899881988764, -0.7215800285339355, 0.20024999976158142, -0.10664000362157822, 0.18996000289916992, 1.0976999998092651, -0.29082998633384705, 0.5604000091552734, 0.3854900002479553, 0.08657799661159515, -0.29168999195098877, 0.6341099739074707, 0.8571299910545349, -1.1292999982833862, -0.42326998710632324, 0.08048000186681747, -0.16513000428676605, -0.08578299731016159, 0.7190300226211548, 0.43533000349998474, 0.079134002327919, 0.7444999814033508, -0.582360029220581, -0.08389999717473984, -0.33472999930381775, -0.09669800102710724, 0.04587100073695183, -0.25562000274658203, -0.5600100159645081, -0.6388800144195557, -0.04848499968647957, -0.10136000066995621, -0.0963210016489029, -0.15431000292301178, -1.4567999839782715, 0.6636800169944763, -0.44554999470710754, -0.20884999632835388, 0.5569800138473511, 0.5542100071907043, -0.1985500007867813, 0.4276899993419647, -0.6704400181770325, 0.01218899991363287, -0.2615300118923187, -0.4410499930381775, 0.028088999912142754, -0.518339991569519, 0.5988100171089172, -0.4069800078868866, 0.5484700202941895, 0.3245700001716614, 0.2357099950313568, 0.3020400106906891, -0.2875699996948242, -0.4182499945163727, 0.07486599683761597, 0.32600998878479004, -0.42497000098228455, 0.7271699905395508, 0.04467400163412094, 0.41912999749183655, 0.05539799854159355, 0.48743000626564026, -0.18108999729156494, -0.5672100186347961, 0.028307000175118446, 0.9892200231552124, -0.030869999900460243, -0.8264300227165222, 0.27987998723983765, -0.06316100060939789, 0.238429993391037, -0.07916700094938278, 0.3299799859523773, 0.21171000599861145, -0.7609000205993652, -0.36597999930381775, -0.43435001373291016, -0.445250004529953, -0.13720999658107758, -0.3573800027370453, -0.2928999960422516, -0.566789984703064, -0.16304999589920044, 0.08248800039291382, -0.2938700020313263, -0.3047800064086914, -0.5481200218200684, 0.03642600029706955, 0.004634699784219265, -0.5079900026321411, 0.1367499977350235, -0.6822599768638611, 0.1057400032877922, 0.4156799912452698, 0.053599998354911804, -0.02572299912571907, 0.9822499752044678, -0.3365499973297119, 0.44036999344825745, 0.5451899766921997, 0.5668200254440308, 0.6377999782562256, -0.02656099945306778], u'basement': [-0.15731999278068542, 0.4438300132751465, -0.563539981842041, 0.04054399952292442, 0.0650629997253418, 0.45559999346733093, 0.44144999980926514, -0.08718899637460709, 0.19707000255584717, -0.6233299970626831, -0.3542900085449219, -0.22213999927043915, 0.4560199975967407, 0.05326699838042259, -0.21971000730991364, -0.3698199987411499, -0.36695998907089233, -0.13359999656677246, 0.12145999819040298, 0.503030002117157, 0.001676499960012734, 0.12703000009059906, 0.17741000652313232, -0.2782599925994873, -0.06486500054597855, -0.07175900042057037, 0.4855700135231018, 0.16565999388694763, -0.27671998739242554, -0.18669000267982483, 0.054506998509168625, 0.11215999722480774, -0.19189999997615814, -0.002832300029695034, 0.14629000425338745, 0.5980799794197083, -0.46230998635292053, -0.014832000248134136, 0.30803999304771423, -0.15419000387191772, 0.012492000125348568, 0.3223400115966797, -0.5859799981117249, 0.5992699861526489, 0.2796599864959717, 0.5850600004196167, 0.5522000193595886, -0.22269999980926514, -0.21081000566482544, -0.6064900159835815, -0.05753299966454506, 0.12099999934434891, -0.43108999729156494, 0.12110999971628189, 0.44690999388694763, 0.17660999298095703, 0.07184600085020065, 0.2326200008392334, -0.15526999533176422, 0.068851999938488, 0.009138699620962143, 0.3951300084590912, 0.7429699897766113, 0.795960009098053, -0.4090000092983246, -0.5092099905014038, 0.3672400116920471, -0.1367799937725067, -0.3140299916267395, -0.1309799998998642, -0.26229000091552734, -0.2906799912452698, -0.3066999912261963, 0.0873280018568039, -0.5976200103759766, -0.003594599897041917, -0.45295000076293945, -0.24231000244617462, 0.1531600058078766, -0.38600000739097595, 0.0021440000273287296, 0.37272998690605164, 0.2506900131702423, -0.011381999589502811, -0.35776999592781067, -0.05232299864292145, -0.0066789002157747746, 0.5205000042915344, -0.0241480004042387, 0.04816799983382225, 0.535040020942688, 0.44071999192237854, 0.27970001101493835, 0.7767800092697144, 0.13200999796390533, 0.2316800057888031, -0.026892000809311867, -0.19979000091552734, 0.8008900284767151, -0.8350899815559387, -0.12501999735832214, -0.06213200092315674, -0.2914699912071228, -0.21452000737190247, -0.05993400141596794, -0.3860900104045868, 0.17983999848365784, 0.30910998582839966, 0.19191999733448029, 0.22718000411987305, -0.4874500036239624, 0.05564099922776222, -0.12616999447345734, 0.16841000318527222, -0.4966599941253662, -0.10920000076293945, 0.1200999990105629, -0.4721199870109558, -0.12759999930858612, -0.39671000838279724, 0.2819499969482422, -0.0012762000551447272, 0.24243000149726868, 0.24457000195980072, -0.05709100142121315, -0.25134000182151794, -0.14997999370098114, -0.5738999843597412, 0.5098999738693237, 0.023555999621748924, 0.15489999949932098, 0.39236998558044434, 0.29172998666763306, -0.18895000219345093, 0.6109399795532227, -0.08742400258779526, -0.0560309998691082, 0.5193799734115601, -0.714900016784668, 0.2720800042152405, 0.02883400022983551, -0.2235500067472458, -0.15363000333309174, 0.2759700119495392, -0.018567999824881554, -0.09723799675703049, 0.03566800057888031, -0.04070800170302391, 0.30261000990867615, -0.21866999566555023, -0.3970800042152405, 0.5793099999427795, -0.19662000238895416, -0.4149399995803833, 0.12180999666452408, 0.6040400266647339, -0.3243499994277954, 0.29071998596191406, -0.11650999635457993, 0.38093000650405884, -0.13616999983787537, 0.03651700168848038, 0.29752999544143677, 0.10653000324964523, 0.3714900016784668, 0.4584600031375885, 0.14037999510765076, -0.3125999867916107, 0.33629000186920166, 0.17684000730514526, -0.18498000502586365, 0.23419000208377838, -0.06530299782752991, -0.0023753999266773462, -0.2961600124835968, 0.7463899850845337, -0.5733100175857544, 0.06298000365495682, -0.04596799984574318, -0.4727500081062317, 0.2904999852180481, -0.13942000269889832, -0.26399001479148865, -0.18805000185966492, -0.0538799986243248, 0.33430999517440796, 1.110200047492981, 0.455949991941452, 0.33204999566078186, 0.5350599884986877, 0.6809099912643433, -0.0032691999804228544, -0.27423998713493347, 0.15916000306606293, -0.06363999843597412, 0.06994199752807617, -0.6970400214195251, 0.2371699959039688, -0.652999997138977, 0.2954599857330322, 0.5508700013160706, -0.07173299789428711, -0.03967899829149246, -0.059436000883579254, 0.3015899956226349, -0.47534000873565674, -0.20633000135421753, -0.8526700139045715, -0.399399995803833, -0.3603900074958801, -0.27118000388145447, 0.6060000061988831, -0.4537599980831146, 0.14076000452041626, 0.010123999789357185, 0.3517000079154968, -0.17430000007152557, -0.3130899965763092, 0.2669599950313568, 0.15567000210285187, 0.6836000084877014, -0.3340800106525421, -0.40024998784065247, -0.15960000455379486, -0.07187599688768387, 0.0523810014128685, -0.6644899845123291, -0.10935000330209732, -0.35910001397132874, -0.2455500066280365, 0.3410100042819977, -0.3959699869155884, -0.29778000712394714, -0.29545000195503235, 0.48058000206947327, -0.22919000685214996, 0.300819993019104, 0.2636699974536896, -0.2620899975299835, 0.5101699829101562, 0.41376999020576477, -0.3972899913787842, 0.08642599731683731, 0.13550999760627747, -0.09301000088453293, -0.41753000020980835, 0.5750799775123596, 0.21119999885559082, -0.5599799752235413, 0.0718030035495758, 0.331930011510849, 0.2405800074338913, 0.010293000377714634, -0.11051999777555466, 0.06896200031042099, 0.621720016002655, 0.1733199954032898, -0.060079000890254974, 0.3073599934577942, 0.3063099980354309, -0.310479998588562, 0.02290000021457672, 0.12709000706672668, -0.34261998534202576, -0.01792600005865097, 0.14883999526500702, -0.14517000317573547, 0.2761099934577942, 0.011601000092923641, -0.3628399968147278, -0.5460699796676636, -0.06754399836063385, -0.26118001341819763, -0.29117000102996826, -0.34071001410484314, -0.1723800003528595, -1.0937000513076782, 0.15740999579429626, -0.4239499866962433, 0.20332999527454376, -0.654229998588562, 0.2647800147533417, -0.047533001750707626, -0.48660001158714294, 0.017084000632166862, 0.7955999970436096, -0.2184000015258789, 0.48993998765945435, 0.3036099970340729, -0.37608999013900757, -0.2632099986076355, -0.004202499985694885, -0.3295699954032898, 0.3471600115299225, 0.3434300124645233, -0.042426999658346176, 0.6971700191497803, -0.030119000002741814, -0.13784000277519226, 0.6459199786186218], u'coin': [0.05233500152826309, 0.2930299937725067, -0.4190100133419037, 0.1756100058555603, 0.0684949979186058, 0.7074000239372253, -0.09541399776935577, -0.02634900063276291, -0.019208999350667, -0.861739993095398, -0.027970999479293823, -0.06959400326013565, 0.016961000859737396, -0.2553099989891052, -0.25898000597953796, -0.2038699984550476, 0.41001999378204346, -0.3039200007915497, -0.37975001335144043, -0.3996500074863434, -0.30199000239372253, -0.07551500201225281, 0.3031499981880188, 0.01614600047469139, 0.9232000112533569, -0.3240100145339966, -0.24950000643730164, -0.03495800122618675, -0.03611600026488304, -0.5618500113487244, 0.8690199851989746, -0.04714300110936165, -0.05087599903345108, 0.36489999294281006, -0.8018800020217896, 0.4802600145339966, -0.24771000444889069, 0.3014200031757355, -1.0369000434875488, 0.21104000508785248, -0.22429999709129333, -0.25137001276016235, -0.020177999511361122, 0.41007000207901, 0.46887001395225525, -0.208979994058609, 0.9400200247764587, -0.6684799790382385, -0.49597999453544617, 0.7849799990653992, 0.06562600284814835, -0.2609800100326538, 0.09476400166749954, 0.18535000085830688, -0.043845001608133316, -0.22860999405384064, -0.5433599948883057, 0.8171799778938293, 0.51146000623703, -0.4864799976348877, 0.20452000200748444, 0.3108699917793274, -0.4955199956893921, -0.938979983329773, 0.22301000356674194, -0.19197000563144684, -0.30237001180648804, -0.03618999943137169, -0.43375998735427856, 0.1466899961233139, -0.11580999940633774, -0.08632099628448486, -0.10089000314474106, 0.3811799883842468, 0.03580600023269653, 0.5785199999809265, 0.019380999729037285, 0.06926299631595612, -0.11066000163555145, -0.922469973564148, -0.13790999352931976, -0.06128799915313721, 0.5506899952888489, 0.23984000086784363, 0.44027000665664673, -0.7619100213050842, -0.39980998635292053, 0.0801360011100769, -0.4970000088214874, 0.3874399960041046, 0.49116000533103943, -0.4557400047779083, -0.32291001081466675, -0.5441399812698364, 0.09813600033521652, 0.02202799916267395, -0.02194399945437908, -0.1515199989080429, -0.28450000286102295, -0.01370800007134676, 0.5547000169754028, 0.22175000607967377, 0.41422000527381897, -0.2787800133228302, 0.3775100111961365, 0.09510199725627899, -0.5479199886322021, -0.39160001277923584, 0.04139399901032448, 0.48848000168800354, -0.2680000066757202, 0.4714899957180023, -0.26701998710632324, -0.025279000401496887, 0.1039699986577034, 0.7849400043487549, -0.4805999994277954, -0.20187999308109283, 0.188060000538826, -0.1376499980688095, -0.11748000234365463, 0.4698199927806854, -0.4036099910736084, 0.022408999502658844, -0.6160500049591064, 0.34703001379966736, 0.6997100114822388, 0.6711999773979187, -0.149399995803833, 0.33041998744010925, -0.11826000362634659, -0.0975790023803711, 0.2316800057888031, 0.02112700045108795, -0.29297998547554016, 0.014543999917805195, 0.46768999099731445, -0.34415000677108765, 0.32361000776290894, -0.19039000570774078, 0.080485999584198, 0.09459000080823898, -0.14579999446868896, 0.20917999744415283, 0.2381100058555603, -0.46039000153541565, -0.40018001198768616, -0.07423199713230133, 0.6930999755859375, -0.2469100058078766, -0.05261800065636635, -0.13391000032424927, -0.40845000743865967, -1.0433000326156616, -0.44499000906944275, 0.022801000624895096, -0.2433300018310547, -0.49750998616218567, -0.07366199791431427, -0.34619998931884766, -0.3068400025367737, 0.17826999723911285, 0.6167399883270264, 0.7164199948310852, 1.061900019645691, 0.7118600010871887, 0.2729499936103821, 0.1124500036239624, 0.195250004529953, -0.33406001329421997, 0.2214999943971634, 0.5614100098609924, 0.06881300359964371, -0.19378000497817993, 0.2146500051021576, -0.14776000380516052, -0.20268000662326813, 0.15449999272823334, 0.30063000321388245, -0.12144999951124191, 0.11258000135421753, 0.37196001410484314, -0.18602000176906586, 0.0961500033736229, 0.13732999563217163, -0.6758599877357483, 0.7010599970817566, 0.5879799723625183, 0.5564200282096863, -0.27358999848365784, 0.8998000025749207, 0.5359399914741516, 0.4803299903869629, 0.7102599740028381, -0.3486599922180176, 0.048225998878479004, -0.1875700056552887, 0.16500000655651093, -0.16617000102996826, 0.5400099754333496, 0.5497099757194519, 0.6321799755096436, -0.1517699956893921, -0.49202001094818115, 0.9592199921607971, -0.5360900163650513, 0.10373000055551529, 0.04831499978899956, -0.059466999024152756, 0.16719000041484833, 0.5658100247383118, 0.03369300067424774, 0.14566999673843384, 0.13278000056743622, 0.11113999783992767, -0.9581599831581116, -0.44231998920440674, -0.38047999143600464, -0.9267899990081787, 0.24913999438285828, -0.7183200120925903, 0.3118099868297577, -0.27028998732566833, 0.28624001145362854, -0.10270000249147415, 0.3435400128364563, 0.17521999776363373, -0.12258999794721603, 0.41519999504089355, 0.4901599884033203, -0.27366000413894653, 0.0999239981174469, -0.34575000405311584, 0.2700499892234802, 0.2016499936580658, 0.5307300090789795, -0.06046700105071068, -0.4804899990558624, 0.30476999282836914, -0.20319999754428864, -0.15485000610351562, 0.24940000474452972, 0.6559699773788452, -0.2778800129890442, -0.5947700142860413, -0.0606359988451004, -0.8130800127983093, -0.46222999691963196, 0.026763999834656715, 0.5425400137901306, 0.18063999712467194, 0.22205999493598938, 0.12623000144958496, -0.228970006108284, 0.3453800082206726, 0.2972399890422821, -0.06862200051546097, 0.10694000124931335, -0.1878100037574768, -0.4966599941253662, -0.701229989528656, 0.07943999767303467, 0.36267000436782837, 0.6860100030899048, 0.32892999053001404, 0.016891999170184135, -0.32576000690460205, 0.7916399836540222, 0.22258999943733215, 0.14395999908447266, 0.182559996843338, 0.024630000814795494, -0.025374000892043114, -0.1577499955892563, -0.7561699748039246, -0.17601999640464783, -0.3720400035381317, -0.47284001111984253, -0.06503699719905853, 0.2975099980831146, 0.04652399942278862, -0.003127099946141243, -0.8256700038909912, -0.2889699935913086, -0.3435800075531006, -0.3371799886226654, 0.26693999767303467, -0.5002999901771545, -0.17077000439167023, -0.006144700106233358, -0.1179099977016449, -0.07685299962759018, -0.5228300094604492, 0.8996099829673767, 0.6225900053977966, -0.40345999598503113, -0.028968999162316322, 0.0904259979724884, 0.4238699972629547, -0.47530999779701233], u'desert': [-0.5281699895858765, -0.17468999326229095, -0.5605499744415283, -0.18786999583244324, -0.011012000031769276, 0.1225999966263771, -0.32280999422073364, 0.7381799817085266, 0.5379199981689453, -0.5874500274658203, 0.4613899886608124, -0.6213099956512451, -0.13697999715805054, 0.25203999876976013, 0.302949994802475, -0.2770799994468689, 0.37692001461982727, -0.25916001200675964, 0.46178001165390015, 0.571690022945404, -0.30272001028060913, -0.041377000510692596, 0.4454900026321411, -0.09662999957799911, 0.12249000370502472, -0.8777999877929688, 0.8109800219535828, 0.10846000164747238, -0.1639000028371811, 0.21794000267982483, 0.23792000114917755, 0.15752999484539032, -1.0391000509262085, -0.47391998767852783, 0.24331000447273254, -0.598609983921051, 0.06224299967288971, -0.19878999888896942, 0.09467799961566925, -0.32971999049186707, 0.4316900074481964, -0.44159001111984253, 0.3785499930381775, -0.5251100063323975, 0.6622999906539917, -0.10153000056743622, -0.09706400334835052, 0.2937600016593933, 0.29846999049186707, -0.6099399924278259, -0.23286999762058258, -0.11337000131607056, -0.026978999376296997, 0.30257999897003174, 0.29120999574661255, 3.516300057526678e-05, -0.30184000730514526, -0.3550400137901306, 1.2059999704360962, 0.37196001410484314, -0.19283999502658844, -0.06129400059580803, 1.2803000211715698, -0.6204299926757812, -0.33368998765945435, 0.2065799981355667, 0.4750100076198578, 0.6792100071907043, 0.0515189990401268, -0.06273400038480759, 0.1278200000524521, 0.1898999959230423, 0.04076499864459038, 0.4156799912452698, -0.5856000185012817, 0.021152999252080917, -0.06923499703407288, -0.5913299918174744, 0.2545500099658966, 0.04533499851822853, -0.04218199849128723, 0.27265000343322754, -0.7005699872970581, 0.0016685000155121088, -0.23152999579906464, -0.0729300007224083, -0.1391099989414215, 0.4798800051212311, 0.5565400123596191, -0.15351000428199768, -0.2608399987220764, 0.42197999358177185, 0.6572499871253967, 0.44185999035835266, -0.06785299628973007, 0.24759000539779663, 0.4646199941635132, 0.2319599986076355, -0.11214999854564667, 0.21081000566482544, 0.5297200083732605, 0.9177600145339966, -0.16404999792575836, 0.5870500206947327, -0.6682500243186951, -0.36559000611305237, 0.09254399687051773, 0.7450600266456604, 0.3050999939441681, -0.15477000176906586, 0.24782000482082367, -0.34134000539779663, 0.3600800037384033, -0.3747499883174896, -0.26175999641418457, 0.13428999483585358, 0.3398500084877014, 0.37803998589515686, 0.4920800030231476, 0.6692399978637695, -0.1865299940109253, -0.10107000172138214, -0.9375, -0.13675999641418457, -0.04866800084710121, 0.1402300000190735, -0.24511000514030457, 0.33232998847961426, -0.4339599907398224, -0.3414199948310852, 0.20769000053405762, 0.008464000187814236, 0.20231999456882477, 0.4829599857330322, -0.5185800194740295, -0.5449000000953674, -0.06523700058460236, -0.1174900010228157, -0.15971000492572784, -0.31490999460220337, 0.4391700029373169, 0.2815600037574768, -0.8144000172615051, -0.061560001224279404, -1.0355000495910645, -0.1591300070285797, 0.18098999559879303, 0.016388000920414925, -0.25731998682022095, 0.39190998673439026, 0.4020099937915802, 0.11388000100851059, -0.4428899884223938, -0.04433999955654144, 0.6324700117111206, -0.4231500029563904, 0.2543700039386749, 0.36177000403404236, 0.6848300099372864, 0.7156800031661987, 0.11569000035524368, -0.8078799843788147, 0.12937000393867493, 0.508109986782074, 0.29280999302864075, -0.8622900247573853, 0.32844001054763794, -0.24985000491142273, -0.35995998978614807, 0.22945000231266022, 0.02305999957025051, -0.06582300364971161, 0.4545400142669678, -0.33390000462532043, -0.3846000134944916, 0.7711300253868103, 0.07890500128269196, 0.24636000394821167, 0.21373000741004944, 0.1263599991798401, -0.29708001017570496, 0.09574999660253525, 0.22442999482154846, 0.9467700123786926, 0.361519992351532, -0.06258100271224976, 0.7890400290489197, -0.1404300034046173, -0.3302899897098541, -0.5448700189590454, -0.349590003490448, 0.5453600287437439, 0.041627999395132065, -0.19160999357700348, 0.15174999833106995, -0.013395999558269978, 0.2698200047016144, -0.013716000132262707, 0.03695400059223175, 0.6690099835395813, 1.1806000471115112, 0.0361969992518425, 0.029062999412417412, -0.07536099851131439, 0.41234999895095825, 0.2928900122642517, -0.02533400058746338, -0.40755000710487366, 0.39724001288414, -0.01643200032413006, -0.14767000079154968, -0.09751000255346298, -0.13068999350070953, -0.7134100198745728, 0.5640299916267395, -0.12524999678134918, 0.07993000000715256, 0.23986999690532684, 0.1403300017118454, -0.011641999706625938, 0.8376399874687195, 0.15776999294757843, 0.084648996591568, -0.09912800043821335, -0.19682000577449799, -0.33733001351356506, -0.09196999669075012, -0.2414100021123886, 0.1465200036764145, -0.3914699852466583, 0.43860000371932983, -0.07298800349235535, -0.34389999508857727, -0.3056800067424774, 0.22111999988555908, 0.4091300070285797, -0.6689199805259705, 0.12892000377178192, 0.3902199864387512, -0.34064000844955444, -0.1390800029039383, 0.6042500138282776, -0.10491999983787537, 0.2657400071620941, -0.2135699987411499, -0.3098300099372864, -0.5004199743270874, -0.2526099979877472, 0.1288899928331375, -0.36924999952316284, -0.36768999695777893, 0.2150699943304062, -0.044158000499010086, -0.36675000190734863, 0.49696001410484314, 0.22502000629901886, 0.1482899934053421, 0.15800000727176666, -0.2169100046157837, -0.159170001745224, 0.1305599957704544, -0.2897000014781952, 0.07875099778175354, -0.49994000792503357, -0.24873000383377075, -0.3123700022697449, 0.12857000529766083, -0.192890003323555, 0.4270299971103668, 0.07751300185918808, -0.28418999910354614, -0.12737999856472015, -0.06880900263786316, -0.022763999179005623, 0.030143000185489655, -0.6271799802780151, -1.783400058746338, 0.7424399852752686, -0.24366000294685364, 0.15460999310016632, 0.35697001218795776, -0.3941099941730499, -0.2200399935245514, 0.2781899869441986, -0.30803000926971436, 0.0469140000641346, 0.1308099925518036, 0.0451119989156723, 0.041919998824596405, 0.023778999224305153, 0.38220998644828796, 0.1453000009059906, -0.41054999828338623, 0.48614999651908875, 0.0341779999434948, 0.7187399864196777, 0.24913999438285828, 0.30410000681877136, 0.025572000071406364, -0.09706799685955048], u'pool': [0.18640999495983124, 0.6172900199890137, -0.2114199995994568, -0.32521000504493713, -0.10882999747991562, 1.0437999963760376, 0.7096899747848511, -0.09058299660682678, 0.3986000120639801, -0.6566399931907654, 0.3149600028991699, 0.06094000115990639, 0.04753600060939789, -0.14610999822616577, -0.24935999512672424, -0.3746100068092346, -0.17603999376296997, 0.2617799937725067, -0.09629800170660019, 0.2625499963760376, -0.4147300124168396, 0.35030999779701233, -0.41266000270843506, 0.06145099923014641, 0.5203999876976013, -0.2541300058364868, 0.35978999733924866, 0.663569986820221, -0.40891000628471375, 0.3589000105857849, -0.5511000156402588, -0.05497699975967407, 0.038731999695301056, 0.0885939970612526, -0.8573399782180786, 0.8044899702072144, 0.42142999172210693, 0.08293399959802628, -0.40540000796318054, 0.3318600058555603, 0.38960000872612, 0.2088800072669983, -0.04521799832582474, 0.58610999584198, 0.27059000730514526, -0.19312000274658203, 1.1367000341415405, 0.39239001274108887, 0.6819499731063843, -0.3709299862384796, -0.3365600109100342, 0.22877000272274017, -0.2269899994134903, -0.42309999465942383, 0.31126001477241516, 0.1870799958705902, -0.0030412001069635153, -0.0912339985370636, 0.3489600121974945, 0.44475001096725464, 0.37968000769615173, 0.2793000042438507, -0.5989199876785278, 0.2805599868297577, 0.016853999346494675, -0.13249999284744263, -0.2217700034379959, -0.31185999512672424, -0.7660599946975708, -0.13513000309467316, 0.019412999972701073, 0.4309599995613098, 0.21507999300956726, 0.07804299890995026, -0.8866900205612183, -0.32429999113082886, -0.3729099929332733, -0.1923999935388565, 0.026704000309109688, -0.8661199808120728, 0.2926900088787079, 0.20640000700950623, -0.5352299809455872, -0.514240026473999, -0.008585300296545029, -0.38842999935150146, -0.12738999724388123, -0.2519400119781494, 0.014158999547362328, -0.4354200065135956, 0.31564000248908997, 0.3358500003814697, 0.08365099877119064, -0.37422001361846924, 0.305869996547699, 0.4720599949359894, 0.23345999419689178, -0.0872189998626709, 0.26370999217033386, -0.33913999795913696, -0.17881999909877777, 0.06359700113534927, -0.2723900079727173, -0.6827999949455261, 0.5769400000572205, 0.6863700151443481, 0.04939499869942665, -0.21818000078201294, -0.2877199947834015, 0.4108999967575073, -0.1422400027513504, 0.10592000186443329, -0.05585800111293793, -0.09203699976205826, -0.36035001277923584, 0.0644489973783493, -0.1538199931383133, 0.19102999567985535, -0.5938199758529663, 0.07199899852275848, 0.4331800043582916, 0.03805600106716156, -0.22928999364376068, 0.5050100088119507, 0.9085999727249146, 0.09711699932813644, 0.09747599810361862, -0.23026999831199646, -0.2948800027370453, -0.5386999845504761, -0.3828299939632416, -0.04616200178861618, 0.3974300026893616, 0.2128400057554245, 0.1678999960422516, -0.24527999758720398, 0.060947999358177185, -0.23628999292850494, -0.17509999871253967, -0.2985199987888336, 0.002187799895182252, -0.223580002784729, 0.47922998666763306, -0.20802000164985657, -0.3709700107574463, 0.02622699923813343, 0.07672099769115448, -0.1886499971151352, 0.4780299961566925, 0.152319997549057, -0.06351999938488007, -0.002901999978348613, 0.12872999906539917, -0.4284999966621399, 0.43004998564720154, 0.016512999311089516, -0.4353399872779846, 0.5358999967575073, -0.10659000277519226, 0.36399000883102417, 0.28415000438690186, -0.2102299928665161, 0.4065600037574768, 0.20725999772548676, 0.5242800116539001, -0.2580299973487854, -0.1031700000166893, -0.02191299945116043, 0.24186000227928162, -0.13467000424861908, 0.11973000317811966, 0.41047000885009766, 0.03403199836611748, -0.22439000010490417, -0.5804399847984314, -0.2136099934577942, -0.0901079997420311, 0.6874099969863892, 0.23917999863624573, 0.3110800087451935, -0.00015423000149894506, 0.24954000115394592, -0.146139994263649, -0.2288299947977066, -0.09023399651050568, 0.004531499929726124, 0.45100998878479004, 0.0019226999720558524, 0.6152799725532532, -0.16978000104427338, 0.8804200291633606, 0.6815800070762634, -0.35653001070022583, -0.5067200064659119, -0.15897999703884125, 0.27496999502182007, 0.3752700090408325, -0.16142000257968903, -0.46202999353408813, -0.08107099682092667, 0.7681599855422974, -0.5820299983024597, -0.45603999495506287, -0.6330500245094299, -0.08535300195217133, 0.06748799979686737, -0.5669000148773193, -0.4405199885368347, -0.16630999743938446, 0.12268999963998795, 0.029543999582529068, 0.2643899917602539, -0.8337200284004211, -0.2790200114250183, 0.25874999165534973, 0.5540000200271606, -0.11885000020265579, -0.36333999037742615, 0.1141899973154068, -0.4655100107192993, 0.3806400001049042, 0.08949899673461914, -0.19994999468326569, -0.21593999862670898, -0.1479800045490265, -0.39195001125335693, -0.26085999608039856, -0.0011180000146850944, -0.10458000004291534, 0.32486000657081604, 0.10451000183820724, -0.39122000336647034, 0.2855600118637085, 0.15154999494552612, 0.22080999612808228, 0.540910005569458, 0.01171599980443716, 0.0330829992890358, 0.11663000285625458, 0.14899000525474548, 0.7984799742698669, 0.17971999943256378, 0.4588499963283539, 0.39239999651908875, -0.6104300022125244, -0.21552999317646027, 0.014487000182271004, -0.3420400023460388, -0.2834300100803375, 0.12370000034570694, 0.4414199888706207, -0.6638299822807312, 0.5583000183105469, -0.44457000494003296, 0.10493999719619751, -0.30469000339508057, 0.1173200011253357, 0.36941999197006226, 0.3199799954891205, 0.6676700115203857, 0.01903199963271618, -0.3987500071525574, 0.21187999844551086, -0.327239990234375, -0.07943200320005417, -0.22684000432491302, -0.11674000322818756, 0.12964999675750732, -0.4657900035381317, -0.402319997549057, -0.15178999304771423, -0.2942799925804138, 0.07088600099086761, -0.1326099932193756, 0.3241400122642517, 0.28753000497817993, -1.878000020980835, 0.6799799799919128, 0.004697899799793959, 0.21772000193595886, -0.2287999987602234, -0.05337600037455559, -0.4242199957370758, -0.46924999356269836, 0.2889299988746643, -0.13676999509334564, 0.082955002784729, 0.1375499963760376, -0.3191800117492676, 0.04478999972343445, -0.32444000244140625, 0.06964600086212158, -0.16518999636173248, 0.030933000147342682, 0.5291200280189514, -0.13008999824523926, 0.7054600119590759, 0.11206000298261642, -0.31957998871803284, -0.24300000071525574], u'cliff': [0.25944000482559204, 0.04687099903821945, -0.20096999406814575, 0.16719000041484833, -0.094309002161026, 0.4450500011444092, -0.021330000832676888, -0.5227699875831604, 0.40439000725746155, 0.08143399655818939, 0.16614000499248505, -0.6023600101470947, -0.14077000319957733, -0.06444200128316879, -0.02195500023663044, 0.33454999327659607, -0.03149599954485893, -0.27272000908851624, 0.3619000017642975, 0.16491000354290009, -0.0568929985165596, 0.1253499984741211, -0.02894200012087822, 0.30309000611305237, 0.26315000653266907, 0.12955999374389648, 0.06898599863052368, 0.018609000369906425, -0.34586000442504883, 0.6148300170898438, 0.09829100221395493, 0.15428000688552856, -0.43647998571395874, -0.7000499963760376, -0.4514000117778778, -0.008296700194478035, 0.11981000006198883, -0.5630900263786316, 0.09179099649190903, -0.4060400128364563, 0.07811199873685837, 0.047231998294591904, 0.22450999915599823, 0.45565998554229736, -0.06338399648666382, 0.5622400045394897, 0.35740000009536743, -0.034384001046419144, 0.19172999262809753, -0.5030400156974792, -0.5719799995422363, -0.2416599988937378, -0.5898000001907349, -0.036931999027729034, -0.0672919973731041, 0.4292699992656708, -0.5907800197601318, -0.16819000244140625, -0.304639995098114, 0.10732000321149826, 0.10853999853134155, 0.10531000047922134, 0.9686499834060669, 0.555109977722168, 0.5483400225639343, -0.5564000010490417, -0.2390899956226349, 0.6215900182723999, 0.3549099862575531, -0.40887999534606934, -0.24365000426769257, 0.4403400123119354, -0.6070299744606018, 0.4330500066280365, -0.16216999292373657, -0.1123799979686737, 0.06964199990034103, -0.04822099953889847, 0.3792400062084198, -0.5478699803352356, 0.1236800029873848, -0.002204699907451868, 0.002099399920552969, 0.31547001004219055, 0.010582000017166138, 0.4653100073337555, 0.5885400176048279, 0.5574899911880493, -0.1578799933195114, 0.1399800032377243, 0.31518998742103577, 0.6658300161361694, -0.14114999771118164, -0.26732999086380005, 0.41857001185417175, 0.6178399920463562, 0.6630600094795227, -0.4872399866580963, 0.22064000368118286, -0.03280699998140335, -0.3353999853134155, 0.46511998772621155, -0.11435999721288681, 0.03014500066637993, -0.15033000707626343, -0.11232999712228775, 0.23332999646663666, -0.12238000333309174, 0.21457000076770782, -0.5379700064659119, -0.35089001059532166, -0.6172900199890137, 0.24779999256134033, 0.02022700011730194, 0.9021499752998352, 0.379830002784729, -0.37770000100135803, 0.19780999422073364, -0.19296999275684357, 0.23747999966144562, -0.27331000566482544, -0.30674999952316284, -0.4736500084400177, -0.11148999631404877, -0.30090999603271484, -0.7871699929237366, 0.1731400042772293, 0.046241000294685364, -0.005057299975305796, -0.459989994764328, -0.31411001086235046, 0.7174500226974487, -0.14448000490665436, -0.1890600025653839, -0.04614799842238426, 0.08124999701976776, -0.6754699945449829, 0.17023000121116638, 0.10948000103235245, -0.40887001156806946, 0.0581820011138916, -0.21094000339508057, -0.2619900107383728, -0.1379700005054474, -0.6711199879646301, -0.2602599859237671, 0.1301400065422058, -0.11568000167608261, -0.23579999804496765, -0.3948499858379364, 0.5488899946212769, -0.19952000677585602, -0.1449200063943863, -0.8683500289916992, 0.2910099923610687, 0.37470000982284546, 0.37290000915527344, 0.017297999933362007, -0.15192000567913055, 0.7276800274848938, 0.1010499969124794, -0.5246400237083435, -0.044468000531196594, -0.1003900021314621, -0.008392499759793282, 0.31147998571395874, -0.24943000078201294, 0.22787000238895416, -0.063059002161026, -0.10496000200510025, -0.21854999661445618, 0.16245000064373016, 0.6708199977874756, -0.47953000664711, -0.4088999927043915, 0.694320023059845, 0.044266000390052795, 0.5248100161552429, -0.06368900090456009, -0.35978999733924866, 0.01736699976027012, 0.30796000361442566, 0.2291100025177002, -0.18625999987125397, 0.055750999599695206, -0.5228400230407715, -0.49334999918937683, -0.011815999634563923, 0.016529999673366547, 0.7457299828529358, 0.0933689996600151, 0.10283999890089035, 0.2305999994277954, -0.3972199857234955, 0.25481998920440674, 0.20813000202178955, -0.09723500162363052, -0.4525499939918518, 0.7286499738693237, -0.030866000801324844, 1.09660005569458, -0.022515999153256416, 0.04947200044989586, 0.03651599958539009, 0.058368999511003494, 0.4500100016593933, 0.242249995470047, -0.03584799915552139, -0.3699299991130829, 0.31264999508857727, -0.22495999932289124, 0.07174500077962875, -0.17594000697135925, -0.3161799907684326, 0.3923799991607666, 0.6691200137138367, -0.07127399742603302, -0.03897299990057945, -0.067051000893116, -0.28376999497413635, 0.09142599999904633, 0.523360013961792, -0.1252399981021881, -0.44822999835014343, 0.17743000388145447, -0.06847699731588364, -0.19600999355316162, 0.42173999547958374, -0.06203500181436539, -0.034175001084804535, -0.05392000079154968, 0.09062200039625168, -0.2471799999475479, 0.09549500048160553, -0.06188200041651726, -0.011455999687314034, -0.014336000196635723, -0.2055400013923645, -0.03796200081706047, -0.3421199917793274, -0.07462000101804733, 0.228970006108284, 0.07642299681901932, 0.17312000691890717, -0.6195700168609619, 0.4055100083351135, -0.05753900110721588, -0.12274999916553497, 0.1312199980020523, 0.6519299745559692, -0.032533999532461166, 0.39730000495910645, 0.43244999647140503, -0.7477700114250183, 0.49606001377105713, 0.17629000544548035, 0.21052999794483185, 0.14837999641895294, -0.2967100143432617, 0.3598400056362152, -0.7614700198173523, 0.29673999547958374, 0.044325001537799835, -0.3763299882411957, -0.11314000189304352, -0.38857001066207886, 0.7611299753189087, 0.4206399917602539, -0.18659000098705292, -0.06413400173187256, -0.06275799870491028, -0.11956000328063965, -0.05178900063037872, -0.013768999837338924, -0.11151000112295151, 0.10758999735116959, -0.10420999675989151, -0.22033999860286713, -0.34911999106407166, 0.2126699984073639, -0.5107200145721436, 0.13059000670909882, -0.21121999621391296, -0.4149700105190277, -0.46136999130249023, -0.09348700195550919, 0.38944000005722046, 0.046626001596450806, 0.006577800028026104, -0.13565999269485474, -0.03243099898099899, 0.24211999773979187, -0.2960900068283081, -0.3712800145149231, -0.05021499842405319, 0.396699994802475, -0.019447000697255135, 0.7682099938392639, 0.4889200031757355, -0.1959500014781952], u'butter': [0.35740000009536743, 0.46897000074386597, -0.02180200070142746, 0.02373100072145462, -0.1675100028514862, -0.21252000331878662, -0.07389000058174133, -0.030308999121189117, 0.05408100038766861, -0.7501400113105774, 0.16482000052928925, -0.6409599781036377, -0.1956000030040741, 0.7117199897766113, -0.4731000065803528, 0.24522000551223755, -0.31832000613212585, 0.22025999426841736, -0.46698999404907227, 0.13127000629901886, -0.09982000291347504, 0.1588200032711029, 0.321370005607605, 0.7029899954795837, -0.1691100001335144, 0.1810300052165985, -0.21629999577999115, 0.024159999564290047, -0.416049987077713, -0.31852999329566956, -0.5285699963569641, 0.6372600197792053, 0.0061471001245081425, -0.3175100088119507, -0.48541998863220215, 0.8788300156593323, 0.11224000155925751, 0.7156000137329102, -0.06352800130844116, 0.09145499765872955, 0.2427700012922287, -0.3180299997329712, 0.3973599970340729, 0.04231499880552292, 0.20201000571250916, -0.18296000361442566, 0.18637999892234802, 0.2814199924468994, 0.1025800034403801, 0.5708400011062622, 0.34975001215934753, 0.19384999573230743, 0.14357000589370728, 0.06458000093698502, -0.17570999264717102, 0.009340699762105942, -0.5128999948501587, 0.43198999762535095, 0.5767099857330322, 0.306769996881485, 0.35047999024391174, 0.7135099768638611, -0.2195899933576584, 0.18314999341964722, 0.18901999294757843, -0.14925000071525574, 0.22412000596523285, 0.2796800136566162, -0.6507999897003174, -0.3127399981021881, 0.10141000151634216, -0.09000299870967865, -0.07212900370359421, 0.3130199909210205, -0.7253400087356567, 0.2934499979019165, 0.3774999976158142, -0.11840000003576279, -0.2931399941444397, -0.2669999897480011, -0.13597999513149261, 0.3796199858188629, -0.2980499863624573, -0.18963000178337097, 0.1311500072479248, -0.4946900010108948, -0.13926999270915985, -0.21144999563694, -0.7411400079727173, -0.61940997838974, -0.08010199666023254, -0.3505899906158447, -0.2510499954223633, -0.09322699904441833, -0.5129899978637695, -0.3139899969100952, 0.33055999875068665, 0.5395299792289734, -0.3281700015068054, 0.6172800064086914, -0.25095999240875244, 0.09546300023794174, 0.3918299973011017, -0.9353799819946289, -0.6474400162696838, -0.3268199861049652, -0.13433000445365906, 0.14000000059604645, -0.38304001092910767, 0.9352800250053406, 0.5453199744224548, -0.1055700033903122, -0.11540000140666962, 0.32214999198913574, -0.001052499981597066, -0.024337999522686005, -0.5584800243377686, 0.985230028629303, 0.4214800000190735, -0.10260000079870224, -0.6287599802017212, -0.6678299903869629, 0.47356000542640686, 0.03270699828863144, -0.41363999247550964, -0.2711400091648102, 0.08825100213289261, 0.23327000439167023, -0.7441400289535522, 0.8938900232315063, -0.2659600079059601, 0.7995399832725525, 0.17870000004768372, 1.1258000135421753, -0.04033900052309036, -0.17760999500751495, -0.17499999701976776, 0.5170199871063232, -0.17361000180244446, 0.5390899777412415, 0.6204500198364258, 0.4358200132846832, -0.6741200089454651, -0.15871000289916992, -0.5170599818229675, 0.7493100166320801, -0.16410000622272491, -0.026355000212788582, 0.3677400052547455, -0.04604800045490265, -0.7644500136375427, 0.7526999711990356, 0.2496500015258789, 0.2943899929523468, -0.7751500010490417, -0.32857999205589294, -0.12256000190973282, -0.2770799994468689, -0.3753100037574768, -0.19242000579833984, 0.2977199852466583, 0.13327999413013458, 0.03483600169420242, -0.505079984664917, -0.05377500131726265, -0.04992099851369858, 0.27849000692367554, 0.23128999769687653, 0.13123999536037445, -0.5974500179290771, -0.2689099907875061, 0.32771000266075134, -0.3806999921798706, 0.07156399637460709, -0.3647400140762329, -0.5590500235557556, 0.6252099871635437, -0.9282199740409851, 0.4889799952507019, -0.5830299854278564, 0.2993299961090088, 0.43140000104904175, 0.1745299994945526, -0.8165900111198425, 0.050951000303030014, -0.6066799759864807, 1.3375999927520752, 0.3037000000476837, -0.13895000517368317, -0.16655999422073364, 0.3660599887371063, 1.1930999755859375, 0.1418599933385849, -0.037285998463630676, 0.3140299916267395, -0.46226999163627625, -0.4143100082874298, -0.30640000104904175, 0.425790011882782, -0.34255000948905945, 0.13875000178813934, -0.6711699962615967, 1.2869999408721924, 0.49970000982284546, 0.45138001441955566, -0.40165001153945923, 0.30149999260902405, 0.07049500197172165, -0.6126700043678284, 0.024829000234603882, 0.7330800294876099, -0.02307800017297268, -0.2862800061702728, 0.3506700098514557, -0.7284200191497803, 0.1590300053358078, 0.2803800106048584, -0.22710999846458435, 0.2635999917984009, 0.03542200103402138, 0.013605000451207161, 0.7978500127792358, -0.13266000151634216, -0.7555599808692932, -0.4848400056362152, -0.29673001170158386, 0.15865999460220337, 0.39056000113487244, 0.23955999314785004, -0.034717001020908356, 0.0824740007519722, -0.008359000086784363, 0.05174199864268303, -0.19241000711917877, 0.959559977054596, 0.07506600022315979, -0.08175300061702728, 0.5238100290298462, -1.0707000494003296, -0.7510600090026855, -0.44955000281333923, -0.2687000036239624, -0.4653699994087219, 0.013110999949276447, -1.0154999494552612, -0.17833000421524048, 0.2640799880027771, 0.42250001430511475, -0.2068600058555603, -0.8311200141906738, 0.12116000056266785, -0.36807000637054443, 0.5824400186538696, 0.5626099705696106, -0.23446999490261078, -0.08911799639463425, -0.08409799635410309, -0.06563899666070938, -0.05166799947619438, 0.4371899962425232, -0.23016999661922455, -0.5986999869346619, 0.14774000644683838, 0.0438309982419014, 0.35352998971939087, 0.047283999621868134, -0.507610023021698, -0.11313000321388245, 0.3269200026988983, 0.011033999733626842, -0.28200000524520874, -0.004097300115972757, -0.2708599865436554, 1.1622999906539917, 0.06730800122022629, 0.9239199757575989, -0.6824100017547607, -0.45340999960899353, -0.8950099945068359, -0.08192399889230728, 0.1657000035047531, 0.5961999893188477, -0.5571100115776062, -0.6196799874305725, 0.6453800201416016, 1.1044000387191772, 0.19634999334812164, -0.608299970626831, 0.05332399904727936, 0.7442100048065186, -0.2904700040817261, 0.047919001430273056, 0.5046600103378296, -0.10334999859333038, -0.041891999542713165, -0.5962399840354919, 0.06558900326490402, -0.18799999356269836, -0.05260000005364418, 0.5153399705886841], u'trail': [-0.3418999910354614, -0.2646999955177307, 0.3264800012111664, 0.06005999818444252, -0.3948400020599365, 0.1531199961900711, -0.20945000648498535, 0.2455199956893921, 0.5456299781799316, 0.11061999946832657, -0.10406000167131424, 0.05450500175356865, -0.62704998254776, -0.07137200236320496, 0.4486500024795532, 0.3208700120449066, -0.320389986038208, 0.1470700055360794, 0.5647100210189819, 0.19678999483585358, -0.3820599913597107, -0.30188998579978943, 0.23352999985218048, 0.018338000401854515, 0.12086000293493271, -0.5157600045204163, 0.05538100004196167, 0.010966000147163868, -0.1712999939918518, 0.2984899878501892, 0.8929399847984314, 0.17133000493049622, -0.30820998549461365, 0.21863999962806702, -0.52920001745224, 0.4879100024700165, -0.473470002412796, 0.05956299975514412, 0.6571400165557861, -0.37077999114990234, -0.7883899807929993, 0.13862000405788422, 0.08782999962568283, -0.03851599991321564, 0.03341199830174446, 0.08253200352191925, 0.9928699731826782, 0.39956000447273254, -0.009004100225865841, -0.5591099858283997, -0.5972999930381775, 0.016334999352693558, -0.08935800194740295, -0.020005999132990837, 0.28457000851631165, 0.2007099986076355, -0.22362999618053436, -0.7044900059700012, -0.06094300001859665, 0.6574000120162964, 0.35589998960494995, 0.1937599927186966, 0.7612500190734863, -0.26194000244140625, 0.47380000352859497, -0.3694300055503845, -0.3227800130844116, -0.5525799989700317, -0.02560500055551529, -0.3243899941444397, 0.08922699838876724, 0.22537000477313995, 0.29054000973701477, 0.6877400279045105, -0.15129999816417694, -0.07783100008964539, 0.5409700274467468, 0.21501000225543976, -0.36256998777389526, -0.26096001267433167, -0.8073099851608276, 0.18945999443531036, 0.7846599817276001, -0.267659991979599, -0.3649600148200989, -0.8002300262451172, -0.08641599863767624, 0.5898200273513794, 0.2857300043106079, 0.4724999964237213, -0.11806999891996384, -0.175369992852211, 0.4750800132751465, -0.39844000339508057, -0.0950549989938736, -0.20422999560832977, 0.40092000365257263, 0.31178998947143555, 0.503030002117157, -0.21323999762535095, -0.5206199884414673, 0.5077499747276306, -0.14645999670028687, 0.46518999338150024, 0.017842000350356102, -0.3163299858570099, 0.2579500079154968, 0.1020599976181984, 0.02578200027346611, 0.090038001537323, 0.11558999866247177, -0.8945800065994263, -0.021463999524712563, -0.624809980392456, 0.21264000236988068, 0.1123099997639656, 0.23931999504566193, 0.5250399708747864, 0.3867200016975403, 0.4267300069332123, -0.03622400015592575, -0.19499999284744263, -0.2665199935436249, -0.22160999476909637, -0.2959100008010864, 0.18797999620437622, -0.763949990272522, 0.01905299909412861, -0.3767000138759613, 0.21446000039577484, -0.17093999683856964, -0.03375000134110451, -0.051274001598358154, 0.017069000750780106, -0.03128400072455406, -0.07577099651098251, 0.36357998847961426, 0.5729299783706665, 0.65802001953125, -0.2874999940395355, 0.5707200169563293, -0.7242799997329712, -0.1307000070810318, 0.4691300094127655, -0.5791299939155579, 0.05071699991822243, 0.4708999991416931, 0.02878499962389469, -0.039055999368429184, 0.09854300320148468, 0.26243001222610474, 0.7679299712181091, -0.6400700211524963, -0.0708250030875206, 0.551289975643158, -0.3626199960708618, 0.44530999660491943, -0.0792820006608963, -0.19568000733852386, 0.6527699828147888, 0.24132999777793884, -0.2712399959564209, 0.4868299961090088, -0.1932400017976761, 0.010707000270485878, -0.13964000344276428, 0.44881001114845276, -0.6265100240707397, -0.3996100127696991, 0.30278998613357544, -0.2070000022649765, -0.334850013256073, -0.1043500006198883, 0.42866000533103943, -0.0824199989438057, -0.3158800005912781, 0.1633400022983551, -0.11747000366449356, 0.04811599850654602, -0.08820199966430664, 0.40132999420166016, 0.6401299834251404, 0.052372001111507416, 0.008061000145971775, -0.6818600296974182, -0.3370699882507324, -0.03752100095152855, 0.2170500010251999, -0.1458200067281723, 0.2090499997138977, -0.08005700260400772, 0.6924200057983398, -0.09491699934005737, -0.38523000478744507, -0.5958099961280823, -0.20216000080108643, 0.08428700268268585, -0.5273200273513794, 0.2490299940109253, 0.4426499903202057, 1.1233999729156494, 0.04354200139641762, 0.40933001041412354, -0.08285299688577652, -0.3758699893951416, -0.02575499936938286, -0.48416998982429504, 0.11485999822616577, -0.04367300122976303, 0.38892999291419983, -0.027403999119997025, -0.185479998588562, 0.3834800124168396, 0.023125000298023224, -0.3223100006580353, -0.011014999821782112, -0.345550000667572, 0.22387999296188354, -0.25863999128341675, -0.3881700038909912, 0.5174599885940552, -0.22811000049114227, -0.22995999455451965, -0.760640025138855, 0.43112999200820923, 0.12008000165224075, -0.05008599907159805, 0.10016000270843506, -0.7971199750900269, 0.23987999558448792, -0.452349990606308, -0.24657000601291656, -0.13085000216960907, -0.20555000007152557, -0.11817000061273575, 0.28095000982284546, -0.3013800084590912, -0.4018999934196472, 0.05500800162553787, -0.1526000052690506, 0.08674199879169464, 0.32561999559402466, -0.01459100004285574, -0.7882800102233887, -0.270550012588501, -0.5509399771690369, 0.5373299717903137, 0.48984000086784363, 0.5337499976158142, 0.03916399925947189, -0.1810699999332428, -0.19144999980926514, -0.15809999406337738, -0.4020099937915802, 0.35387998819351196, -0.40411999821662903, 0.14480000734329224, -0.04033299908041954, -0.552299976348877, 0.3535799980163574, -0.324180006980896, -0.20685000717639923, 0.06492699682712555, 0.2913599908351898, -0.30643999576568604, -0.15172000229358673, 0.1359100043773651, -0.17847999930381775, -0.014135999605059624, -0.24315999448299408, 0.05890899896621704, -0.07424700260162354, 0.0005774900200776756, 0.02453400008380413, 0.31755000352859497, 0.35767999291419983, -1.2640999555587769, -0.27109000086784363, 0.49390000104904175, 0.013621999882161617, 0.6866400241851807, -0.3796600103378296, 0.29635000228881836, -0.22758999466896057, -0.666920006275177, 0.2667199969291687, 0.28110000491142273, -0.14688999950885773, 0.05291999876499176, -0.33087000250816345, 0.15616999566555023, 0.16830000281333923, -1.1256999969482422, 1.0640000104904175, -0.6947000026702881, -0.2106499969959259, -0.25863999128341675, 0.5571200251579285, 0.3604600131511688, 0.22429999709129333], u'coat': [0.033257998526096344, -0.4309599995613098, -0.05786899849772453, -0.4050999879837036, -0.004820900037884712, -0.352620005607605, -0.3757599890232086, -0.024196000769734383, 0.271340012550354, -1.0440000295639038, 0.041620999574661255, -0.0805009976029396, -0.17118999361991882, 0.33866000175476074, 0.06759800016880035, 0.2361000031232834, 0.20720000565052032, 0.1174200028181076, 0.0279300007969141, -0.6486899852752686, -0.6463800072669983, -0.40874001383781433, 0.6183599829673767, 0.6526299715042114, 0.09168700128793716, -0.6506199836730957, 0.5062699913978577, 0.5832599997520447, -0.22703999280929565, -0.4445199966430664, 0.43415001034736633, -0.07450900226831436, -0.3217499852180481, 0.3469099998474121, 0.05670500174164772, 0.5999000072479248, 0.18929000198841095, 0.09392800182104111, -0.668940007686615, 0.5004799962043762, -0.28554001450538635, -0.9274799823760986, 0.07879400253295898, -0.042132001370191574, 0.7348899841308594, -0.20795999467372894, 0.12732000648975372, -0.14063000679016113, -0.1738000065088272, -0.0940679982304573, -0.4862099885940552, -0.11344999819993973, 0.5636000037193298, -0.05898100137710571, -0.21907000243663788, 0.17523999512195587, 0.004479699768126011, -0.4429900050163269, 0.3677600026130676, 0.14264999330043793, 0.3501499891281128, 0.1882999986410141, 0.002128100022673607, -0.008247699588537216, 0.3242200016975403, -0.5782999992370605, -0.39750999212265015, 0.21793000400066376, 0.27421998977661133, 0.02993999980390072, 0.3841499984264374, -0.14993000030517578, 0.18223999440670013, -0.43456000089645386, -0.3970299959182739, 0.2650800049304962, -0.024196000769734383, 0.23746000230312347, -0.1545500010251999, -0.4425800144672394, -0.02422500029206276, 0.35267001390457153, 0.07205499708652496, -0.3278000056743622, 0.3608500063419342, -0.10343000292778015, -0.13367000222206116, -0.1857299953699112, -0.6292700171470642, 0.05017999932169914, 0.06853900104761124, -0.24573999643325806, 0.13681000471115112, 0.07716500014066696, -0.6251400113105774, -0.000677440024446696, 0.13088999688625336, 0.7808399796485901, 0.22333000600337982, 0.6030300259590149, 0.3400900065898895, 0.2444400042295456, -0.1437000036239624, 0.10514000058174133, 0.09691599756479263, -0.10391999781131744, 0.1882600039243698, -0.0004733600071631372, -0.19232000410556793, 0.9813600182533264, 0.5807999968528748, 0.7936699986457825, -0.1430100053548813, -0.0755779966711998, -0.30110999941825867, 0.41787999868392944, -0.6520299911499023, 0.4701499938964844, 0.25540998578071594, -0.5964000225067139, -0.3902899920940399, 0.1063700020313263, 0.8771600127220154, 0.2708899974822998, -0.061702001839876175, -0.21243000030517578, 0.27584001421928406, 0.6590399742126465, 0.18556000292301178, 0.40602999925613403, 0.30904000997543335, 0.07037799805402756, -0.7021099925041199, 0.9166100025177002, 0.0737840011715889, -0.08189599961042404, -0.2619900107383728, 0.2898100018501282, 0.3153899908065796, 0.484250009059906, -0.043838001787662506, 0.18091000616550446, -0.39833998680114746, -0.05704699829220772, 0.20235000550746918, 0.3947800099849701, -0.06951499730348587, 0.11421000212430954, 0.3851499855518341, 0.27382001280784607, -0.07072500139474869, 0.602649986743927, -0.8229299783706665, -0.760860025882721, -0.051093000918626785, 0.08159100264310837, -0.2612900137901306, -0.33739998936653137, 0.1625799983739853, 0.5730100274085999, 0.18702000379562378, 0.049139998853206635, -0.48927998542785645, -0.12184999883174896, 0.11753000319004059, -0.19664999842643738, 0.15841999650001526, 0.42114999890327454, 0.522379994392395, -0.37150999903678894, -0.1143300011754036, -0.2755900025367737, -0.12233000248670578, 0.37310999631881714, 0.30515000224113464, 0.01919800043106079, 0.35389000177383423, -0.2431900054216385, -0.6099799871444702, -1.2091000080108643, 0.2665500044822693, 0.2721000015735626, 0.3849799931049347, -0.11009000241756439, 0.3780899941921234, -0.02607000060379505, 0.8576899766921997, 0.10324999690055847, -0.5167999863624573, -0.09375900030136108, 0.05406000092625618, 0.45770999789237976, -0.22436000406742096, -0.11529000103473663, -0.030690999701619148, -0.4290800094604492, -0.23431000113487244, 0.29291999340057373, -0.3290799856185913, -0.09949100017547607, 0.7940400242805481, -0.28185001015663147, 0.30518001317977905, 0.20050999522209167, 0.4802600145339966, -0.21886999905109406, 0.30483999848365784, 0.19990000128746033, -0.3294700086116791, -0.20558999478816986, 0.5339300036430359, -0.24368999898433685, -0.1861400008201599, 0.11889000236988068, 0.3778400123119354, -0.45443999767303467, 0.7459400296211243, -0.4756599962711334, 0.22109000384807587, -0.5848299860954285, -0.048875998705625534, 0.08240099996328354, 0.28835999965667725, -0.23330999910831451, 0.14142000675201416, -0.21154999732971191, 0.18242000043392181, -0.31929999589920044, 0.27110999822616577, -0.2483700066804886, 0.6312299966812134, 0.1947699934244156, -0.4366300106048584, 0.43435999751091003, 0.1260399967432022, 0.08642499893903732, 0.4121899902820587, -0.4038600027561188, -0.38646000623703003, 0.6350799798965454, 0.5013700127601624, -0.150859996676445, -0.3377699851989746, 0.1501699984073639, -0.8496099710464478, 0.4941900074481964, -0.07898499816656113, -0.42761000990867615, 0.047857001423835754, -1.1916999816894531, -0.33149999380111694, 0.241689994931221, 0.2955799996852875, -0.6757599711418152, 0.7788900136947632, 0.1917800009250641, 0.04907499998807907, 0.22618000209331512, -0.22224999964237213, -0.1647000014781952, -0.260560005903244, -0.11767999827861786, 0.45974001288414, 0.3388200104236603, -0.3517000079154968, 0.22335000336170197, -0.27577000856399536, -0.17744000256061554, -0.3792499899864197, -0.25758999586105347, 0.04614400118589401, -0.5023099780082703, 0.17795999348163605, -0.3744199872016907, -0.4878999888896942, -0.05866200104355812, -0.7780399918556213, -0.43129000067710876, -0.6892499923706055, 0.004015100188553333, 0.4713200032711029, 0.4316900074481964, 0.07351700216531754, -0.1522900015115738, -0.8124300241470337, 0.7383999824523926, 0.14448000490665436, 0.3885200023651123, 0.3379000127315521, -0.20303000509738922, 0.28356000781059265, -0.13235999643802643, -0.25251999497413635, 0.5453500151634216, 0.19643999636173248, -0.3777500092983246, 0.3608500063419342, 0.46415001153945923, 0.7041900157928467, 0.5920199751853943], u'seafood': [0.26409000158309937, 0.5856800079345703, 0.2948800027370453, -0.03710399940609932, 0.022732000797986984, -0.047697000205516815, 0.23305000364780426, 0.4109100103378296, -0.27147001028060913, 0.13957999646663666, 0.46682998538017273, -0.5885699987411499, -0.39059001207351685, 0.37362000346183777, -0.24120000004768372, -0.617609977722168, -0.3024199903011322, 0.46465998888015747, -0.4075799882411957, 0.49366000294685364, -0.3262600004673004, 0.6464300155639648, 0.13211999833583832, -0.1025800034403801, -0.5072699785232544, 0.3215300142765045, -0.33636000752449036, -0.03396400064229965, -0.296999990940094, -0.40178999304771423, 0.14967000484466553, 0.4998300075531006, -0.49334999918937683, 0.20938999950885773, -0.22123000025749207, 0.4847800135612488, -0.15674999356269836, -0.599560022354126, -0.24775999784469604, -0.22673000395298004, -0.443230003118515, 0.36100998520851135, 0.32207998633384705, 0.4322899878025055, -0.6561999917030334, 0.07894600182771683, 0.6250100135803223, -0.2851000130176544, -0.35131001472473145, 0.8483999967575073, -0.505370020866394, -0.1645900011062622, 0.42882001399993896, -0.22142000496387482, -0.0027441000565886497, 0.20757000148296356, 0.2770000100135803, 0.444489985704422, -0.4585300087928772, -0.015745000913739204, 0.43887999653816223, -0.61735999584198, 0.6187400221824646, -0.3109099864959717, 0.03340600058436394, 0.09955199807882309, -0.6118299961090088, -0.5154200196266174, -0.3763200044631958, 0.5327000021934509, 0.17180000245571136, 0.0897269994020462, 0.4473400115966797, -0.6677200198173523, -0.3619999885559082, 0.15550999343395233, 0.7942000031471252, 0.2603900134563446, -0.49046000838279724, -0.2082200050354004, -0.28600001335144043, 0.22432999312877655, -0.07886499911546707, 0.1109199970960617, 0.5890200138092041, -0.5648400187492371, -0.04706500098109245, 0.08641599863767624, -0.29701998829841614, -0.7547000050544739, 0.10495000332593918, -0.2500399947166443, -0.027823999524116516, -0.0520780012011528, -0.06538300216197968, 0.5126699805259705, -0.093129001557827, -0.023746000602841377, -0.003072800114750862, 0.08685000240802765, 0.03925999999046326, 0.2918800115585327, 0.5734400153160095, -0.5186100006103516, -0.057705000042915344, -0.24626000225543976, 0.2528800070285797, 0.3441300094127655, -0.0509909987449646, 0.1698099970817566, 0.6293500065803528, -0.321289986371994, -0.5460699796676636, -0.41391000151634216, 0.5382500290870667, -0.5631899833679199, 0.19262999296188354, 0.027000999078154564, -0.2707200050354004, 0.6279100179672241, -0.41585999727249146, 0.09056399762630463, 0.18082000315189362, 0.4529699981212616, 0.4343400001525879, 0.22222000360488892, -0.21363000571727753, 0.20011000335216522, 0.20442000031471252, 0.7223899960517883, -0.005064200144261122, 0.10936000198125839, -0.002544600050896406, -0.5395600199699402, -0.022268999367952347, -0.10405000299215317, 0.3877600133419037, 0.0912420004606247, 0.03009900078177452, 0.6924499869346619, 0.283160001039505, 0.38569000363349915, 0.9010499715805054, -0.322270005941391, -0.0650079995393753, 0.20110000669956207, -0.2994900047779083, -0.20216000080108643, -0.23172999918460846, -0.26589998602867126, -0.5813599824905396, 0.040876999497413635, 0.4465799927711487, -0.32521000504493713, -0.5605999827384949, -0.5083799958229065, -0.35717999935150146, -0.1666799932718277, -0.24122999608516693, -0.4309999942779541, 0.7152000069618225, 0.6528400182723999, 0.0065612997859716415, 0.15518000721931458, 0.17792999744415283, -0.5145999789237976, 0.8118699789047241, -0.7874500155448914, -0.20204000174999237, 0.43939998745918274, -0.4571300148963928, -0.156700000166893, -0.3427099883556366, 0.23952999711036682, 0.12231999635696411, 0.05755599960684776, 0.3567099869251251, 0.011932999826967716, 0.18569999933242798, -0.7831400036811829, 0.009127099998295307, -0.31894999742507935, -0.10859999805688858, 0.10936000198125839, 0.08544400334358215, -0.2639000117778778, 0.7173399925231934, 0.43004000186920166, 0.25659000873565674, -0.06895100325345993, -0.575760006904602, 0.6182900071144104, -0.9414399862289429, -0.047210000455379486, 0.6252300143241882, -0.0017088999738916755, 0.03374199941754341, -0.42767998576164246, -0.4894599914550781, -0.10769999772310257, -0.02896299958229065, -0.46303001046180725, 0.06034599989652634, 0.1574700027704239, 0.07988200336694717, 1.1270999908447266, 0.6959800124168396, -0.25383999943733215, 0.17533999681472778, -0.038100000470876694, 0.002357000019401312, -0.47442999482154846, -0.1015700027346611, 0.4464600086212158, 0.09087599813938141, 0.19208000600337982, 0.6029099822044373, -0.4117699861526489, -0.2166299968957901, 0.042454998940229416, 1.0694999694824219, -0.20774999260902405, -0.5250899791717529, -0.4159899950027466, 0.20303000509738922, 0.11692000180482864, 0.1739100068807602, -0.26111000776290894, 0.022877000272274017, 0.5033699870109558, 0.034651998430490494, 0.2655999958515167, 0.13075000047683716, 0.06352800130844116, 0.8255100250244141, 0.3031800091266632, 0.3974500000476837, -0.3274399936199188, -0.2055100053548813, -0.09925699979066849, -0.7507500052452087, 0.16574999690055847, -0.24924999475479126, -0.04562700167298317, -0.6189299821853638, 0.2531200051307678, 0.17754000425338745, -0.0443900004029274, -0.661870002746582, -0.3958599865436554, 0.6389899849891663, -0.12732000648975372, 0.263839989900589, 0.5766500234603882, 0.0887259989976883, 0.06765399873256683, -0.4159199893474579, 0.724049985408783, -0.2760300040245056, 0.3051300048828125, -0.1231599971652031, -0.4645499885082245, -0.2110300064086914, -0.6620100140571594, 0.25843000411987305, -0.26579999923706055, -0.11151000112295151, -0.053467001765966415, 0.4409799873828888, -0.5055800080299377, -0.4550800025463104, -0.021699000149965286, -0.1311500072479248, 0.4532099962234497, 0.1848900020122528, 0.10830999910831451, -0.9085999727249146, 0.23948000371456146, -1.1369999647140503, -0.12065000087022781, -0.1993200033903122, 0.20015999674797058, -0.10922999680042267, 0.21936999261379242, 0.08078700304031372, 0.25839000940322876, -0.033466000109910965, -0.15863999724388123, 0.48778998851776123, -0.0447239987552166, -0.0056687998585402966, 0.25527000427246094, 0.4964999854564667, -0.23943999409675598, -0.23197999596595764, -0.6543999910354614, 0.14709000289440155, -0.24120000004768372, -0.15755000710487366, -0.3597399890422821], u'clock': [0.08165399730205536, 0.14842000603675842, -0.05922599881887436, 0.13867999613285065, 0.0003969100071117282, -0.10129000246524811, 0.24782000482082367, -0.5255399942398071, 0.6901199817657471, -0.8259000182151794, 0.1460999995470047, -0.15440000593662262, 0.5631399750709534, -0.3790600001811981, 0.6994400024414062, 0.13213999569416046, 0.23107999563217163, -0.4994100034236908, -0.8679199814796448, -0.015327000059187412, 0.22867999970912933, -0.287200003862381, -0.04801899939775467, -0.144679993391037, 0.7656400203704834, 0.1993499994277954, -0.16506999731063843, -0.44951000809669495, -0.45006000995635986, -0.001766399946063757, 0.5909600257873535, 0.38618001341819763, -0.291810005903244, 0.5502200126647949, -0.7234200239181519, 0.11554999649524689, -0.6076599955558777, -0.3613399863243103, -0.47115999460220337, 0.41822001338005066, -0.1990099996328354, 0.49421998858451843, -0.3191100060939789, -0.195810005068779, -0.1688700020313263, 0.17317000031471252, 0.3047800064086914, -0.131850004196167, 0.07450100034475327, -0.3618600070476532, 0.09673400223255157, 0.27195000648498535, 0.06851399689912796, 0.05279399827122688, -0.41106000542640686, -0.18091000616550446, -0.1146399974822998, 0.21046000719070435, 0.06938300281763077, 0.4465000033378601, 0.2783200144767761, -0.19853000342845917, 0.35319000482559204, -0.024163000285625458, -0.4496999979019165, 0.11479000002145767, 0.3725599944591522, 0.21755999326705933, 0.33438000082969666, -0.5769500136375427, 0.37595999240875244, 0.5586599707603455, -0.08815199881792068, 0.05776600167155266, 0.2923800051212311, 0.8400800228118896, 0.06207599863409996, -0.5133799910545349, 0.27118000388145447, -0.15877999365329742, 0.11841999739408493, -0.28797999024391174, -0.21198999881744385, 0.5508000254631042, 0.11606000363826752, 0.04831099882721901, -0.16957999765872955, -0.09432999789714813, -0.12999999523162842, 0.263700008392334, 0.8748800158500671, -0.31255000829696655, -0.20558999478816986, -0.1430799961090088, -0.386570006608963, -0.11794000118970871, -0.23952999711036682, -0.5196899771690369, 0.447299987077713, -0.45412999391555786, 0.08691100031137466, 0.5075700283050537, 0.11423999816179276, -0.05184699967503548, 0.4692299962043762, -0.5387600064277649, 0.2467699944972992, -0.17372000217437744, -0.08164799958467484, 0.40981000661849976, -0.41725999116897583, 0.1143300011754036, -0.6557300090789795, 0.4583199918270111, -0.34099000692367554, -0.21397000551223755, -0.4089199900627136, 0.2729499936103821, -0.43922001123428345, -0.03754099830985069, -0.1442600041627884, -0.2782000005245209, 0.34321001172065735, -0.25679001212120056, -0.1368200033903122, -0.4577699899673462, 0.652459979057312, 0.6768900156021118, -0.4298500120639801, 0.07785899937152863, 0.2508600056171417, 0.29238998889923096, -0.09313700348138809, -0.1466899961233139, -0.33261001110076904, 0.12789000570774078, 0.489439994096756, -0.2559399902820587, 0.18164999783039093, 0.21664999425411224, -0.45548000931739807, 0.08848100155591965, -0.1290300041437149, 0.30577999353408813, -0.07122799754142761, 0.2559800148010254, -0.4763199985027313, -0.14112000167369843, 0.45625001192092896, -0.11270999908447266, -0.15805000066757202, 0.2828499972820282, 0.026009999215602875, -0.3222000002861023, 0.0768669992685318, -0.3001599907875061, -0.10341999679803848, -0.149849995970726, -0.026528000831604004, -0.17531999945640564, -0.08696500211954117, 0.028697000816464424, 0.07060699909925461, -1.0176000595092773, 0.21212999522686005, 0.6475600004196167, 0.023700999096035957, -0.19269999861717224, 0.35394999384880066, 0.3103399872779846, -0.2524600028991699, 0.5519000291824341, 0.2565000057220459, 0.16395999491214752, 0.3281700015068054, 0.1618099957704544, -0.628250002861023, 0.8094300031661987, -0.36302998661994934, -0.3320100009441376, -0.12684999406337738, -0.20232999324798584, -0.22134000062942505, -0.36021000146865845, -0.09336099773645401, -0.4661400020122528, 0.18445000052452087, -0.7567600011825562, 0.055112000554800034, -0.08090099692344666, 0.8075299859046936, 0.12685999274253845, -0.3427700102329254, 0.24413999915122986, 0.045605000108480453, -0.43751001358032227, -0.4554400146007538, 0.18129000067710876, 0.17739999294281006, -0.4378800094127655, 0.6987199783325195, 0.37821999192237854, 0.7755500078201294, 0.296999990940094, 0.34828001260757446, 0.045090001076459885, -0.8250600099563599, -0.9594100117683411, 0.12473999708890915, 0.3857699930667877, 0.41159000992774963, 0.44179999828338623, 0.04058599844574928, -0.7969099879264832, -0.268310010433197, 0.2631100118160248, -0.5670400261878967, -0.22811000049114227, 0.207179993391037, 0.22262999415397644, -0.5375400185585022, 0.0328189991414547, 0.13007999956607819, -0.3478499948978424, 0.24642999470233917, 0.31066998839378357, 0.13162000477313995, -0.09801100194454193, 0.1446399986743927, -0.23393000662326813, 0.12852999567985535, 0.06113100051879883, 0.4428899884223938, -0.2920899987220764, -0.7085199952125549, 0.40084001421928406, -0.15049000084400177, 0.12639999389648438, 0.4215799868106842, -0.02630000002682209, 0.23023000359535217, 0.2957499921321869, 0.1681399941444397, 0.01946300081908703, -0.09022799879312515, -0.060061998665332794, -0.05474400147795677, 0.1678600013256073, -0.328900009393692, -0.17332999408245087, -0.2908500134944916, -0.19865000247955322, -0.05100199952721596, 0.0331449992954731, 0.34968000650405884, 0.3080100119113922, -0.2595300078392029, 0.3218500018119812, -0.38905999064445496, -0.15491999685764313, -0.03514000028371811, -0.11635000258684158, 0.3546999990940094, 0.28387001156806946, -0.28325000405311584, 0.17177000641822815, 0.2381799966096878, -0.18685999512672424, 0.8946499824523926, -0.6076599955558777, 0.0522180013358593, 0.49441999197006226, 0.2594200074672699, -0.052678998559713364, 0.071602001786232, 0.42013001441955566, -2.0903000831604004, 0.44192999601364136, -0.3730500042438507, -0.13007999956607819, 0.1523900032043457, 0.15821999311447144, 0.30355000495910645, -0.33796000480651855, 0.3064500093460083, -0.08728200197219849, -0.2211499959230423, -0.5796599984169006, -0.3425000011920929, -0.31553998589515686, 0.14178000390529633, 0.16224999725818634, 0.07208500057458878, -0.46665000915527344, -0.1695300042629242, 0.06808499991893768, 0.4510500133037567, 0.06957300007343292, -0.11319000273942947, -0.07120399922132492], u'metal': [0.03929999843239784, 0.10657999664545059, -0.4749999940395355, -0.5335000157356262, 0.380950003862381, -0.4939799904823303, 0.3809700012207031, 0.13610999286174774, 0.048666998744010925, -1.1676000356674194, -0.6628999710083008, -0.32416000962257385, -0.07510299980640411, 0.14057999849319458, -0.07235399633646011, -0.7610999941825867, -0.9849500060081482, -0.007310300134122372, 0.06397700309753418, -0.43783000111579895, 0.27469998598098755, 0.18649999797344208, 0.13877999782562256, 0.581250011920929, 0.05127200111746788, -0.42535001039505005, 0.23382000625133514, 0.3990800082683563, -0.28751999139785767, 0.00817359983921051, 0.16753999888896942, 0.06505399942398071, -0.3374199867248535, 0.3002699911594391, -0.32738998532295227, -0.1635199934244156, -0.4068799912929535, -0.05522599816322327, 0.2368299961090088, 0.7452200055122375, -0.7515599727630615, -0.15148000419139862, -0.006415700074285269, -0.19878999888896942, -0.008562400005757809, 0.41356998682022095, -0.31657999753952026, -0.7115899920463562, -0.06972800195217133, 0.3523100018501282, 0.016527000814676285, 0.3511500060558319, 0.02843100018799305, 0.35662999749183655, 0.19165000319480896, 0.20529000461101532, -0.577049970626831, -0.15012000501155853, 0.7472000122070312, -0.37942999601364136, 0.4090000092983246, 0.6991299986839294, 0.49028000235557556, -0.10770999640226364, 0.5232999920845032, -0.04824100062251091, 0.12772999703884125, 0.4719800055027008, 0.05013599991798401, 0.7209600210189819, 0.24108000099658966, -0.39315998554229736, -0.1368899941444397, 0.31213998794555664, -0.4378100037574768, 0.3706499934196472, -0.21664999425411224, -0.2088800072669983, 0.02428700029850006, -0.6289299726486206, 0.10673999786376953, -0.23652000725269318, -0.002644999884068966, 0.025169000029563904, 0.3391900062561035, -0.1593800038099289, 0.1324400007724762, 0.4454599916934967, -0.6801999807357788, 0.006860199850052595, 0.7764999866485596, 0.06189500167965889, 0.09136000275611877, -0.6911600232124329, -0.5418999791145325, -0.2757599949836731, -0.35016998648643494, -0.3937099874019623, 0.2983100116252899, -0.4414199888706207, -0.45879998803138733, 0.38475000858306885, 0.15494999289512634, -0.7012500166893005, 0.5493900179862976, -0.5745700001716614, 0.2693899869918823, 0.14264999330043793, -0.5541200041770935, -0.6231899857521057, -0.3190000057220459, 0.16223999857902527, -0.4039199948310852, -0.719730019569397, 0.23789000511169434, 0.08004099875688553, 0.007889499887824059, 0.2029999941587448, 0.03205399960279465, -0.520609974861145, -0.06557399779558182, -0.8009999990463257, -0.26467999815940857, -0.21494999527931213, 0.13021999597549438, -0.11342000216245651, -0.16685999929904938, -0.1686599999666214, -0.08708000183105469, -0.0375249981880188, 0.12620000541210175, 0.8902300000190735, 0.42537999153137207, -0.0876929983496666, 0.22144000232219696, 0.04876000061631203, -0.19405999779701233, 0.4635300040245056, 0.1818699985742569, 0.24616999924182892, 0.01697699911892414, 0.13207000494003296, -0.03186500072479248, -0.593779981136322, 0.6589499711990356, 0.3734300136566162, 0.569350004196167, 0.08313100039958954, 0.04345399886369705, -0.8852800130844116, 0.45664000511169434, -0.36302000284194946, -0.5273500084877014, -0.7056599855422974, 0.6830499768257141, -0.1369599997997284, -0.22394999861717224, -0.14760999381542206, 0.19464999437332153, -0.2683500051498413, 0.3429499864578247, -0.36188000440597534, 0.08045300096273422, -0.16955000162124634, 0.6149500012397766, 0.564740002155304, 0.42838001251220703, 0.08579500019550323, 0.6259899735450745, -0.3341299891471863, 0.26381999254226685, 0.6202600002288818, 0.415120005607605, 0.03458800166845322, 0.49957001209259033, -0.708329975605011, -0.3636699914932251, 0.214819997549057, -0.007896100170910358, -0.7910900115966797, 0.3332599997520447, -0.25824999809265137, 0.2621999979019165, 0.184129998087883, -0.06174999848008156, -0.5399500131607056, 0.5993800163269043, 0.2094999998807907, 0.6355599761009216, 0.19479000568389893, 0.008060799911618233, 0.07853399962186813, 0.38888001441955566, 0.5147299766540527, -0.3526400029659271, -0.29618000984191895, -0.3904300034046173, 0.16836999356746674, -0.0891600027680397, 0.378710001707077, 0.6226699948310852, -0.10147000104188919, 0.1646299958229065, 0.03427400067448616, 0.1026500016450882, 0.651889979839325, 0.6255999803543091, -0.4518499970436096, -0.39340999722480774, 0.11225999891757965, 0.38975000381469727, 0.26815998554229736, 0.4417699873447418, 0.3202100098133087, 0.2662999927997589, 0.29802998900413513, 0.6851599812507629, -0.04345199838280678, -0.02397499978542328, 0.13595999777317047, -0.08873599767684937, -0.36048999428749084, 0.3628300130367279, -0.6686099767684937, -0.4606899917125702, 0.33807000517845154, -0.8481799960136414, -0.5418300032615662, -0.02141300030052662, 0.017791999503970146, -0.045747000724077225, -0.22434000670909882, -0.45524999499320984, 0.19719000160694122, 0.5910699963569641, 0.11303000152111053, 0.05845699831843376, -0.16041000187397003, -0.7378600239753723, 0.17951999604701996, 0.07756999880075455, -0.44609999656677246, -0.41304999589920044, -0.1514900028705597, -0.35295000672340393, -0.06640300154685974, 0.2697699964046478, -0.8554400205612183, -0.4611400067806244, 0.5976700186729431, -0.35183000564575195, 0.34505000710487366, -0.18271000683307648, -0.12615999579429626, 0.273140013217926, 0.5097500085830688, 0.09077499806880951, -0.5130599737167358, -0.2154099941253662, -0.04595800116658211, 0.14597000181674957, -0.20089000463485718, 0.017250999808311462, -0.16064999997615814, 0.22109000384807587, 0.5737400054931641, -0.8084400296211243, 0.4115599989891052, 0.04736199975013733, -0.30206000804901123, -0.31314998865127563, -0.04224200174212456, 0.5509600043296814, -0.45548000931739807, -1.0230000019073486, -0.1212799996137619, -1.6990000009536743, -0.32806000113487244, -0.7352200150489807, 0.15358999371528625, -0.306549996137619, -0.6009100079536438, -1.1157000064849854, 0.6564300060272217, 0.5379300117492676, 0.3469400107860565, -0.6991000175476074, 0.06864099949598312, 0.14591999351978302, -0.10491999983787537, 0.12201999872922897, -0.035422999411821365, -0.9118499755859375, 0.7920500040054321, 0.6507999897003174, 0.7191200256347656, -0.13841000199317932, -0.18648000061511993, -0.08376400172710419, 0.3715200126171112], u'dog': [-0.11043000221252441, 0.8121700286865234, 0.07366800308227539, 0.19022999703884125, -0.05288799852132797, 0.06146800145506859, 0.16076000034809113, 0.4130200147628784, -0.30199000239372253, -0.908270001411438, 0.27504000067710876, -0.03189000114798546, -0.2884199917316437, 0.23446999490261078, 0.47679001092910767, 0.5012400150299072, 0.29370999336242676, 0.27028998732566833, 0.05474499985575676, 0.09803800284862518, 0.5711600184440613, 0.36754998564720154, 0.040734000504016876, 0.3434700071811676, -0.182559996843338, -0.2893500030040741, 0.023825999349355698, -0.19401000440120697, 0.2444400042295456, 0.1340699940919876, -0.1649399995803833, -0.26982998847961426, -0.2623400092124939, -0.21778999269008636, -0.8752800226211548, 0.7382199764251709, -0.08793099969625473, -0.010875999927520752, -0.2653999924659729, 0.3466799855232239, -0.558139979839325, 0.17590999603271484, 0.16925999522209167, -0.15725000202655792, -0.5042999982833862, -0.20100000500679016, 0.6670100092887878, -0.03251799941062927, 0.04501200094819069, 0.06567499786615372, -0.16061000525951385, -0.7336300015449524, 0.2464199960231781, 0.34325000643730164, 0.2189899981021881, 0.048645999282598495, -0.5998700261116028, -0.058152999728918076, -0.05169399827718735, -0.5784599781036377, 0.30000001192092896, 0.35078001022338867, 0.4664599895477295, -0.0075309001840651035, 0.10454999655485153, -0.5101600289344788, -0.055987000465393066, -0.10294999927282333, -0.26475998759269714, -0.04123000055551529, -0.028371000662446022, 0.51978999376297, -0.34848999977111816, -0.47216999530792236, -0.3722899854183197, -0.03279000148177147, 0.13989000022411346, 0.3571600019931793, 0.19304999709129333, -0.2198600023984909, 0.24135999381542206, 0.4097599983215332, 0.37516000866889954, 0.1425500065088272, -0.03414300084114075, -0.7265300154685974, -0.10831999778747559, 0.6861600279808044, -0.2633500099182129, -0.4234499931335449, -0.2425300031900406, 0.1577800065279007, 0.14258000254631042, -0.3274900019168854, -0.3469899892807007, 0.16147999465465546, 0.19603000581264496, 0.41639000177383423, -0.2337000072002411, 0.07581599801778793, 0.15898999571800232, 0.00166229996830225, -0.04830100014805794, -0.10610999912023544, -0.19325999915599823, 0.14494000375270844, 0.015406000427901745, 0.10628999769687653, -0.036699000746011734, 0.6323000192642212, 0.12985999882221222, 0.4990200102329254, -1.1323000192642212, -0.1263599991798401, 0.06471800059080124, 0.12374000251293182, -0.4971199929714203, -0.01483600027859211, 0.10487999767065048, -0.49818000197410583, -0.28856000304222107, 0.389490008354187, -0.03182800114154816, -0.2862499952316284, -0.09875799715518951, -0.07699000090360641, -0.24233999848365784, 0.7579299807548523, 0.34834998846054077, -0.7103000283241272, 0.4531799852848053, -0.34417998790740967, -0.1945900022983551, 0.6147800087928772, -0.029009999707341194, -0.2786400020122528, 0.385560005903244, 0.10072000324726105, 0.12894999980926514, 0.01799199916422367, 0.3366999924182892, 0.20698000490665436, -0.3804900050163269, -0.006666100118309259, 0.11540000140666962, -0.08526799827814102, -0.14608000218868256, 0.44514000415802, -0.09367399662733078, 0.23638999462127686, -0.1144699975848198, 1.0947999954223633, -0.057822998613119125, -0.16294999420642853, 0.5587999820709229, -0.018988000229001045, -0.07137399911880493, 0.2131900042295456, 0.06127699837088585, 0.727590024471283, 0.6274700164794922, -0.19280000030994415, 0.13056999444961548, 0.1742600053548813, -0.10228999704122543, 0.152319997549057, 0.5249999761581421, -0.21919000148773193, -0.27184998989105225, -0.5418599843978882, 0.31751999258995056, 0.16374999284744263, -0.2903900146484375, 0.17073999345302582, -0.3181400001049042, -0.9642099738121033, -0.1160999983549118, -0.29951000213623047, 0.18685999512672424, -0.4598599970340729, 0.4163300096988678, -0.17583000659942627, -0.3458299934864044, -0.27243998646736145, -0.5021600127220154, 0.012852000072598457, 0.5983800292015076, -0.11236999928951263, 0.24696999788284302, -0.4904800057411194, -0.4418799877166748, -0.16255000233650208, -0.7331299781799316, -0.3767699897289276, -0.6892499923706055, 0.061174001544713974, -0.42100998759269714, -0.1315300017595291, -0.008359000086784363, -0.018360000103712082, 1.3686000108718872, 0.04616900160908699, 0.9462199807167053, -0.01512600015848875, -0.12477000057697296, 0.48754000663757324, 0.22383999824523926, -0.21819999814033508, -0.23388999700546265, 0.15207000076770782, -0.28718000650405884, -0.6390799880027771, -0.22382999956607819, -0.18014000356197357, -0.3354800045490265, 0.5358700156211853, -0.29366999864578247, 0.10865999758243561, 0.06341099739074707, -0.00934240035712719, -0.1588599979877472, 0.2260199934244156, 0.11924999952316284, -0.4144200086593628, -0.07806199789047241, -0.09785699844360352, 0.2793799936771393, -0.18347999453544617, -0.3458400070667267, 0.1848900020122528, 0.17402000725269318, -0.5219799876213074, -0.4330599904060364, 0.16256000101566315, 0.14032000303268433, 0.35124000906944275, -0.18279999494552612, -0.3598400056362152, -0.13008999824523926, 0.16303999722003937, 0.3173399865627289, 0.003771600080654025, -0.04549799859523773, -0.42065998911857605, -0.44418999552726746, -0.6898499727249146, -0.49358999729156494, 0.0702809989452362, -0.1437699943780899, 0.6250799894332886, -0.056311000138521194, 0.18850000202655792, -0.05678499862551689, 0.14052000641822815, 1.1972999572753906, 0.718940019607544, 0.5433200001716614, -0.12460999935865402, -0.11977999657392502, 0.3016299903392792, -0.16272999346256256, -0.04673999920487404, -0.25249001383781433, -0.03065899945795536, -0.3227100074291229, 0.32361000776290894, 0.33243998885154724, -0.02781900018453598, -0.3336699903011322, -0.023444000631570816, -0.5039399862289429, -0.20587000250816345, -0.13012999296188354, -0.3588399887084961, 0.0453840009868145, -0.11862999945878983, -1.7257000207901, 0.39441001415252686, -0.531790018081665, 0.5820900201797485, -0.6577100157737732, 0.3684900104999542, 0.23518000543117523, 0.10802000015974045, -0.8315899968147278, 0.6148599982261658, 0.25547000765800476, -0.452890008687973, 0.514460027217865, -0.17911000549793243, -0.1238899976015091, 0.18688000738620758, -0.4110200107097626, -0.7087699770927429, -0.37501001358032227, -0.6615200042724609, 0.677299976348877, 0.33935999870300293, 0.5799400210380554, 0.06814900040626526], u'ocean': [0.12654000520706177, -0.6427800059318542, -0.45298001170158386, -0.014820000156760216, -0.14441999793052673, 0.16745999455451965, -0.08539199829101562, -0.20653000473976135, 0.588670015335083, -1.455199956893921, 0.5084900259971619, 0.11858999729156494, 0.09472700208425522, -0.3571600019931793, 0.0023544998839497566, 0.24716000258922577, -0.0996439978480339, 0.18470999598503113, -0.2706499993801117, 0.9155700206756592, -0.537909984588623, 0.49689000844955444, -0.39528998732566833, 0.091109998524189, -0.3292199969291687, 0.06792700290679932, 0.3638400137424469, 0.6876000165939331, -0.4681600034236908, -0.41416001319885254, 0.7116199731826782, 0.3476499915122986, -0.81836998462677, -0.673520028591156, 0.44032999873161316, -0.053426001220941544, 0.27713000774383545, 0.022645000368356705, -0.03650299832224846, 0.45221999287605286, -0.5312600135803223, 0.07674799859523773, 0.21449999511241913, 0.15700000524520874, 0.061416998505592346, 0.304639995098114, 0.808430016040802, 0.13985000550746918, 0.38982000946998596, -0.1820099949836731, -0.0324070006608963, 0.10730999708175659, 0.5602800250053406, -0.6675900220870972, 0.018634000793099403, 0.4265500009059906, 0.25387999415397644, 0.15546999871730804, 0.01179600041359663, 0.28299999237060547, -0.2632499933242798, 0.3656899929046631, 0.9569500088691711, -0.08969700336456299, 0.43428000807762146, -0.263700008392334, -0.5257499814033508, 0.3726100027561188, -0.7939500212669373, 0.24417999386787415, 0.15395000576972961, 0.2728100121021271, -0.2651500105857849, 0.06441400200128555, -0.4291200041770935, -0.01501299999654293, 0.04826800152659416, -0.5198400020599365, -0.3018600046634674, -0.0025080000050365925, 0.13847999274730682, 0.23533999919891357, -0.6021599769592285, 0.3805299997329712, -0.40375998616218567, 0.021909000352025032, 0.3456900119781494, 0.32638999819755554, -0.054131001234054565, -1.0046000480651855, -0.1278499960899353, -0.3174799978733063, 0.042490001767873764, -0.44550999999046326, -0.5416200160980225, 0.4727399945259094, 0.2496899962425232, -0.09488499909639359, -0.10792999714612961, 0.06451699882745743, 0.13905000686645508, 0.6409000158309937, 0.36691999435424805, -0.032632000744342804, 0.08355999737977982, 0.36834999918937683, 0.4383400082588196, -0.2811500132083893, 0.6329500079154968, -0.00020828000560868531, -0.17643000185489655, -0.776199996471405, 0.2009900063276291, 0.22940999269485474, 0.08919200301170349, 0.04352099820971489, 0.2947399914264679, 0.17100000381469727, -0.1639000028371811, -0.05114499852061272, -0.1540600061416626, -0.48798999190330505, -0.48511001467704773, 0.41051000356674194, 0.1307699978351593, 0.1631300002336502, 0.35168999433517456, -0.04875199869275093, 0.04478999972343445, -0.02075600065290928, -0.37748000025749207, 0.4407599866390228, 0.4628300070762634, -0.04529999941587448, 0.08427400141954422, 0.0732560008764267, -0.008033299818634987, 0.024746999144554138, -0.35315001010894775, 0.21003000438213348, 0.2339099943637848, -0.016808999702334404, -0.09867200255393982, -0.11367999762296677, -0.6612399816513062, 0.09304100275039673, 0.2556599974632263, 0.3771199882030487, 0.05912400037050247, -0.030083000659942627, 0.5082499980926514, 0.0094352001324296, 0.4018799960613251, -0.6677899956703186, 0.9150099754333496, -0.08252999931573868, -0.23312999308109283, -0.09046000242233276, -0.12217999994754791, -0.0016804999904707074, -0.07913299649953842, -0.7178199887275696, 0.34775999188423157, -0.045976001769304276, -0.008702999912202358, -0.5279499888420105, 0.5168499946594238, 0.2872900068759918, -0.0336339995265007, 1.1660000085830688, 0.0867220014333725, -0.16253000497817993, 0.03250100091099739, -0.010359999723732471, -0.22213999927043915, 0.0008179500000551343, -0.06085899844765663, 0.20062999427318573, -0.1965000033378601, -0.5821200013160706, -0.382779985666275, -0.022348999977111816, 0.4361500144004822, 0.4979400038719177, 0.5651199817657471, -0.83788001537323, 0.5024999976158142, 0.5317000150680542, -0.5008800029754639, -0.3248099982738495, 0.1875, 0.3520599901676178, 0.04674199968576431, -0.5987399816513062, 0.24626000225543976, 0.3931800127029419, 0.1611499935388565, -0.6369100213050842, -0.5322499871253967, 0.006361499894410372, 1.0814000368118286, -0.2942500114440918, -0.2672500014305115, -0.38580000400543213, 0.2964499890804291, 0.15522000193595886, 0.20024000108242035, 0.08181600272655487, 0.2884100079536438, 0.038256000727415085, 0.01133699994534254, 0.18523000180721283, 0.04163699969649315, -0.2879199981689453, -0.029788000509142876, -0.004181200172752142, 0.09420599788427353, -0.2794800102710724, 0.14653000235557556, -0.31512001156806946, 0.9657400250434875, 0.1123799979686737, 0.10090000182390213, -0.2408899962902069, -0.030712999403476715, -0.01771700009703636, -0.17702999711036682, -0.7877699732780457, -0.4310399889945984, 0.08333200216293335, 0.6578699946403503, -0.031237000599503517, -0.3350600004196167, 0.018314000219106674, 0.488970011472702, -0.24356000125408173, -0.03322000056505203, 0.024560999125242233, 0.25753000378608704, -0.11743000149726868, -0.7709100246429443, 0.3412399888038635, 0.03381500020623207, 0.22195999324321747, -0.5238400101661682, 0.2884100079536438, 0.10313999652862549, -0.05518599972128868, 0.14131000638008118, 0.2423200011253357, 0.039170000702142715, 0.009897599928081036, 0.23929999768733978, 0.28714001178741455, -0.38286998867988586, 0.018883999437093735, 0.3157599866390228, -0.025748999789357185, 0.1273999959230423, 0.34132999181747437, -0.03135399892926216, -0.3979800045490265, -0.618340015411377, -0.2168000042438507, 0.24065999686717987, 0.00036378001095727086, 0.11813999712467194, 0.04818100109696388, 0.22829000651836395, 0.012994999997317791, 0.5010200142860413, 0.047874998301267624, -0.11015000194311142, 0.16911999881267548, 0.7429199814796448, 0.3601999878883362, -1.423200011253357, 0.7088199853897095, 0.13767999410629272, -0.05976099893450737, -0.3101300001144409, 0.7022899985313416, -0.301829993724823, -0.4130600094795227, -0.3098500072956085, -0.396340012550354, -0.26930001378059387, -0.3761399984359741, 0.2767600119113922, -0.3553900122642517, -0.39280998706817627, 0.32589998841285706, -0.4641599953174591, -0.029652999714016914, 0.03869299963116646, 0.3600200116634369, 0.0997219979763031, 0.0037376999389380217, 0.34064000844955444, -0.18738999962806702], u'jacket': [-0.41640999913215637, -0.11527000367641449, -0.14388999342918396, -0.0057080998085439205, -0.34505999088287354, -0.043296001851558685, -0.5354800224304199, -0.27595001459121704, 0.2583500146865845, -0.4122599959373474, 0.41835999488830566, 0.06921800225973129, 0.02575800009071827, 0.389849990606308, -0.6327499747276306, 0.13826000690460205, -0.36083999276161194, 0.14417000114917755, -0.43296998739242554, 0.2382200062274933, -0.046539001166820526, -0.11857999861240387, 0.25839000940322876, 0.0437919981777668, -0.6357100009918213, -0.8392199873924255, 0.5652899742126465, 0.2772200107574463, 0.18801000714302063, -0.026624999940395355, 0.16690999269485474, -0.46531999111175537, -0.07015199959278107, 0.0071875001303851604, -0.47110000252723694, 0.30000001192092896, -0.41885000467300415, -0.43105000257492065, 0.05114800110459328, 0.7229700088500977, -0.32260000705718994, -0.3630000054836273, 0.06979300081729889, -0.2691099941730499, 0.3147599995136261, 0.16006000339984894, 0.5001999735832214, -0.3080799877643585, -0.6634699702262878, -0.1366100013256073, -0.4074400067329407, -0.4339900016784668, 0.16731999814510345, 0.01589200086891651, -0.16037000715732574, 0.0921119973063469, -0.17001000046730042, -0.787060022354126, 0.6532400250434875, -0.07504899799823761, -0.07744999974966049, -0.08987800031900406, -0.017909999936819077, -0.016179999336600304, 0.04344499856233597, -0.23904000222682953, -0.4575999975204468, -0.010478000156581402, 0.1925099939107895, -0.055337000638246536, 0.5933499932289124, -0.24776999652385712, 0.4123300015926361, -0.017121000215411186, -0.14413000643253326, 0.15533000230789185, -0.4816400110721588, 0.0667250007390976, -0.09908399730920792, -0.46700000762939453, 0.13315999507904053, 0.7555699944496155, -0.23851999640464783, 0.03482099995017052, -0.05125200003385544, -0.06274200230836868, -0.01433899998664856, 0.31005001068115234, -0.5140299797058105, -0.32719001173973083, 0.09058599919080734, 0.15358999371528625, -0.530489981174469, 0.062001001089811325, 0.1234700009226799, 0.4000900089740753, 0.289110004901886, 0.19283999502658844, 0.06271299719810486, 0.09934700280427933, -0.029458999633789062, 0.8936399817466736, -0.00555279990658164, 0.7673400044441223, -0.011423000134527683, -0.08625800162553787, 0.5141299962997437, 0.39831000566482544, -0.33945998549461365, -0.21899999678134918, -0.5083199739456177, 0.9390400052070618, -0.18005000054836273, -0.2842699885368347, -0.40806999802589417, 0.21552999317646027, 0.10488999634981155, 0.49046000838279724, 0.5720999836921692, -1.2028000354766846, -0.21536000072956085, 0.30493998527526855, 0.5613899827003479, 0.02674099989235401, -0.07436800003051758, -0.05475199967622757, 0.16574999690055847, 0.14386999607086182, -0.05087500065565109, -0.09249400347471237, -0.0026533000636845827, -0.04913200065493584, -0.2190299928188324, -0.40665000677108765, -0.22672000527381897, -0.08876500278711319, -0.34536001086235046, 0.20757000148296356, 0.25249001383781433, 0.38659000396728516, 0.03874899819493294, 0.01828099973499775, 0.246629998087883, -0.28723999857902527, -0.0953890010714531, 0.21894000470638275, -0.08050200343132019, 0.1904900074005127, 0.7200499773025513, 0.28707000613212585, 0.046512000262737274, 0.006919899955391884, -0.06542400270700455, -0.8470399975776672, 0.32989001274108887, -0.44231998920440674, -0.06570500135421753, -0.22302000224590302, 0.5057899951934814, 0.8565599918365479, 0.0764010027050972, -0.4107399880886078, -0.2837600111961365, -0.014835000038146973, 0.208639994263649, -0.4320400059223175, -0.056060999631881714, 0.6276400089263916, -0.13401000201702118, -0.16471000015735626, 0.0018318999791517854, -0.1103999987244606, -0.4789699912071228, -0.23201000690460205, -0.1706400066614151, -0.8842599987983704, 0.30594000220298767, 0.5965099930763245, -0.06009500101208687, -0.4643999934196472, 0.28095000982284546, 0.31560999155044556, -0.046751998364925385, -0.1064700037240982, 0.09245099872350693, -0.20280000567436218, 0.6684200167655945, 0.4161500036716461, -0.07772199809551239, 0.3517799973487854, 0.040084000676870346, 0.3943699896335602, -0.2752099931240082, 0.03143699839711189, -0.26475000381469727, -0.13416999578475952, -0.8635200262069702, 0.2838999927043915, -0.23070000112056732, -0.02397800050675869, 0.8505399823188782, -0.20503999292850494, 0.6572499871253967, 0.27327999472618103, 0.6736199855804443, -0.22084000706672668, 0.44826000928878784, 0.21190999448299408, -0.5833699703216553, -0.12049999833106995, 0.2645699977874756, -0.44387000799179077, -0.1925099939107895, 0.26895999908447266, 0.5837000012397766, -0.32548999786376953, 0.5604599714279175, -0.5748299956321716, 0.2910900115966797, -0.5371699929237366, 0.6232200264930725, 0.42712000012397766, 0.22405999898910522, 0.5233700275421143, 0.028829999268054962, -0.3068400025367737, -0.12230999767780304, -0.18348999321460724, 0.08045700192451477, -0.7476999759674072, 0.5559300184249878, -0.3294700086116791, -0.3570399880409241, 0.4125100076198578, 0.40448999404907227, -0.5592899918556213, 0.7547699809074402, -0.3999600112438202, -0.5319499969482422, 0.8367599844932556, 0.810699999332428, -0.02933100052177906, -0.6965699791908264, 0.45407000184059143, -0.20645999908447266, 0.44284000992774963, -0.3416599929332733, -0.7918599843978882, -0.02957100048661232, -0.7885900139808655, -0.44859999418258667, -0.006865500006824732, 0.11406999826431274, -0.708549976348877, 0.3451699912548065, 0.08451099693775177, 0.1993200033903122, 0.45208001136779785, -0.021974999457597733, -0.6841199994087219, 0.04133699834346771, -0.128930002450943, 0.4888499975204468, 0.4694899916648865, -0.8944600224494934, 0.4573200047016144, -0.2933900058269501, -0.025803999975323677, -0.6167799830436707, -0.049157001078128815, -0.11606000363826752, -0.23794999718666077, 0.2434699982404709, -0.04278700053691864, -0.6696799993515015, -0.12605999410152435, -0.6177200078964233, 0.06189600005745888, -0.3716999888420105, 0.18357999622821808, 0.19960999488830566, -0.1445000022649765, 0.195360004901886, -0.3116399943828583, -0.5309399962425232, 0.36924999952316284, -0.5327000021934509, 0.3809199929237366, -0.25558000802993774, -0.5059400200843811, 0.10813000053167343, 0.0005023899720981717, -0.24958999454975128, 0.5939099788665771, -0.5466099977493286, -0.24998000264167786, 0.0800200030207634, 0.7117499709129333, 0.7100099921226501, -0.02318199910223484], u'coal': [-0.3382599949836731, 0.24300000071525574, 0.4116100072860718, -0.4868200123310089, -0.3840300142765045, -0.7438300251960754, 0.36917999386787415, 0.2561199963092804, -0.19760000705718994, -0.7095100283622742, -0.7595099806785583, -0.31367000937461853, 0.12298999726772308, 0.3853600025177002, 0.021981000900268555, -0.39164999127388, -0.3365499973297119, -0.20943999290466309, -0.4373599886894226, 0.5863699913024902, -0.13415999710559845, -0.1524599939584732, 0.5235000252723694, 0.6919800043106079, -0.2706800103187561, -0.08800199627876282, -0.058371998369693756, 0.6901599764823914, -0.3737100064754486, 0.5214800238609314, 0.8986200094223022, 0.9759699702262878, 0.1871899962425232, 0.05462700128555298, 0.06647799909114838, 0.06841400265693665, 0.14711999893188477, 0.10758999735116959, 0.5515300035476685, 0.04520900174975395, -0.44749999046325684, 0.25016000866889954, 0.6897799968719482, -0.18522000312805176, 0.13502000272274017, -0.38901999592781067, 0.07982700318098068, -0.36844000220298767, 0.045329999178647995, -0.20959000289440155, 0.3978999853134155, 0.6311399936676025, -0.7910000085830688, 0.049837999045848846, 0.4047499895095825, 0.5917999744415283, 0.3039799928665161, -0.44179001450538635, -0.5138199925422668, -0.6342599987983704, -0.16148999333381653, -0.1634799987077713, 0.3889999985694885, -0.9025200009346008, -0.29892998933792114, 0.012037999927997589, -0.6084399819374084, 0.18233999609947205, -0.9065700173377991, 0.7356699705123901, 0.3813300132751465, -0.37553998827934265, -0.13266000151634216, -0.5566800236701965, -0.14687000215053558, -0.11569000035524368, -0.5588099956512451, 0.07096800208091736, 0.009596100077033043, 0.20695999264717102, -0.1248600035905838, -1.0616999864578247, 0.3847399950027466, -0.014891000464558601, 0.87117999792099, -0.17081999778747559, 0.14970000088214874, 0.08819399774074554, -0.010591999627649784, 0.18546999990940094, 0.6865100264549255, 0.5532900094985962, -0.07605200260877609, 0.3754900097846985, -0.5777199864387512, -0.2114199995994568, -0.3695499897003174, 0.46897000074386597, 0.3268299996852875, -0.09420499950647354, -0.14983999729156494, 0.05593299865722656, -0.44857001304626465, 0.2097799926996231, -0.17618000507354736, 1.128100037574768, 0.17342999577522278, 0.3308500051498413, 0.015269000083208084, 0.24714000523090363, 0.03476500138640404, -0.9177600145339966, -0.33765000104904175, -0.19801999628543854, -0.07688300311565399, -0.16047999262809753, 0.8420799970626831, -0.005625800229609013, 0.8528100252151489, 0.1369599997997284, -0.517300009727478, 0.4417800009250641, -0.5154600143432617, -0.19156000018119812, -0.3150100111961365, 0.4892500042915344, -0.5033400058746338, 0.21433000266551971, -0.229980006814003, -0.1531900018453598, 0.5551999807357788, 0.912630021572113, -0.33129000663757324, -0.4629899859428406, -0.17562000453472137, 0.7339699864387512, -1.301800012588501, 0.10022000223398209, 0.25398001074790955, 0.24998000264167786, 0.37049001455307007, -0.1487099975347519, 0.5485000014305115, -0.479310005903244, -0.2612000107765198, 0.08470500260591507, 0.5277799963951111, 0.45232000946998596, 0.4023300111293793, -0.14997999370098114, 0.32774001359939575, -0.10886000096797943, -0.3917500078678131, -0.8991100192070007, 0.6806600093841553, 0.1278800070285797, 0.13610999286174774, -0.10614000260829926, -0.07801999896764755, -0.9248899817466736, -0.18407000601291656, -0.011114999651908875, 0.4843299984931946, 0.25819000601768494, 0.3782700002193451, -0.36177000403404236, -0.08279400318861008, -0.3823400139808655, -0.09643600136041641, -0.5613899827003479, -0.3257099986076355, 1.187999963760376, 0.10113999992609024, -0.06490500271320343, -0.018804000690579414, 0.2939299941062927, -0.8329899907112122, -0.25275999307632446, -0.20151999592781067, -0.09931199997663498, -0.14535999298095703, 0.07302600145339966, 0.4893600046634674, -0.49417001008987427, 0.18431000411510468, 0.3650999963283539, 0.6821600198745728, 0.36002999544143677, 0.19966000318527222, -0.18624000251293182, -0.04157799854874611, 0.6642500162124634, -0.10864999890327454, -0.18073999881744385, 0.2027300000190735, -0.6127099990844727, -0.6036999821662903, -1.0211999416351318, 0.04251600056886673, 0.4138700067996979, 0.10819999873638153, 0.631060004234314, -0.15017999708652496, -0.8779299855232239, 0.32528001070022583, 0.34064000844955444, 0.11236000061035156, -0.34575000405311584, 0.12504999339580536, -0.6148399710655212, -0.42941999435424805, -0.6198099851608276, -0.07238899916410446, -0.05818299949169159, 0.3900600075721741, 0.4681699872016907, 0.05122600123286247, 0.4070900082588196, -0.23612000048160553, -0.49004000425338745, 0.38161998987197876, 0.08207400143146515, 0.2179500013589859, -0.45941999554634094, 0.1076899990439415, -0.012091999873518944, -0.5877900123596191, -0.24421000480651855, -0.24355000257492065, 0.42412999272346497, -0.25525999069213867, -0.1428699940443039, -0.5258899927139282, 0.555429995059967, 0.2636600136756897, -0.3440000116825104, 0.4860900044441223, 0.07481800019741058, -0.5402899980545044, -0.40174999833106995, -0.2798599898815155, 0.14892999827861786, -0.06474599987268448, -0.1320900022983551, -0.2994700074195862, 0.0873349979519844, 0.37024998664855957, -0.35019999742507935, 0.011092999950051308, -0.2644999921321869, -0.43775999546051025, -0.5005499720573425, -0.20155000686645508, -0.4345499873161316, 0.8119999766349792, -0.194350004196167, -0.6199300289154053, 0.1254200041294098, -0.3242200016975403, -0.030628999695181847, -0.14659999310970306, 0.3899199962615967, 0.03228599950671196, 0.1146399974822998, 0.8673999905586243, 0.30063000321388245, 0.678629994392395, -0.4410499930381775, 0.4508199989795685, -0.005881300196051598, -0.2778100073337555, -0.46549999713897705, -0.080594003200531, 0.1588599979877472, -0.3020699918270111, -0.44936999678611755, -1.3532999753952026, -0.2594499886035919, 0.16986000537872314, 0.5988500118255615, -1.157099962234497, 0.20930999517440796, -0.7812600135803223, 0.12695999443531036, 0.4693099856376648, 0.15644000470638275, 0.27215999364852905, -0.47881001234054565, 0.5059000253677368, -0.15563000738620758, -0.5765299797058105, 0.30114999413490295, 0.5033699870109558, -0.10577999800443649, 0.09130699932575226, 1.2151999473571777, 0.32491999864578247, -1.0648000240325928, -0.0425879992544651, 0.8526999950408936], u'shore': [-0.05873600021004677, -0.23064999282360077, -0.5665599703788757, 0.12752999365329742, -0.3499000072479248, 0.00012145000073360279, 0.34477999806404114, -0.07562199980020523, 0.2987099885940552, -0.8599900007247925, -0.09949400275945663, 0.30535000562667847, 0.520550012588501, -0.2146800011396408, 0.10247000306844711, 0.28060999512672424, -0.2562499940395355, -0.1310500055551529, -0.20545999705791473, -0.10209999978542328, -0.2492399960756302, -0.21692000329494476, -0.4233900010585785, -0.03277700021862984, 7.050400017760694e-05, 0.10216999799013138, 0.1400499939918518, 0.1581999957561493, -0.3571600019931793, 0.4880400002002716, 0.17213000357151031, -0.4983600080013275, 0.3458299934864044, 0.26434001326560974, -0.18449999392032623, 0.24675999581813812, 0.13311000168323517, 0.3530600070953369, 0.43015000224113464, -0.05961399897933006, -0.5261800289154053, 0.1602800041437149, 0.27717000246047974, 0.1505800038576126, 0.1378999948501587, 0.8479999899864197, 0.7968299984931946, -0.0253090001642704, 0.10136000066995621, 0.49265000224113464, -0.34518998861312866, -0.08382800221443176, 0.5490800142288208, -0.35238000750541687, 0.191880002617836, -0.05883200094103813, 0.08626899868249893, 0.4825400114059448, -0.3894599974155426, 0.2365799993276596, 0.48041999340057373, 0.14309999346733093, 0.7807700037956238, 0.11569999903440475, 0.516759991645813, 0.3557800054550171, -0.7202699780464172, -0.07194799929857254, 0.15573999285697937, 0.23976999521255493, -0.039055000990629196, 0.05336499959230423, 0.18925000727176666, -0.04907200112938881, -0.07866500318050385, -0.36333999037742615, 0.30726999044418335, 0.286980003118515, 0.5867699980735779, -0.38203999400138855, -0.10774999856948853, 0.1515199989080429, -0.11006999760866165, 0.24049000442028046, 0.1041100025177002, 0.011621000245213509, -0.3856000006198883, 0.05889900028705597, -0.2808600068092346, -0.2996000051498413, 0.5457900166511536, 0.38644999265670776, 0.08273699879646301, -0.42871999740600586, -0.07580699771642685, -0.06569399684667587, 0.07307899743318558, -0.3001500070095062, -0.1130400002002716, -0.26010000705718994, -0.1417199969291687, 0.4233599901199341, -0.02595599927008152, 0.12601999938488007, 0.026499999687075615, 0.18557000160217285, 0.1001800000667572, -0.22495999932289124, 0.5236700177192688, -0.144679993391037, -0.19523000717163086, -0.8107399940490723, -0.1729000061750412, -0.27922001481056213, 0.11123000085353851, -0.10807999968528748, 0.45473000407218933, -0.09006199985742569, 0.18925000727176666, 0.2252800017595291, 0.07742299884557724, -0.4471299946308136, -0.8047599792480469, 0.14722000062465668, 0.5309100151062012, 0.5741000175476074, 0.3426400125026703, -0.12967999279499054, 0.2556299865245819, -0.156810000538826, -0.14624999463558197, 0.07040700316429138, 0.6132400035858154, -0.219310000538826, 0.4020099937915802, 0.09229300171136856, 0.5158100128173828, 0.03800100088119507, -0.06763800233602524, 0.25582998991012573, 0.10129000246524811, -0.10716000199317932, 0.05173400044441223, 0.4405600130558014, -0.6279199719429016, -0.14930999279022217, 0.16473999619483948, -0.2922399938106537, -0.45306000113487244, 0.03649400174617767, 0.6126499772071838, 0.10614000260829926, 0.3972499966621399, -0.36000001430511475, 0.8220499753952026, -0.015657000243663788, 0.38752999901771545, -0.23965999484062195, -0.10232999920845032, 0.45925000309944153, 0.21593999862670898, -0.3960599899291992, 0.2762199938297272, 0.12545999884605408, -0.36792999505996704, -0.02637000009417534, 0.056074999272823334, -0.2647300064563751, -0.03797199949622154, 0.4910700023174286, -0.19325999915599823, 0.01465499959886074, -0.0014406000263988972, 0.24958999454975128, 0.2747099995613098, 0.02181199938058853, 0.03826199844479561, 0.041071001440286636, -0.2305299937725067, -0.09392199665307999, 0.3959200084209442, -0.10429999977350235, 0.21702000498771667, -0.051197998225688934, 0.06991899758577347, 0.34033000469207764, 0.03688500076532364, -0.4058000147342682, 0.15275999903678894, -0.29139000177383423, 0.17295999825000763, 0.6168500185012817, -0.02891799993813038, -0.8063799738883972, 0.22538000345230103, 0.20273999869823456, 0.3072200119495392, -0.8626700043678284, 0.3060399889945984, -0.07914900034666061, 0.9899600148200989, 0.3072899878025055, -0.5299800038337708, 0.05049100145697594, -0.23263999819755554, 0.08747799694538116, 0.4291499853134155, -0.13437999784946442, 0.274260014295578, -0.010673999786376953, -0.19964000582695007, 0.4037899971008301, -0.008108199574053288, -0.3483099937438965, 0.19213999807834625, 0.1666100025177002, 0.2909899950027466, -0.1046999990940094, -0.13099999725818634, -0.24592000246047974, 1.3177000284194946, 0.17362000048160553, 0.14256000518798828, -0.3374199867248535, 0.37832000851631165, 0.2347099930047989, 0.01899999938905239, -0.7170100212097168, -0.03739200159907341, -0.46963000297546387, 0.42800000309944153, 0.32100000977516174, -0.15226000547409058, 0.2862200140953064, 0.17396000027656555, -0.1944500058889389, -0.33939000964164734, 0.19625000655651093, -0.053435999900102615, -0.24045999348163605, -0.4127500057220459, 0.0740090012550354, 0.5749099850654602, 0.1965000033378601, -0.8340799808502197, -0.08469700068235397, 0.032990001142024994, 0.14350000023841858, -0.3420400023460388, -0.1820099949836731, 0.0736669972538948, 0.31867000460624695, 0.7638499736785889, 0.36730000376701355, 0.5972399711608887, -0.399399995803833, -0.0317469984292984, 0.4146000146865845, -0.2067900002002716, 0.37178999185562134, 0.1820099949836731, 0.1958799958229065, -0.4639900028705597, -0.4108799993991852, -0.04815800115466118, -0.4660100042819977, 0.3178899884223938, -0.10924000293016434, -0.10169000178575516, -0.3442800045013428, -0.2546199858188629, -0.22190000116825104, -0.07013999670743942, 0.2615799903869629, 0.16975000500679016, -0.1089399978518486, -0.9269899725914001, 0.6779900193214417, 0.39340999722480774, -0.06478899717330933, 0.14386999607086182, 0.26298999786376953, -0.361160010099411, -0.6725299954414368, -0.1766500025987625, -0.17993000149726868, 0.0795620009303093, 0.16529999673366547, 0.5976399779319763, -0.08242099732160568, -0.6226500272750854, -0.28477001190185547, -0.03023800067603588, -0.4518299996852875, -0.5125899910926819, -0.06400299817323685, 0.05340699851512909, 0.21003000438213348, -0.031252000480890274, -0.15162000060081482], u'truck': [0.08016200363636017, 0.16016000509262085, -0.015111000277101994, -0.2904900014400482, -0.537339985370636, 0.25310999155044556, -0.15410999953746796, 0.36730000376701355, -0.5319200158119202, -0.6336399912834167, -0.16292999684810638, -0.26666998863220215, -0.08169800043106079, 0.40369001030921936, -0.1124500036239624, 0.07016400247812271, 0.08252599835395813, -0.3675299882888794, -0.2535800039768219, -0.7337999939918518, 0.2611500024795532, -0.13550999760627747, 0.5461400151252747, 0.19516000151634216, -0.4179399907588959, -0.2860899865627289, 0.0512549988925457, 0.2839300036430359, 0.15775999426841736, -0.30340999364852905, -0.5521600246429443, 0.5750799775123596, 0.22575999796390533, 0.05397599935531616, 0.2930299937725067, 0.42357999086380005, -0.467629998922348, -0.48824000358581543, -0.06089799851179123, 0.3347199857234955, 0.027543000876903534, 0.27529001235961914, 0.29249998927116394, -0.34314000606536865, -0.018314000219106674, 0.06931199878454208, 0.32537999749183655, -0.29455000162124634, 0.08484099805355072, -0.06963799893856049, 0.1050100028514862, 0.3352400064468384, -0.28505998849868774, -0.08513200283050537, 0.15723000466823578, 0.35569998621940613, -0.028212999925017357, -0.1902099996805191, -0.24053999781608582, 0.23226000368595123, -0.3980399966239929, 0.3584499955177307, 0.2925199866294861, -0.18198999762535095, -0.6972299814224243, 0.1506499946117401, -0.8162599802017212, -0.07285100221633911, 0.07393199950456619, 0.05346300080418587, -0.1526300013065338, 0.2526699900627136, 0.12775999307632446, 0.6078199744224548, -0.2763899862766266, 0.018811000511050224, 0.1298699975013733, -0.48214998841285706, 0.1129399985074997, -0.2816399931907654, 0.41912001371383667, 0.07552599906921387, 0.2725900113582611, 0.4209800064563751, -0.23127000033855438, -0.494049996137619, 0.20200000703334808, 0.1138399988412857, 0.05796299874782562, 0.4492500126361847, 1.042099952697754, 0.152879998087883, 0.04455700144171715, -0.5401099920272827, 0.21190999448299408, 0.0016892000567167997, -0.2717199921607971, -0.4917199909687042, -0.21873000264167786, -0.4047900140285492, 0.14815999567508698, 0.8606200218200684, 0.5746999979019165, -0.37419000267982483, -0.28108999133110046, -0.3995699882507324, 0.8719599843025208, 0.3967599868774414, -0.04114700108766556, -0.14733000099658966, -0.3264999985694885, 0.21321000158786774, 0.012098999693989754, -0.3893899917602539, 0.03039800003170967, 0.6804100275039673, 0.15039999783039093, 0.34163999557495117, 0.15001000463962555, 0.20196999609470367, -0.031120000407099724, -0.28325000405311584, 0.48824000358581543, -0.29440999031066895, -0.47161999344825745, 0.08711399883031845, 0.14648999273777008, 0.023800000548362732, 0.4848000109195709, -0.17848999798297882, 0.6604200005531311, 0.6238499879837036, 0.27744001150131226, 0.5306800007820129, 0.18483999371528625, 0.0737370029091835, -0.16196000576019287, -0.37376001477241516, 0.36162999272346497, 0.4850499927997589, 0.0212009996175766, 0.031057000160217285, 0.35558998584747314, -0.3044399917125702, -0.5790200233459473, 0.23568999767303467, 0.008905299939215183, -0.24334000051021576, -0.4247100055217743, 0.09717699885368347, 0.7503700256347656, 0.6003000140190125, 0.3397800028324127, -0.22573000192642212, 0.758430004119873, -0.22683000564575195, 0.25099000334739685, 0.05473100021481514, 0.4146600067615509, -0.06730800122022629, 0.44999000430107117, -0.37303000688552856, -0.23006999492645264, 0.18369999527931213, 0.012485000304877758, -0.06898900121450424, -0.09187199920415878, 0.15865999460220337, -0.03652799874544144, -0.16163000464439392, 0.14372999966144562, 0.12308000028133392, 0.5264999866485596, -0.03652099892497063, -0.15153999626636505, 0.017351999878883362, -0.06842699646949768, -0.38201001286506653, 0.4981200098991394, -0.16564999520778656, 0.5586000084877014, -0.3160400092601776, 0.2842499911785126, 0.12303999811410904, 0.20928999781608582, -0.39430001378059387, 0.25626999139785767, 0.5080599784851074, 0.703220009803772, -0.19360999763011932, 0.11395999789237976, -0.31442999839782715, -0.3196899890899658, 0.06319600343704224, -0.028754999861121178, -0.36959001421928406, -0.30577000975608826, 0.26513999700546265, -0.012159000150859356, 0.053307998925447464, 0.8064500093460083, -0.5933099985122681, 0.45739999413490295, -0.0919020026922226, 0.1722099930047989, -0.30309000611305237, 0.29903000593185425, -0.22269999980926514, 0.17574000358581543, -0.12561999261379242, -0.8839200139045715, 0.23771999776363373, -0.10665000230073929, -0.16469000279903412, -0.006241300143301487, 0.0634709969162941, -0.00583630008623004, 0.47821998596191406, -0.1443600058555603, 0.21021999418735504, 0.8071600198745728, 0.08687900006771088, 0.24594999849796295, -0.20572000741958618, 0.6102399826049805, 0.1307699978351593, 0.2381100058555603, -0.478630006313324, 0.41578999161720276, -0.39772000908851624, -0.052969999611377716, -0.28165000677108765, 0.08907599747180939, -0.3307200074195862, 1.0200999975204468, 0.5743299722671509, 0.4064199924468994, 0.24503999948501587, -0.7931299805641174, -0.5261499881744385, 0.44464001059532166, 0.09538500010967255, 0.06503699719905853, -0.509190022945404, -0.16801999509334564, -0.7484599947929382, 0.6328099966049194, -0.3864400088787079, 0.15049999952316284, 0.5539799928665161, -0.14744000136852264, -0.14601999521255493, -0.46417000889778137, 0.20038999617099762, 0.8418999910354614, -0.5874999761581421, -0.6757599711418152, 0.3796499967575073, -0.006280899979174137, 0.06451500207185745, 0.13856999576091766, -0.004196700174361467, 0.07932800054550171, 0.44887998700141907, 0.1623300015926361, -0.3393000066280365, 0.5852100253105164, -0.33204999566078186, -0.8856300115585327, -0.6817700266838074, -0.24484999477863312, 0.09282699972391129, -0.5020999908447266, 0.5272300243377686, 0.5960000157356262, -0.30737000703811646, -1.5394999980926514, 0.07428699731826782, -0.1263599991798401, 0.2029999941587448, -0.4886299967765808, -0.06815999746322632, 0.7914599776268005, 0.03485700115561485, 0.0982849970459938, 0.8664299845695496, 0.1949000060558319, -0.4800100028514862, 0.23473000526428223, 0.2867499887943268, 0.6470500230789185, -0.42506998777389526, -0.10395999997854233, -0.4636699855327606, 0.5103899836540222, 1.0218000411987305, 0.10865999758243561, -0.0028737999964505434, 0.08037599921226501, 0.6467700004577637], u'jungle': [-0.5042999982833862, -0.016039999201893806, -0.12424000352621078, 0.4694499969482422, -0.3919700086116791, 0.11166000366210938, 0.1540900021791458, 0.7483900189399719, 0.35010001063346863, -0.2956799864768982, -0.3179599940776825, -0.4201900064945221, -0.597760021686554, 0.08559300005435944, -0.03942599892616272, 0.01728300005197525, -0.12161000072956085, 0.04075299948453903, 0.3556100130081177, 0.5560100078582764, 0.1230200007557869, 0.20242999494075775, -0.006008800119161606, 0.0811149999499321, -0.4789600074291229, -0.5612199902534485, 0.9652199745178223, -0.6204100251197815, 0.1911199986934662, 0.8530399799346924, 0.1715099960565567, -0.41971999406814575, -0.31084999442100525, -0.7933300137519836, 0.6936200261116028, 0.055810000747442245, -0.12101999670267105, -0.1383100003004074, -0.10272999852895737, 0.006768300198018551, 0.20035000145435333, -0.7402200102806091, -0.42260000109672546, -0.01592399924993515, -0.10860999673604965, 0.021185999736189842, 0.5366899967193604, 0.2712000012397766, 0.1760299950838089, -0.5145999789237976, -0.03384200111031532, 0.019744999706745148, 0.21649999916553497, 0.20247000455856323, 0.2633199989795685, -0.13460999727249146, -0.2363000065088272, -0.44374001026153564, 0.4511600136756897, 0.06347700208425522, -0.43393000960350037, -0.07038599997758865, -0.001715000020340085, 0.18807999789714813, -0.14518000185489655, 0.27055999636650085, 0.5827699899673462, -0.19965000450611115, 0.3672100007534027, -0.13258999586105347, -0.358379989862442, 0.6972200274467468, -0.09467100352048874, -0.05028200149536133, -0.31130000948905945, -0.1836100071668625, 0.7418000102043152, -0.3930099904537201, 0.5791199803352356, -0.7811999917030334, 0.6351799964904785, -0.19391000270843506, 0.12150999903678894, -0.222120001912117, -0.4289799928665161, -0.6792700290679932, 0.5343800187110901, -0.2814599871635437, 0.1995999962091446, -0.5685700178146362, -0.3143100142478943, -0.27024999260902405, 0.7001699805259705, -0.035280000418424606, 0.11939000338315964, 0.12372999638319016, -0.013741999864578247, 0.07346300035715103, 0.08292800188064575, -0.20397000014781952, 0.5512499809265137, 0.5720099806785583, -0.005556399933993816, 0.10503999888896942, -0.4535500109195709, -0.2664400041103363, 0.1165900006890297, 0.24320000410079956, 0.34106001257896423, 0.23670999705791473, 0.04908199980854988, -0.6962800025939941, -0.10322999954223633, 0.17351000010967255, 0.36858999729156494, -0.15041999518871307, 0.3366599977016449, 0.2672399878501892, -0.18750999867916107, 0.1147100031375885, -0.6721699833869934, -0.670490026473999, -0.5337700247764587, -0.11929000169038773, -0.2420700043439865, 0.06953100115060806, -0.03222399950027466, 0.27790001034736633, -0.5587800145149231, -0.2051900029182434, -0.10896000266075134, 0.8418499827384949, 0.030347000807523727, 0.04533199965953827, 0.2390899956226349, -0.4652499854564667, -0.29774999618530273, 0.39587000012397766, -0.2049800008535385, -0.1268399953842163, 0.15605999529361725, -0.3347899913787842, -0.9106600284576416, 0.011269000358879566, -0.8267899751663208, 0.08541200309991837, 0.4661099910736084, 0.41258999705314636, -0.590149998664856, -0.2052599936723709, -0.24626000225543976, 0.2469799965620041, -0.050880998373031616, 0.4277600049972534, 0.8996599912643433, -0.3389900028705597, 0.004690499976277351, -0.40573999285697937, 0.4507899880409241, 0.6657999753952026, 0.8137099742889404, -0.2373500019311905, 0.6369500160217285, 0.021577000617980957, -0.534089982509613, -0.5707600116729736, 0.6393899917602539, 0.5013599991798401, -0.46566998958587646, -0.23868000507354736, -0.5681399703025818, -0.297650009393692, -0.07003200054168701, 0.48767000436782837, -0.2592200040817261, 0.13131999969482422, 0.635200023651123, 0.53302001953125, -0.11185000091791153, -0.08792299777269363, -0.08503600209951401, 0.47165000438690186, 0.20044000446796417, 0.3741700053215027, -0.20773999392986298, 0.23619000613689423, 0.3655099868774414, 0.8651000261306763, 0.4573099911212921, -0.2713199853897095, -0.10272999852895737, 0.5057500004768372, -0.008782699704170227, -0.2818700075149536, 0.23157000541687012, -0.10475999861955643, -0.2680099904537201, 0.7440999746322632, -0.4274600148200989, 0.5989199876785278, 0.8200899958610535, 0.1968899965286255, -0.28725001215934753, -0.020927000790834427, 0.14504000544548035, -0.03790299966931343, -0.39983999729156494, 0.22032999992370605, -0.2142699956893921, -0.394540011882782, -0.7062600255012512, -0.4321100115776062, -0.5634400248527527, -0.4461599886417389, 0.31033000349998474, -0.04093499854207039, -0.18055999279022217, -0.1281999945640564, 0.20702999830245972, 0.2745400071144104, 0.6463000178337097, -0.6485999822616577, 0.2747899889945984, 0.0703129991889, -0.07844799757003784, 0.0747470036149025, 0.16562999784946442, 0.2468000054359436, -0.12081000208854675, -0.8858799934387207, 0.3513700067996979, 0.32120001316070557, -0.5452100038528442, 0.3731899857521057, 0.18434999883174896, -0.043584998697042465, 0.39013999700546265, -0.152319997549057, -0.49660998582839966, 0.04231100156903267, 0.38117000460624695, -0.030886000022292137, 0.009642800316214561, 0.15004000067710876, -0.47569000720977783, -0.13235999643802643, 0.3401699960231781, -0.013508999720215797, -0.13383999466896057, 0.3738600015640259, -0.6905199885368347, 0.12434999644756317, -0.2962700128555298, -0.16761000454425812, 0.7302200198173523, 0.9589599967002869, 0.462119996547699, -0.11139000207185745, 0.01496300008147955, 0.08605899661779404, 0.07128199934959412, -0.3170599937438965, -0.01996999979019165, -0.4153200089931488, 0.06014600023627281, 0.8296200037002563, -0.838450014591217, -0.0690699964761734, 0.15125000476837158, 0.18051999807357788, -0.16410000622272491, -0.5219699740409851, -0.021762000396847725, 0.0507659986615181, -0.012378999963402748, 0.5449100136756897, -0.813040018081665, 0.24255000054836273, -0.5999000072479248, -0.322050005197525, -0.10266000032424927, -0.594980001449585, -0.6003100275993347, 0.021654000505805016, -1.0382000207901, -0.5500100255012512, -0.21842999756336212, 0.47235000133514404, -0.2155900001525879, -0.046241000294685364, 0.07404500246047974, 0.215829998254776, -0.5065600275993347, -0.04536300152540207, -0.5028700232505798, 1.1744999885559082, -0.30300000309944153, 0.5092800259590149, -0.2436700016260147, 0.39941999316215515], u'bottle': [-0.3610300123691559, 0.5361199975013733, -0.31926000118255615, -0.49465999007225037, 0.22530999779701233, 0.6828399896621704, -0.15399999916553497, -0.4189000129699707, 0.6448699831962585, -0.6434500217437744, -0.1040399968624115, -0.46821001172065735, -0.22308999300003052, -0.245169997215271, 0.052643001079559326, 0.25325000286102295, -0.4700799882411957, 0.33952999114990234, -0.2798300087451935, 0.07064700126647949, 0.2141299992799759, 0.2398499995470047, -0.19890999794006348, 0.3767299950122833, -0.24772000312805176, -0.6148999929428101, -0.630050003528595, 0.2541300058364868, 0.005111100152134895, -0.7643700242042542, -0.4929800033569336, -0.15191000699996948, -0.01027199998497963, 0.282370001077652, -0.6285099983215332, 0.5401800274848938, -0.1590999960899353, 0.29896000027656555, -0.08327200263738632, 0.0804930031299591, -0.25582000613212585, -0.3812899887561798, 0.1528100073337555, -0.12251000106334686, -0.6812599897384644, -0.09692200273275375, 0.12655000388622284, 0.07866799831390381, -0.15645000338554382, 0.28224998712539673, -0.011874999850988388, -0.49900001287460327, -0.32754001021385193, 0.18251000344753265, -0.29954999685287476, 0.5733199715614319, 0.07651499658823013, 0.32552000880241394, 0.6518800258636475, -0.2333800047636032, 0.009960499592125416, -0.33375000953674316, 0.30219000577926636, 0.2185799926519394, -0.4243600070476532, -0.7569500207901001, -1.0580999851226807, -0.363429993391037, 0.18339000642299652, -0.13694000244140625, -0.07669100165367126, -0.46601998805999756, -0.0460829995572567, 0.5620999932289124, 0.19505000114440918, -0.6432899832725525, 0.18193000555038452, -0.7480900287628174, -0.41207998991012573, 0.05658800154924393, -0.5677499771118164, 0.29276999831199646, 0.24487000703811646, 0.4462999999523163, 0.5470600128173828, -0.2506600022315979, 0.3105100095272064, -0.0016616999637335539, 0.15568000078201294, -0.6166399717330933, 0.8137699961662292, 0.3442400097846985, 0.04034800082445145, 0.11687000095844269, 0.5377799868583679, 0.31926000118255615, 0.4082599878311157, -0.08444999903440475, 0.030563000589609146, -0.24794000387191772, 0.12943999469280243, 0.353520005941391, 0.14090000092983246, -0.16102999448776245, 0.21254000067710876, -0.2803100049495697, 0.2063799947500229, -0.02776000089943409, -0.624750018119812, 0.23151999711990356, -0.16342000663280487, 0.37353000044822693, 0.33647000789642334, -0.35100001096725464, -0.03700299933552742, -0.07862400263547897, -0.6939200162887573, 0.576259970664978, 0.20521000027656555, -0.8165599703788757, -0.1391499936580658, 0.22415000200271606, 0.03572700172662735, 0.2235500067472458, -0.38352999091148376, -0.1808300018310547, -0.016795000061392784, 0.02686999924480915, 0.016743000596761703, 0.17337000370025635, 0.7710800170898438, 0.08291099965572357, 0.122529998421669, 0.24823999404907227, -0.2275799959897995, 0.3121599853038788, -0.007053100038319826, 0.2564600110054016, -0.1552799940109253, -0.3457599878311157, 0.35679998993873596, -0.22773000597953796, -0.47185999155044556, -0.5204799771308899, -0.4726000130176544, -0.4108400046825409, -0.09059000015258789, -0.04865799844264984, 0.49494999647140503, 0.6461799740791321, -0.05571199953556061, 0.4508500099182129, 0.17494000494480133, -0.8577799797058105, -0.20826999843120575, -0.034276001155376434, -0.006099900230765343, -0.8529199957847595, 0.06827399879693985, 0.31349998712539673, 0.23274999856948853, -0.2910099923610687, -0.2057799994945526, 0.30344000458717346, 0.3846299946308136, -0.1647700071334839, -0.34158000349998474, 0.4260300099849701, -0.18794000148773193, -0.3469499945640564, -0.10248000174760818, 0.22346000373363495, 0.31126001477241516, 0.07427600026130676, 0.10322000086307526, -0.8667299747467041, 0.0038447000551968813, 0.42541998624801636, 0.21067999303340912, -0.7235400080680847, -0.3295600116252899, -0.2882100045681, -0.0014915999490767717, -0.06720399856567383, 0.8095999956130981, -0.009765800088644028, 1.2654000520706177, 0.4182099997997284, 0.31213998794555664, -0.06938199698925018, 0.9092000126838684, 0.480540007352829, -0.36434999108314514, 0.16344000399112701, -0.48363998532295227, -0.17746999859809875, -0.6950299739837646, 0.23340000212192535, -0.5917900204658508, 0.47012001276016235, 0.5045999884605408, 0.7274600267410278, 0.5925400257110596, 0.185139998793602, 0.045497000217437744, -0.23048999905586243, -0.36410999298095703, 0.05691299960017204, -0.42528998851776123, 0.15928000211715698, -0.09476400166749954, 0.09433600306510925, 0.08147300034761429, 0.017620999366044998, 0.22744999825954437, -0.4907799959182739, 0.18233999609947205, -0.34619998931884766, -0.5069400072097778, 0.11336000263690948, -0.062240999191999435, 0.6964100003242493, -0.026528000831604004, 0.419979989528656, -0.3759399950504303, 0.15158000588417053, 0.04094000160694122, -0.6161400079727173, 0.5885900259017944, -0.36030998826026917, 0.15358999371528625, 0.19518999755382538, -0.42326998710632324, -0.13940000534057617, 0.5550500154495239, 0.28485000133514404, -0.22988000512123108, -0.1697700023651123, -0.3424299955368042, 0.21107999980449677, -0.6600000262260437, -0.1849599927663803, -0.29023000597953796, 0.023806000128388405, -0.22833000123500824, 0.20572000741958618, 0.5539399981498718, -0.03620700165629387, -0.5250300168991089, -0.3732999861240387, 0.7691400051116943, 0.07639999687671661, 0.3423199951648712, -0.1779399961233139, 0.024664999917149544, 0.26069000363349915, -0.7438799738883972, -0.6499599814414978, 0.39719998836517334, -0.24654999375343323, 0.056453999131917953, 0.15282000601291656, -0.33524999022483826, 0.06697399914264679, -0.2347699999809265, -0.26910001039505005, -0.06579600274562836, 0.05023999884724617, 0.05291000008583069, 0.7733399868011475, 0.25033000111579895, -0.5011799931526184, 0.16788999736309052, -0.37762999534606934, 0.09507600218057632, 0.028567999601364136, -1.304900050163269, -0.009620199911296368, -0.7328000068664551, -0.5750300288200378, -0.32822999358177185, 0.5611299872398376, 0.11687000095844269, 0.02760699950158596, 0.05102099850773811, 0.5229300260543823, -0.11607000231742859, -0.9536799788475037, 0.26826998591423035, 0.491129994392395, -0.2543700039386749, -0.05342099815607071, 0.42440998554229736, 0.3891200125217438, 0.13725000619888306, -0.35359999537467957, 0.14524999260902405, -0.25297999382019043, 0.14108000695705414, 0.21762000024318695], u'basket': [-0.23319000005722046, 0.3644999861717224, 0.06190500035881996, -0.1956299990415573, 0.31233999133110046, 0.1678999960422516, -0.17741000652313232, -0.23033000528812408, 0.49928000569343567, -0.4961499869823456, -0.29409998655319214, -0.016207000240683556, -0.06786800175905228, -0.059866998344659805, 0.367000013589859, -0.7615900039672852, -0.3012300133705139, -0.15068000555038452, 0.2019599974155426, -0.15310999751091003, 0.21287000179290771, -0.23914000391960144, 0.591759979724884, -0.2443300038576126, 0.4134399890899658, -0.21762000024318695, -0.7989299893379211, -0.15836000442504883, 0.35100001096725464, -0.41907998919487, -0.050783999264240265, 0.34821999073028564, 0.3318899869918823, 0.42594999074935913, -1.174299955368042, 0.1970600038766861, 0.1415500044822693, 0.29912999272346497, -0.23386000096797943, 0.2756099998950958, -0.24619999527931213, -0.6729599833488464, 0.34790998697280884, -0.4418100118637085, 0.03860199823975563, 0.457179993391037, 0.45509999990463257, 0.15900999307632446, -0.4473100006580353, 0.617330014705658, 0.21412000060081482, 0.48982998728752136, -0.2930000126361847, -0.3971799910068512, -0.5355799794197083, -0.27553999423980713, -0.893280029296875, 0.32058000564575195, 0.30722999572753906, -0.1046300008893013, 0.09073200076818466, 0.08556199818849564, -0.22801999747753143, -0.1467600017786026, -0.2913300096988678, -0.0321430005133152, -0.3449999988079071, -0.3614400029182434, -0.10899999737739563, -0.3020299971103668, 0.13923999667167664, 0.48146000504493713, 0.04334700107574463, -0.0826139971613884, 0.18196000158786774, 0.5728899836540222, 1.0715999603271484, -0.4819500148296356, 0.36520999670028687, -0.8697299957275391, 0.14566999673843384, -0.39114999771118164, 0.30441999435424805, 0.2722199857234955, 0.2615000009536743, -0.005306399893015623, 0.2161400020122528, 0.31575000286102295, -0.4233100116252899, -0.12261000275611877, 1.1224000453948975, -0.37380000948905945, -0.02472599968314171, -0.706570029258728, -0.12387999892234802, -0.29256999492645264, -0.0021005000453442335, 0.09638500213623047, -0.1979600042104721, 0.16927999258041382, 0.23538999259471893, 0.4662199914455414, 0.10819999873638153, 0.3396100103855133, 0.19147999584674835, -0.4738500118255615, 0.23431000113487244, -0.01507600024342537, 0.04025999829173088, 0.350600004196167, -0.09946800023317337, 0.1271599978208542, -0.2955299913883209, 0.13649000227451324, 0.5385299921035767, 0.1072700023651123, 0.3518899977207184, 0.45357000827789307, 0.4943999946117401, 0.5199300050735474, -0.06485600024461746, -0.6038900017738342, 0.6903799772262573, -0.1149199977517128, -0.09346000105142593, -0.3083600103855133, -0.2145400047302246, 0.09750600159168243, 0.4288899898529053, 0.784089982509613, 0.24741999804973602, 0.09503000229597092, -0.39041998982429504, 0.2694999873638153, -0.3124600052833557, 0.4714199900627136, 0.3644999861717224, 0.13907000422477722, -0.4954499900341034, -0.023490000516176224, 0.03596299886703491, 0.49114999175071716, -0.05567000061273575, -0.5500500202178955, -0.6826000213623047, 0.08677399903535843, -0.6152899861335754, -0.5116900205612183, -0.3790299892425537, -0.2599000036716461, -0.4977400004863739, 0.209989994764328, -0.035930998623371124, -0.4588800072669983, -0.17364999651908875, -0.07766000181436539, -0.1275700032711029, -0.24647000432014465, -0.015131999738514423, -0.10705000162124634, -0.37029001116752625, -0.11409000307321548, -0.11806000024080276, -0.030553000047802925, 0.19442999362945557, -0.5227599740028381, -0.4516099989414215, 0.3196899890899658, 0.2417600005865097, 0.3333300054073334, -0.5102300047874451, -0.13794000446796417, -0.19439999759197235, 0.5670599937438965, 0.0850519984960556, 0.11963000148534775, -0.1911199986934662, 0.5056800246238708, -0.07801499962806702, -0.4321399927139282, 0.6886600255966187, -0.664870023727417, 0.09422799944877625, 0.10931000113487244, -0.6047999858856201, -0.16202999651432037, 0.3872700035572052, 0.05380000174045563, -0.28512001037597656, 0.18344999849796295, 0.3347199857234955, 0.6818900108337402, -0.3015100061893463, -0.532800018787384, -0.46665000915527344, -0.3615399897098541, -0.36065998673439026, -0.3575499951839447, -0.07362200319766998, 0.31744998693466187, 0.4533799886703491, -0.6144300103187561, 0.12519000470638275, 0.24981999397277832, 0.5254999995231628, -0.04369499906897545, -0.41315001249313354, -0.038040000945329666, -0.43518000841140747, 0.8578299880027771, -0.38273999094963074, 0.06279800087213516, 0.14194999635219574, 0.016426999121904373, 0.38089999556541443, 0.22663000226020813, 0.1722400039434433, 0.009104900062084198, -0.6221500039100647, -0.182109996676445, 0.16266000270843506, 0.4703100025653839, -0.2962999939918518, -0.3771499991416931, -0.05748400092124939, -0.0311450008302927, -0.11246000230312347, 0.39261001348495483, 0.39858999848365784, 0.022655000910162926, 0.23574000597000122, 0.08651500195264816, 0.0015937000280246139, 0.21428999304771423, -0.30309998989105225, 0.6685100197792053, -0.323529988527298, 0.046108998358249664, 0.25440001487731934, 0.5531700253486633, 0.04922199994325638, -0.06660600006580353, -0.27737998962402344, 0.29243001341819763, -0.29756999015808105, -0.0791890025138855, -0.1520099937915802, 0.8580300211906433, -0.10909000039100647, 0.04276200011372566, -0.2342199981212616, 0.050579000264406204, 0.4431400001049042, -0.5211300253868103, 0.6438000202178955, -0.08339700102806091, -0.4917199909687042, 0.2969000041484833, -0.24278999865055084, 0.38565000891685486, -0.4128600060939789, -0.45781001448631287, 0.01921200007200241, -0.28718000650405884, -0.06148099899291992, -0.210889995098114, -0.0847729966044426, 0.2059600055217743, -0.13278000056743622, -0.7864199876785278, 0.0656059980392456, -0.1902800053358078, -0.04347899928689003, 0.13784000277519226, 0.18515999615192413, -0.6658599972724915, -0.8191499710083008, -0.331030011177063, -1.254699945449829, 0.20362000167369843, 0.10986000299453735, -0.0258799996227026, -0.4790700078010559, -0.1419599950313568, 0.1096699982881546, -0.10638999938964844, 0.5659599900245667, -0.6207399964332581, 0.09126000106334686, 0.12122999876737595, 0.5218600034713745, -0.14199000597000122, 0.30695998668670654, 0.4975700080394745, 0.5276299715042114, 0.18151000142097473, 0.1554100066423416, -0.825689971446991, -0.5333099961280823, 0.057461999356746674], u'meat': [0.6860899925231934, 0.8784099817276001, 0.21856999397277832, -0.24244999885559082, 0.02304299920797348, -0.278219997882843, -0.021206000819802284, 0.509119987487793, -0.06491000205278397, -1.1539000272750854, -0.31053000688552856, -0.9087799787521362, -0.11744000017642975, 0.6361299753189087, -0.45587998628616333, -0.48228999972343445, -0.1555899977684021, 0.22688999772071838, -0.5585899949073792, 0.2199299931526184, -0.23543000221252441, 0.4142099916934967, 0.54093998670578, 0.4537000060081482, -0.1752299964427948, 0.5097799897193909, -0.11253000050783157, -0.47067999839782715, -0.33035001158714294, -0.3001500070095062, -0.6845200061798096, 0.5115100145339966, -0.6940299868583679, -0.23077000677585602, -0.3295400142669678, 0.25249001383781433, 0.5362799763679504, 0.0934320017695427, -0.727429986000061, -0.04456400126218796, -0.28881001472473145, -0.3491800129413605, -0.046824000775814056, -0.49911001324653625, -0.23442000150680542, -0.054625000804662704, -0.17145000398159027, -0.5008999705314636, 0.03360600024461746, 0.24814000725746155, -0.08959099650382996, 0.5189099907875061, 0.05889600142836571, 0.044753000140190125, 0.07438399642705917, -0.15682999789714813, -0.24945999681949615, 0.10371000319719315, -0.8604400157928467, -0.17050999402999878, -0.02834700047969818, 0.14395000040531158, 0.4688200056552887, -0.37907999753952026, -0.06812900304794312, -0.5115600228309631, -0.4355599880218506, -0.19964000582695007, 0.15577000379562378, 0.4408400058746338, 0.3860599994659424, 0.35572001338005066, 0.14970999956130981, -0.32082000374794006, -0.5089100003242493, 0.08866400271654129, 0.5770400166511536, 0.9609100222587585, -0.5152300000190735, -0.027936000376939774, 0.19776000082492828, 0.12483999878168106, -0.05680600181221962, -0.07978899776935577, 0.0949459969997406, -0.7791299819946289, -0.22694000601768494, 0.1882999986410141, -0.7169700264930725, -0.20670999586582184, -0.21751999855041504, -0.42489999532699585, -0.04628700017929077, 0.19631999731063843, -0.2542099952697754, 0.04829400032758713, -0.6030799746513367, 0.3686699867248535, -0.6325899958610535, 0.20311999320983887, 0.02374899946153164, -0.13163000345230103, 0.19945000112056732, -1.117900013923645, -0.5007100105285645, -0.1089399978518486, -0.00011511000047903508, 0.4818600118160248, -0.44161999225616455, 0.785539984703064, 0.3764899969100952, 0.5692600011825562, -0.7693300247192383, -0.15520000457763672, 0.021733999252319336, -0.1969199925661087, -0.482340008020401, 0.17505000531673431, 0.24681000411510468, -0.43088001012802124, -0.7469800114631653, -0.10869999974966049, 0.21155999600887299, 0.5220999717712402, -0.16593000292778015, 0.4955900013446808, -0.024656999856233597, 0.09052799642086029, -0.010738000273704529, 0.1522500067949295, 0.23409999907016754, 0.4275999963283539, 0.6043300032615662, 0.15588000416755676, -0.10068999975919724, 0.07334399968385696, -0.01607700064778328, 0.03280799835920334, 0.5216400027275085, 0.6400399804115295, 0.4108000099658966, 0.30445000529289246, 0.08586599677801132, -0.7769799828529358, -0.3247799873352051, 0.08576299995183945, -0.027249999344348907, -0.2846600115299225, -0.057760000228881836, -0.6127200126647949, -0.9843500256538391, 0.5213000178337097, 0.025318000465631485, -0.007254200056195259, 0.1408499926328659, -0.21041999757289886, -0.45688000321388245, -0.3562299907207489, -0.2962599992752075, -0.1513500064611435, 0.3886600136756897, 0.8494600057601929, -0.037119001150131226, -0.24307000637054443, 0.35596001148223877, -0.06206100061535835, 0.4564000070095062, -0.646619975566864, 0.35934001207351685, -0.4111100137233734, -0.302480012178421, -0.09369499981403351, -0.21809999644756317, 0.5496900081634521, -0.0738689973950386, 0.18585999310016632, 0.35447999835014343, -0.3743399977684021, 0.41385000944137573, -0.5539299845695496, 0.7452600002288818, 0.2853800058364868, -0.5266000032424927, -0.7012100219726562, -0.1404699981212616, -0.15373000502586365, 0.6902499794960022, -0.0500670000910759, 0.20948000252246857, -0.2964800000190735, -0.7303000092506409, 0.9150599837303162, -0.3732700049877167, -0.009397399611771107, 0.027664000168442726, 0.07503499835729599, -0.42212000489234924, -0.19439999759197235, -0.31567999720573425, 0.4657500088214874, 0.5788999795913696, -0.48201000690460205, 0.4226199984550476, 0.37803998589515686, -0.18626999855041504, 0.6388300061225891, 0.30441001057624817, -0.18437999486923218, -0.10227999836206436, -0.3062700033187866, 0.021869000047445297, -0.331030011177063, -0.610539972782135, 0.7650499939918518, -0.07931499928236008, -0.15764999389648438, 0.7704499959945679, -0.561240017414093, -0.3568100035190582, 0.03931700065732002, 0.229420006275177, 0.09109299629926682, -0.4038600027561188, -1.2968000173568726, 0.1012599989771843, -0.09756799787282944, -0.09299600124359131, 0.01637299917638302, 0.1802300065755844, 0.4465799927711487, 0.6050400137901306, -0.011320999823510647, -0.18118999898433685, 0.11625000089406967, 0.7036899924278259, 0.21112999320030212, 0.4863699972629547, -0.05805300176143646, -0.6853600144386292, -0.2525399923324585, -0.563979983329773, -0.16718000173568726, -0.12892000377178192, -0.9205399751663208, -0.9547299742698669, 0.2258400022983551, 0.6581100225448608, -0.22431999444961548, -0.14630000293254852, -0.7128900289535522, 0.041099999099969864, -0.21499000489711761, 0.21028000116348267, 0.6729300022125244, 0.4965200126171112, 0.5016599893569946, -0.2814199924468994, 0.0004822300106752664, -0.2742300033569336, 0.14146000146865845, -0.021176999434828758, -0.5199999809265137, -0.04952000081539154, -0.5696799755096436, 0.360370010137558, 0.2655999958515167, -0.1887200027704239, 0.07529500126838684, 0.026360999792814255, -0.32747000455856323, -0.9068400263786316, 0.006909800227731466, 0.505370020866394, 0.30625998973846436, 0.09980099648237228, 0.16235999763011932, -1.542799949645996, 0.08118700236082077, -0.6461399793624878, -0.5009599924087524, -0.21182000637054443, -0.18367999792099, -0.006750000175088644, 0.4024899899959564, -0.4086900055408478, 0.6792600154876709, 0.16904999315738678, -0.4995400011539459, 0.6108400225639343, 0.2944900095462799, 0.040004000067710876, 0.12358000129461288, 0.2290399968624115, -0.010297000408172607, -0.3762499988079071, -0.7408499717712402, 0.14955000579357147, -0.3991999924182892, 0.25964000821113586, -0.4620800018310547], u'tube': [0.35514000058174133, 0.11073999851942062, -0.03684600070118904, -0.7962499856948853, 0.27702000737190247, 0.1748799979686737, -0.09302400052547455, -0.4146899878978729, -0.008322600275278091, -1.056399941444397, -0.4170899987220764, -0.5529000163078308, 0.28255999088287354, -0.009892200119793415, 0.16523000597953796, 0.14657999575138092, -0.666450023651123, -0.6463900208473206, 0.40443000197410583, -0.21821999549865723, -0.06445199996232986, -0.773360013961792, 0.216729998588562, 0.6217300295829773, -0.44457000494003296, -0.20909999310970306, -0.1422400027513504, 0.6057699918746948, 0.5343000292778015, 0.6948800086975098, 0.13795000314712524, 0.5172600150108337, 0.2830899953842163, 0.2396399974822998, 0.051451001316308975, 0.5484399795532227, -0.02989399991929531, 0.05385600030422211, -0.22603000700473785, 0.9330499768257141, -0.4263800084590912, 0.4284699857234955, 0.1764499992132187, 0.29131999611854553, -0.7235100269317627, -0.06479699909687042, -0.29982998967170715, -0.09823799878358841, 0.25275999307632446, 0.3930499851703644, 0.09419099986553192, 0.4430600106716156, -0.26256999373435974, 0.12032999843358994, 0.07671400159597397, -0.10683999955654144, 0.6233100295066833, -0.1766899973154068, 0.425819993019104, 0.3328799903392792, 0.19282999634742737, -0.1898300051689148, 0.7360600233078003, 0.0724480003118515, 0.5954599976539612, 0.03253199905157089, 0.006850500125437975, -0.03177100047469139, -0.13291999697685242, 0.6633399724960327, 0.5410500168800354, -0.008778300136327744, -0.17542000114917755, 0.1853799968957901, 0.7453399896621704, 0.1913899928331375, -0.11765000224113464, -0.13447000086307526, -0.7384300231933594, -0.44975998997688293, -0.2561900019645691, -0.10507000237703323, -0.004997699987143278, 0.07489000260829926, -0.36994001269340515, 0.27360999584198, 0.9546200037002563, -0.5663300156593323, 0.072782002389431, 0.14855000376701355, 0.5157099962234497, -0.057930998504161835, -0.614109992980957, -0.04989999905228615, -0.11699000000953674, 0.23152999579906464, -0.25892001390457153, 0.44839999079704285, 0.6635400056838989, -0.7609000205993652, 0.21428999304771423, 0.6410700082778931, 0.07731799781322479, -0.5258499979972839, 0.5209699869155884, 0.37874001264572144, 0.42642998695373535, -0.19978000223636627, -0.2523899972438812, 0.5577999949455261, -0.3680900037288666, 0.7149500250816345, 0.8692600131034851, 0.06910300254821777, -0.3913399875164032, -0.3327299952507019, -0.606939971446991, 0.8718500137329102, -0.23749999701976776, -0.07628700137138367, -0.10340999811887741, -0.8738800287246704, 0.48458001017570496, -1.0565999746322632, -0.18419000506401062, -0.39546999335289, -0.5028300285339355, -0.2504799962043762, -0.4287700057029724, 0.5242800116539001, -0.022842999547719955, -0.0031471000984311104, 0.2578999996185303, 0.47308000922203064, 0.054715000092983246, 0.44784998893737793, 0.010010000318288803, -0.18764999508857727, 0.16810999810695648, 0.34880998730659485, 0.39114999771118164, -0.1512400060892105, -0.33904001116752625, -0.35054999589920044, -0.33649998903274536, 0.0092351995408535, 0.04425499960780144, -0.3900499939918518, 0.11078000068664551, -0.2677200138568878, -0.03903200104832649, 0.20442000031471252, 0.4085899889469147, 0.25369998812675476, 0.6281300187110901, 0.12122999876737595, -0.10632000118494034, -0.6107900142669678, -0.007347399834543467, 0.8986200094223022, -0.2387000024318695, -0.323529988527298, 0.029969999566674232, -0.30098000168800354, 0.4633199870586395, 0.0048190997913479805, 0.22366000711917877, -0.04053699970245361, 0.4098399877548218, -0.010773000307381153, 0.37501001358032227, 0.8230100274085999, 0.673259973526001, 0.03198400139808655, -0.38517001271247864, 0.146139994263649, -0.46988001465797424, 0.8216500282287598, 0.04424599930644035, -1.1863000392913818, 0.009719699621200562, 0.20909999310970306, 0.28584998846054077, 0.9666799902915955, -0.046167001128196716, -0.17215999960899353, 1.0073000192642212, 0.4359399974346161, 0.5698099732398987, -0.930869996547699, 0.22387999296188354, 0.7450900077819824, 0.209539994597435, 0.5748400092124939, -0.3471300005912781, 0.2466599941253662, 0.014201000332832336, -0.6352300047874451, -0.15884000062942505, -0.08506199717521667, 0.383650004863739, 0.4320099949836731, 0.27046000957489014, 0.24963000416755676, 0.16878999769687653, 0.4888100028038025, -0.4621799886226654, -0.33939000964164734, -0.2965799868106842, 0.10552000254392624, -0.1861799955368042, -0.7034000158309937, 0.03844999894499779, -0.23163999617099762, 0.27487000823020935, -0.01579900085926056, 0.09022500365972519, -0.6993200182914734, -0.013814999721944332, -0.33858999609947205, -0.040449000895023346, -0.026978999376296997, -0.05474599823355675, -0.5011100172996521, 0.5317100286483765, 0.07237400114536285, -0.24194000661373138, -0.5819299817085266, 0.6878700256347656, 0.2951500117778778, 0.20427000522613525, 0.21188999712467194, 0.15613999962806702, -0.4989500045776367, 0.2411700040102005, -0.4129300117492676, 0.48447999358177185, 0.018164999783039093, -0.1638599932193756, -0.4822100102901459, 0.3158099949359894, -0.6360499858856201, 0.060245998203754425, 0.7367100119590759, -1.1217000484466553, -0.6613699793815613, 0.43042001128196716, -0.3269999921321869, 0.034898001700639725, -0.32117000222206116, -0.19115999341011047, -0.03013800084590912, 0.2391200065612793, -0.24097999930381775, 0.039069000631570816, 0.3261300027370453, -0.022799000144004822, -0.1182899996638298, 0.5251799821853638, 0.026576999574899673, -0.1745000034570694, -0.28325000405311584, 0.026466000825166702, 0.3618699908256531, -0.07221599668264389, 0.29100000858306885, -0.23799000680446625, 0.3743700087070465, 0.5636100172996521, 0.17249999940395355, 0.17896999418735504, -0.33643999695777893, 0.09761899709701538, -0.0730689987540245, -0.3316200077533722, -0.25971999764442444, -0.9501500129699707, -0.22708000242710114, -0.8911499977111816, 0.4354200065135956, -0.07168100029230118, -0.4808200001716614, -0.5611100196838379, -0.10277000069618225, -0.19081999361515045, 0.00829629972577095, -0.00530839990824461, 0.10457000136375427, -0.3653799891471863, -0.24254000186920166, -0.04173799976706505, -0.30584999918937683, 0.4124400019645691, -0.15383000671863556, 0.799560010433197, -0.25220999121665955, 0.07129199802875519, -0.24270999431610107, 0.23030999302864075, 0.25617000460624695]} diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_UT.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_UT.py new file mode 100644 index 0000000000000000000000000000000000000000..dd0deb7f5e6be2f836a7d69dbb2af99f7285ae3b --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/aux_data/glove_UT.py @@ -0,0 +1,29 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +attrs_dict = {u'Synthetic': [-0.04577599838376045, 0.3800700008869171, 0.06601399928331375, 0.30629000067710876, -0.3903000056743622, -0.4559600055217743, -0.24815000593662262, -0.15424999594688416, 0.251910001039505, -1.542199969291687, 0.296889990568161, 0.04115299880504608, -0.10824000090360641, -0.5511000156402588, 0.15072999894618988, -0.6427199840545654, -0.6002699732780457, 0.6632300019264221, 0.01593099907040596, 0.25126999616622925, -0.7840200066566467, -0.06940600275993347, -0.7046599984169006, 1.0583000183105469, -0.7482500076293945, -0.047265999019145966, 0.12145999819040298, -0.19754000008106232, -0.39076000452041626, 0.8264899849891663, -0.23531000316143036, 0.12178999930620193, -0.4373599886894226, -0.03483700007200241, 0.1463399976491928, 0.8266000151634216, 0.1419299989938736, -0.015936000272631645, 0.47641000151634216, 0.5698800086975098, -0.6881800293922424, 0.3273400068283081, 0.11575999855995178, -0.3928599953651428, 0.859969973564148, -0.17398999631404877, -0.13364000618457794, 0.36761000752449036, 0.40156999230384827, 0.18532000482082367, -0.7027999758720398, 0.3431699872016907, -0.20604999363422394, 0.25824999809265137, -0.1853400021791458, 0.05159199982881546, 0.4432399868965149, -0.8833500146865845, -0.15508000552654266, 0.03284300118684769, 0.03600899875164032, 0.010456999763846397, 0.12272000312805176, -0.3211599886417389, 0.22999000549316406, 0.22707000374794006, -0.4835200011730194, 0.5113099813461304, 0.19235999882221222, 0.20065000653266907, -0.05050100013613701, 0.8626999855041504, 0.3550199866294861, 0.239779993891716, 0.20059999823570251, 0.01259199995547533, 0.18863999843597412, -0.6061000227928162, 0.4338200092315674, 0.37742000818252563, -0.012063000351190567, -0.21557000279426575, 0.13241000473499298, -0.038047000765800476, -0.4945000112056732, -0.06906399875879288, 0.5912600159645081, 0.5262799859046936, -0.07176300138235092, 0.29857000708580017, 0.11816000193357468, 0.12838999927043915, 0.3648200035095215, -0.14882999658584595, -0.12362000346183777, 0.018970999866724014, -0.47512000799179077, -0.28130999207496643, 0.3004699945449829, -0.4404599964618683, 0.02690899930894375, 0.46779000759124756, -0.2299399971961975, 0.11753000319004059, -0.24092000722885132, 0.33908000588417053, -0.028602000325918198, -0.1736699938774109, -0.6475899815559387, 0.3613699972629547, -0.3054400086402893, -0.08866699784994125, 0.4997299909591675, 0.005345200188457966, 0.19354000687599182, 0.683899998664856, 0.611519992351532, 0.7105500102043152, 0.06873200088739395, -0.08810999989509583, 0.06660199910402298, -0.15602000057697296, 0.17026999592781067, 0.42340999841690063, -0.03636600077152252, -0.077333003282547, 0.1926400065422058, 0.2630699872970581, 0.017246000468730927, 0.6931399703025818, -0.13223999738693237, 0.09873899817466736, -0.31341999769210815, 0.8980799913406372, -0.6257399916648865, -0.23291000723838806, -0.04795699939131737, 0.1792600005865097, -0.2240000069141388, -0.2957000136375427, 0.11181999742984772, -0.027431000024080276, 0.5121300220489502, -0.8829900026321411, 1.2210999727249146, 0.33972999453544617, -0.008983800187706947, -0.4056299924850464, -0.04244299978017807, 0.32635000348091125, -0.3197700083255768, -0.46476998925209045, -0.03531600162386894, -0.20038999617099762, -0.26475000381469727, -0.255950003862381, -0.549239993095398, 0.21930000185966492, 0.48537999391555786, -0.5171599984169006, 0.3018999993801117, -0.4078100025653839, -0.5220999717712402, 0.3406600058078766, 0.10730999708175659, 0.20045000314712524, 0.3650200068950653, 0.1910800039768219, 0.24275000393390656, -0.01835400052368641, 0.16229000687599182, -0.6160100102424622, 0.3590500056743622, 0.398499995470047, -0.22748999297618866, -0.19943000376224518, 0.4292699992656708, 0.11563999950885773, -0.7495499849319458, -0.7363700270652771, 0.30741000175476074, -0.10733000189065933, -0.11004000157117844, 0.07846099883317947, 0.003531999886035919, -0.11417999863624573, 0.7155299782752991, 0.5024799704551697, 0.13433000445365906, -0.3752099871635437, -0.059477001428604126, 0.6579700112342834, 0.3008899986743927, -0.8413400053977966, -0.2658799886703491, -0.08957000076770782, -0.23789000511169434, -0.27052998542785645, 0.12833000719547272, -0.10885000228881836, -0.6639400124549866, -0.13018999993801117, 0.14413000643253326, -0.5527499914169312, 0.4489800035953522, 0.832260012626648, 0.07375799864530563, 0.34272998571395874, -1.1331000328063965, 0.44043999910354614, 0.3302299976348877, 0.3827599883079529, -0.16054999828338623, -0.08064600080251694, -0.42458999156951904, 0.2711299955844879, 0.03840000182390213, -0.079694002866745, 0.18782000243663788, -0.4207499921321869, -0.029160000383853912, 0.16200000047683716, -0.32196998596191406, -0.018453000113368034, 0.16693000495433807, -0.0026314000133424997, -0.07119999825954437, 0.6017299890518188, -0.5726500153541565, 0.14930999279022217, 0.9554700255393982, 0.537060022354126, -0.20654000341892242, 0.4418799877166748, 0.349839985370636, 0.35016000270843506, 0.3416300117969513, 0.09083600342273712, -0.17700999975204468, 0.39215001463890076, 0.1829800009727478, -0.3836899995803833, -0.5289999842643738, -0.0954509973526001, -0.4580099880695343, 0.1346299946308136, 0.16503000259399414, -0.2955099940299988, -0.16820000112056732, -0.35262998938560486, 0.17072999477386475, -0.39601001143455505, -0.03160100057721138, -0.1031700000166893, -0.30037999153137207, -0.057117998600006104, 0.19399000704288483, -0.18832999467849731, 0.04792400076985359, -0.30101001262664795, -0.19999000430107117, -0.3028999865055084, -0.0185759998857975, -0.4203599989414215, 0.5943999886512756, 0.2754899859428406, -0.3252300024032593, 0.10197000205516815, 0.2249400019645691, 0.2983900010585785, 0.8947399854660034, 0.1452600061893463, -0.305400013923645, -0.38666000962257385, -0.8218700289726257, 0.030956000089645386, -0.6825100183486938, -0.05696500092744827, -0.3364599943161011, 0.08358299732208252, -0.0675320029258728, -0.8689500093460083, -0.3306800127029419, 1.2415000200271606, 0.3635900020599365, -0.09112299978733063, -0.3589499890804291, 0.09703599661588669, 0.22290000319480896, -0.06057000160217285, -0.3908900022506714, -0.16450999677181244, 0.039538998156785965, 0.3409000039100647, 0.06477600336074829, -0.1576399952173233, 1.0355000495910645, -0.5768899917602539, 0.12389999628067017, 0.782289981842041], u'Canvas': [0.11776000261306763, -0.25749000906944275, -0.30239999294281006, -0.5817800164222717, -0.06854899972677231, 0.3100300133228302, -0.642989993095398, -0.6093299984931946, -0.43342000246047974, -0.4262099862098694, -0.02540300041437149, 0.3224300146102905, -0.17177000641822815, -0.043133001774549484, -0.2373500019311905, -0.11386000365018845, -0.09922300279140472, -0.03717400133609772, 0.1990099996328354, -0.20895999670028687, -0.009801800362765789, -0.06243100017309189, 0.009280400350689888, -0.12469000369310379, 0.3485200107097626, -0.07759299874305725, 0.017927000299096107, -0.019030999392271042, 0.04417100176215172, 0.7634699940681458, 0.3228999972343445, 0.19257999956607819, -0.2895500063896179, -0.000591500021982938, -0.016530999913811684, 0.38651999831199646, -0.36820998787879944, -0.8469399809837341, 0.0815730020403862, 0.6695700287818909, 0.032986000180244446, -0.2884399890899658, 0.0663990005850792, -0.2928299903869629, 0.14805999398231506, 0.5450199842453003, 0.4271700084209442, -0.25095999240875244, -0.21952000260353088, 0.18694999814033508, 0.04422299936413765, 0.3316799998283386, 0.6321600079536438, -0.33445000648498535, 0.07240799814462662, -0.06504499912261963, -0.17684000730514526, -0.0449879989027977, 0.3676399886608124, -0.05189799889922142, -0.42243000864982605, -0.2804499864578247, -0.19923000037670135, -0.250789999961853, 0.4525099992752075, -0.24196000397205353, -0.42034000158309937, -1.1134999990463257, 0.10976999998092651, -0.3188000023365021, 0.06318099796772003, -0.20806999504566193, -0.45719999074935913, -0.18796999752521515, 0.2138800024986267, -0.0381230004131794, 0.04224200174212456, 0.03913699835538864, 0.3130800127983093, -0.4183500111103058, -0.11474999785423279, -0.0947749987244606, -0.8221799731254578, -0.40553998947143555, -0.43636998534202576, 0.30774998664855957, 0.5503699779510498, 0.15410999953746796, -0.30733001232147217, 0.28119999170303345, 0.16054999828338623, -0.3449000120162964, 0.31376999616622925, -0.23950999975204468, -0.12928999960422516, -0.5474500060081482, -0.25543999671936035, -0.06344100087881088, 0.027545999735593796, -0.6276599764823914, 0.1116499975323677, 0.34123000502586365, -0.33528000116348267, -0.3143100142478943, -0.30028998851776123, -0.1010499969124794, -0.0004915000172331929, 0.012880999594926834, -0.38718000054359436, -0.5114099979400635, -0.34068000316619873, 0.07612399756908417, -0.016567999497056007, 0.2955799996852875, -0.3841499984264374, 0.22146999835968018, 0.19300000369548798, 0.7715299725532532, 0.0022952998988330364, -0.3125300109386444, -0.03221900016069412, -0.20573000609874725, 0.1723800003528595, 0.8471800088882446, 0.46404001116752625, -0.02310599945485592, -0.21786999702453613, 0.33739998936653137, -0.15835000574588776, 0.2512800097465515, 0.13154000043869019, 0.06436800211668015, -0.19404999911785126, 0.5265499949455261, -0.4429599940776825, -0.7163800001144409, -0.28134000301361084, 0.622730016708374, -0.18230000138282776, 0.25328999757766724, 0.1538199931383133, 0.7798699736595154, -0.3813300132751465, -0.19426999986171722, -0.07922899723052979, 0.3818100094795227, 0.07904499769210815, 0.23547999560832977, -0.6780200004577637, 0.10694999992847443, -0.2649100124835968, 0.46751999855041504, -0.16223999857902527, -0.7611100077629089, -0.41705000400543213, 0.32840999960899353, 0.27952998876571655, -0.2235500067472458, 0.27695000171661377, 0.24327999353408813, -0.2825999855995178, -0.5859699845314026, 0.03816699981689453, 0.32297998666763306, 0.4759399890899658, -0.18929000198841095, 0.14735999703407288, 0.7404199838638306, 0.15734000504016876, 0.3575499951839447, 0.12690000236034393, -0.16362999379634857, -0.23765000700950623, -0.49559998512268066, 0.20613999664783478, -0.2484000027179718, 0.040773000568151474, 0.573140025138855, -0.15222999453544617, -0.9987199902534485, 0.3710300028324127, 0.3084000051021576, 0.36309000849723816, -0.011490999720990658, -0.6328499913215637, -0.4516099989414215, 0.12325000017881393, 0.4568699896335602, 0.45032998919487, 0.3812899887561798, 0.6545100212097168, 0.7950599789619446, 0.38065001368522644, -0.2796199917793274, 0.06893300265073776, 0.1373099982738495, -0.5483599901199341, 0.8762000203132629, 0.046755000948905945, -0.03585600107908249, 0.300680011510849, -0.5569999814033508, 0.3929100036621094, -0.0010371999815106392, 0.38666999340057373, -0.26041001081466675, 0.09444200247526169, 0.5407800078392029, -0.6853700280189514, -0.1417900025844574, -0.07552400231361389, 0.001829099957831204, -0.1435299962759018, 0.3785899877548218, 0.12942999601364136, 0.39809998869895935, 0.1427299976348877, -0.35420000553131104, -0.2549999952316284, 0.16561000049114227, 0.49184998869895935, 0.2903900146484375, 0.1287200003862381, -0.4709300100803375, 0.32346999645233154, -0.14449000358581543, 0.05095599964261055, -0.10757999867200851, -0.09872300177812576, -0.6980599761009216, 0.3866199851036072, 0.20272000133991241, -0.3596700131893158, 0.24196000397205353, 0.7747200131416321, 0.24935999512672424, -0.47418999671936035, -0.0646120011806488, -0.4107300043106079, 0.02492300048470497, -0.1951099932193756, 0.09061700105667114, -0.516290009021759, 0.15539999306201935, -0.6659700274467468, -0.4631899893283844, -0.366890013217926, -0.2941800057888031, -0.1591300070285797, -0.02559800073504448, -0.4878599941730499, 0.2529900074005127, 0.610260009765625, -0.5174000263214111, 0.5217999815940857, 0.378930002450943, -0.3196699917316437, -0.06980200111865997, -0.04400400072336197, 0.23667000234127045, 0.13467000424861908, -0.13804000616073608, 0.2660900056362152, -0.0903640016913414, -0.3041900098323822, -0.2410299926996231, -0.3864000141620636, 0.028049999848008156, -0.37310001254081726, 0.05808800086379051, -0.04430200159549713, 0.06411699950695038, 0.6045500040054321, -0.12318000197410583, -0.7910400032997131, 0.040950000286102295, -0.512910008430481, -0.20782999694347382, -0.5692999958992004, 0.2870999872684479, 0.34586000442504883, -0.3864000141620636, -0.03888599947094917, 0.09629099816083908, -0.03134100139141083, 0.0400019995868206, -0.14797000586986542, 0.2633799910545349, -0.3892599940299988, 0.06704100221395493, -0.17337000370025635, -0.00737369991838932, -0.018343999981880188, -0.21529999375343323, 0.41238999366760254, 0.2899700105190277, 0.5396999716758728, -0.008594200015068054, 0.1139800027012825, -0.2518500089645386], u'Nylon': [0.11185000091791153, 0.07597299665212631, 0.0334319993853569, -0.553629994392395, -0.19370999932289124, -0.14632999897003174, -0.12931999564170837, -0.35137999057769775, 0.017340999096632004, -0.35521000623703003, 0.20934000611305237, 0.1306300014257431, 0.4045499861240387, -0.2553600072860718, -0.3493900001049042, 0.08583199977874756, -0.91007000207901, 0.24146999418735504, -0.1688999980688095, 0.10040000081062317, -0.06762699782848358, -0.695169985294342, -0.3354400098323822, 0.4762600064277649, -0.3094399869441986, 0.07708299905061722, 0.03751400113105774, -0.2983799874782562, 0.06275299936532974, 0.518310010433197, -0.19212999939918518, -0.19693000614643097, -0.04613399878144264, 0.04805000126361847, 0.10497000068426132, 0.0552389994263649, -0.43963998556137085, -0.2215300053358078, 0.796280026435852, 0.9047099947929382, -0.24350999295711517, -0.3179599940776825, -0.10233999788761139, -0.27889999747276306, 0.16075000166893005, 0.4916999936103821, 0.5916000008583069, -0.10057999938726425, -0.20826999843120575, 0.43459999561309814, -0.1123099997639656, 0.2292100042104721, -0.22951999306678772, -0.1282700002193451, 0.17722000181674957, 0.11722999811172485, -0.04680199921131134, -0.8517699837684631, -0.023684000596404076, 0.04681200161576271, -0.09302599728107452, 0.2140900045633316, 0.11023999750614166, -0.06899700313806534, 0.7007099986076355, -0.030577000230550766, -0.3765900135040283, 0.19298000633716583, -0.23872999846935272, 0.20469999313354492, -0.23030999302864075, -0.006815900094807148, -0.2473600059747696, 0.278219997882843, 0.18756000697612762, 0.3222599923610687, -0.17193999886512756, -0.08721199631690979, -0.0252470001578331, -0.42754998803138733, -0.2667100131511688, -0.5668500065803528, -0.05734499916434288, -0.3485499918460846, -0.4683299958705902, 0.4192200005054474, 0.08441600203514099, 0.3806999921798706, -0.44523000717163086, -0.3521200120449066, 0.15988999605178833, -0.40483999252319336, 0.5400099754333496, 0.20589999854564667, -0.22201000154018402, 0.2746399939060211, -0.2768299877643585, 0.8968799710273743, 0.0027819001115858555, -0.2516300082206726, 0.25262001156806946, 0.8426499962806702, -0.04746700078248978, -0.3431200087070465, 0.03420000150799751, -0.6859099864959717, 0.5352200269699097, 0.17367999255657196, -0.6289600133895874, -0.14201000332832336, -0.32280001044273376, 0.5119699835777283, 0.022648999467492104, -0.18362000584602356, 0.2614699900150299, 0.02338399924337864, 0.3296099901199341, 0.4769600033760071, 0.3546200096607208, -0.12014000117778778, 0.318450003862381, -0.6374599933624268, 1.055899977684021, 0.2526000142097473, 0.4629800021648407, 0.2390899956226349, -0.02097800001502037, 0.5501899719238281, -0.13207000494003296, 0.4731599986553192, -0.14496000111103058, 0.1823900043964386, -0.2922399938106537, -0.16283999383449554, -0.5876500010490417, -0.27781999111175537, -0.6289700269699097, 0.34033000469207764, -0.2083600014448166, -0.1418199986219406, -0.26864001154899597, 0.02301499992609024, 0.24703000485897064, -0.8041399717330933, 0.6745399832725525, 0.6245999932289124, -0.2800599932670593, -0.8553400039672852, 0.06310500204563141, 0.33476999402046204, -0.504289984703064, -0.07733500003814697, 0.08305700123310089, -0.7077000141143799, 0.02244899980723858, -0.5616199970245361, 0.01268799975514412, -0.24467000365257263, 0.2752099931240082, 0.36757001280784607, 0.23628999292850494, -0.6511399745941162, -0.14036999642848969, 0.2668200135231018, -0.0854249969124794, -0.4431900084018707, -0.3380500078201294, 0.5380399823188782, -0.16687999665737152, -0.21431000530719757, 0.04020199924707413, 0.3752700090408325, -0.15523000061511993, 0.3809100091457367, -0.4061700105667114, -0.5024200081825256, -0.10542000085115433, 0.6520299911499023, 0.0917539969086647, -0.5285199880599976, 0.791450023651123, -0.2831000089645386, 0.20483000576496124, 0.15769000351428986, -0.1545100063085556, 0.0020568999461829662, 0.2532300055027008, 0.7230600118637085, -0.05225599929690361, -0.09195099771022797, 0.10193999856710434, 0.5239400267601013, 0.09703399986028671, -0.6240400075912476, -0.05181900039315224, -0.04481099918484688, -0.5406100153923035, -0.07397399842739105, 0.5082799792289734, -0.010582000017166138, -0.6354100108146667, 0.2179500013589859, 0.7803999781608582, 0.10554999858140945, 0.20600000023841858, 0.6419199705123901, 0.384660005569458, 0.5880399942398071, -0.8062999844551086, -0.08561400324106216, 0.07718099653720856, 0.34984999895095825, -0.02546899951994419, 0.2978000044822693, 0.11529000103473663, -0.1095300018787384, 0.0419670008122921, -0.737779974937439, -0.2789900004863739, -0.518310010433197, 0.44940000772476196, -0.08599600195884705, 0.1265300065279007, -0.40509000420570374, 0.3979699909687042, 0.06059600040316582, -0.15384000539779663, 0.11869999766349792, -0.5024399757385254, -0.6779699921607971, 0.49487999081611633, -0.2006700038909912, 0.002907400019466877, 0.48069000244140625, 0.7811899781227112, 0.09555800259113312, 0.24741999804973602, -0.19599999487400055, -0.4437499940395355, 0.5778800249099731, 0.2955999970436096, -0.2619900107383728, -0.36917999386787415, 0.07397100329399109, -0.4016900062561035, 0.3529999852180481, 0.5410799980163574, -0.6986799836158752, -0.19729000329971313, -0.5341299772262573, -0.21533000469207764, -0.058504000306129456, 0.5822399854660034, -0.38515999913215637, 0.35238999128341675, 0.39285001158714294, 0.10468000173568726, 0.16423000395298004, 0.13644999265670776, -0.6394299864768982, 0.4702700078487396, -0.32662999629974365, -0.3316600024700165, 0.0013141999952495098, 0.5202500224113464, 0.27619001269340515, -0.4040600061416626, -0.006999200209975243, -0.2252800017595291, 0.6585800051689148, 0.5225899815559387, 0.008446499705314636, -0.04720199853181839, 0.24337999522686005, -0.2659800052642822, 0.3933599889278412, 0.276529997587204, 0.046066999435424805, -0.9360299706459045, 0.6937100291252136, 0.34150001406669617, -0.030573999509215355, -0.1682800054550171, 0.26510000228881836, 0.1204100027680397, 0.4214000105857849, -0.12286999821662903, 0.27272000908851624, -0.399509996175766, -0.005057699978351593, 0.004785500001162291, -0.3501499891281128, 0.10459999740123749, 0.419050008058548, -0.3171899914741516, 0.15647999942302704, 0.4870299994945526, -0.13741999864578247, 0.5196899771690369, 0.03798900172114372], u'Patent.Leather': [0.320279985666275, 0.1373399943113327, 0.20201000571250916, 0.023415999487042427, -0.23228999972343445, -0.06644000113010406, 0.05316900089383125, 0.07862400263547897, -0.013139000162482262, -0.9607700109481812, -0.21727000176906586, -0.08145499974489212, 0.10281000286340714, 0.24017000198364258, 0.03472499921917915, -0.20090000331401825, -0.2791999876499176, 0.6718500256538391, -0.18369999527931213, 0.11964999884366989, -0.3772299885749817, 0.17750999331474304, 0.2807300090789795, 0.14963999390602112, -0.9964900016784668, -0.024354999884963036, 0.31227999925613403, -0.14323000609874725, -0.2888700067996979, 0.7826200127601624, -0.14722999930381775, 0.11243999749422073, -0.814300000667572, -0.027765000239014626, -0.6208299994468689, 0.5914599895477295, -0.08576899766921997, -0.1636199951171875, 0.28033000230789185, 0.37178999185562134, -0.69718998670578, -0.6338499784469604, -0.07243800163269043, -0.6990299820899963, 0.5445200204849243, -0.3038800060749054, 0.15636000037193298, -0.2471799999475479, -0.4692400097846985, 0.29109999537467957, -0.3986800014972687, 0.01960100047290325, 0.15570999681949615, 0.02155900001525879, 0.27219000458717346, -0.16113999485969543, -0.05614500120282173, -0.6595600247383118, 0.07558900117874146, 0.09509199857711792, -0.009356300346553326, -0.05873600021004677, -0.40542998909950256, -0.4925200045108795, 0.16739000380039215, -0.3635300099849701, -0.6791800260543823, -0.20646999776363373, 0.19829000532627106, 0.19067999720573425, 0.2824299931526184, -0.055215999484062195, 0.4657599925994873, -0.5020899772644043, 0.2580200135707855, -0.23984000086784363, -0.16124999523162842, -0.16965000331401825, -0.3687100112438202, -0.5354999899864197, 0.2418700009584427, 0.36816999316215515, -0.30820000171661377, -0.48938998579978943, -0.10033000260591507, -0.4301599860191345, -0.24375000596046448, 0.3955399990081787, -0.8563200235366821, 0.08522699773311615, 0.12536999583244324, 0.24233999848365784, -0.055006999522447586, -0.08333499729633331, 0.04233599826693535, 0.5936400294303894, -0.11635000258684158, -0.0815730020403862, -0.33258000016212463, -0.1943099945783615, -0.31000998616218567, 0.8799999952316284, -0.6165099740028381, -0.714139997959137, -0.2503400146961212, -0.5200899839401245, 0.6614599823951721, 0.6125100255012512, -0.17357000708580017, -0.7688699960708618, -0.12917999923229218, 0.03411199897527695, -0.19575999677181244, -0.5171499848365784, -0.024707000702619553, -0.23646000027656555, 0.15014000236988068, 0.6591299772262573, 0.31084999442100525, -0.6385300159454346, -0.22391000390052795, -0.11964000016450882, 0.37900999188423157, -0.12244000285863876, -0.1665000021457672, 0.18322999775409698, 0.167820006608963, 0.5069699883460999, 0.21445000171661377, -0.1690399944782257, -0.008311999961733818, -0.08574599772691727, -0.16388000547885895, 0.34650999307632446, -0.6297500133514404, -0.4809400141239166, -0.4261400103569031, 0.2230599969625473, -0.22889000177383423, 0.36719998717308044, 0.5606499910354614, 0.4404599964618683, 0.9167500138282776, -0.6277099847793579, 0.7337700128555298, 0.05577800050377846, -0.07277899980545044, -0.3273699879646301, 0.2655400037765503, 0.13027000427246094, 0.09305299818515778, 0.23232999444007874, -0.4279400110244751, -0.8535400032997131, -0.1460600048303604, -0.372189998626709, -0.4641599953174591, -0.4708400070667267, 0.2945699989795685, 0.19336000084877014, 0.7178099751472473, 0.10603000223636627, -0.16719000041484833, 0.23145000636577606, 0.7326499819755554, -0.4119200110435486, 0.5793899893760681, 0.4956600069999695, 0.22970999777317047, 0.2870999872684479, -0.7600399851799011, 0.3722200095653534, -0.47773000597953796, 0.5303699970245361, -0.7299699783325195, -0.8354600071907043, -0.2558799982070923, 0.49994999170303345, -0.240789994597435, -0.6984599828720093, 0.5922600030899048, 0.26368001103401184, 0.05631599947810173, 0.14757999777793884, 0.8230299949645996, -0.7596399784088135, 0.9071900248527527, 0.5719900131225586, 0.49101001024246216, -0.4599800109863281, -0.028178999200463295, 0.4863100051879883, -0.11738000065088272, 0.1105400025844574, 0.37571999430656433, -0.10271000117063522, -0.4672600030899048, 0.23286999762058258, -0.3844299912452698, 0.3286300003528595, 0.6381400227546692, -0.34033000469207764, 0.5185800194740295, 0.13745999336242676, 0.8479400277137756, -0.04707400128245354, 0.6611400246620178, 0.4970400035381317, -1.0228999853134155, -0.263480007648468, 0.6801699995994568, -0.0806960016489029, -0.2588199973106384, 0.5239400267601013, 0.38166001439094543, -0.47262001037597656, 0.6296799778938293, -0.42080000042915344, -0.5334699749946594, -0.6371999979019165, 0.1580599993467331, -0.0010679999832063913, 0.2142699956893921, -0.12668000161647797, -0.07343199849128723, 0.6901900172233582, -0.2778399884700775, 0.4765999913215637, -0.21198000013828278, -0.4487299919128418, 0.35850000381469727, -0.6176400184631348, -0.26298999786376953, 0.4318999946117401, 0.9145200252532959, 0.10837999731302261, 0.6436700224876404, -0.23469999432563782, -0.7064700126647949, 0.4562300145626068, 0.4773400127887726, 0.20976999402046204, -0.8035799860954285, 0.5206900238990784, 0.015116999857127666, 0.9608299732208252, -0.03911399841308594, -0.34738001227378845, 0.12470000237226486, -0.24270999431610107, -0.7930300235748291, -0.19282999634742737, 0.5221199989318848, -0.20204000174999237, 0.4980199933052063, 0.3032900094985962, -0.2975800037384033, 0.26172998547554016, -0.16524000465869904, -0.5250399708747864, -0.4694899916648865, -0.8069300055503845, 0.8540999889373779, 0.44475001096725464, -0.4258500039577484, -0.0385189987719059, -0.17191000282764435, -0.03782200068235397, -0.28393998742103577, 0.08512900024652481, -0.3799099922180176, -0.35912999510765076, -0.002711700042709708, -0.09387800097465515, -0.7360799908638, 0.0026227999478578568, -0.5934200286865234, 0.9315699934959412, -0.9944999814033508, 0.185029998421669, -0.010018999688327312, -0.4411199986934662, -0.10202000290155411, -0.3109000027179718, -0.08896899968385696, 0.9317799806594849, -0.18008999526500702, 0.2724199891090393, -0.23865999281406403, -0.49386999011039734, 0.23709000647068024, 0.003082399955019355, -0.251800000667572, 0.4736500084400177, -0.5670199990272522, 0.49171000719070435, -0.11277999728918076, 0.2933500111103058, -0.11552999913692474, 0.24938000738620758], u'Leather': [0.320279985666275, 0.1373399943113327, 0.20201000571250916, 0.023415999487042427, -0.23228999972343445, -0.06644000113010406, 0.05316900089383125, 0.07862400263547897, -0.013139000162482262, -0.9607700109481812, -0.21727000176906586, -0.08145499974489212, 0.10281000286340714, 0.24017000198364258, 0.03472499921917915, -0.20090000331401825, -0.2791999876499176, 0.6718500256538391, -0.18369999527931213, 0.11964999884366989, -0.3772299885749817, 0.17750999331474304, 0.2807300090789795, 0.14963999390602112, -0.9964900016784668, -0.024354999884963036, 0.31227999925613403, -0.14323000609874725, -0.2888700067996979, 0.7826200127601624, -0.14722999930381775, 0.11243999749422073, -0.814300000667572, -0.027765000239014626, -0.6208299994468689, 0.5914599895477295, -0.08576899766921997, -0.1636199951171875, 0.28033000230789185, 0.37178999185562134, -0.69718998670578, -0.6338499784469604, -0.07243800163269043, -0.6990299820899963, 0.5445200204849243, -0.3038800060749054, 0.15636000037193298, -0.2471799999475479, -0.4692400097846985, 0.29109999537467957, -0.3986800014972687, 0.01960100047290325, 0.15570999681949615, 0.02155900001525879, 0.27219000458717346, -0.16113999485969543, -0.05614500120282173, -0.6595600247383118, 0.07558900117874146, 0.09509199857711792, -0.009356300346553326, -0.05873600021004677, -0.40542998909950256, -0.4925200045108795, 0.16739000380039215, -0.3635300099849701, -0.6791800260543823, -0.20646999776363373, 0.19829000532627106, 0.19067999720573425, 0.2824299931526184, -0.055215999484062195, 0.4657599925994873, -0.5020899772644043, 0.2580200135707855, -0.23984000086784363, -0.16124999523162842, -0.16965000331401825, -0.3687100112438202, -0.5354999899864197, 0.2418700009584427, 0.36816999316215515, -0.30820000171661377, -0.48938998579978943, -0.10033000260591507, -0.4301599860191345, -0.24375000596046448, 0.3955399990081787, -0.8563200235366821, 0.08522699773311615, 0.12536999583244324, 0.24233999848365784, -0.055006999522447586, -0.08333499729633331, 0.04233599826693535, 0.5936400294303894, -0.11635000258684158, -0.0815730020403862, -0.33258000016212463, -0.1943099945783615, -0.31000998616218567, 0.8799999952316284, -0.6165099740028381, -0.714139997959137, -0.2503400146961212, -0.5200899839401245, 0.6614599823951721, 0.6125100255012512, -0.17357000708580017, -0.7688699960708618, -0.12917999923229218, 0.03411199897527695, -0.19575999677181244, -0.5171499848365784, -0.024707000702619553, -0.23646000027656555, 0.15014000236988068, 0.6591299772262573, 0.31084999442100525, -0.6385300159454346, -0.22391000390052795, -0.11964000016450882, 0.37900999188423157, -0.12244000285863876, -0.1665000021457672, 0.18322999775409698, 0.167820006608963, 0.5069699883460999, 0.21445000171661377, -0.1690399944782257, -0.008311999961733818, -0.08574599772691727, -0.16388000547885895, 0.34650999307632446, -0.6297500133514404, -0.4809400141239166, -0.4261400103569031, 0.2230599969625473, -0.22889000177383423, 0.36719998717308044, 0.5606499910354614, 0.4404599964618683, 0.9167500138282776, -0.6277099847793579, 0.7337700128555298, 0.05577800050377846, -0.07277899980545044, -0.3273699879646301, 0.2655400037765503, 0.13027000427246094, 0.09305299818515778, 0.23232999444007874, -0.4279400110244751, -0.8535400032997131, -0.1460600048303604, -0.372189998626709, -0.4641599953174591, -0.4708400070667267, 0.2945699989795685, 0.19336000084877014, 0.7178099751472473, 0.10603000223636627, -0.16719000041484833, 0.23145000636577606, 0.7326499819755554, -0.4119200110435486, 0.5793899893760681, 0.4956600069999695, 0.22970999777317047, 0.2870999872684479, -0.7600399851799011, 0.3722200095653534, -0.47773000597953796, 0.5303699970245361, -0.7299699783325195, -0.8354600071907043, -0.2558799982070923, 0.49994999170303345, -0.240789994597435, -0.6984599828720093, 0.5922600030899048, 0.26368001103401184, 0.05631599947810173, 0.14757999777793884, 0.8230299949645996, -0.7596399784088135, 0.9071900248527527, 0.5719900131225586, 0.49101001024246216, -0.4599800109863281, -0.028178999200463295, 0.4863100051879883, -0.11738000065088272, 0.1105400025844574, 0.37571999430656433, -0.10271000117063522, -0.4672600030899048, 0.23286999762058258, -0.3844299912452698, 0.3286300003528595, 0.6381400227546692, -0.34033000469207764, 0.5185800194740295, 0.13745999336242676, 0.8479400277137756, -0.04707400128245354, 0.6611400246620178, 0.4970400035381317, -1.0228999853134155, -0.263480007648468, 0.6801699995994568, -0.0806960016489029, -0.2588199973106384, 0.5239400267601013, 0.38166001439094543, -0.47262001037597656, 0.6296799778938293, -0.42080000042915344, -0.5334699749946594, -0.6371999979019165, 0.1580599993467331, -0.0010679999832063913, 0.2142699956893921, -0.12668000161647797, -0.07343199849128723, 0.6901900172233582, -0.2778399884700775, 0.4765999913215637, -0.21198000013828278, -0.4487299919128418, 0.35850000381469727, -0.6176400184631348, -0.26298999786376953, 0.4318999946117401, 0.9145200252532959, 0.10837999731302261, 0.6436700224876404, -0.23469999432563782, -0.7064700126647949, 0.4562300145626068, 0.4773400127887726, 0.20976999402046204, -0.8035799860954285, 0.5206900238990784, 0.015116999857127666, 0.9608299732208252, -0.03911399841308594, -0.34738001227378845, 0.12470000237226486, -0.24270999431610107, -0.7930300235748291, -0.19282999634742737, 0.5221199989318848, -0.20204000174999237, 0.4980199933052063, 0.3032900094985962, -0.2975800037384033, 0.26172998547554016, -0.16524000465869904, -0.5250399708747864, -0.4694899916648865, -0.8069300055503845, 0.8540999889373779, 0.44475001096725464, -0.4258500039577484, -0.0385189987719059, -0.17191000282764435, -0.03782200068235397, -0.28393998742103577, 0.08512900024652481, -0.3799099922180176, -0.35912999510765076, -0.002711700042709708, -0.09387800097465515, -0.7360799908638, 0.0026227999478578568, -0.5934200286865234, 0.9315699934959412, -0.9944999814033508, 0.185029998421669, -0.010018999688327312, -0.4411199986934662, -0.10202000290155411, -0.3109000027179718, -0.08896899968385696, 0.9317799806594849, -0.18008999526500702, 0.2724199891090393, -0.23865999281406403, -0.49386999011039734, 0.23709000647068024, 0.003082399955019355, -0.251800000667572, 0.4736500084400177, -0.5670199990272522, 0.49171000719070435, -0.11277999728918076, 0.2933500111103058, -0.11552999913692474, 0.24938000738620758], u'Satin': [-0.197610005736351, -0.24403999745845795, -0.12399999797344208, 0.3229599893093109, -0.5281999707221985, -0.05816800147294998, 0.012153999879956245, -0.7126700282096863, 0.17282000184059143, 0.13332000374794006, -0.23106999695301056, 0.24980999529361725, -0.4471299946308136, 0.6337599754333496, -0.08679600059986115, -0.044321998953819275, -0.11328999698162079, 0.44339999556541443, -0.12442000210285187, -0.08335600048303604, -0.1703300029039383, -0.028098000213503838, 0.036657001823186874, 0.37762999534606934, 0.014769000001251698, -0.3226499855518341, 0.7011100053787231, -0.023057999089360237, -0.0865359976887703, -0.1767899990081787, 0.11827000230550766, -0.4357599914073944, -0.03354300186038017, 0.22397999465465546, -0.3085300028324127, 0.384550005197525, -0.016913000494241714, -0.26952001452445984, 0.16827000677585602, 0.09297099709510803, -0.6539099812507629, -0.28937000036239624, -0.13777999579906464, -0.4848499894142151, 0.058285001665353775, -0.09163100272417068, 0.22812999784946442, 0.030388999730348587, -0.4112600088119507, 0.3778499960899353, -0.05515100061893463, -0.025067999958992004, 0.5290200114250183, -0.7571200132369995, -0.5366899967193604, -0.34755000472068787, -0.28707998991012573, -0.3547300100326538, 0.4560900032520294, -0.09517200291156769, 0.1964700073003769, -0.18556000292301178, 0.0051115998066961765, 0.043535999953746796, 0.6535199880599976, -0.12398000061511993, 0.4344100058078766, 0.08564899861812592, 0.27465999126434326, 0.025188999250531197, 0.1779100000858307, -0.10730999708175659, -0.3391200006008148, -0.8235599994659424, 0.1965699940919876, 0.5229799747467041, -0.05955599993467331, 0.20110000669956207, -0.2262600064277649, -0.6744199991226196, 0.025350000709295273, 0.34678998589515686, -0.15059000253677368, -0.3381600081920624, 0.3563700020313263, 0.4930900037288666, 0.5108100175857544, -0.06915199756622314, 0.09475599974393845, 0.23905999958515167, 0.12489999830722809, 0.04644700139760971, 0.11738000065088272, -0.3598400056362152, -0.34790000319480896, 0.29357999563217163, 0.0993029996752739, -0.03524800017476082, 0.24800999462604523, 0.3912299871444702, 0.34391000866889954, 0.2565299868583679, 0.1817300021648407, -0.006895000115036964, 0.20996999740600586, -0.23598000407218933, 0.4146600067615509, 0.11247000098228455, -0.27206000685691833, -0.06303700059652328, -0.4022499918937683, 0.5847600102424622, 0.09646400064229965, -0.4366599917411804, 0.12234000116586685, 0.05423099920153618, 0.22798000276088715, 1.1568000316619873, 0.2908099889755249, -0.5517699718475342, 0.17236000299453735, -0.19083000719547272, 0.22317999601364136, -0.360289990901947, 0.08518800139427185, -0.26903998851776123, 0.1149199977517128, 0.6331200003623962, -0.24751999974250793, 0.15749000012874603, -0.21910999715328217, -0.07188600301742554, -0.6615800261497498, 0.5601500272750854, -0.44753000140190125, -0.2837899923324585, -0.8572800159454346, 0.5834400057792664, -0.07319100201129913, 0.28202998638153076, 0.17655999958515167, 0.06477200239896774, 0.0008790100109763443, -0.6726599931716919, 0.2936199903488159, -0.029172999784350395, -0.2574700117111206, -0.8428099751472473, 0.15211999416351318, 0.30020999908447266, -0.25986000895500183, 0.2671700119972229, -0.046525999903678894, -0.5841799974441528, -0.09004300087690353, -0.08682499825954437, 0.1219400018453598, -0.4825499951839447, -0.19029000401496887, 0.4986500144004822, -0.22735999524593353, -0.579230010509491, -0.05925999954342842, 0.016189999878406525, -0.058511000126600266, -0.1385899931192398, -0.47551000118255615, 0.6205999851226807, 0.3500100076198578, 0.06688400357961655, -0.6089000105857849, -0.08866599947214127, 0.1685899943113327, 0.040316998958587646, -0.23601000010967255, -0.8530200123786926, -0.3666900098323822, 1.0396000146865845, -0.8686599731445312, -0.5602800250053406, 0.1062999963760376, 0.1920499950647354, 0.7289299964904785, 0.3058199882507324, 0.7003399729728699, -0.2940399944782257, 0.7104799747467041, 0.3876599967479706, 0.11206000298261642, -0.03545999899506569, 0.4897400140762329, 0.4223499894142151, -0.22891999781131744, -0.04611000046133995, 0.18535000085830688, 0.15327000617980957, -0.4197700023651123, 0.58228999376297, -0.11287999898195267, -0.35324999690055847, 0.08799199759960175, -0.36899998784065247, 0.6673600077629089, 0.23902000486850739, 0.4507000148296356, 0.3032799959182739, -0.05208300054073334, 0.3372200131416321, -0.820110023021698, -0.12935000658035278, -0.019298000261187553, 0.06604799628257751, 0.15904000401496887, 0.6204699873924255, 0.35600998997688293, -0.0002059700054815039, 0.8068100214004517, -1.0723999738693237, -0.06260699778795242, -0.5611699819564819, 0.298880010843277, -0.05324200168251991, 0.4420900046825409, -0.14730000495910645, -0.32910001277923584, -0.15102000534534454, -0.07690799981355667, -0.08342500030994415, -0.4773299992084503, -1.0120999813079834, 0.3291899859905243, -0.4510999917984009, -0.27904999256134033, 0.13846999406814575, 0.6043000221252441, 0.014561999589204788, 0.3755500018596649, -0.5783699750900269, -0.14821000397205353, -0.10496000200510025, 0.11332999914884567, -0.017938999459147453, -0.3692600131034851, 0.8411499857902527, -0.5013599991798401, 0.6949800252914429, 0.11816000193357468, -0.32280999422073364, 0.4478699862957001, -0.3470599949359894, -0.5090000033378601, 0.38019001483917236, 0.009673500433564186, 0.00078444997780025, 0.3802500069141388, -0.5419999957084656, 0.04602799937129021, 0.43856000900268555, -0.11371999979019165, -0.2016499936580658, -0.11698000133037567, 0.23050999641418457, 0.22442999482154846, 0.31863999366760254, -0.7429500222206116, 0.38853999972343445, -0.24251000583171844, -0.09753300249576569, -0.10197000205516815, 0.4185999929904938, 0.2387399971485138, -0.4792500138282776, -0.0633459985256195, -0.38429999351501465, -0.6972699761390686, 0.2828800082206726, 0.23142999410629272, -0.27605000138282776, -1.0936000347137451, 0.07041200250387192, 0.3959200084209442, 0.36256998777389526, -0.1029599979519844, 0.02188899926841259, -0.08692800253629684, -0.01598300039768219, -0.24682000279426575, 0.7046999931335449, -0.5762699842453003, -0.6213300228118896, -0.4783799946308136, -0.1461700052022934, 0.0011355000315234065, 0.8987299799919128, -0.33340999484062195, -0.40964001417160034, 0.6242700219154358, 0.11153999716043472, 0.39177000522613525, -0.03908900171518326], u'Hair.Calf': [-0.17782999575138092, -0.12342999875545502, -0.2675899863243103, -0.11969000101089478, -0.1327199935913086, -0.06735599786043167, -0.3056899905204773, 0.1921599954366684, 0.7000899910926819, -1.4428000450134277, 0.014871999621391296, 0.3147200047969818, -0.03840100020170212, 0.4485799968242645, 0.049525000154972076, -0.12518000602722168, 0.6982300281524658, 0.016652999445796013, -0.6890100240707397, -0.1205499991774559, -0.7501099705696106, 0.24653999507427216, -0.11738000065088272, 0.7127000093460083, -0.3893899917602539, 0.14329999685287476, 0.03225899860262871, -0.6736999750137329, 0.14575999975204468, 0.48416998982429504, -0.0262449998408556, 0.023087000474333763, -0.5595399737358093, -0.05503999814391136, -0.8291400074958801, 0.3745099902153015, -0.047784000635147095, -0.22519999742507935, -0.3313100039958954, -0.04357200115919113, -0.28676000237464905, -0.5304399728775024, -0.13200999796390533, -0.7903800010681152, 0.49406999349594116, -0.4215799868106842, -0.3527199923992157, -0.37116000056266785, 0.3951599895954132, -0.5865799784660339, -0.3316900134086609, -0.17931999266147614, 0.08724100142717361, 0.0878250002861023, 0.03681999817490578, -0.11270999908447266, -0.5851899981498718, -0.336870014667511, -0.05160500109195709, -0.5704799890518188, 0.32350000739097595, -0.1788800060749054, 0.5801699757575989, 0.2698099911212921, 0.24785999953746796, -0.752810001373291, -0.41923001408576965, 0.4724699854850769, 0.651889979839325, -0.08144500106573105, 0.24108999967575073, -0.5160099864006042, 0.4155600070953369, 0.17288999259471893, 0.292959988117218, -0.3279699981212616, -0.02672399953007698, -0.3174299895763397, -0.15324999392032623, -0.2391200065612793, -0.5508599877357483, 0.6527000069618225, 0.27241000533103943, 0.19846999645233154, 0.0015228999545797706, 0.5952699780464172, 0.34516000747680664, 0.029062999412417412, -0.22859999537467957, 0.16856999695301056, -0.3674600124359131, 0.023799000307917595, -0.09345799684524536, 0.14294999837875366, -0.23903000354766846, 0.11072000116109848, -0.31633999943733215, -0.22206999361515045, 1.0628999471664429, -0.17598000168800354, 0.6144099831581116, -0.659030020236969, -0.07663899660110474, -0.3370800018310547, 0.01077600009739399, 0.1916700005531311, 0.5451499819755554, 0.2896699905395508, -0.9051399827003479, 0.049633998423814774, -0.011307000182569027, 0.6455900073051453, 0.0732479989528656, -0.21077999472618103, 0.3359000086784363, 0.2718299925327301, -0.5671200156211853, 0.7849599719047546, -0.3176000118255615, -0.4463599920272827, -0.3017599880695343, 0.06731300055980682, 0.1394300013780594, 0.2772899866104126, 0.043845001608133316, 0.15068000555038452, -0.6125100255012512, 0.39761999249458313, 0.07394500076770782, -0.5436300039291382, 0.5733399987220764, -0.010963000357151031, -0.39923998713493347, -0.014976000413298607, -0.2892000079154968, 0.2285500019788742, -0.24974000453948975, -0.12495999783277512, 0.06300699710845947, 0.025986000895500183, 0.3969300091266632, 0.04687200114130974, 0.10211999714374542, -0.35260000824928284, 0.2113099992275238, 0.3607400059700012, 0.1545799970626831, 0.4747300148010254, 0.19922000169754028, -0.20462000370025635, 0.24379999935626984, 0.3608100116252899, -0.5717800259590149, -0.6045500040054321, -0.01536799967288971, -0.7539399862289429, 0.37637001276016235, -0.8854399919509888, 0.5166699886322021, 0.9347500205039978, 0.23959000408649445, -0.27028998732566833, -0.5587499737739563, 0.11913999915122986, -0.3502900004386902, 0.2997399866580963, 0.06508299708366394, 0.5756499767303467, 0.39535999298095703, -0.2939800024032593, 0.24834999442100525, 0.5941399931907654, -0.12336999922990799, -0.4292199909687042, 0.2739199995994568, -0.20352999866008759, 0.06909599900245667, 0.38863998651504517, 0.20816999673843384, -0.6449699997901917, 0.010711999610066414, -0.4095900058746338, 0.18975000083446503, 0.21127000451087952, -0.372979998588562, 0.3053700029850006, 1.0959999561309814, -0.4526500105857849, 0.0054652998223900795, 0.01907699927687645, -0.31042999029159546, 0.2431199997663498, -0.41523998975753784, 0.5000500082969666, -0.10569000244140625, -0.3886600136756897, -0.3539400100708008, -0.30386000871658325, -0.11682000011205673, 0.1567399948835373, 1.2711999416351318, 0.0013837999431416392, 1.037500023841858, 0.5199900269508362, -0.06636600196361542, 0.35293999314308167, -0.016858000308275223, 0.5503299832344055, -0.6370199918746948, -0.3161900043487549, 0.298550009727478, 0.11217000335454941, -0.2571600079536438, 0.35798999667167664, -0.03947800025343895, 0.414110004901886, 0.7214999794960022, -0.6320300102233887, 0.24267999827861786, -0.6170399785041809, -0.055890001356601715, -0.32471001148223877, 0.2700299918651581, -0.14246000349521637, 0.2721500098705292, -0.11535000056028366, -0.18201999366283417, 0.21040000021457672, -0.3197999894618988, 0.3721199929714203, 0.9535700082778931, 0.05803399905562401, 0.2707599997520447, -0.13123999536037445, 0.008258800022304058, -0.5071499943733215, 0.08798299729824066, 0.18313999474048615, -0.5934900045394897, -0.14482000470161438, 0.8331500291824341, 0.018438000231981277, -0.23951999843120575, 0.6507200002670288, -0.8234000205993652, 0.08004400134086609, -0.03560600057244301, -0.19704000651836395, 0.06499899923801422, -0.07267700135707855, 0.19603000581264496, -0.028963999822735786, 0.6169700026512146, -0.9715399742126465, 0.5585899949073792, 0.1774899959564209, 0.2588300108909607, 0.573140025138855, -0.35390999913215637, -0.14013999700546265, 0.45860999822616577, -0.7767300009727478, -0.010076000355184078, 0.08136399835348129, -0.2616100013256073, 0.06291799992322922, -0.20689000189304352, 0.320499986410141, -0.5160800218582153, 0.5317599773406982, -0.2517699897289276, -0.34088999032974243, -0.5313599705696106, -0.2775300145149231, -0.4233199954032898, 0.34529998898506165, -1.0324000120162964, -0.23690000176429749, -0.7773399949073792, -0.17887000739574432, -0.3516699969768524, 0.7312800288200378, 0.35583001375198364, 0.4602699875831604, -0.37749001383781433, 0.856249988079071, 0.15181000530719757, 0.055528998374938965, -0.350600004196167, 0.8498799800872803, 0.17199000716209412, -0.09520400315523148, -0.6055600047111511, 0.41600000858306885, -0.012474999763071537, -0.4451099932193756, 0.5662800073623657, 0.24911999702453613, 0.29613998532295227, 0.3441399931907654], u'Full.grain.leather': [0.320279985666275, 0.1373399943113327, 0.20201000571250916, 0.023415999487042427, -0.23228999972343445, -0.06644000113010406, 0.05316900089383125, 0.07862400263547897, -0.013139000162482262, -0.9607700109481812, -0.21727000176906586, -0.08145499974489212, 0.10281000286340714, 0.24017000198364258, 0.03472499921917915, -0.20090000331401825, -0.2791999876499176, 0.6718500256538391, -0.18369999527931213, 0.11964999884366989, -0.3772299885749817, 0.17750999331474304, 0.2807300090789795, 0.14963999390602112, -0.9964900016784668, -0.024354999884963036, 0.31227999925613403, -0.14323000609874725, -0.2888700067996979, 0.7826200127601624, -0.14722999930381775, 0.11243999749422073, -0.814300000667572, -0.027765000239014626, -0.6208299994468689, 0.5914599895477295, -0.08576899766921997, -0.1636199951171875, 0.28033000230789185, 0.37178999185562134, -0.69718998670578, -0.6338499784469604, -0.07243800163269043, -0.6990299820899963, 0.5445200204849243, -0.3038800060749054, 0.15636000037193298, -0.2471799999475479, -0.4692400097846985, 0.29109999537467957, -0.3986800014972687, 0.01960100047290325, 0.15570999681949615, 0.02155900001525879, 0.27219000458717346, -0.16113999485969543, -0.05614500120282173, -0.6595600247383118, 0.07558900117874146, 0.09509199857711792, -0.009356300346553326, -0.05873600021004677, -0.40542998909950256, -0.4925200045108795, 0.16739000380039215, -0.3635300099849701, -0.6791800260543823, -0.20646999776363373, 0.19829000532627106, 0.19067999720573425, 0.2824299931526184, -0.055215999484062195, 0.4657599925994873, -0.5020899772644043, 0.2580200135707855, -0.23984000086784363, -0.16124999523162842, -0.16965000331401825, -0.3687100112438202, -0.5354999899864197, 0.2418700009584427, 0.36816999316215515, -0.30820000171661377, -0.48938998579978943, -0.10033000260591507, -0.4301599860191345, -0.24375000596046448, 0.3955399990081787, -0.8563200235366821, 0.08522699773311615, 0.12536999583244324, 0.24233999848365784, -0.055006999522447586, -0.08333499729633331, 0.04233599826693535, 0.5936400294303894, -0.11635000258684158, -0.0815730020403862, -0.33258000016212463, -0.1943099945783615, -0.31000998616218567, 0.8799999952316284, -0.6165099740028381, -0.714139997959137, -0.2503400146961212, -0.5200899839401245, 0.6614599823951721, 0.6125100255012512, -0.17357000708580017, -0.7688699960708618, -0.12917999923229218, 0.03411199897527695, -0.19575999677181244, -0.5171499848365784, -0.024707000702619553, -0.23646000027656555, 0.15014000236988068, 0.6591299772262573, 0.31084999442100525, -0.6385300159454346, -0.22391000390052795, -0.11964000016450882, 0.37900999188423157, -0.12244000285863876, -0.1665000021457672, 0.18322999775409698, 0.167820006608963, 0.5069699883460999, 0.21445000171661377, -0.1690399944782257, -0.008311999961733818, -0.08574599772691727, -0.16388000547885895, 0.34650999307632446, -0.6297500133514404, -0.4809400141239166, -0.4261400103569031, 0.2230599969625473, -0.22889000177383423, 0.36719998717308044, 0.5606499910354614, 0.4404599964618683, 0.9167500138282776, -0.6277099847793579, 0.7337700128555298, 0.05577800050377846, -0.07277899980545044, -0.3273699879646301, 0.2655400037765503, 0.13027000427246094, 0.09305299818515778, 0.23232999444007874, -0.4279400110244751, -0.8535400032997131, -0.1460600048303604, -0.372189998626709, -0.4641599953174591, -0.4708400070667267, 0.2945699989795685, 0.19336000084877014, 0.7178099751472473, 0.10603000223636627, -0.16719000041484833, 0.23145000636577606, 0.7326499819755554, -0.4119200110435486, 0.5793899893760681, 0.4956600069999695, 0.22970999777317047, 0.2870999872684479, -0.7600399851799011, 0.3722200095653534, -0.47773000597953796, 0.5303699970245361, -0.7299699783325195, -0.8354600071907043, -0.2558799982070923, 0.49994999170303345, -0.240789994597435, -0.6984599828720093, 0.5922600030899048, 0.26368001103401184, 0.05631599947810173, 0.14757999777793884, 0.8230299949645996, -0.7596399784088135, 0.9071900248527527, 0.5719900131225586, 0.49101001024246216, -0.4599800109863281, -0.028178999200463295, 0.4863100051879883, -0.11738000065088272, 0.1105400025844574, 0.37571999430656433, -0.10271000117063522, -0.4672600030899048, 0.23286999762058258, -0.3844299912452698, 0.3286300003528595, 0.6381400227546692, -0.34033000469207764, 0.5185800194740295, 0.13745999336242676, 0.8479400277137756, -0.04707400128245354, 0.6611400246620178, 0.4970400035381317, -1.0228999853134155, -0.263480007648468, 0.6801699995994568, -0.0806960016489029, -0.2588199973106384, 0.5239400267601013, 0.38166001439094543, -0.47262001037597656, 0.6296799778938293, -0.42080000042915344, -0.5334699749946594, -0.6371999979019165, 0.1580599993467331, -0.0010679999832063913, 0.2142699956893921, -0.12668000161647797, -0.07343199849128723, 0.6901900172233582, -0.2778399884700775, 0.4765999913215637, -0.21198000013828278, -0.4487299919128418, 0.35850000381469727, -0.6176400184631348, -0.26298999786376953, 0.4318999946117401, 0.9145200252532959, 0.10837999731302261, 0.6436700224876404, -0.23469999432563782, -0.7064700126647949, 0.4562300145626068, 0.4773400127887726, 0.20976999402046204, -0.8035799860954285, 0.5206900238990784, 0.015116999857127666, 0.9608299732208252, -0.03911399841308594, -0.34738001227378845, 0.12470000237226486, -0.24270999431610107, -0.7930300235748291, -0.19282999634742737, 0.5221199989318848, -0.20204000174999237, 0.4980199933052063, 0.3032900094985962, -0.2975800037384033, 0.26172998547554016, -0.16524000465869904, -0.5250399708747864, -0.4694899916648865, -0.8069300055503845, 0.8540999889373779, 0.44475001096725464, -0.4258500039577484, -0.0385189987719059, -0.17191000282764435, -0.03782200068235397, -0.28393998742103577, 0.08512900024652481, -0.3799099922180176, -0.35912999510765076, -0.002711700042709708, -0.09387800097465515, -0.7360799908638, 0.0026227999478578568, -0.5934200286865234, 0.9315699934959412, -0.9944999814033508, 0.185029998421669, -0.010018999688327312, -0.4411199986934662, -0.10202000290155411, -0.3109000027179718, -0.08896899968385696, 0.9317799806594849, -0.18008999526500702, 0.2724199891090393, -0.23865999281406403, -0.49386999011039734, 0.23709000647068024, 0.003082399955019355, -0.251800000667572, 0.4736500084400177, -0.5670199990272522, 0.49171000719070435, -0.11277999728918076, 0.2933500111103058, -0.11552999913692474, 0.24938000738620758], u'Rubber': [0.2986299991607666, 0.06507299840450287, -0.11800999939441681, -0.013868999667465687, -0.33441999554634094, -0.6196799874305725, 0.0966470018029213, 0.688510000705719, -0.012130999937653542, -0.5205399990081787, -0.0633540004491806, -0.284280002117157, -0.3378799855709076, -0.5685399770736694, 0.2430499941110611, -0.3166300058364868, -0.19812999665737152, 0.7768800258636475, 0.06390400230884552, 0.38328999280929565, -0.3698500096797943, 0.002942899940535426, 0.2739099860191345, 0.4095200002193451, -0.7394000291824341, 0.13287000358104706, -0.2543799877166748, 0.1751600056886673, -0.34529998898506165, 0.7320299744606018, 0.35207998752593994, -0.40283000469207764, -0.22826999425888062, 0.33076998591423035, 0.03148899972438812, 0.4479300081729889, 0.14959000051021576, -0.34338998794555664, 0.5040799975395203, 0.6566600203514099, -0.13862000405788422, -0.3896700143814087, -0.10209999978542328, -0.003396800020709634, -0.3211899995803833, -0.21886000037193298, -0.29502999782562256, -0.4569700062274933, 0.014921000227332115, 1.4467999935150146, 0.192780002951622, 0.43167999386787415, -0.11180999875068665, 0.4858799874782562, 0.455159991979599, 0.1584099978208542, -0.07071900367736816, 0.1256999969482422, 0.02778399921953678, -0.7682499885559082, 0.13549000024795532, -0.3788999915122986, -0.8564000129699707, -0.48135998845100403, 0.7329999804496765, -0.05452600121498108, -0.5305899977684021, -0.20156000554561615, -0.4888800084590912, 0.4542999863624573, -0.330020010471344, 0.30017998814582825, -0.24556000530719757, 0.6433699727058411, 0.05084700137376785, 0.31984999775886536, 0.3308899998664856, -0.11275999993085861, 0.1404999941587448, -0.5774099826812744, 0.2191700041294098, 0.3236300051212311, 0.001630699960514903, 0.38207998871803284, -0.4682599902153015, -0.03551200032234192, 0.04054199904203415, -0.1838800013065338, -0.5474900007247925, 0.06058499962091446, 0.2213200032711029, -0.39937999844551086, -0.2192399948835373, -0.31856000423431396, 0.43827998638153076, 0.20409999787807465, -0.6905800104141235, 0.12713000178337097, -0.3513199985027313, -0.7056099772453308, 0.05018499866127968, 0.9441800117492676, -0.46459999680519104, -0.8096699714660645, 0.2837499976158142, 0.31582000851631165, -0.45987001061439514, -0.22417999804019928, -0.3726600110530853, -0.046720001846551895, 0.6299300193786621, -0.0013876999728381634, -0.19347000122070312, -0.2539600133895874, 0.51146000623703, 0.19089999794960022, 0.28334999084472656, 0.6500300168991089, 0.1527000069618225, -0.24301999807357788, -0.29218998551368713, -0.20714999735355377, 0.07373400032520294, -0.3077999949455261, -0.5839499831199646, 0.5425500273704529, 0.3295600116252899, -0.18283000588417053, 0.5159100294113159, 0.052848998457193375, 0.10576999932527542, 1.0450999736785889, 0.08685000240802765, 0.7823299765586853, -0.41321998834609985, -0.27790001034736633, 0.09461499750614166, 0.16659000515937805, 0.7350800037384033, 0.5509099960327148, 0.29973000288009644, 0.47947001457214355, 0.15665000677108765, -0.31314000487327576, -0.02555599994957447, 0.7722399830818176, -0.14316000044345856, -0.2017199993133545, -0.01362099964171648, -0.3018600046634674, -0.12732000648975372, 0.14295999705791473, 0.17744000256061554, -0.27441999316215515, 0.6162099838256836, -0.20860999822616577, 0.0681539997458458, -0.6604499816894531, 0.36476001143455505, -0.2655400037765503, -0.14114999771118164, -0.1935800015926361, 0.015495999716222286, -0.07668200135231018, 0.6678000092506409, -0.3740699887275696, 0.03956000134348869, 1.0391000509262085, 0.24718999862670898, 0.03426099941134453, -0.5722299814224243, 0.4587399959564209, 0.37490999698638916, 0.32493001222610474, 0.1035899966955185, -0.4659099876880646, 0.0006781899719499052, 0.5013999938964844, 0.039684999734163284, -0.09893699735403061, 0.26076000928878784, 0.07077699899673462, -0.029262999072670937, -0.10665000230073929, -0.09894700348377228, -0.3204900026321411, 0.751010000705719, 0.7310400009155273, 0.15775999426841736, -0.03548799827694893, 0.43623000383377075, 0.8976899981498718, -0.3375000059604645, -0.041127998381853104, 0.18987999856472015, 0.6549800038337708, 0.3146199882030487, -0.17563000321388245, 0.3952699899673462, 0.30741000175476074, 0.16116000711917877, 0.5968599915504456, 0.1319900006055832, 0.010394999757409096, -0.25290998816490173, 0.35238000750541687, 0.6152399778366089, -0.25044000148773193, -1.347499966621399, -0.526960015296936, -0.13305999338626862, 0.3326599895954132, 0.11784999817609787, -0.46347999572753906, 0.6791800260543823, -0.10200999677181244, 0.510699987411499, -0.25613000988960266, 0.03677799925208092, -0.5928999781608582, 0.4637100100517273, -0.6692600250244141, 0.4978500008583069, 0.20206999778747559, -0.1068200021982193, 0.3999499976634979, -0.5158600211143494, 0.08559499680995941, -0.32339999079704285, 0.2120400071144104, 0.39921000599861145, -0.14725999534130096, -0.2765200138092041, -0.06712699681520462, 0.8468300104141235, 0.3279399871826172, 0.05340899899601936, -0.656470000743866, -0.265749990940094, 0.36757999658584595, 0.23101000487804413, -0.08473599702119827, -0.7307900190353394, 0.2536099851131439, -0.2222599983215332, 0.16269999742507935, 0.1887200027704239, -0.0710889995098114, 0.04977300018072128, -0.47350001335144043, 0.38672998547554016, -0.9177500009536743, 0.609000027179718, -0.694890022277832, 0.5725600123405457, -0.048909999430179596, -0.7856900095939636, 0.28022998571395874, 0.22181999683380127, -0.33755001425743103, -0.40898001194000244, -0.17364999651908875, -0.15484000742435455, -0.3169499933719635, 0.14271999895572662, 0.9213399887084961, -0.9323400259017944, -0.17607000470161438, -0.06615299731492996, 0.1668899953365326, 0.07637500017881393, -0.18207000195980072, 0.7608100175857544, -0.36103999614715576, -1.0786999464035034, -0.13590000569820404, -1.2842999696731567, -0.12902000546455383, -0.7518600225448608, 0.7635599970817566, -0.23419000208377838, -1.184000015258789, -0.6232399940490723, 0.43474000692367554, -0.12745000422000885, 0.11439000070095062, 0.5758500099182129, 0.17409999668598175, -0.46950000524520874, -0.085037000477314, -0.4584600031375885, -0.3844900131225586, 0.43893998861312866, 0.5967900156974792, 0.3405100107192993, 0.911620020866394, -0.430620014667511, -0.46198999881744385, -0.1901800036430359, 0.3259100019931793], u'Cotton': [-0.4855400025844574, -0.11411000043153763, 0.045823998749256134, -0.32666000723838806, -0.18908999860286713, -0.21142999827861786, 0.17017999291419983, -0.26513001322746277, 0.1303199976682663, -0.5010499954223633, -0.24133999645709991, -0.7208700180053711, 0.14618000388145447, 0.08438900113105774, 0.09275899827480316, -0.006888499949127436, -0.15324999392032623, -0.3196200132369995, -0.4148699939250946, -0.26739001274108887, -0.5365300178527832, -0.5952500104904175, 0.15410999953746796, 0.32923001050949097, 0.0034668000880628824, 0.15602000057697296, -0.3842799961566925, -0.5342400074005127, -0.7240300178527832, 0.13455000519752502, -0.37338998913764954, 0.35613998770713806, -0.7850599884986877, 0.030786000192165375, -0.7758299708366394, 0.7107499837875366, 0.5915200114250183, -0.2745800018310547, 0.22495999932289124, -0.07376500219106674, -0.48627999424934387, -0.6427900195121765, -0.14503000676631927, 0.20397000014781952, 0.23124000430107117, -0.28937000036239624, -0.10552000254392624, -0.23397000133991241, 0.07769100368022919, -0.17403000593185425, 0.8070099949836731, 0.5702999830245972, -0.3167699873447418, -0.30235999822616577, -0.30469000339508057, -0.34318000078201294, -0.33410000801086426, -0.43689998984336853, 0.3951199948787689, -0.8269000053405762, -0.4124799966812134, -0.8205699920654297, -0.49733999371528625, 0.1007699966430664, 0.21347999572753906, 0.12196999788284302, 0.06375200301408768, -0.5659800171852112, -0.4268999993801117, 0.0028291998896747828, 0.6122400164604187, 0.22891999781131744, -0.2737799882888794, -0.13967999815940857, -0.17177000641822815, 0.04463899880647659, 0.035057999193668365, -0.42930999398231506, -0.22032999992370605, 0.2334199994802475, 0.2787199914455414, -0.09884999692440033, -0.7591800093650818, 0.023218000307679176, 0.1363700032234192, 0.17847999930381775, -0.19894999265670776, 0.11539000272750854, 0.4516800045967102, -0.3623400032520294, 0.48315998911857605, -0.10379000008106232, 0.2775700092315674, 0.12947000563144684, -0.2814500033855438, 0.4572800099849701, 0.30994001030921936, 0.36462000012397766, -0.2651199996471405, 0.3347100019454956, 0.10780999809503555, 0.8299099802970886, -0.5795400142669678, -0.1703599989414215, -0.6583700180053711, 0.2259799987077713, -0.06634899973869324, 0.16624000668525696, -0.6624400019645691, 0.23859000205993652, -0.15967999398708344, -0.09475299715995789, -0.26686999201774597, 0.1301099956035614, 0.19442999362945557, 0.22536000609397888, 0.6286399960517883, 1.0521999597549438, 0.44765999913215637, -0.12161999940872192, -0.40128999948501587, 0.12320999801158905, 0.7168400287628174, -0.0148930000141263, 0.10965999960899353, 0.5205600261688232, 0.03360699862241745, 0.4652000069618225, 0.5386599898338318, 0.2632800042629242, -0.03514999896287918, 0.7741100192070007, -0.43303999304771423, -0.3841499984264374, -0.5683900117874146, 0.022863000631332397, -0.20923000574111938, 0.6434999704360962, -0.4309700131416321, -0.23236000537872314, -0.20410999655723572, 0.012570999562740326, 0.2374899983406067, -0.7210400104522705, -0.31431999802589417, 1.0090999603271484, -0.07662300020456314, -0.8828799724578857, 0.3297800123691559, -0.3161900043487549, -0.46502000093460083, -0.06824900209903717, -0.00914829969406128, -0.9161400198936462, 0.075764000415802, 0.10509999841451645, -0.4009999930858612, -0.15745000541210175, 0.728879988193512, 0.2402999997138977, 0.05519099906086922, -0.11202000081539154, -0.4175899922847748, -0.12303999811410904, -0.11715000122785568, -0.6187999844551086, 0.006577900145202875, -0.11455000191926956, 0.07845199853181839, 0.3882899880409241, -0.46428999304771423, 0.34360000491142273, -0.7879199981689453, 0.23592999577522278, 0.8235999941825867, -0.032896000891923904, 0.2930299937725067, 0.5084199905395508, -0.36539000272750854, -0.03266099840402603, 0.021655000746250153, 0.49292999505996704, -0.4200200140476227, -0.2716499865055084, 0.016815999522805214, -0.43529000878334045, 0.08568400144577026, 0.28240999579429626, 0.021177999675273895, 0.35137999057769775, -0.22491000592708588, 1.1813000440597534, -0.16779999434947968, -0.14451999962329865, -0.3361000120639801, -0.012010999955236912, 0.22123999893665314, -0.4632500112056732, -0.1005999967455864, -0.37081000208854675, 0.06120600178837776, -0.13173000514507294, 1.0228999853134155, 0.08201699703931808, 0.7480400204658508, -0.3377000093460083, 0.5619099736213684, 0.5848699808120728, -0.29785001277923584, -0.18203000724315643, -0.11230000108480453, -0.29725998640060425, -0.45824000239372253, 0.41157999634742737, 0.22112999856472015, 0.024855000898241997, -0.03136000037193298, -0.13488000631332397, 0.12303999811410904, -0.6951299905776978, 0.1538199931383133, -0.8869900107383728, -0.20995000004768372, 0.001999499974772334, -0.10659000277519226, 0.0900299996137619, 0.1970600038766861, 0.539650022983551, -0.006381500046700239, 0.16357000172138214, 0.668470025062561, 0.19258999824523926, -0.22033999860286713, 0.5473999977111816, 0.5388100147247314, -0.07762199640274048, 0.6043699979782104, -0.539929986000061, -0.4928399920463562, 0.04162700101733208, -0.41266000270843506, 0.0008732699789106846, -0.4851199984550476, 0.5968400239944458, -0.7574999928474426, 0.026757000014185905, -0.12982000410556793, -0.6014900207519531, -0.16142000257968903, -0.38106998801231384, -0.10547000169754028, 0.25148001313209534, -0.08052700012922287, -0.345770001411438, 1.1341999769210815, 0.12383999675512314, -0.20691999793052673, 0.2670600116252899, 0.1441899985074997, -0.1682399958372116, -0.031022999435663223, -0.0888649970293045, 0.07929600030183792, -0.08092299848794937, -0.22544999420642853, -0.051600001752376556, -0.22554999589920044, -0.20680999755859375, -0.5492100119590759, 0.7500799894332886, -0.04782399907708168, 0.17794999480247498, -0.08101499825716019, 0.2015099972486496, -0.6751599907875061, -0.22316999733448029, -0.560259997844696, -0.5600299835205078, -0.39875999093055725, 0.5852699875831604, -0.7232800126075745, -0.5482500195503235, 0.15508000552654266, -0.05444199964404106, 0.20017999410629272, 0.06644300371408463, 0.45837000012397766, -0.456169992685318, -0.4622400104999542, -0.4316500127315521, -0.04379900172352791, -0.11607000231742859, -0.009190299548208714, 0.3391599953174591, -0.1487099975347519, 0.7196800112724304, -0.03236699849367142, -0.8023899793624878, 0.16742999851703644, 0.7063199877738953], u'Suede': [0.166020005941391, -0.32677000761032104, 0.3769899904727936, 0.7254199981689453, -0.27678999304771423, 0.044270001351833344, 0.3000200092792511, -0.5001599788665771, 0.42166998982429504, 0.5700299739837646, 0.15365999937057495, -0.03306499868631363, -0.2580299973487854, 0.3367300033569336, 0.10322999954223633, -0.2599300146102905, -0.005070100072771311, 0.3408600091934204, 0.02050900086760521, 0.08401600271463394, -0.21376000344753265, 0.4914900064468384, -0.016506999731063843, 0.013526000082492828, -0.27682000398635864, -0.3670099973678589, 0.43733999133110046, 0.40667998790740967, -0.1813800036907196, 0.716189980506897, -0.25099998712539673, -0.06078999862074852, -0.3277199864387512, -0.0009612700087018311, 0.07983999699354172, 0.5428699851036072, -0.22563999891281128, 0.2598400115966797, 0.28922000527381897, 0.7115499973297119, -0.6244800090789795, -0.29624998569488525, 0.11016000062227249, -0.2491600066423416, 0.7373499870300293, 0.13957999646663666, 0.34915998578071594, -0.04178199917078018, -0.43876001238822937, 0.25846999883651733, -0.5821499824523926, -0.36691999435424805, -0.15296000242233276, -0.023933999240398407, -0.12005999684333801, 0.06443499773740768, -0.22648000717163086, -0.25516000390052795, 0.11719000339508057, 0.4630900025367737, -0.41909000277519226, -0.16599999368190765, -0.7708399891853333, 0.019411999732255936, 0.512220025062561, 0.05300600081682205, 0.5902299880981445, -0.26809000968933105, 0.09786500036716461, 0.44554001092910767, 0.8129199743270874, 0.053011998534202576, 0.11114999651908875, -0.17183999717235565, 0.40977001190185547, 0.00040995999006554484, -0.07085800170898438, 0.11400999873876572, -0.2556999921798706, -0.387580007314682, 0.5270199775695801, 0.750220000743866, -0.16800999641418457, -0.40874001383781433, 0.19088000059127808, -0.02814299985766411, 0.5556300282478333, 0.015186999924480915, -0.4707300066947937, -0.22098000347614288, 0.09800499677658081, 0.746999979019165, 0.30243000388145447, -0.2892000079154968, -0.21907000243663788, 0.4413500130176544, 0.485289990901947, 0.3165600001811981, -0.33138999342918396, 0.4167799949645996, -0.0911789983510971, 0.3595300018787384, -0.5338500142097473, -0.1709199994802475, -0.8583300113677979, -0.3415699899196625, 0.5355700254440308, 0.1320600062608719, -0.015567000024020672, -0.786050021648407, 0.01312199980020523, 0.011207000352442265, 0.2274399995803833, -0.4022899866104126, 0.1712300032377243, -0.5273299813270569, 0.10802999883890152, 0.2697699964046478, 0.5715799927711487, -0.966480016708374, 0.04529400169849396, -0.12841999530792236, 0.5288699865341187, 0.06982900202274323, 0.07669500261545181, -0.44663000106811523, -0.09431599825620651, 0.9212300181388855, -0.12268999963998795, -0.12997999787330627, -0.445389986038208, -0.28898999094963074, -0.35266000032424927, 0.17542999982833862, -0.28883999586105347, -0.9993600249290466, -0.4388200044631958, 0.24618999660015106, 0.13699999451637268, 0.3082900047302246, 0.6485999822616577, 0.3031400144100189, 0.40261998772621155, -0.4514400064945221, 0.20151999592781067, -0.022019999101758003, -0.35822999477386475, 0.0037855999544262886, 0.27487999200820923, 0.42162999510765076, -0.06888099759817123, -0.07999599725008011, -0.7178699970245361, -0.7225099802017212, -0.5008900165557861, -0.3650200068950653, 0.21358999609947205, 0.005239299964159727, 0.49129000306129456, 0.55690997838974, -0.4188399910926819, -0.7163500189781189, -0.4571099877357483, 0.12851999700069427, 0.13710999488830566, 0.15296000242233276, 0.126910001039505, 0.5693100094795227, 0.9750000238418579, 0.09419000148773193, 0.036058999598026276, 0.33320000767707825, -0.13476000726222992, 0.017993999645113945, -0.13898000121116638, -0.7620199918746948, 0.3695699870586395, 0.27663999795913696, -0.6767699718475342, -0.7266600131988525, 0.24851000308990479, 0.5972899794578552, 0.05000999942421913, -0.16588999330997467, -0.06672599911689758, -0.37240999937057495, 0.7573300004005432, 0.12985999882221222, 0.21541999280452728, -0.18118999898433685, -0.3545700013637543, -0.03829199820756912, -0.05188300088047981, -0.08797299861907959, -0.1090800017118454, 0.5716300010681152, -0.29857999086380005, 0.4719499945640564, -0.23668000102043152, -0.16912999749183655, 0.3928000032901764, -0.3991299867630005, 0.4306600093841553, 0.03750700131058693, -0.014991000294685364, -0.049389999359846115, 0.22922000288963318, 0.42375001311302185, -1.0182000398635864, -0.2669200003147125, 0.34665000438690186, 0.03431500121951103, 0.27285999059677124, 1.0681999921798706, 0.10498999804258347, 0.5091500282287598, 0.6499500274658203, -0.4050700068473816, 0.07415799796581268, -0.7428600192070007, 0.36434000730514526, 0.0235190000385046, 0.1703599989414215, 0.31644999980926514, 0.08727400004863739, 0.010478000156581402, -0.6438300013542175, 0.3233500123023987, -0.07667499780654907, -0.6068199872970581, 0.35879001021385193, -0.06579600274562836, -0.08348099887371063, -0.09742999821901321, 0.47800999879837036, -0.020885000005364418, 0.5357099771499634, 0.09191299974918365, -0.5396900177001953, -0.15591999888420105, 0.28240999579429626, -0.43435001373291016, -0.6450799703598022, 0.4888400137424469, 0.14580999314785004, 0.26568999886512756, 0.020718000829219818, -0.8827800154685974, 0.13800999522209167, -0.19812999665737152, -0.34077998995780945, 0.21809999644756317, 0.08566399663686752, -0.23697000741958618, -0.2984899878501892, 0.11715999990701675, 0.06770899891853333, 0.44944998621940613, 0.43740999698638916, -0.5017600059509277, 0.07991299778223038, -0.15542000532150269, 0.5068699717521667, -0.0068280999548733234, -0.4185299873352051, 0.0813170000910759, -0.4345400035381317, -0.03292899951338768, -0.27147001028060913, 0.06848400086164474, 0.19732999801635742, -0.3453899919986725, -0.08543500304222107, -0.5040000081062317, -0.9288700222969055, 0.2053299993276596, 0.4140700101852417, 0.05811300128698349, -0.6167200207710266, 0.6600900292396545, 0.4182499945163727, 0.08973199874162674, 0.022797999903559685, 0.012761999852955341, -0.32141000032424927, 0.4393500089645386, -0.0829169973731041, 0.33313000202178955, -0.5004000067710876, -0.9474300146102905, 0.1359100043773651, -0.04068100079894066, 0.030559999868273735, 0.3903299868106842, -0.4298500120639801, -0.10865999758243561, -0.039058998227119446, 0.5217900276184082, 0.13747000694274902, 0.17486000061035156], u'Wool': [-0.14305000007152557, -0.1031700000166893, -0.00836700014770031, -0.45399001240730286, 0.19032999873161316, -0.6324099898338318, -0.26642000675201416, 0.16666999459266663, -0.04538799822330475, -0.7112399935722351, 0.30647000670433044, -1.0413999557495117, 0.2306700050830841, 0.6582499742507935, 0.06593199819326401, -0.2180899977684021, -0.08231700211763382, -0.3385399878025055, -0.5003499984741211, 0.39372000098228455, -0.3156999945640564, -0.8389599919319153, 0.3412899971008301, 0.6111299991607666, -0.32387998700141907, -0.3589499890804291, 0.2498999983072281, -0.24637000262737274, -0.16899000108242035, 0.4431999921798706, -0.3062700033187866, 0.17552000284194946, -0.7307800054550171, -0.29982998967170715, -0.47925999760627747, 0.4534600079059601, 0.4155299961566925, 0.1252399981021881, 0.052545998245477676, 0.17714999616146088, -0.6453400254249573, -0.3243499994277954, 0.30265000462532043, -0.6115800142288208, 0.6375200152397156, -0.010604999959468842, -0.2653299868106842, -0.18432000279426575, -0.2835400104522705, -0.2879599928855896, 0.05666700005531311, -0.02175999991595745, -0.3169099986553192, 0.0057760002091526985, -0.08931700140237808, 0.10044000297784805, -0.6008899807929993, -0.4053399860858917, -0.44648000597953796, -0.31327998638153076, -0.11131999641656876, -0.4922100007534027, 0.23704999685287476, 0.19068999588489532, 0.15926000475883484, 0.09582500159740448, -0.21727000176906586, -0.1363999992609024, -0.23684999346733093, 0.20062999427318573, 0.3718299865722656, 0.031877998262643814, -0.12951000034809113, -0.4064300060272217, 0.10891000181436539, 0.148499995470047, 0.048601001501083374, -0.10913000255823135, -0.24053999781608582, -0.07919599860906601, -0.25117000937461853, 0.04990699887275696, -0.4094099998474121, -0.3641299903392792, -0.0015807000454515219, 0.2292499989271164, 0.3968200087547302, 0.0001828700042096898, -0.2995699942111969, 0.0244120005518198, 0.38411998748779297, -0.0994350016117096, -0.18411000072956085, 0.22970999777317047, -0.4173400104045868, 0.3351399898529053, 0.157260000705719, 1.0091999769210815, -0.15750999748706818, 1.2148000001907349, 0.31150001287460327, 0.5750399827957153, -0.6193699836730957, 0.2587699890136719, -0.39136001467704773, -0.2950400114059448, -0.19740000367164612, 0.051552001386880875, -0.46105000376701355, 0.5947099924087524, 0.175369992852211, 0.23725999891757965, -0.8650500178337097, -0.03474799916148186, -0.0040616001933813095, 0.32892000675201416, -0.09969300031661987, 0.7408400177955627, 0.24073000252246857, -0.6715800166130066, 0.05670100077986717, 0.21086999773979187, 0.8250399827957153, 0.42671000957489014, 0.4331800043582916, 0.22753000259399414, 0.051639001816511154, 0.2767600119113922, -0.19660000503063202, -0.4520699977874756, -0.02708899974822998, -0.038297999650239944, -0.6512399911880493, -0.2126999944448471, -0.09266600012779236, 0.5627999901771545, -0.6859700083732605, 0.44387000799179077, 0.5389800071716309, 0.2670300006866455, 0.050106000155210495, 0.6374199986457825, 0.2594499886035919, -0.7214000225067139, 0.13036000728607178, 0.3398900032043457, 0.3370000123977661, -0.6962800025939941, -0.036215998232364655, -0.27237001061439514, -0.06401500105857849, 0.14270000159740448, -0.11620999872684479, -0.9869400262832642, -0.0021869998890906572, -0.14904999732971191, -0.6129800081253052, -0.5414900183677673, 0.6324599981307983, -0.0550680011510849, -0.009775600396096706, 0.056752000004053116, -0.37483999133110046, 0.019007999449968338, 0.28817999362945557, -0.4242100119590759, 0.3003300130367279, -0.06247600167989731, 0.7048900127410889, 0.47286999225616455, -0.43641000986099243, -0.1770399957895279, -0.16810999810695648, 0.46573999524116516, 0.20759999752044678, -0.09361399710178375, 0.12464000284671783, 0.457040011882782, -0.2999500036239624, -0.2073500007390976, 0.368149995803833, 0.09950599819421768, -0.29300999641418457, -0.3487299978733063, 0.6368100047111511, 0.08954799920320511, 0.8809000253677368, -0.10234999656677246, 0.12310999631881714, 0.5613800287246704, -0.15880000591278076, 0.718500018119812, 0.021624000743031502, -0.17169000208377838, -0.04642900079488754, 0.24404999613761902, -0.47822999954223633, -0.1735599935054779, 0.14024999737739563, -0.1837099939584732, 0.0020751000847667456, -0.2439499944448471, 0.7670999765396118, 0.21671999990940094, 1.1442999839782715, 0.44223999977111816, 0.5102499723434448, 0.5731199979782104, -0.5725100040435791, -0.42489001154899597, 0.07318899780511856, 0.154339998960495, -0.06763099879026413, 0.2852199971675873, 0.32161998748779297, 0.27904000878334045, -0.00907289981842041, -0.6517000198364258, 0.22152000665664673, -0.5297799706459045, 0.2744100093841553, -0.5460799932479858, -0.028550999239087105, -0.39193999767303467, 0.2463500052690506, 0.04070800170302391, -0.07644300162792206, -0.06331200152635574, -0.05159299820661545, 0.21713000535964966, 0.7168200016021729, -0.03386399894952774, -0.1444299966096878, 0.37448999285697937, 1.027899980545044, -0.3184199929237366, 0.8250799775123596, -0.21698999404907227, -0.5768300294876099, 0.21265999972820282, -0.4348500072956085, -0.11913999915122986, -1.024399995803833, 0.1763100028038025, -0.9293799996376038, 0.892009973526001, -0.08829399943351746, -0.31275999546051025, 0.07679399847984314, -0.6633999943733215, -0.3430899977684021, 0.1264200061559677, 0.4913400113582611, -0.5802199840545654, 0.48333999514579773, 0.35776999592781067, 0.030619999393820763, 0.36987999081611633, -0.1018500030040741, -0.02835099957883358, 0.18609000742435455, -0.06207900121808052, -0.03551600128412247, 0.509880006313324, -0.1149199977517128, 0.15730999410152435, 0.15514999628067017, -0.11040999740362167, -0.18769000470638275, -0.0158890001475811, -0.3264699876308441, -0.09814699739217758, 0.10791999846696854, -0.07166200131177902, -0.671750009059906, 0.14661000669002533, -0.21533000469207764, -0.017635999247431755, -0.6210500001907349, 0.41596999764442444, -0.31589001417160034, -0.08134199678897858, -0.03477700054645538, 0.5273699760437012, -0.032965999096632004, 0.2595599889755249, -0.0995120033621788, -0.17789000272750854, -0.014289000071585178, -0.29012998938560486, 0.0782570019364357, 0.5430200099945068, 0.14121000468730927, 0.4592899978160858, -0.2909800112247467, 0.2367199957370758, 0.27507999539375305, 0.12551000714302063, 0.7321299910545349, 0.5205399990081787], u'Nubuck': [0.320279985666275, 0.1373399943113327, 0.20201000571250916, 0.023415999487042427, -0.23228999972343445, -0.06644000113010406, 0.05316900089383125, 0.07862400263547897, -0.013139000162482262, -0.9607700109481812, -0.21727000176906586, -0.08145499974489212, 0.10281000286340714, 0.24017000198364258, 0.03472499921917915, -0.20090000331401825, -0.2791999876499176, 0.6718500256538391, -0.18369999527931213, 0.11964999884366989, -0.3772299885749817, 0.17750999331474304, 0.2807300090789795, 0.14963999390602112, -0.9964900016784668, -0.024354999884963036, 0.31227999925613403, -0.14323000609874725, -0.2888700067996979, 0.7826200127601624, -0.14722999930381775, 0.11243999749422073, -0.814300000667572, -0.027765000239014626, -0.6208299994468689, 0.5914599895477295, -0.08576899766921997, -0.1636199951171875, 0.28033000230789185, 0.37178999185562134, -0.69718998670578, -0.6338499784469604, -0.07243800163269043, -0.6990299820899963, 0.5445200204849243, -0.3038800060749054, 0.15636000037193298, -0.2471799999475479, -0.4692400097846985, 0.29109999537467957, -0.3986800014972687, 0.01960100047290325, 0.15570999681949615, 0.02155900001525879, 0.27219000458717346, -0.16113999485969543, -0.05614500120282173, -0.6595600247383118, 0.07558900117874146, 0.09509199857711792, -0.009356300346553326, -0.05873600021004677, -0.40542998909950256, -0.4925200045108795, 0.16739000380039215, -0.3635300099849701, -0.6791800260543823, -0.20646999776363373, 0.19829000532627106, 0.19067999720573425, 0.2824299931526184, -0.055215999484062195, 0.4657599925994873, -0.5020899772644043, 0.2580200135707855, -0.23984000086784363, -0.16124999523162842, -0.16965000331401825, -0.3687100112438202, -0.5354999899864197, 0.2418700009584427, 0.36816999316215515, -0.30820000171661377, -0.48938998579978943, -0.10033000260591507, -0.4301599860191345, -0.24375000596046448, 0.3955399990081787, -0.8563200235366821, 0.08522699773311615, 0.12536999583244324, 0.24233999848365784, -0.055006999522447586, -0.08333499729633331, 0.04233599826693535, 0.5936400294303894, -0.11635000258684158, -0.0815730020403862, -0.33258000016212463, -0.1943099945783615, -0.31000998616218567, 0.8799999952316284, -0.6165099740028381, -0.714139997959137, -0.2503400146961212, -0.5200899839401245, 0.6614599823951721, 0.6125100255012512, -0.17357000708580017, -0.7688699960708618, -0.12917999923229218, 0.03411199897527695, -0.19575999677181244, -0.5171499848365784, -0.024707000702619553, -0.23646000027656555, 0.15014000236988068, 0.6591299772262573, 0.31084999442100525, -0.6385300159454346, -0.22391000390052795, -0.11964000016450882, 0.37900999188423157, -0.12244000285863876, -0.1665000021457672, 0.18322999775409698, 0.167820006608963, 0.5069699883460999, 0.21445000171661377, -0.1690399944782257, -0.008311999961733818, -0.08574599772691727, -0.16388000547885895, 0.34650999307632446, -0.6297500133514404, -0.4809400141239166, -0.4261400103569031, 0.2230599969625473, -0.22889000177383423, 0.36719998717308044, 0.5606499910354614, 0.4404599964618683, 0.9167500138282776, -0.6277099847793579, 0.7337700128555298, 0.05577800050377846, -0.07277899980545044, -0.3273699879646301, 0.2655400037765503, 0.13027000427246094, 0.09305299818515778, 0.23232999444007874, -0.4279400110244751, -0.8535400032997131, -0.1460600048303604, -0.372189998626709, -0.4641599953174591, -0.4708400070667267, 0.2945699989795685, 0.19336000084877014, 0.7178099751472473, 0.10603000223636627, -0.16719000041484833, 0.23145000636577606, 0.7326499819755554, -0.4119200110435486, 0.5793899893760681, 0.4956600069999695, 0.22970999777317047, 0.2870999872684479, -0.7600399851799011, 0.3722200095653534, -0.47773000597953796, 0.5303699970245361, -0.7299699783325195, -0.8354600071907043, -0.2558799982070923, 0.49994999170303345, -0.240789994597435, -0.6984599828720093, 0.5922600030899048, 0.26368001103401184, 0.05631599947810173, 0.14757999777793884, 0.8230299949645996, -0.7596399784088135, 0.9071900248527527, 0.5719900131225586, 0.49101001024246216, -0.4599800109863281, -0.028178999200463295, 0.4863100051879883, -0.11738000065088272, 0.1105400025844574, 0.37571999430656433, -0.10271000117063522, -0.4672600030899048, 0.23286999762058258, -0.3844299912452698, 0.3286300003528595, 0.6381400227546692, -0.34033000469207764, 0.5185800194740295, 0.13745999336242676, 0.8479400277137756, -0.04707400128245354, 0.6611400246620178, 0.4970400035381317, -1.0228999853134155, -0.263480007648468, 0.6801699995994568, -0.0806960016489029, -0.2588199973106384, 0.5239400267601013, 0.38166001439094543, -0.47262001037597656, 0.6296799778938293, -0.42080000042915344, -0.5334699749946594, -0.6371999979019165, 0.1580599993467331, -0.0010679999832063913, 0.2142699956893921, -0.12668000161647797, -0.07343199849128723, 0.6901900172233582, -0.2778399884700775, 0.4765999913215637, -0.21198000013828278, -0.4487299919128418, 0.35850000381469727, -0.6176400184631348, -0.26298999786376953, 0.4318999946117401, 0.9145200252532959, 0.10837999731302261, 0.6436700224876404, -0.23469999432563782, -0.7064700126647949, 0.4562300145626068, 0.4773400127887726, 0.20976999402046204, -0.8035799860954285, 0.5206900238990784, 0.015116999857127666, 0.9608299732208252, -0.03911399841308594, -0.34738001227378845, 0.12470000237226486, -0.24270999431610107, -0.7930300235748291, -0.19282999634742737, 0.5221199989318848, -0.20204000174999237, 0.4980199933052063, 0.3032900094985962, -0.2975800037384033, 0.26172998547554016, -0.16524000465869904, -0.5250399708747864, -0.4694899916648865, -0.8069300055503845, 0.8540999889373779, 0.44475001096725464, -0.4258500039577484, -0.0385189987719059, -0.17191000282764435, -0.03782200068235397, -0.28393998742103577, 0.08512900024652481, -0.3799099922180176, -0.35912999510765076, -0.002711700042709708, -0.09387800097465515, -0.7360799908638, 0.0026227999478578568, -0.5934200286865234, 0.9315699934959412, -0.9944999814033508, 0.185029998421669, -0.010018999688327312, -0.4411199986934662, -0.10202000290155411, -0.3109000027179718, -0.08896899968385696, 0.9317799806594849, -0.18008999526500702, 0.2724199891090393, -0.23865999281406403, -0.49386999011039734, 0.23709000647068024, 0.003082399955019355, -0.251800000667572, 0.4736500084400177, -0.5670199990272522, 0.49171000719070435, -0.11277999728918076, 0.2933500111103058, -0.11552999913692474, 0.24938000738620758], u'Faux.Leather': [0.320279985666275, 0.1373399943113327, 0.20201000571250916, 0.023415999487042427, -0.23228999972343445, -0.06644000113010406, 0.05316900089383125, 0.07862400263547897, -0.013139000162482262, -0.9607700109481812, -0.21727000176906586, -0.08145499974489212, 0.10281000286340714, 0.24017000198364258, 0.03472499921917915, -0.20090000331401825, -0.2791999876499176, 0.6718500256538391, -0.18369999527931213, 0.11964999884366989, -0.3772299885749817, 0.17750999331474304, 0.2807300090789795, 0.14963999390602112, -0.9964900016784668, -0.024354999884963036, 0.31227999925613403, -0.14323000609874725, -0.2888700067996979, 0.7826200127601624, -0.14722999930381775, 0.11243999749422073, -0.814300000667572, -0.027765000239014626, -0.6208299994468689, 0.5914599895477295, -0.08576899766921997, -0.1636199951171875, 0.28033000230789185, 0.37178999185562134, -0.69718998670578, -0.6338499784469604, -0.07243800163269043, -0.6990299820899963, 0.5445200204849243, -0.3038800060749054, 0.15636000037193298, -0.2471799999475479, -0.4692400097846985, 0.29109999537467957, -0.3986800014972687, 0.01960100047290325, 0.15570999681949615, 0.02155900001525879, 0.27219000458717346, -0.16113999485969543, -0.05614500120282173, -0.6595600247383118, 0.07558900117874146, 0.09509199857711792, -0.009356300346553326, -0.05873600021004677, -0.40542998909950256, -0.4925200045108795, 0.16739000380039215, -0.3635300099849701, -0.6791800260543823, -0.20646999776363373, 0.19829000532627106, 0.19067999720573425, 0.2824299931526184, -0.055215999484062195, 0.4657599925994873, -0.5020899772644043, 0.2580200135707855, -0.23984000086784363, -0.16124999523162842, -0.16965000331401825, -0.3687100112438202, -0.5354999899864197, 0.2418700009584427, 0.36816999316215515, -0.30820000171661377, -0.48938998579978943, -0.10033000260591507, -0.4301599860191345, -0.24375000596046448, 0.3955399990081787, -0.8563200235366821, 0.08522699773311615, 0.12536999583244324, 0.24233999848365784, -0.055006999522447586, -0.08333499729633331, 0.04233599826693535, 0.5936400294303894, -0.11635000258684158, -0.0815730020403862, -0.33258000016212463, -0.1943099945783615, -0.31000998616218567, 0.8799999952316284, -0.6165099740028381, -0.714139997959137, -0.2503400146961212, -0.5200899839401245, 0.6614599823951721, 0.6125100255012512, -0.17357000708580017, -0.7688699960708618, -0.12917999923229218, 0.03411199897527695, -0.19575999677181244, -0.5171499848365784, -0.024707000702619553, -0.23646000027656555, 0.15014000236988068, 0.6591299772262573, 0.31084999442100525, -0.6385300159454346, -0.22391000390052795, -0.11964000016450882, 0.37900999188423157, -0.12244000285863876, -0.1665000021457672, 0.18322999775409698, 0.167820006608963, 0.5069699883460999, 0.21445000171661377, -0.1690399944782257, -0.008311999961733818, -0.08574599772691727, -0.16388000547885895, 0.34650999307632446, -0.6297500133514404, -0.4809400141239166, -0.4261400103569031, 0.2230599969625473, -0.22889000177383423, 0.36719998717308044, 0.5606499910354614, 0.4404599964618683, 0.9167500138282776, -0.6277099847793579, 0.7337700128555298, 0.05577800050377846, -0.07277899980545044, -0.3273699879646301, 0.2655400037765503, 0.13027000427246094, 0.09305299818515778, 0.23232999444007874, -0.4279400110244751, -0.8535400032997131, -0.1460600048303604, -0.372189998626709, -0.4641599953174591, -0.4708400070667267, 0.2945699989795685, 0.19336000084877014, 0.7178099751472473, 0.10603000223636627, -0.16719000041484833, 0.23145000636577606, 0.7326499819755554, -0.4119200110435486, 0.5793899893760681, 0.4956600069999695, 0.22970999777317047, 0.2870999872684479, -0.7600399851799011, 0.3722200095653534, -0.47773000597953796, 0.5303699970245361, -0.7299699783325195, -0.8354600071907043, -0.2558799982070923, 0.49994999170303345, -0.240789994597435, -0.6984599828720093, 0.5922600030899048, 0.26368001103401184, 0.05631599947810173, 0.14757999777793884, 0.8230299949645996, -0.7596399784088135, 0.9071900248527527, 0.5719900131225586, 0.49101001024246216, -0.4599800109863281, -0.028178999200463295, 0.4863100051879883, -0.11738000065088272, 0.1105400025844574, 0.37571999430656433, -0.10271000117063522, -0.4672600030899048, 0.23286999762058258, -0.3844299912452698, 0.3286300003528595, 0.6381400227546692, -0.34033000469207764, 0.5185800194740295, 0.13745999336242676, 0.8479400277137756, -0.04707400128245354, 0.6611400246620178, 0.4970400035381317, -1.0228999853134155, -0.263480007648468, 0.6801699995994568, -0.0806960016489029, -0.2588199973106384, 0.5239400267601013, 0.38166001439094543, -0.47262001037597656, 0.6296799778938293, -0.42080000042915344, -0.5334699749946594, -0.6371999979019165, 0.1580599993467331, -0.0010679999832063913, 0.2142699956893921, -0.12668000161647797, -0.07343199849128723, 0.6901900172233582, -0.2778399884700775, 0.4765999913215637, -0.21198000013828278, -0.4487299919128418, 0.35850000381469727, -0.6176400184631348, -0.26298999786376953, 0.4318999946117401, 0.9145200252532959, 0.10837999731302261, 0.6436700224876404, -0.23469999432563782, -0.7064700126647949, 0.4562300145626068, 0.4773400127887726, 0.20976999402046204, -0.8035799860954285, 0.5206900238990784, 0.015116999857127666, 0.9608299732208252, -0.03911399841308594, -0.34738001227378845, 0.12470000237226486, -0.24270999431610107, -0.7930300235748291, -0.19282999634742737, 0.5221199989318848, -0.20204000174999237, 0.4980199933052063, 0.3032900094985962, -0.2975800037384033, 0.26172998547554016, -0.16524000465869904, -0.5250399708747864, -0.4694899916648865, -0.8069300055503845, 0.8540999889373779, 0.44475001096725464, -0.4258500039577484, -0.0385189987719059, -0.17191000282764435, -0.03782200068235397, -0.28393998742103577, 0.08512900024652481, -0.3799099922180176, -0.35912999510765076, -0.002711700042709708, -0.09387800097465515, -0.7360799908638, 0.0026227999478578568, -0.5934200286865234, 0.9315699934959412, -0.9944999814033508, 0.185029998421669, -0.010018999688327312, -0.4411199986934662, -0.10202000290155411, -0.3109000027179718, -0.08896899968385696, 0.9317799806594849, -0.18008999526500702, 0.2724199891090393, -0.23865999281406403, -0.49386999011039734, 0.23709000647068024, 0.003082399955019355, -0.251800000667572, 0.4736500084400177, -0.5670199990272522, 0.49171000719070435, -0.11277999728918076, 0.2933500111103058, -0.11552999913692474, 0.24938000738620758], u'Faux.Fur': [0.15368999540805817, 0.03651399910449982, -0.3862699866294861, -0.29183998703956604, -0.14330999553203583, -0.24120000004768372, -0.4438599944114685, 0.6119999885559082, -0.2903499901294708, -0.7015799880027771, 0.08596699684858322, -0.20850999653339386, -0.03649099916219711, 0.46865999698638916, -0.0012923999456688762, -0.10017000138759613, 0.40185999870300293, 0.6122199892997742, -0.19506999850273132, 0.6898800134658813, -0.3505899906158447, -0.4056999981403351, 0.6861799955368042, 0.23691000044345856, -0.5981199741363525, -0.5986199975013733, 1.131100058555603, -0.4171000123023987, 0.06561499834060669, 0.8596100211143494, 0.3426100015640259, -0.04284299910068512, -0.7576900124549866, -0.2772800028324127, 0.2634600102901459, -0.07900600135326385, 0.7805399894714355, 0.15796999633312225, 0.20305000245571136, -0.17496000230312347, -1.2480000257492065, -0.2059600055217743, 0.38339999318122864, -0.4189999997615814, 0.08767099678516388, 0.10390999913215637, -0.018496999517083168, -0.3912700116634369, 0.04193799942731857, 0.3031199872493744, 0.02206300012767315, -0.053419001400470734, 0.4654099941253662, -0.05860399827361107, 0.2365500032901764, 0.016217000782489777, -0.22481000423431396, -0.8526300191879272, -0.3924899995326996, -0.1453000009059906, 0.19520999491214752, -0.5100100040435791, 0.14564000070095062, -1.1619999408721924, 0.0790800005197525, -0.29387998580932617, -0.04225099831819534, -0.28648999333381653, 0.5206300020217896, 0.1901399940252304, 0.24338999390602112, -0.30847999453544617, -0.17835000157356262, 0.120169997215271, -0.13716000318527222, -0.5896300077438354, 0.08578000217676163, 0.21998000144958496, -0.25665000081062317, -0.15992000699043274, 0.002155299996957183, 0.7206199765205383, 0.19473999738693237, -0.23411999642848969, 0.0018173999851569533, -0.1171099990606308, 0.2931700050830841, -0.4577600061893463, -0.5559599995613098, -0.1937599927186966, -0.4709799885749817, -0.8032199740409851, 0.31953999400138855, 0.7156999707221985, -0.551609992980957, 0.5989099740982056, -0.8667399883270264, 0.38326001167297363, -0.5611100196838379, 0.31341999769210815, 0.48805001378059387, 0.4932900071144104, 0.09980499744415283, 0.40064001083374023, -0.2324099987745285, 0.216839998960495, 0.33594000339508057, 0.16495999693870544, 0.32864001393318176, 0.6502900123596191, -0.21119000017642975, 0.15727999806404114, -0.598829984664917, -0.04361400008201599, 0.26921001076698303, 0.21965999901294708, 0.12092000246047974, 1.1380000114440918, 0.37560999393463135, -0.522819995880127, -0.07638700306415558, -0.15025000274181366, 0.6977499723434448, 0.22066999971866608, -0.2300100028514862, 0.22620999813079834, -0.14970000088214874, 0.7495099902153015, 0.40623000264167786, -0.5443900227546692, -0.16967999935150146, -0.3116399943828583, -0.692359983921051, -0.6058700084686279, -0.3438499867916107, 0.0028880999889224768, 0.28499001264572144, 0.26625001430511475, 0.22314999997615814, -0.09827599674463272, 0.27035999298095703, 0.13840000331401825, 0.35872000455856323, -0.48124000430107117, 0.3793100118637085, 0.1836100071668625, -0.08990299701690674, 0.4156799912452698, -0.36733001470565796, 0.06409899890422821, 0.2659200131893158, 0.4630100131034851, -0.6077899932861328, -0.6329600214958191, 0.06759099662303925, -0.4940299987792969, -0.3679099977016449, -0.32552000880241394, 0.32809001207351685, 0.29951998591423035, 0.3281799852848053, 0.3156999945640564, 0.16558000445365906, 0.3723199963569641, -0.06031600013375282, -0.29416000843048096, 0.5344799757003784, -0.04083700105547905, 0.46136000752449036, 0.2039099931716919, -0.7131699919700623, -0.5747500061988831, -0.007786999922245741, 0.3156000077724457, 0.6858400106430054, -0.5205699801445007, 0.07801400125026703, -0.15660999715328217, -0.8816900253295898, 0.011756000109016895, 0.6903700232505798, 0.19578999280929565, -0.3009899854660034, -0.08127299696207047, 0.6843799948692322, -0.23524999618530273, 0.7016800045967102, -0.20472000539302826, -0.12758000195026398, -0.21671999990940094, 0.57805997133255, 0.20430999994277954, -0.3364099860191345, -0.350629985332489, 0.01331000030040741, -0.016165999695658684, -0.0028458999004215, 0.3418099880218506, 0.15474000573158264, -0.17547999322414398, 0.6614699959754944, -0.15884000062942505, 0.16134999692440033, 0.3001999855041504, 0.5223900079727173, 0.31558001041412354, -0.26488998532295227, 0.5242300033569336, -0.5243399739265442, -0.2565999925136566, 0.6205999851226807, -0.5370799899101257, 0.4722999930381775, 0.7242299914360046, 0.13324999809265137, -0.19017000496387482, 1.035599946975708, 0.3169200122356415, -0.10025999695062637, -0.5618600249290466, 0.3755899965763092, -0.5095900297164917, 0.12746000289916992, -0.02768700011074543, 0.24278999865055084, 0.6538699865341187, 0.21379999816417694, 0.294979989528656, 0.146589994430542, -0.17211000621318817, 1.0276000499725342, 0.24467000365257263, -0.04176799952983856, 0.4462999999523163, 0.30487000942230225, -0.38135001063346863, -0.20149999856948853, -0.4785600006580353, -0.33851000666618347, 0.16165000200271606, -0.0010287000332027674, 0.0611799992620945, -0.617929995059967, 0.34237000346183777, -1.3385000228881836, 0.32475000619888306, -0.012639000080525875, 0.0745529979467392, 0.7570499777793884, -0.4094499945640564, -0.1910099983215332, 0.2687000036239624, 0.2478400021791458, 0.2503800094127655, 1.1455999612808228, 0.5687500238418579, -0.1495800018310547, -0.1242000013589859, -0.5608100295066833, -0.4661499857902527, 0.26488998532295227, -0.12159000337123871, 0.08044400066137314, 0.5627400279045105, -0.3425000011920929, 0.4735099971294403, -0.07466799765825272, -0.656059980392456, 0.02264999970793724, 0.00973649974912405, 0.022074000909924507, -0.06576099991798401, 0.018288999795913696, -0.19641999900341034, -0.521049976348877, -0.1695300042629242, -0.40178999304771423, 0.4461199939250946, -1.1550999879837036, 0.08447200059890747, -0.2640100121498108, 0.13325999677181244, -0.10986000299453735, -0.06195300072431564, -0.8210099935531616, 0.4606899917125702, 0.609250009059906, 0.03255699947476387, 0.5089899897575378, 0.27810999751091003, 0.15998999774456024, -0.48864999413490295, -0.4666000008583069, -0.12952999770641327, -0.5705000162124634, -0.5006499886512756, 0.24997000396251678, 0.9506700038909912, 0.44273999333381653, 0.649150013923645], u'Sheepskin': [0.36041000485420227, 0.19268999993801117, 0.058559998869895935, -0.7066299915313721, -0.09381400048732758, -0.0528549998998642, -0.024111999198794365, -0.02098100073635578, -0.24714000523090363, 0.4708099961280823, 0.27856001257896423, 0.1399800032377243, 0.2564699947834015, -0.04771599918603897, -0.16056999564170837, -0.6208099722862244, 0.36559998989105225, 0.3806999921798706, 0.07505299896001816, 0.6007999777793884, -0.3216699957847595, -0.6354600191116333, 0.3519600033760071, 0.4719099998474121, -0.4133099913597107, -0.007081400137394667, 0.08201400190591812, 0.10874000191688538, 0.40867000818252563, 0.533519983291626, 0.029378000646829605, -0.5830199718475342, -0.47183001041412354, -0.5044100284576416, 0.3273699879646301, -0.04618300125002861, 0.013704000040888786, -0.1873600035905838, 0.07115299999713898, 0.002532700076699257, -0.35811999440193176, -0.09082700312137604, 0.20521999895572662, -0.38600999116897583, 0.5237200260162354, 0.21270999312400818, 0.286980003118515, -0.39706000685691833, -0.15546000003814697, 0.36294999718666077, -0.30893000960350037, -0.26989999413490295, -0.0017722999909892678, 0.27360999584198, -0.021537000313401222, 0.13954000174999237, 0.10151000320911407, -0.44150999188423157, -0.3503499925136566, 0.08188100159168243, 0.3835099935531616, -0.11035999655723572, -0.42225998640060425, -0.33583998680114746, 0.5377500057220459, 0.06535399705171585, -0.4088200032711029, 0.1718199998140335, 0.02741999924182892, 0.8992599844932556, 0.06227799877524376, 0.5628499984741211, 0.058538999408483505, -0.005794099997729063, -0.1661199927330017, -0.14229999482631683, -0.3444499969482422, 0.3176400065422058, 0.06847000122070312, -0.5749899744987488, 0.3490599989891052, 0.1307000070810318, 0.592710018157959, -0.6612200140953064, -0.08981200307607651, 0.11448000371456146, 0.06651800125837326, -0.13819999992847443, -0.12570999562740326, -0.21096999943256378, -0.15376000106334686, 0.05226000025868416, -0.20915000140666962, 0.2708599865436554, 0.0785600021481514, 0.17045000195503235, -0.1823199987411499, 1.0234999656677246, -0.5710700154304504, 0.943809986114502, 0.6242799758911133, 1.1806000471115112, 0.2618600130081177, 0.25929999351501465, 0.053665000945329666, -0.2162500023841858, 0.05632900074124336, -0.15509000420570374, 0.30757999420166016, -0.07434900104999542, 0.4631800055503845, 0.6461300253868103, -0.6225900053977966, -0.21879999339580536, -0.2234400063753128, -0.4045499861240387, -0.44718998670578003, -0.09804300218820572, 0.07405299693346024, -0.13545000553131104, 0.04438500106334686, -0.007046999875456095, 0.3868800103664398, 0.20719000697135925, 0.6084499955177307, 0.43636998534202576, 0.24714000523090363, 0.11716999858617783, -0.5014500021934509, -0.2652899920940399, 0.020688999444246292, -0.22789999842643738, -0.5358200073242188, -0.008115200325846672, -0.44356000423431396, -0.3323799967765808, -0.5297300219535828, 0.07210899889469147, 0.04704799875617027, -0.4022200107574463, -0.1535400003194809, 0.37494999170303345, 0.013269999995827675, -0.14719000458717346, 0.16067999601364136, 0.5463200211524963, -0.11844000220298767, -0.3327299952507019, 0.7205299735069275, -0.3996100127696991, -0.31439998745918274, 0.45267000794410706, -1.0362000465393066, -1.1366000175476074, -0.40101000666618347, 0.009547400288283825, -0.265390008687973, -0.4198499917984009, 0.6840900182723999, -0.3149699866771698, 0.37849000096321106, -0.3317900002002716, 0.4722500145435333, -0.24568000435829163, 0.17409999668598175, -0.6273000240325928, 0.37887999415397644, 0.0031103999353945255, 0.1984899938106537, -0.7129999995231628, -0.5983499884605408, -0.10305999964475632, 0.08227500319480896, -0.050491999834775925, -0.4287700057029724, -0.2240000069141388, 0.6816499829292297, -0.12399999797344208, -0.47262001037597656, -0.5525400042533875, 0.16875000298023224, 0.15304000675678253, 0.5808600187301636, 0.2918500006198883, 0.507420003414154, -0.11524000018835068, 0.9572299718856812, 0.38082998991012573, 0.08080799877643585, -0.23829999566078186, -0.04421599954366684, 0.33528000116348267, -0.014336000196635723, -0.7021899819374084, 0.2007800042629242, 0.16173000633716583, -0.19820000231266022, 0.804390013217926, 0.16130000352859497, -0.1481499969959259, -0.6145700216293335, -0.006801399867981672, -0.5115600228309631, 0.08893100172281265, 0.15504999458789825, -0.3168500065803528, -0.2567700147628784, 0.20993000268936157, -0.6725299954414368, -0.04281599819660187, 0.22527000308036804, 0.4749099910259247, -0.5073599815368652, 0.46977999806404114, 0.14065000414848328, -0.1034500002861023, -0.3220599889755249, -0.18366999924182892, -0.7693600058555603, 0.10550999641418457, 0.05981000140309334, 0.29927000403404236, 0.6418399810791016, -0.28874000906944275, 0.8745499849319458, 0.07754600048065186, -0.571120023727417, 0.14508000016212463, 0.3442099988460541, 0.29089000821113586, 0.32062000036239624, -0.06890899688005447, 0.24483999609947205, 0.7895100116729736, 0.18682000041007996, -0.03146800026297569, 0.14045000076293945, -0.1696999967098236, -0.31150001287460327, 0.3452700078487396, 0.8848299980163574, -0.20633000135421753, -0.8305799961090088, 0.12077999860048294, -0.2939999997615814, 0.5918099880218506, 0.05962099879980087, 0.09275499731302261, 0.3214699923992157, -0.5097100138664246, -0.5356299877166748, -0.2977299988269806, 0.6915799975395203, -0.629610002040863, 0.1452600061893463, 0.214369997382164, -0.4402799904346466, 0.9263200163841248, -0.041057001799345016, -0.47554999589920044, -0.3703700006008148, 0.3710800111293793, 0.4012700021266937, -0.06276600062847137, -0.376010000705719, 0.17732000350952148, 0.320609986782074, -1.292799949645996, -0.0956370010972023, -0.28543999791145325, 0.1619199961423874, -0.19064000248908997, -0.17509999871253967, -0.32684001326560974, -0.5794100165367126, -0.3289400041103363, 0.06562300026416779, -0.01832200028002262, -0.6093999743461609, 0.28446999192237854, -0.10018999874591827, -0.9327999949455261, -0.03415200114250183, 0.5499399900436401, -0.11890999972820282, 0.23921999335289001, -0.4974200129508972, 1.1176999807357788, 0.05611500144004822, 0.1358799934387207, 0.22464999556541443, -0.2593100070953369, 0.9101200103759766, -0.2562299966812134, 0.20096999406814575, -0.9978500008583069, 0.4080600142478943, 0.08402899652719498, -0.017829999327659607, 0.7226099967956543]} +objs_dict = {u'Shoes.Clogs.and.Mules': [0.06574799865484238, -0.5269299745559692, -0.09666399657726288, 0.10811000317335129, 0.21544000506401062, -0.14688000082969666, 0.6641700267791748, 0.38040998578071594, 0.08992700278759003, -0.07052300125360489, -0.6724799871444702, 0.4205299913883209, -0.46309998631477356, -0.0592229999601841, 0.007628200110048056, -0.14106999337673187, 0.10766000300645828, -0.26273998618125916, 0.34953999519348145, 0.23962999880313873, -0.18929000198841095, -0.03394699841737747, 0.11874999850988388, -0.2917900085449219, -0.4701499938964844, -0.018845999613404274, 0.038155000656843185, -0.06934600323438644, 0.0577160008251667, 0.6264600157737732, -0.13642999529838562, 0.21155999600887299, -0.2788499891757965, 0.5247600078582764, 0.33792001008987427, 0.18786999583244324, -0.243709996342659, -0.2578299939632416, 0.30489999055862427, 0.29273998737335205, -0.3669799864292145, -0.1372399926185608, -0.09233599901199341, -0.13749000430107117, -0.46832001209259033, -0.2613700032234192, 0.4553999900817871, 0.3691299855709076, -0.04078900068998337, 0.032003000378608704, -0.6733400225639343, 0.057326000183820724, 0.4188399910926819, -0.6457399725914001, -0.03884800150990486, -0.12887999415397644, 0.12514999508857727, -0.20574000477790833, -0.4312700033187866, 0.3626900017261505, -0.0633149966597557, -0.017750000581145287, -0.48364999890327454, 0.5211399793624878, -0.182559996843338, 0.648140013217926, -0.14180999994277954, 0.6367599964141846, -0.47360000014305115, 0.09185100346803665, 0.27546000480651855, 0.27783000469207764, -0.32798999547958374, 0.24390000104904175, -0.157260000705719, -0.29813000559806824, 0.21404999494552612, -0.19943000376224518, -0.004251199774444103, -0.8014900088310242, -0.03658000007271767, 0.14441999793052673, 0.7035099864006042, -0.05623200163245201, 0.21505999565124512, -0.18150000274181366, 0.14292000234127045, -0.3386400043964386, -0.07907400280237198, 0.2940399944782257, 0.05626700073480606, 0.15399999916553497, 0.20915000140666962, -0.4653100073337555, -0.2156900018453598, -0.15106000006198883, 0.4584299921989441, -0.019473999738693237, -0.020653000101447105, 0.23136000335216522, 0.2896899878978729, 0.35888999700546265, -0.1814199984073639, -0.6267399787902832, -0.0983780026435852, 0.14428000152111053, -0.34150999784469604, -0.04022299870848656, 0.026165999472141266, -0.7295299768447876, -0.5232999920845032, -0.03386100009083748, -0.1724099963903427, -0.32451000809669495, -0.21353000402450562, 0.10704000294208527, 0.15324999392032623, 0.3656400144100189, -0.13009999692440033, -0.7033600211143494, 0.09413900226354599, -0.7925000190734863, 0.25971001386642456, -0.2905600070953369, -0.03201200067996979, -0.015073999762535095, -0.4954400062561035, 0.08019600063562393, -0.20032000541687012, 0.2165600061416626, 0.4309000074863434, -0.2172199934720993, -0.1359100043773651, -0.3036699891090393, -0.08710899949073792, -0.8338299989700317, 0.38370001316070557, 0.336870014667511, 0.3325499892234802, 0.18258999288082123, 0.06161699816584587, 0.22425000369548798, 0.2112399935722351, -0.06541500240564346, 0.19163000583648682, 0.056821998208761215, -0.20252999663352966, 0.1539900004863739, 0.2424200028181076, 0.17520000040531158, -0.49428001046180725, -0.22283999621868134, 0.10864000022411346, -0.3294300138950348, -0.2183299958705902, 0.5613499879837036, 0.06099100038409233, 0.14256000518798828, 0.11394000053405762, 0.10591000318527222, -0.045524999499320984, 0.3985399901866913, 0.4660800099372864, -0.31738999485969543, 0.33386000990867615, 0.2204499989748001, 0.2159299999475479, -0.04788700118660927, 0.5064399838447571, -0.5151600241661072, -0.5395500063896179, 0.7009099721908569, -0.03455200046300888, 0.275409996509552, -0.14226000010967255, -0.8464400172233582, -0.13106000423431396, -0.6036199927330017, 0.0757410004734993, -0.6320099830627441, -0.3922500014305115, -0.26495999097824097, 0.7036899924278259, 0.14921000599861145, -0.007014099974185228, -0.40893998742103577, 0.417279988527298, 0.4306899905204773, -0.28641000390052795, -0.7629799842834473, -0.15925000607967377, 0.12154000252485275, 0.048670001327991486, 0.3715899884700775, 0.290120005607605, -0.21527999639511108, -0.390529990196228, 0.5062100291252136, -0.4466400146484375, -0.3246900141239166, -0.5084599852561951, 0.1710900068283081, 0.45146000385284424, 0.5174000263214111, 0.23387999832630157, -0.06580500304698944, 0.1443600058555603, -0.3510499894618988, -0.483240008354187, 0.48774001002311707, -0.28922000527381897, 0.41343000531196594, 0.18982000648975372, -0.12358999997377396, 0.1310500055551529, -0.016899000853300095, 0.14090000092983246, -0.2395000010728836, -0.31499001383781433, -0.26837998628616333, 0.32938000559806824, -0.16804000735282898, -0.036368001252412796, 0.14225000143051147, 0.14271999895572662, -0.43004998564720154, -1.1833000183105469, -0.18565000593662262, 0.30072999000549316, 0.22543999552726746, 0.15741999447345734, 0.23853999376296997, -0.04706700146198273, 0.0191079992800951, -0.2960500121116638, 0.042075999081134796, 0.07653500139713287, -0.08614200353622437, 0.1468600034713745, -0.3324500024318695, 0.3126299977302551, -0.11364000290632248, -0.5540300011634827, -0.08410099893808365, -0.2063400000333786, 0.05023900046944618, 0.35133999586105347, 0.28565001487731934, -0.04802799969911575, -0.6585400104522705, 0.46931999921798706, -0.4128899872303009, 0.17448000609874725, -0.29218000173568726, 0.17077000439167023, -0.6325399875640869, 0.09711900353431702, 0.3661699891090393, 0.5886899828910828, -0.300570011138916, 0.0460360012948513, -0.3334200084209442, -0.11452999711036682, -0.4544999897480011, 0.6152200102806091, 0.007750099990516901, -0.03386100009083748, -0.5902100205421448, -0.5202800035476685, 0.06624200195074081, 0.23770000040531158, 0.6643499732017517, -0.07084900140762329, -0.6079099774360657, -0.6549299955368042, -0.5168799757957458, 0.6690700054168701, -0.7844200134277344, -1.1165000200271606, 0.5631200075149536, 0.4414699971675873, -0.3013800084590912, -0.19512000679969788, 0.030455999076366425, -0.1525699943304062, 0.29704999923706055, 0.17182999849319458, 0.2869099974632263, -0.16666999459266663, -0.546720027923584, 0.008154500275850296, -0.12159000337123871, 0.1103999987244606, 0.4053199887275696, -0.58992999792099, -0.9019799828529358, -0.5094900131225586, -0.10732000321149826, 0.025460999459028244, -0.46525999903678894], u'Shoes.Heels': [0.219650000333786, 0.07948999851942062, 0.015030999667942524, 0.15986000001430511, -0.22746999561786652, -0.04331300035119057, -0.08965999633073807, -0.44488000869750977, 0.3429900109767914, -0.3225899934768677, -0.2826800048351288, 0.153779998421669, 0.04661000147461891, 0.14395999908447266, -0.16495999693870544, 0.09883899986743927, 0.22699999809265137, -0.2552500069141388, 0.0998769998550415, 0.051382001489400864, -0.11751999706029892, -0.0035757001023739576, 0.5185400247573853, -0.06651599705219269, 0.06932900100946426, 0.05786500126123428, 0.34815001487731934, -0.4721600115299225, 0.32559001445770264, 0.21971000730991364, -0.23346999287605286, -0.2226099967956543, -0.4161500036716461, -0.11049000173807144, -0.9663199782371521, 0.3610900044441223, -0.48739999532699585, 0.5645300149917603, 0.6009600162506104, 0.3288699984550476, -0.08153499662876129, -0.3250199854373932, -0.2400200068950653, 0.1969199925661087, -0.28999000787734985, 0.04651099815964699, 0.1930599957704544, -0.16347000002861023, -0.22336000204086304, 0.2795799970626831, -0.4006899893283844, 0.07509800046682358, -0.16009999811649323, -0.5303800106048584, -0.006113800220191479, 0.08971700072288513, -0.22982999682426453, -0.17181000113487244, -0.008038599975407124, 0.24104000627994537, -0.11415000259876251, -0.3141300082206726, 0.32686999440193176, 0.45072999596595764, 0.11226999759674072, -0.4300299882888794, 0.5717800259590149, 0.023375999182462692, 0.40400999784469604, -0.23413999378681183, -0.1149199977517128, 0.4088200032711029, -0.34797999262809753, -0.3856799900531769, 0.22224999964237213, -0.21887999773025513, 0.28696000576019287, -0.19835999608039856, -0.11714000254869461, -0.3246999979019165, -0.31292998790740967, -0.14815999567508698, 0.05299200117588043, -0.15131999552249908, 0.21554000675678253, 0.03888799995183945, -0.29462000727653503, -0.0506879985332489, 0.3025699853897095, -0.033771999180316925, 0.11627999693155289, 0.5623199939727783, -0.14030000567436218, 0.1036200001835823, -0.2107899934053421, -0.2853100001811981, -0.5933099985122681, 0.19518999755382538, 0.14539000391960144, -0.3981499969959259, -0.11072000116109848, 0.4479700028896332, -0.20348000526428223, -0.24400000274181366, -0.24247999489307404, -1.0113999843597412, 0.1697400063276291, 0.07141199707984924, 0.1365099996328354, -0.6870700120925903, 0.3138200044631958, 0.11153999716043472, -0.40696001052856445, -0.42403000593185425, 0.049265000969171524, -0.04419099912047386, 0.28334999084472656, -0.07404299825429916, 0.46053001284599304, -0.7308499813079834, -0.17609000205993652, -0.4245299994945526, 0.44672998785972595, -0.19212999939918518, -0.5629100203514099, 0.405239999294281, -0.05854799970984459, 0.42949000000953674, -0.1738699972629547, -0.02217100001871586, 0.015848999843001366, 0.36399000883102417, -0.7437499761581421, -0.14952999353408813, -0.300029993057251, -0.021240999922156334, -0.5087599754333496, 0.3066200017929077, -0.14661000669002533, 0.24714000523090363, 0.23648999631404877, -0.16202999651432037, 0.04464900121092796, 0.17151999473571777, -0.2845900058746338, 0.2562899887561798, -0.40450000762939453, 0.258899986743927, 0.15352000296115875, -0.05618000030517578, 0.33689001202583313, 0.17937999963760376, -0.46518999338150024, 0.1430799961090088, 0.15467000007629395, -0.03985600173473358, 0.30792999267578125, -0.34035998582839966, 0.09083099663257599, 0.5265799760818481, -0.11562000215053558, 0.029388999566435814, -0.26475998759269714, -0.10328000038862228, 0.3757599890232086, 0.0354279987514019, 0.3566800057888031, 0.4225800037384033, 0.09006699919700623, -0.27292999625205994, -0.31911998987197876, 0.5286700129508972, -0.6569899916648865, -0.3863399922847748, -0.20472000539302826, -0.10245999693870544, -0.4050399959087372, 0.3815299868583679, 0.18967999517917633, 0.0045727998949587345, 0.44648998975753784, -0.4134500026702881, 0.6801699995994568, 0.10922999680042267, 0.06825599819421768, -0.39969000220298767, 0.22582000494003296, 0.07699800282716751, 0.06102599948644638, -0.446370005607605, -0.12728999555110931, -1.0543999671936035, -0.4207099974155426, -0.14244000613689423, -0.31018999218940735, 0.33083000779151917, -0.09524299949407578, -0.4303799867630005, 0.692330002784729, 0.06453599780797958, 0.9007200002670288, -0.0541130006313324, 0.10639999806880951, 0.6057900190353394, -0.23329000174999237, 0.05968200042843819, -0.10576000064611435, -0.018223000690340996, -0.6658899784088135, 0.30188000202178955, -0.10390999913215637, 0.911620020866394, 0.06057300046086311, 0.27226001024246216, 0.2729800045490265, -0.24496999382972717, 0.33726000785827637, -0.5259400010108948, 0.00024717001360841095, -0.9076700210571289, 0.5846199989318848, 0.1810699999332428, 0.35343000292778015, 0.7089200019836426, 0.026228999719023705, 0.09261400252580643, -0.3439599871635437, 0.1277099996805191, -0.2867400050163269, 0.1982100009918213, 0.33406999707221985, -0.001749600050970912, 0.34446999430656433, -0.08833900094032288, -0.07843700051307678, -0.613070011138916, -0.020968999713659286, 0.04188600182533264, 0.33441999554634094, 0.2072799950838089, 0.27900999784469604, -0.024901000782847404, -0.6086199879646301, -0.13300999999046326, -0.3081499934196472, 0.16463999450206757, 0.052232999354600906, 0.00615020003169775, 0.34147000312805176, -0.6908400058746338, -0.06774699687957764, 0.15012000501155853, 0.2294899970293045, -0.10010000318288803, -0.2515299916267395, -0.495059996843338, 0.05534699931740761, 0.7286400198936462, -0.7355300188064575, -0.06754600256681442, -0.6669300198554993, 0.07938399910926819, 0.22822000086307526, -0.20204000174999237, -0.15379999577999115, -0.1302099972963333, -0.749459981918335, -0.16547000408172607, -0.3032299876213074, -0.31334999203681946, -0.016189999878406525, -0.30006998777389526, -0.6694200038909912, -0.2405499964952469, -0.7322999835014343, -0.1333799958229065, -0.6287800073623657, 0.07925599813461304, -0.3816800117492676, 0.722790002822876, 0.22925999760627747, 0.162990003824234, 0.21724000573158264, -0.08160000294446945, -0.5528600215911865, -0.03137499839067459, -0.0901150032877922, 0.03868899866938591, -0.027279000729322433, -0.5084199905395508, 0.21889999508857727, -0.00023015000624582171, 0.2795400023460388, 0.3716199994087219, -0.09158699959516525, 0.12726999819278717, 0.1662999987602234, -0.1618500053882599, 0.4067299962043762, 0.05266299843788147], u'Boots.Mid-Calf': [-0.15091000497341156, 0.1041100025177002, -1.0577000379562378, 0.5563899874687195, -0.0029670000076293945, 0.11469999700784683, 0.09877300262451172, -0.29368001222610474, -0.028706999495625496, 0.7798600196838379, -0.4042699933052063, 1.076200008392334, 0.6060500144958496, 0.8275799751281738, -0.6356099843978882, -0.5946699976921082, 0.014066999778151512, 0.5651199817657471, -0.3120099902153015, 0.44701001048088074, -0.6526399850845337, -0.645039975643158, 0.26565998792648315, -0.29829999804496765, -0.5902299880981445, -0.25183001160621643, -0.19022999703884125, 0.1245800033211708, 0.19957000017166138, 0.6190099716186523, 0.31011998653411865, -0.2732200026512146, 0.5270100235939026, -0.8452600240707397, 0.828000009059906, 0.5938000082969666, -0.1259399950504303, 0.7526999711990356, -0.26447001099586487, -0.7587800025939941, -0.5090699791908264, 0.5793499946594238, -0.07673700153827667, -0.5339000225067139, 0.34779998660087585, 0.06233900040388107, -0.33035001158714294, -0.14932000637054443, -0.4392000138759613, -0.2662299871444702, 0.07704299688339233, -0.48739999532699585, 0.5917400121688843, -0.3346099853515625, 0.28876999020576477, -0.26041001081466675, -0.24503999948501587, -0.003819999983534217, -0.04586099833250046, 0.3893600106239319, 0.2192399948835373, -0.09421899914741516, -0.3543199896812439, 0.0034384001046419144, 0.04207000136375427, -0.22310000658035278, 0.06123699992895126, 0.24231000244617462, 0.23017999529838562, 0.4792200028896332, 0.5904399752616882, 0.28749001026153564, -0.664139986038208, 0.4914500117301941, 0.6396399736404419, -0.6467700004577637, 0.09189199656248093, 0.21191999316215515, -0.43474000692367554, -0.25053998827934265, 0.22316999733448029, 0.2337300032377243, 0.579010009765625, -0.5306800007820129, -0.05079200118780136, 0.5776900053024292, 0.37136998772621155, -0.4067099988460541, -0.16051000356674194, -0.2079000025987625, 0.9269599914550781, 0.5050699710845947, -0.10097000002861023, 0.28979000449180603, 0.2923699915409088, 0.06273899972438812, 0.057774998247623444, -0.13892999291419983, -0.21694999933242798, 0.2338699996471405, 0.06513699889183044, 0.32427000999450684, 0.7290400266647339, 0.4440999925136566, -0.8859900236129761, -0.15591000020503998, -0.745989978313446, 0.18894000351428986, 0.21458999812602997, -0.11686000227928162, 0.09226799756288528, -0.15448999404907227, -0.33726999163627625, -0.2400899976491928, 0.027574999257922173, -0.4582799971103668, 0.19415000081062317, 0.3716599941253662, 0.9386399984359741, 0.40738001465797424, 0.3562999963760376, -0.18692000210285187, 0.22909000515937805, 0.16527999937534332, 0.01675499975681305, 0.14666999876499176, 0.048555001616477966, -0.1406400054693222, 0.1644199937582016, 0.12118999660015106, -0.25892001390457153, -0.6444000005722046, -0.4402100145816803, 0.03374600037932396, 0.2341099977493286, -0.02705400064587593, -0.005032800137996674, -0.3025299906730652, 0.6392300128936768, 0.4041599929332733, -0.23104000091552734, -0.28022998571395874, -0.6957600116729736, -0.8560699820518494, 0.4010300040245056, -0.3135499954223633, -0.4470599889755249, 0.29853999614715576, 0.6404399871826172, 0.20782999694347382, -0.43202999234199524, -0.7000399827957153, -0.3558099865913391, 0.010843000374734402, -0.021855000406503677, -0.08210600167512894, 0.12585000693798065, -0.05709400027990341, -0.10798999667167664, 0.2846899926662445, 0.4211300015449524, -0.5254200100898743, -0.7664700150489807, -0.006008299998939037, 0.15940000116825104, -0.026910999789834023, -0.24445000290870667, 0.129940003156662, 0.02689100056886673, 0.4575900137424469, -0.06364999711513519, 0.15214000642299652, 0.4568899869918823, -0.10814999788999557, 0.1179800033569336, -0.47023001313209534, -0.2271299958229065, 0.1369200050830841, 0.5059300065040588, -0.43494999408721924, 0.4014100134372711, 0.28321999311447144, -0.0032740000169724226, -0.4474300146102905, 0.5781099796295166, 0.0815809965133667, 0.4601399898529053, 0.3865399956703186, 0.3000200092792511, -0.32150998711586, -0.25051000714302063, -0.4665299952030182, -0.11072999984025955, -0.06614399701356888, 0.3256399929523468, 0.25949999690055847, -0.1056400015950203, 0.19596999883651733, -0.23937000334262848, 0.021240999922156334, -0.3758000135421753, -0.16937999427318573, -0.3039099872112274, 0.1287900060415268, -0.5653499960899353, 0.3874000012874603, -0.6866400241851807, 0.5703200101852417, -0.5923200249671936, -0.43509000539779663, -0.20486000180244446, -0.6616100072860718, 0.21061000227928162, 1.1857999563217163, -0.47663000226020813, -0.8400999903678894, -0.2958599925041199, -0.2985900044441223, 0.0650549978017807, -0.15866999328136444, -0.1378600001335144, 0.3503200113773346, 0.8845900297164917, 0.2983199954032898, -0.32736000418663025, -0.028862999752163887, -0.8470199704170227, 0.48781999945640564, -0.5895900130271912, -0.20613999664783478, 0.2000499963760376, -0.11243999749422073, -0.4210500121116638, 0.06162799894809723, 0.2112800031900406, -0.3974800109863281, -0.19726000726222992, 0.3082599937915802, 0.262800008058548, -0.08939400315284729, 0.2813900113105774, -0.024203000590205193, -0.297109991312027, 0.04312799870967865, 0.5784599781036377, -0.22577999532222748, 0.04958700016140938, 0.08567000180482864, -0.47367000579833984, 0.3401600122451782, -0.35201001167297363, 0.10110999643802643, 0.338019996881485, -1.0252000093460083, -0.9286699891090393, -0.520359992980957, -0.4027499854564667, 0.7970399856567383, -0.18374000489711761, -0.23194000124931335, 0.14862999320030212, 0.08475899696350098, -0.39563998579978943, -0.05711499974131584, -0.17515000700950623, 0.20573000609874725, 0.2428700029850006, 0.6331700086593628, 0.009373899549245834, -0.04499699920415878, -0.16224999725818634, -0.788569986820221, -0.8942700028419495, 0.0743900015950203, -0.24371999502182007, -0.3416000008583069, 0.6010100245475769, -0.5028200149536133, -0.5857899785041809, -0.5220100283622742, 0.2559100091457367, 0.4483500123023987, 0.5286300182342529, -0.21112999320030212, -0.410290002822876, -0.30270999670028687, 0.4726499915122986, -0.11808999627828598, 0.09046199917793274, -0.6214200258255005, 0.6830199956893921, 0.47889000177383423, -0.6695799827575684, 0.1385200023651123, -0.39902999997138977, -1.11899995803833, 0.2026599943637848, -0.28881001472473145, 0.2994300127029419, -0.19812999665737152], u'Shoes.Flats': [-0.20374999940395355, 0.3542500138282776, -0.5919100046157837, 0.03648199886083603, 0.12483000010251999, 0.1103300005197525, 0.4150699973106384, -0.12443000078201294, 0.16572000086307526, -0.21337999403476715, -0.15049000084400177, -0.6731299757957458, 0.04386499896645546, 0.7317100167274475, 0.2585499882698059, -0.05827600136399269, 0.568340003490448, 0.07803399860858917, 0.701770007610321, -0.08540800213813782, -0.5639700293540955, 0.1720699965953827, 0.2926900088787079, 0.25154000520706177, -0.013891000300645828, 0.40195000171661377, 0.018631000071763992, 0.4059099853038788, -0.1646600067615509, 0.4990899860858917, 0.17173999547958374, 0.01042999979108572, -0.765749990940094, 0.31589001417160034, 0.8075699806213379, 0.4856700003147125, -0.21073000133037567, 0.10864999890327454, 0.368910014629364, -0.005092099774628878, -0.4969800114631653, 0.06385599821805954, -0.12670999765396118, 0.7390300035476685, 0.621429979801178, 0.22047999501228333, 0.1538500040769577, 0.32137998938560486, -0.08169600367546082, 0.001856100047007203, -0.6115700006484985, -0.04486300051212311, 0.027437999844551086, -0.15604999661445618, 0.30952998995780945, 0.07775899767875671, 0.013050000183284283, -0.053502000868320465, -0.282150000333786, 0.6051099896430969, 0.04901599884033203, 0.02716599963605404, -0.10429999977350235, -0.20397000014781952, 0.4099099934101105, -0.1974799931049347, 0.4717999994754791, 0.22812999784946442, -0.3149699866771698, -0.1541299968957901, 0.18049000203609467, 0.399509996175766, -0.9211099743843079, 0.1722699999809265, -0.517769992351532, 0.19739000499248505, 0.20372000336647034, 0.2700299918651581, 0.5132499933242798, -0.013384000398218632, -0.005723400041460991, 0.10214000195264816, 0.043602000921964645, 0.09233000129461288, 0.29745998978614807, -0.15961000323295593, -0.01581300050020218, -0.46059998869895935, -0.3429900109767914, -0.2599799931049347, 0.2091200053691864, 0.3149400055408478, 0.9327200055122375, 0.5180400013923645, 0.006593000143766403, -0.08728200197219849, 0.5365300178527832, 0.4648999869823456, 0.8944900035858154, 0.1726599931716919, -0.23929999768733978, -0.0889040008187294, -0.922469973564148, -0.29493001103401184, -0.6224899888038635, -0.35725000500679016, 0.22481000423431396, -0.10226999968290329, -0.15886999666690826, -0.36796998977661133, 0.23517000675201416, -0.5226899981498718, 0.032079000025987625, -0.22965000569820404, -0.4388200044631958, 0.24935999512672424, 0.6451200246810913, 0.7340199947357178, -0.23287999629974365, 0.48194000124931335, 0.33487001061439514, -0.3586600124835968, 0.32455000281333923, 0.3682200014591217, -0.003144599962979555, -0.3811900019645691, -0.07605700194835663, -0.08156300336122513, -0.10279999673366547, -0.3504199981689453, -0.07782500237226486, 0.23113000392913818, -0.40766000747680664, -0.012942999601364136, 0.14651000499725342, -0.16177000105381012, -0.333950012922287, -0.5636199712753296, 0.3828299939632416, -0.3052999973297119, 0.6996200084686279, -0.12991000711917877, -0.032207001000642776, 0.3317900002002716, -0.38089001178741455, -0.31630000472068787, -0.14090999960899353, 0.0614130012691021, -0.12764999270439148, 0.12249000370502472, 0.07254599779844284, 0.10547000169754028, -1.0125000476837158, -0.368120014667511, -0.11238999664783478, 0.6206499934196472, 0.4221799969673157, 0.2543100118637085, 0.16332000494003296, 0.2706800103187561, 0.13203999400138855, -0.47822999954223633, -0.35238000750541687, 0.43911999464035034, -0.24345999956130981, -0.32133999466896057, 0.34452998638153076, 0.13585999608039856, -0.028210999444127083, 0.0774729996919632, -0.4890500009059906, 0.4185999929904938, 0.25494998693466187, -0.08006999641656876, -0.721530020236969, 0.25547000765800476, -0.5357400178909302, -0.05269100144505501, 0.2656700015068054, -0.18140999972820282, 0.057307999581098557, 0.003998899832367897, 0.0475349985063076, -0.06408300250768661, 0.02411000058054924, -0.1294499933719635, 0.5228300094604492, 0.13958999514579773, 0.08707500249147415, 0.7508500218391418, 0.48177000880241394, 0.08293599635362625, 0.04318000003695488, -0.08963099867105484, 0.26085999608039856, 0.05147000029683113, -0.5923399925231934, -0.19154000282287598, 0.7361800074577332, -0.5470799803733826, -0.09102299809455872, -0.01430600043386221, -0.35989001393318176, 0.005388699937611818, 0.3617599904537201, 0.20850999653339386, -0.24221999943256378, -0.23125000298023224, -0.09845399856567383, -0.14935000240802765, -0.17991000413894653, 0.37648001313209534, 0.05813800171017647, 0.43966999650001526, 0.08455999940633774, 0.634850025177002, -0.12761999666690826, -0.5348700284957886, 0.8145700097084045, -0.5087699890136719, 0.7486100196838379, -0.32214000821113586, -0.17107999324798584, 0.03487500175833702, 0.3201799988746643, -0.08179499953985214, -0.12148000299930573, 0.06547199934720993, -0.036396000534296036, 0.583329975605011, 0.5042799711227417, -0.057544998824596405, -0.5234400033950806, -0.051263000816106796, 0.4408099949359894, 0.8270000219345093, -0.1453700065612793, 0.350849986076355, 0.004543299786746502, 0.10075999796390533, -0.2892000079154968, 0.6127300262451172, -0.1596899926662445, 0.37915000319480896, -0.7341399788856506, 0.052264999598264694, 0.5247399806976318, -0.23646999895572662, -0.07887899875640869, -0.5824099779129028, 0.14571000635623932, 0.43406999111175537, -0.3590700030326843, 0.030448999255895615, -0.34191998839378357, -0.5179799795150757, 0.041113998740911484, 0.7238500118255615, -0.07748900353908539, -0.17459000647068024, -0.6070899963378906, 0.4159800112247467, 0.16930000483989716, -0.09758400171995163, 0.08365499973297119, -0.6472799777984619, 0.27410998940467834, 0.23327000439167023, -0.1035199984908104, 0.3835600018501282, -0.057287998497486115, -0.2430499941110611, -0.23656000196933746, 0.06295400112867355, -0.032489001750946045, -0.07571899890899658, -0.8056600093841553, 0.47786998748779297, -0.4751099944114685, 0.2125999927520752, -0.016582999378442764, 0.393669992685318, -0.014708000235259533, -0.06123200058937073, -0.49487999081611633, 0.5999500155448914, 0.12346000224351883, 0.1992799937725067, 0.09995599836111069, -0.38238999247550964, -0.6141899824142456, 0.0022324000019580126, 0.33963000774383545, 0.13481999933719635, 0.208529993891716, -0.18401999771595, 0.3868899941444397, 0.4650300145149231, 0.5759099721908569, 0.14044000208377838], u'Boots.Knee.High': [0.13481999933719635, 0.06180800125002861, 0.0332069993019104, -0.14851999282836914, -0.1845099925994873, 0.45642000436782837, -0.2784999907016754, -0.3435100018978119, -0.7072299718856812, 0.9877499938011169, 0.1864600032567978, 0.10005000233650208, 1.0371999740600586, -0.056814998388290405, 0.29058000445365906, -0.6417199969291687, 0.22187000513076782, 0.21087999641895294, -0.07796499878168106, 0.30417001247406006, -0.604610025882721, -0.20678000152111053, -0.2637600004673004, -0.2604700028896332, 0.4990699887275696, -0.2634899914264679, 0.4533799886703491, 0.6245399713516235, 0.3500500023365021, 0.1834000051021576, 0.0981689989566803, -0.07018499821424484, -0.294730007648468, -0.4743399918079376, 0.451119989156723, 0.007008600048720837, -0.2489199936389923, -0.2595300078392029, 0.2963399887084961, 0.25422999262809753, -0.1747799962759018, 0.22686000168323517, 0.6608399748802185, -0.3568499982357025, 0.2885900139808655, -0.0746690034866333, 0.14792999625205994, -0.26065000891685486, -0.8256300091743469, 0.09438200294971466, -0.5938400030136108, 0.6593000292778015, -0.19136999547481537, -0.12014999985694885, 0.13857999444007874, -0.11324000358581543, 0.7782800197601318, -0.47727999091148376, -0.27160999178886414, 0.5808600187301636, 0.21527999639511108, -0.4222100079059601, -0.5845100283622742, -0.11606000363826752, -0.36711999773979187, -0.27138999104499817, 0.40463998913764954, -0.13816000521183014, 0.45504000782966614, 0.021656999364495277, 0.38078999519348145, 0.0521249994635582, -0.8345400094985962, -0.7039300203323364, 0.7649999856948853, -0.038405001163482666, -0.6519200205802917, 0.5437099933624268, -0.17810000479221344, 0.044162001460790634, -0.013012000359594822, 0.2130099982023239, 0.6682199835777283, -0.3304300010204315, -0.45465999841690063, -0.1321299970149994, 0.3493900001049042, 0.5265700221061707, 0.11129000037908554, -0.05620799958705902, 0.5031499862670898, -0.13101999461650848, 0.25512999296188354, 0.4073899984359741, -0.17573000490665436, -0.10391999781131744, 0.8008900284767151, 0.2892799973487854, 0.565310001373291, 0.02822200022637844, 0.5202299952507019, 0.206169992685318, -0.5213099718093872, -0.12304999679327011, -0.8004400134086609, 0.09955500066280365, 0.13604000210762024, -0.8839700222015381, 0.6117100119590759, -0.9912199974060059, 0.2126300036907196, 0.42072001099586487, -0.265639990568161, -0.13095000386238098, 0.19166000187397003, -0.5490300059318542, 0.3622100055217743, -0.06268499791622162, 0.5232300162315369, 0.4781700074672699, -0.19101999700069427, 0.23340000212192535, -0.3505899906158447, -0.6902999877929688, 0.0823419988155365, 0.21155999600887299, 0.08305700123310089, -0.030260000377893448, 0.23074999451637268, -0.2714900076389313, -0.6485700011253357, -0.16198000311851501, 0.0648370012640953, 0.4243299961090088, -0.7021700143814087, 0.11518999934196472, -0.2058899998664856, 0.13383999466896057, 0.4249100089073181, 0.05352199822664261, 0.749210000038147, 0.3865100145339966, 0.16133999824523926, -0.22457000613212585, 0.258870005607605, 0.1379700005054474, 0.2161100059747696, -0.3757599890232086, 0.4959000051021576, 0.017587000504136086, -0.8549299836158752, 0.02145799994468689, -0.749239981174469, -0.056019000709056854, -0.16126999258995056, -0.39844000339508057, 0.4522800147533417, -0.8480799794197083, -0.37127000093460083, -0.3651300072669983, -0.23860999941825867, -0.5801200270652771, 0.07644599676132202, 0.05819699913263321, -0.07338699698448181, -0.25852999091148376, -0.18711000680923462, 0.1751900017261505, 0.04282199963927269, -0.019773000851273537, 0.06367599964141846, 0.2223699986934662, 0.5757899880409241, 0.5169299840927124, -0.7513499855995178, -0.5389699935913086, -0.4611800014972687, 0.285290002822876, -0.010437999852001667, -0.4001699984073639, 0.1910499930381775, 0.12878000736236572, 0.1749500036239624, 0.5148599743843079, 0.13451999425888062, 0.24833999574184418, 0.7979099750518799, 0.5387200117111206, 0.1761700063943863, -0.2842699885368347, -0.2092999964952469, -0.14901000261306763, 0.776889979839325, -0.5900099873542786, 0.25303998589515686, 0.3562299907207489, 0.10416000336408615, -0.2269199937582016, -0.21844999492168427, -0.4286400079727173, -1.1220999956130981, 0.21126000583171844, 0.3604699969291687, -0.1713400036096573, -0.581279993057251, 0.09073100239038467, 0.30347999930381775, 0.24484999477863312, -0.42353999614715576, 0.3386000096797943, -0.12692999839782715, 0.02287900075316429, -0.30643001198768616, -0.5360100269317627, -0.15331999957561493, -0.6172400116920471, 0.3631899952888489, -0.059581998735666275, -0.11140000075101852, 0.17449000477790833, -0.3091900050640106, -0.354449987411499, 0.7605299949645996, -0.2732999920845032, 0.02338000014424324, 0.2755500078201294, -0.6182100176811218, -0.10080999881029129, -0.12353000044822693, -0.05713000148534775, -0.15740999579429626, -0.19904999434947968, -0.39399999380111694, -0.1003199964761734, -0.20247000455856323, -0.29346001148223877, -0.045896001160144806, 0.038217999041080475, 0.33941999077796936, 0.09079299867153168, 0.0013182000257074833, 0.03874799981713295, -0.6254100203514099, -0.39625999331474304, 0.5229099988937378, 0.32183998823165894, 0.09308899939060211, 0.1635800004005432, 0.4143100082874298, -0.10790000110864639, 0.22396999597549438, 0.7756800055503845, 0.08003299683332443, -0.49775999784469604, -0.4946900010108948, 0.43296000361442566, -0.44356998801231384, 0.2780199944972992, 0.39621999859809875, -0.09465400129556656, -0.07067400217056274, 0.3460499942302704, 0.26003000140190125, -0.21636000275611877, -0.8137999773025513, -0.06496799737215042, 0.17723000049591064, -0.220210000872612, -0.450219988822937, 0.5349400043487549, -0.46299999952316284, 0.24913999438285828, -1.0525000095367432, -0.8823800086975098, -0.4630900025367737, 0.5121999979019165, 0.12274999916553497, 0.13965000212192535, -0.2213599979877472, -0.3403399884700775, 0.11095000058412552, -0.12078999727964401, 0.47714000940322876, 0.26166999340057373, -0.2870100140571594, -0.029292000457644463, 0.13932999968528748, -0.3335399925708771, -0.04171000048518181, -0.5347399711608887, -0.0700099989771843, 0.06413300335407257, -0.45497000217437744, 0.05893699824810028, -0.323060005903244, -0.6448299884796143, -0.15873000025749207, 0.0065079000778496265, 0.6321600079536438, -0.34428998827934265], u'Shoes.Sneakers.and.Athletic.Shoes': [0.08028200268745422, -0.14322000741958618, -0.14704999327659607, -0.1123799979686737, -0.3999600112438202, 0.3549500107765198, -0.20107999444007874, -0.07432500272989273, 0.20698000490665436, -0.3431299924850464, -0.0075738998129963875, 0.0010248000035062432, -0.024692000821232796, -0.19744999706745148, -0.11444000154733658, 0.1310500055551529, 0.35951000452041626, -0.3451800048351288, 0.1400900036096573, -0.21512000262737274, 0.1664399951696396, 0.04547400027513504, 0.002072500064969063, -0.32291001081466675, -0.4050399959087372, -0.21729999780654907, 0.11215999722480774, 0.3932200074195862, 0.2649100124835968, 0.30037999153137207, 0.06964100152254105, -0.563730001449585, -0.5146300196647644, 0.14786000549793243, -0.2089100033044815, 0.5804399847984314, -0.46456000208854675, -0.5322700142860413, 0.26462000608444214, 0.17878000438213348, 0.08421099931001663, -0.49904999136924744, -0.21714000403881073, -0.1088000014424324, -0.0821710005402565, -0.08148600161075592, 0.614870011806488, -0.23749999701976776, -0.16753999888896942, 0.16614000499248505, 0.03978100046515465, -0.44703999161720276, 0.1799899935722351, -0.6659200191497803, 0.14297999441623688, 0.1160300001502037, -0.30709001421928406, 0.12280000001192093, -0.46636998653411865, 0.06563299894332886, 0.022370999678969383, -0.3838300108909607, -0.3764300048351288, -0.27847999334335327, -0.22599999606609344, 0.1301400065422058, -0.3064900040626526, -0.21121999621391296, -0.10005000233650208, 0.08011899888515472, 0.750190019607544, 0.47846001386642456, 0.25617000460624695, -0.2534500062465668, 0.19960999488830566, -0.462660014629364, 0.2784700095653534, -0.09402599930763245, 0.38210999965667725, -0.36649999022483826, 0.3395799994468689, 0.11731000244617462, -0.01030299998819828, 0.006069099996238947, 0.1521500051021576, -0.13582000136375427, 0.35844001173973083, -0.11113999783992767, -0.13541999459266663, 0.5230100154876709, -0.3371500074863434, 0.20347000658512115, 0.030601000413298607, 0.3413099944591522, -0.1997700035572052, -0.12605999410152435, -0.022052999585866928, -0.42987000942230225, -0.15140999853610992, -0.03425699844956398, 0.2087700068950653, 0.6393899917602539, 0.012985000386834145, 0.12483999878168106, -0.27180999517440796, -0.5971699953079224, 0.04375600069761276, -0.17876000702381134, 0.10711999982595444, -0.6855900287628174, -0.364190012216568, 0.39353999495506287, -0.14369000494480133, -0.11360999941825867, -0.1491599977016449, 0.42361000180244446, 0.4014100134372711, 0.17938999831676483, 0.10063999891281128, -0.37786000967025757, -0.11116000264883041, -0.35067999362945557, 0.5108500123023987, -0.1667799949645996, -0.5884799957275391, 0.26657000184059143, 0.1808300018310547, 0.3827599883079529, 0.5073800086975098, -0.15940000116825104, -0.21602000296115875, -0.19923999905586243, -0.12244000285863876, 0.04995099827647209, -0.11678999662399292, -0.17818999290466309, -0.11219000071287155, 0.5115799903869629, -0.017201999202370644, -0.1708499938249588, -0.0491660013794899, -0.15320999920368195, 0.45848000049591064, -0.16911999881267548, 0.096560999751091, 0.39103999733924866, -0.30910998582839966, -0.27480000257492065, 0.2967599928379059, -0.21171000599861145, -0.21538999676704407, 0.015118000097572803, -0.4344800114631653, -0.7368299961090088, -0.547819972038269, -0.3740200102329254, -0.03546600043773651, -0.15663999319076538, 0.5805500149726868, 0.3196699917316437, 0.21714000403881073, -0.647130012512207, -0.4642300009727478, 0.07544700056314468, 0.5934200286865234, -0.520829975605011, 0.09504199773073196, 1.1445000171661377, -0.08490300178527832, 0.18935999274253845, 0.15678000450134277, 0.17013999819755554, -0.7598599791526794, 0.17387999594211578, -0.3985300064086914, -0.1611199975013733, 0.48868000507354736, 0.31619998812675476, -0.1694899946451187, -0.3856799900531769, 0.6467099785804749, -0.137580007314682, -0.07568799704313278, -0.14635999500751495, 0.1309099942445755, -0.3063899874687195, 0.9062899947166443, 0.9315400123596191, 0.28911998867988586, 0.06135899946093559, 0.49636000394821167, -0.23354999721050262, 0.4650000035762787, 0.09407799690961838, -0.17917999625205994, -0.15443000197410583, -0.8608599901199341, 0.14869999885559082, -0.05988200008869171, 0.27059999108314514, 0.5359699726104736, -0.03580600023269653, 0.6479799747467041, 0.7166799902915955, 0.21433000266551971, -0.03909499943256378, 0.023872999474406242, 0.3498300015926361, -0.9323700070381165, -0.21850000321865082, -0.18694999814033508, 0.4114699959754944, -0.6055099964141846, 0.5346500277519226, 0.20111000537872314, 0.2692199945449829, 0.3305099904537201, -0.5652499794960022, -0.23124000430107117, -0.538320004940033, 0.8046000003814697, 0.5126299858093262, 0.3673500120639801, 0.005558200180530548, 0.43070998787879944, 0.19187000393867493, -0.2829799950122833, 0.2657099962234497, -0.34209001064300537, 0.14125999808311462, 0.620389997959137, 0.23718999326229095, 0.08354900032281876, 0.009837299585342407, 0.06559699773788452, 0.5420399904251099, 0.04820999875664711, 0.6901999711990356, -0.10100000351667404, 0.1675100028514862, 0.31665000319480896, 0.38637998700141907, -0.7127000093460083, 0.291020005941391, 0.2738499939441681, -0.4960100054740906, 0.22195999324321747, -0.6657199859619141, 0.09516599774360657, -0.5450299978256226, -0.3465299904346466, 0.046091001480817795, -0.12689000368118286, -0.022975999861955643, -0.5098299980163574, -0.2729699909687042, 0.019874000921845436, 0.42214998602867126, -0.014305000193417072, -0.4404299855232239, 0.5767599940299988, -0.20860999822616577, 0.15478000044822693, -0.47512000799179077, 0.16272999346256256, -0.14359000325202942, -0.25690001249313354, 0.08951299637556076, -0.5230699777603149, -0.031348999589681625, -0.14894999563694, 0.15097999572753906, -0.11612000316381454, -0.8933200240135193, -0.4690299928188324, -0.2574999928474426, -0.15518000721931458, -0.49674999713897705, -0.8342800140380859, 0.060516998171806335, 0.6486300230026245, 0.02035599946975708, 0.1897599995136261, 0.5621100068092346, -0.45513999462127686, 0.6208800077438354, -0.2586100101470947, 0.5347899794578552, -0.3855299949645996, -0.5345500111579895, -0.1867000013589859, 0.08327700197696686, -0.22811000049114227, 1.0896999835968018, -0.43283000588417053, -1.0562000274658203, 0.4487299919128418, -0.15896999835968018, 0.3762499988079071, -0.06176700070500374], u'Shoes.Boat.Shoes': [0.046521998941898346, -0.27535000443458557, -0.1372399926185608, -0.08483199775218964, -0.5181000232696533, -0.1770700067281723, 0.10560999810695648, 0.21041999757289886, 0.371069997549057, -1.121399998664856, -0.31126001477241516, -0.028116999194025993, -0.15352000296115875, 0.04626300185918808, 0.0881119966506958, -0.30849000811576843, 0.2911199927330017, 0.26403000950813293, 0.42719000577926636, -0.34049999713897705, 0.1041800007224083, 0.062320999801158905, 0.3102000057697296, -0.1408499926328659, -0.6470400094985962, -0.11184000223875046, -0.3789699971675873, 0.19210000336170197, 0.7177799940109253, 0.5410400032997131, -0.06356599926948547, -0.07503599673509598, -0.42344000935554504, 0.030515000224113464, -1.092900037765503, 0.449290007352829, -0.30285999178886414, -0.05439300090074539, 0.30504000186920166, 0.37779998779296875, -0.15199999511241913, -0.6460199952125549, 0.0035019998904317617, -0.3173699975013733, -0.21862000226974487, -0.15986000001430511, 0.7918099761009216, 0.05972500145435333, -0.1509000062942505, 0.46226999163627625, -0.18327000737190247, -0.28367000818252563, 0.18201999366283417, 0.12201999872922897, -0.005777500104159117, 0.5082899928092957, -0.12477999925613403, -0.1820099949836731, -0.12711000442504883, 0.02223999984562397, -0.043411001563072205, -0.2563900053501129, -0.3502100110054016, -0.11584000289440155, 0.1497800052165985, -0.2808699905872345, -0.6232699751853943, 0.041839998215436935, -0.37849000096321106, 0.13702000677585602, 0.4625000059604645, 0.31520000100135803, -0.3492699861526489, -0.5148699879646301, 0.47793999314308167, -0.47211000323295593, 0.06896500289440155, 0.04206300154328346, 0.20796999335289001, -0.46062999963760376, -0.07726799696683884, 0.2194100022315979, 0.10565000027418137, 0.008249400183558464, 0.2724500000476837, -0.37880000472068787, 0.18285000324249268, -0.23850999772548676, -0.23803000152111053, 0.5057399868965149, 0.12291999906301498, 0.3009200096130371, 0.04097500070929527, 0.16286000609397888, 0.0921889990568161, 0.10074000060558319, -0.12800000607967377, -0.28922998905181885, 0.030912000685930252, -0.4964599907398224, 0.1638299971818924, 0.5025299787521362, -0.7382400035858154, -0.13186000287532806, -0.35128000378608704, -0.8575000166893005, 0.780269980430603, -0.18528999388217926, 0.2434300035238266, -0.9970300197601318, 0.04215500131249428, 0.2493000030517578, 0.025662999600172043, -0.2630000114440918, -0.06221200153231621, -0.16773000359535217, 0.6916599869728088, 0.011309999972581863, 0.3172900080680847, -0.6394699811935425, -0.10209999978542328, -0.20327000319957733, 0.47415998578071594, -0.1436299979686737, -0.3637300133705139, 0.24241000413894653, -0.05324700102210045, 0.5356199741363525, 0.2931300103664398, -0.11685000360012054, -0.14448000490665436, -0.026388999074697495, 0.19352999329566956, 0.61080002784729, -0.4250600039958954, -0.5867800116539001, -0.1386300027370453, 0.15971000492572784, -0.11920999735593796, 0.17622999846935272, -0.008022700436413288, -0.3856300115585327, 0.4962399899959564, -0.28029999136924744, 0.046500999480485916, 0.19912000000476837, -0.238429993391037, 0.11766999959945679, -0.010824000462889671, 0.013438999652862549, 0.14736999571323395, 0.39594998955726624, -0.3389599919319153, -1.0918999910354614, -0.24478000402450562, -0.3876799941062927, -0.07248000055551529, -0.5232700109481812, 0.6285399794578552, 0.18720999360084534, 0.7936699986457825, -0.5734900236129761, -0.1639000028371811, -0.1250700056552887, 0.47115999460220337, -0.4240100085735321, 0.22926999628543854, 0.7875199913978577, 0.29151999950408936, 0.4116100072860718, 0.004468199796974659, 0.3899799883365631, -0.27469000220298767, 0.17663000524044037, -0.06794200092554092, -0.3611299991607666, 0.17212000489234924, 0.4335100054740906, -0.21216000616550446, -0.5775600075721741, 0.4483500123023987, -0.11044000089168549, 0.2762199938297272, -0.08030200004577637, 0.022432999685406685, -0.1305599957704544, 0.9050700068473816, 0.7572399973869324, 0.44690999388694763, -0.049949001520872116, 0.03932100161910057, -0.2561799883842468, -0.015061999671161175, 0.24647000432014465, -0.12950000166893005, 0.31459999084472656, -0.7015399932861328, -0.3108200132846832, -0.09731300175189972, -0.08591300249099731, 0.7679200172424316, 0.2945899963378906, 0.5856500267982483, 0.5761200189590454, 0.359279990196228, 0.18246999382972717, 0.49358001351356506, 0.3626999855041504, -1.4315999746322632, -0.46276000142097473, -0.032113999128341675, 0.04262800142168999, -0.13220000267028809, 0.5428299903869629, 0.24456000328063965, -0.1214900016784668, 0.3379800021648407, -0.6558399796485901, -0.2924099862575531, -0.6022499799728394, 0.6264500021934509, 0.16839000582695007, 0.1530199944972992, 0.050641000270843506, 0.6398599743843079, 0.25516000390052795, -0.25940001010894775, 0.5928000211715698, -0.17903999984264374, 0.028286000713706017, 0.492000013589859, -0.15595999360084534, -0.15252000093460083, 0.1951099932193756, -0.05823900178074837, 0.14026999473571777, 0.015316000208258629, 0.43463999032974243, -0.38975998759269714, 0.0024409000761806965, -0.033796001225709915, 0.07804200053215027, -0.7078800201416016, 0.4417400062084198, 0.11490000039339066, -0.014995000325143337, 0.062279000878334045, -0.5150399804115295, 0.3796199858188629, -0.4562000036239624, 0.015585999935865402, 0.03340499848127365, -0.27535998821258545, -0.3035700023174286, 0.3631199896335602, -0.22878000140190125, -0.12161999940872192, 0.133310005068779, -0.015417000278830528, -0.42552000284194946, -0.09953100234270096, -0.7592700123786926, 0.08535800129175186, -0.42236000299453735, 0.12310999631881714, -0.07830899953842163, -0.6050599813461304, 0.22946999967098236, -0.60930997133255, 0.07077299803495407, -0.17032000422477722, 0.14959999918937683, -0.02638299949467182, -0.704990029335022, -0.7353900074958801, -0.11537999659776688, -1.2410999536514282, 0.029536999762058258, -0.7642499804496765, 0.4077099859714508, 0.5358999967575073, -0.12793999910354614, 0.2875800132751465, -0.15876999497413635, -0.45684000849723816, 1.0506000518798828, -0.11748000234365463, 0.5167499780654907, -0.34022000432014465, -0.5885800123214722, 0.23378999531269073, 0.3157399892807007, 0.1094600036740303, 1.0147000551223755, -0.8254799842834473, -0.6924700140953064, 0.21558000147342682, 0.003895200090482831, 0.2953599989414215, 0.10051999986171722], u'Shoes.Oxfords': [0.030156999826431274, -0.33298999071121216, -0.23469999432563782, 0.19726000726222992, -0.48429998755455017, 0.05855000019073486, -0.2472199946641922, -0.45824000239372253, -0.07959599792957306, 0.2895900011062622, 0.037973999977111816, 0.2224999964237213, 1.2035000324249268, -0.26603999733924866, -0.15185999870300293, -0.0683090016245842, 0.6776900291442871, 0.05535599961876869, -0.0018503000028431416, 0.8462499976158142, 0.10327000170946121, 0.1272200047969818, 0.14979000389575958, -0.14603999257087708, -0.12229999899864197, 0.05446799844503403, 0.1212100014090538, -0.2649500072002411, 0.43961000442504883, 0.2032500058412552, -0.5294600129127502, -0.45010000467300415, -0.13978999853134155, -0.08100900053977966, -0.03321399912238121, 0.21671999990940094, -0.48228999972343445, 0.28047001361846924, 0.48796001076698303, 0.7916899919509888, -0.23907999694347382, -0.2712700068950653, -0.1306699961423874, 0.04879499971866608, -0.09155700355768204, 0.04003499820828438, -0.16349999606609344, 0.24132999777793884, -0.07126899808645248, -0.24350999295711517, 0.1160300001502037, -0.1839199960231781, 0.25960999727249146, -0.01231900043785572, -0.09389500319957733, -0.08807200193405151, 0.0067587001249194145, 0.3070400059223175, 0.28602999448776245, 0.6416599750518799, -0.23463000357151031, -0.5416300296783447, -0.22542999684810638, -0.24744999408721924, 0.2065100073814392, 0.19681000709533691, -0.059957001358270645, -0.3060399889945984, -0.49347999691963196, -0.15498000383377075, 0.20291000604629517, 0.40296000242233276, -0.3945100009441376, -0.8799499869346619, 0.8942099809646606, 0.06557700037956238, 0.22290000319480896, -0.037004001438617706, 0.5147799849510193, -0.10849999636411667, 0.00047657001414336264, -0.16419999301433563, -0.5874800086021423, -0.6429100036621094, 0.30562999844551086, -0.697160005569458, 0.6601700186729431, -0.32343000173568726, -0.23657000064849854, -0.11948999762535095, 0.210549995303154, 0.28341999650001526, 0.8059700131416321, 0.017703000456094742, 0.2789100110530853, -0.15031999349594116, -0.0901229977607727, -0.19168999791145325, -0.6622300148010254, 0.4502600133419037, -0.31775999069213867, -0.06451699882745743, -0.4593299925327301, 0.26822999119758606, -0.3161199986934662, -0.2598299980163574, 0.09519000351428986, 0.24845999479293823, -0.13702000677585602, -0.46887001395225525, -0.11755000054836273, -0.7613400220870972, -0.05621200054883957, -0.1555899977684021, -0.24131999909877777, -0.21171000599861145, -0.2147500067949295, 0.13428999483585358, -0.16543999314308167, 0.13086000084877014, 0.31161001324653625, -0.30136001110076904, -0.1912499964237213, -0.22843000292778015, 0.32670000195503235, 0.7496600151062012, 0.16282999515533447, 0.20940999686717987, 0.3192499876022339, 0.4041700065135956, -0.5133900046348572, 0.2443300038576126, -0.05984799936413765, 0.06686899811029434, -0.31134000420570374, -0.513700008392334, 0.21108999848365784, 0.6000300049781799, -0.029896000400185585, -0.4086500108242035, 0.42006999254226685, -0.09796100109815598, 0.016200000420212746, -0.29420000314712524, 0.1048400029540062, 0.3993299901485443, -0.32328000664711, 0.04005200043320656, 0.4319100081920624, 0.3843599855899811, -0.11168999969959259, 0.3861500024795532, -0.683709979057312, -0.7720400094985962, -0.6265599727630615, 0.14076000452041626, 0.4375700056552887, -0.7511000037193298, 0.7407799959182739, 0.10869000107049942, -0.4079200029373169, -0.2855600118637085, 0.10666000097990036, -0.5204600095748901, 0.06696800142526627, -0.387470006942749, -0.22448000311851501, 0.7591699957847595, 0.40393999218940735, 0.14538000524044037, 0.5871300101280212, 0.7989299893379211, 0.21527999639511108, 0.15663999319076538, -0.014262000098824501, -1.201200008392334, -0.4511300027370453, -0.09778899699449539, -0.30094000697135925, -0.757860004901886, 0.2325199991464615, 0.8872799873352051, 0.544700026512146, 0.49952998757362366, 0.5535399913787842, -0.4636799991130829, 0.33333998918533325, 0.829800009727478, 0.16899999976158142, -0.7167099714279175, -0.05568400025367737, -0.17023999989032745, 0.32374998927116394, 0.1453700065612793, 0.4940600097179413, 0.35787999629974365, -0.29690998792648315, 0.4182800054550171, 0.07367599755525589, -0.37244001030921936, -0.6650999784469604, 0.03262300044298172, 0.12957000732421875, -0.31564000248908997, -0.24533000588417053, 0.09141500294208527, -0.3078399896621704, 0.2553200125694275, -0.9821000099182129, -0.2113499939441681, -0.1600400060415268, 0.29721999168395996, -0.4923900067806244, 0.24413999915122986, -0.2474299967288971, -0.2189600020647049, 0.08763399720191956, -0.1826300024986267, 0.2789100110530853, -0.4174799919128418, 0.012641999870538712, 0.3924500048160553, 0.3485400080680847, -0.2944200038909912, -0.3802199959754944, -0.16031000018119812, 0.2551499903202057, 0.8635600209236145, 0.30730000138282776, 0.18921999633312225, -0.4934200048446655, -0.19031000137329102, -0.15415999293327332, 0.1538500040769577, -0.36375001072883606, -0.2779799997806549, -0.136570006608963, 0.735759973526001, 0.32440000772476196, 0.5161399841308594, -0.4045700132846832, -0.25644999742507935, -0.8393300175666809, -0.23438000679016113, 0.612779974937439, -0.06184599921107292, 0.17412999272346497, 0.220210000872612, 0.37654000520706177, -0.4294300079345703, -0.5992699861526489, -0.07301700115203857, 0.1312599927186966, -0.3877499997615814, -0.4371100068092346, -0.17497999966144562, -0.4620699882507324, 0.3488999903202057, -0.3606500029563904, -0.16888999938964844, 0.44670000672340393, -0.2248300015926361, 0.02033100090920925, -0.33702000975608826, -0.3844600021839142, -0.023439999669790268, -0.09938099980354309, -0.3995800018310547, -0.8777499794960022, -0.024435000494122505, -0.5170599818229675, 0.17183999717235565, 0.4979499876499176, -0.44670000672340393, -0.4209800064563751, 0.42245998978614807, 0.6321499943733215, 0.3430500030517578, -0.6964200139045715, -0.18453000485897064, 0.5521900057792664, 0.39678001403808594, 0.3210099935531616, -0.20545999705791473, 0.2514300048351288, 0.3352400064468384, -0.46748000383377075, 0.1407500058412552, -0.29078999161720276, -0.5673199892044067, -0.05127599835395813, 0.41286998987197876, -0.18032999336719513, 0.43867000937461853, -0.9315699934959412, -0.7288399934768677, -0.22992999851703644, 0.18217000365257263, 0.05112700164318085, 0.35826998949050903], u'Boots.Ankle': [-0.1994200050830841, 0.24886000156402588, -0.09966699779033661, -0.12238000333309174, -0.7138299942016602, -0.09532199800014496, -0.4792400002479553, 0.2553499937057495, -0.14488999545574188, -0.4563399851322174, 0.09589800238609314, -0.1969199925661087, 0.303849995136261, 0.4573099911212921, -0.29697999358177185, -0.2668200135231018, 0.45903998613357544, 0.2943199872970581, 0.16292999684810638, 0.04786999896168709, 0.1278800070285797, 0.05766899883747101, 0.7560099959373474, 0.023135000839829445, -0.3289799988269806, -0.4219000041484833, 0.38670000433921814, 0.548740029335022, 0.24150000512599945, 0.7293999791145325, 0.0857359990477562, -0.20893999934196472, -0.15730999410152435, -0.06288199871778488, -0.4809199869632721, 0.28519999980926514, 0.06705400347709656, -0.14219999313354492, 0.3689500093460083, 0.2685999870300293, -0.7888799905776978, -0.6845300197601318, 0.3942500054836273, -0.29719001054763794, 0.34365999698638916, -0.3204900026321411, 0.7562800049781799, -0.35141000151634216, 0.23526999354362488, 0.21747000515460968, -0.6972600221633911, -0.2002200037240982, -0.27880001068115234, 0.3775300085544586, 0.23940999805927277, 0.5446400046348572, 0.021893000230193138, -0.270440012216568, -0.20880000293254852, 0.6423400044441223, 0.3745099902153015, -0.20648999512195587, -0.31790998578071594, -0.08876500278711319, -0.10170000046491623, -0.5272300243377686, -0.19992999732494354, 0.21567000448703766, -0.011261999607086182, -0.19106000661849976, 0.4634700119495392, 0.7560300230979919, -0.4665600061416626, 0.15520000457763672, 0.34828999638557434, -0.3779299855232239, 0.20695999264717102, 0.34029000997543335, -0.1662299931049347, -0.7983800172805786, 0.14247000217437744, 0.562690019607544, 0.06710600107908249, -0.2671999931335449, -0.10961999744176865, -0.1627800017595291, 0.29495999217033386, 0.04880199953913689, -0.5621399879455566, 0.16651000082492828, 0.21768000721931458, 0.4906199872493744, 0.27191999554634094, 0.5788900256156921, -0.090829998254776, 0.01682399958372116, -0.0805869996547699, 0.11691000312566757, -0.29128000140190125, -0.30796000361442566, 0.21101999282836914, 1.0161999464035034, -1.0226999521255493, -0.18996000289916992, -0.7275699973106384, -0.608739972114563, 0.3862900137901306, 0.26763999462127686, 0.022797999903559685, -0.43222999572753906, 0.21040000021457672, -0.10050000250339508, -0.14774000644683838, -0.07554200291633606, 0.09657499939203262, -0.17649999260902405, 0.3967899978160858, 0.3688400089740753, 0.7805899977684021, -0.6608399748802185, 0.057700999081134796, -0.043372999876737595, 0.46926000714302063, -0.27048999071121216, -0.3881100118160248, 0.1560399979352951, -0.07664500176906586, 0.4156300127506256, -0.2861199975013733, -0.0035729999653995037, -0.3450700044631958, -0.17443999648094177, -0.40095001459121704, 0.5260000228881836, -0.5372499823570251, -0.5909299850463867, -0.3572799861431122, 0.2252199947834015, 0.5141199827194214, 0.1184300035238266, 0.24903999269008636, 0.07163900136947632, 0.07007499784231186, -0.20044000446796417, -0.30327001214027405, 0.41359999775886536, -0.1234000027179718, 0.384799987077713, 0.09183000028133392, -0.08418899774551392, -0.0876230001449585, 0.08875799924135208, -0.516979992389679, -0.41833001375198364, 0.24320000410079956, -0.2874799966812134, 0.14609000086784363, -0.925059974193573, 0.4291900098323822, 0.4535199999809265, 0.65625, -0.9336199760437012, -0.06386200338602066, -0.05043400079011917, 0.504800021648407, -0.1348399966955185, 0.6353499889373779, 0.9242200255393982, 0.5330899953842163, -0.1712999939918518, -0.05836600065231323, 0.7191399931907654, -0.16006000339984894, 0.3095700144767761, -0.29899999499320984, -0.2183299958705902, 0.03351400047540665, 0.33006998896598816, 0.08547099679708481, -0.4582799971103668, 0.26085999608039856, 0.2809799909591675, 0.7422500252723694, 0.3224300146102905, 0.2862200140953064, -0.35260000824928284, 0.269320011138916, 0.5582500100135803, -0.003434200072661042, -0.4585599899291992, -0.11032000184059143, -0.26537999510765076, -0.02019600011408329, -0.11441999673843384, -0.45010998845100403, 0.1494700014591217, -0.70319002866745, 0.34189000725746155, 0.22642000019550323, 0.161640003323555, 0.693149983882904, 0.1860000044107437, 0.23970000445842743, 0.5559200048446655, -0.3856399953365326, 0.4814999997615814, 0.4372999966144562, 0.39566001296043396, -0.9940699934959412, -0.265639990568161, -0.039771001785993576, 0.22885000705718994, -0.18501999974250793, 0.2553800046443939, 0.3603000044822693, -0.01708799973130226, 0.44802001118659973, -0.418069988489151, -0.22434000670909882, -0.7767300009727478, 0.23705999553203583, -0.10835999995470047, 0.41495001316070557, 0.32701998949050903, 0.2809000015258789, 0.24052999913692474, -0.529990017414093, 0.07846400141716003, -8.061600237851962e-05, 0.12591999769210815, 0.5208399891853333, 0.25995001196861267, -0.4443199932575226, -0.03623899817466736, 0.3531099855899811, -0.07275599986314774, 0.17317000031471252, -0.28435999155044556, -0.32541000843048096, 0.5348600149154663, 0.4865100085735321, -0.23837999999523163, -0.5590999722480774, 0.12785999476909637, -0.16806000471115112, 0.08810099959373474, -0.02703000046312809, -0.5797399878501892, 0.41040998697280884, -0.470770001411438, -0.08131500333547592, 0.12018000334501266, 0.1666100025177002, -0.7521100044250488, 0.12055999785661697, 0.1280599981546402, -0.35604000091552734, 0.21754999458789825, 0.10559999942779541, -0.40432000160217285, -0.16509999334812164, -0.41530999541282654, 0.27983999252319336, -0.05717800185084343, -0.7026299834251404, 0.33917999267578125, -0.603879988193512, 0.048923999071121216, -0.6085000038146973, 0.18050000071525574, 0.12190999835729599, 0.18254999816417694, -0.34310001134872437, -1.007599949836731, -0.9302200078964233, -0.01800600066781044, -0.7606800198554993, -0.16787999868392944, -0.5964999794960022, 0.05050700157880783, 0.42282000184059143, -0.4657000005245209, 0.005661000031977892, 0.28115999698638916, -0.633080005645752, 0.7017199993133545, -0.0840499997138977, 0.5352799892425537, -0.19808000326156616, -0.829039990901947, 0.38833001255989075, 0.06481000036001205, -0.02618500031530857, 0.5285699963569641, -0.7521700263023376, -0.25874000787734985, -0.02033199928700924, 0.1046999990940094, 0.1051499992609024, 0.12847000360488892], u'Sandals': [0.3744400143623352, -0.29023000597953796, -0.3387500047683716, 0.1049100011587143, -0.10446000099182129, -0.1815900057554245, -0.43459999561309814, 0.0293589998036623, -0.1379300057888031, 0.013415999710559845, -0.0653809979557991, 0.30188998579978943, -0.28898000717163086, 0.22333000600337982, 0.12518000602722168, 0.008930100128054619, 0.15106000006198883, 0.3622699975967407, 0.7321299910545349, -0.1766899973154068, -0.5433200001716614, 0.019936000928282738, 0.2572399973869324, -0.3097899854183197, -0.5849300026893616, 0.09368100017309189, 0.25242000818252563, 0.10347999632358551, 0.06642699986696243, 0.8076599836349487, 0.2501400113105774, 0.1224299967288971, -0.5674099922180176, -0.19867999851703644, -0.012044000439345837, 0.424699991941452, -0.3086700141429901, -0.00032302000909112394, 0.1974100023508072, 0.21411000192165375, -0.05254799872636795, -0.7418400049209595, 0.21522000432014465, -0.03163899853825569, -0.2785100042819977, -0.5709400177001953, 0.45113998651504517, -0.3314499855041504, 0.00788129959255457, 0.8536499738693237, -0.5549700260162354, -0.5199199914932251, 0.21142999827861786, -0.9597200155258179, -0.2242099940776825, 0.12741999328136444, -0.52538001537323, 0.2246199995279312, -0.40105998516082764, 0.47301000356674194, 0.2991499900817871, 0.031125999987125397, -0.46792998909950256, 0.31345999240875244, 0.19474999606609344, -0.1942799985408783, -0.06298799812793732, -0.04715999960899353, 0.001975300023332238, -0.19665999710559845, -0.05045900121331215, 0.3952000141143799, -0.2791000008583069, 0.19298000633716583, 0.7558299899101257, -0.10028000175952911, 0.3607800006866455, -0.5802599787712097, -0.2967199981212616, -0.27496999502182007, -0.16335000097751617, 0.20870999991893768, -0.2870999872684479, 0.5266299843788147, -0.17009000480175018, 0.1524599939584732, 0.045917000621557236, -0.3550499975681305, 0.02011900022625923, 0.10457000136375427, -0.3261300027370453, 0.37217000126838684, 0.5924999713897705, 0.14785000681877136, 0.43059998750686646, 0.13059000670909882, 0.50177001953125, 0.06424999982118607, -0.11957000195980072, 0.3787600100040436, 0.4516200125217438, 0.5607600212097168, -0.7354000210762024, -0.35839998722076416, -0.5722200274467468, -0.1721699982881546, 0.03916100040078163, 0.099932000041008, -0.26759999990463257, -1.4191999435424805, 0.08342000097036362, 0.08045600354671478, 0.041085001081228256, 0.0032979000825434923, 0.2447900027036667, -0.12202999740839005, 0.09700199961662292, -0.05958700180053711, -0.07160600274801254, -0.20654000341892242, -0.11166000366210938, -0.214369997382164, 0.14196999371051788, 0.16033999621868134, -0.36599001288414, 0.37334001064300537, 0.21175000071525574, 0.5794399976730347, -0.043522998690605164, -0.21217000484466553, 0.10662999749183655, 0.11723999679088593, -0.12297999858856201, -0.046532001346349716, -0.47422999143600464, -0.5019500255584717, 0.053909000009298325, 0.08040700107812881, -0.12043000012636185, -0.3104400038719177, -0.048888999968767166, -0.4563499987125397, 0.0793439969420433, 0.11913000047206879, 0.18216000497341156, 0.27928999066352844, -0.1231900006532669, 0.5293999910354614, -0.16042999923229218, -0.17666999995708466, 0.07465899735689163, 0.24584999680519104, -0.3709999918937683, -0.5481500029563904, -0.39427000284194946, -0.45337000489234924, 0.25429001450538635, -0.394679993391037, 0.6636899709701538, 0.16303999722003937, 0.21091000735759735, -0.49994000792503357, 0.0365930013358593, 0.28306999802589417, 0.2533699870109558, 0.017090000212192535, 0.09081699699163437, 1.3932000398635864, 0.1562100052833557, -0.23667000234127045, -0.2570599913597107, 0.6043599843978882, -0.3174299895763397, 0.20931999385356903, -0.31863999366760254, -0.24176999926567078, 0.19929000735282898, 0.8152999877929688, 0.16469000279903412, -0.7673199772834778, 0.05812099948525429, 0.2709900140762329, 0.8121299743652344, 0.30382999777793884, 0.6969500184059143, -0.3723300099372864, 0.4424999952316284, 1.2165000438690186, 0.5107300281524658, -0.027408000081777573, -0.16660000383853912, -0.2840999960899353, 0.34393998980522156, 0.2854599952697754, 0.12032999843358994, 0.5221800208091736, -0.6688699722290039, -0.2895300090312958, -0.1448100060224533, -0.0450889989733696, -0.0522879995405674, -0.11394000053405762, 0.0836929976940155, 0.6001899838447571, 0.3779299855232239, 0.14338000118732452, 0.6781600117683411, 0.09629999846220016, -0.8398100137710571, -0.24970999360084534, -0.4243699908256531, 0.24400000274181366, -0.18230000138282776, 0.355679988861084, 0.21687999367713928, 0.0032015000469982624, -0.08549799770116806, -0.8268200159072876, -0.2662400007247925, -0.6392300128936768, 0.6218500137329102, 0.08235900104045868, 0.7076500058174133, 0.6636000275611877, -0.20796999335289001, -0.3813900053501129, -0.5324699878692627, 0.1224299967288971, -0.07043000310659409, -0.1057400032877922, 0.6034899950027466, 0.22954000532627106, -0.2123199999332428, -0.3706800043582916, 0.26987001299858093, 0.22397999465465546, 0.3589800000190735, -0.028031000867486, -0.31387999653816223, 0.3452700078487396, -0.12519000470638275, 0.2154500037431717, -1.09089994430542, 0.3425599932670593, -0.11740999668836594, 0.19874000549316406, -0.10999000072479248, -0.8276500105857849, -0.2274799942970276, -0.3365100026130676, -0.047821998596191406, 0.6553500294685364, 0.3519800007343292, -0.8649299740791321, -0.5060200095176697, -0.27619001269340515, 0.07758600264787674, 0.6244099736213684, -0.060826998203992844, -0.29556000232696533, -0.2610900104045868, -0.5605000257492065, 0.1312599927186966, -0.2595899999141693, -0.05944399908185005, -0.3264699876308441, -0.19156000018119812, -0.14940999448299408, -0.56454998254776, 0.8209099769592285, -0.706529974937439, -0.11022000014781952, -0.2384600043296814, -0.22954000532627106, -0.9613000154495239, -0.254040002822876, -0.35451000928878784, -0.2821199893951416, -0.8879299759864807, 0.4973599910736084, 0.6823099851608276, -0.32170000672340393, -0.03008599951863289, -0.1151600033044815, -0.72434002161026, 0.17931999266147614, -0.17041000723838806, 0.2169100046157837, -0.35857999324798584, -0.29739001393318176, -0.380840003490448, 0.2598699927330017, 0.02807600051164627, 0.9790199995040894, -0.5327399969100952, -0.940500020980835, 0.10651999711990356, 0.3603000044822693, 0.4354499876499176, 0.11283999681472778], u'Slippers': [-0.32280999422073364, -0.14837999641895294, -0.5921000242233276, -0.5441799759864807, 0.13964000344276428, -0.7008900046348572, -0.4031299948692322, 0.2747800052165985, 0.2757599949836731, 0.2327200025320053, -0.149959996342659, 0.05963899940252304, -0.2361699938774109, 0.1330299973487854, 0.18688000738620758, -0.29721999168395996, 0.03452799841761589, 0.1577800065279007, 0.6172299981117249, 0.14067000150680542, -0.13043999671936035, -0.3365199863910675, -0.35521000623703003, -0.0010584000265225768, -0.44672998785972595, -0.1816300004720688, -0.22505000233650208, 0.09547500312328339, 0.5155100226402283, 0.16996000707149506, -0.05335899814963341, -0.4303100109100342, -0.8122900128364563, 0.11508999764919281, 0.11107999831438065, 0.1254200041294098, 0.0169840008020401, 0.21526999771595, 0.1193699985742569, 0.10147999972105026, 0.09330199658870697, -0.4793199896812439, -0.07451099902391434, -0.11062999814748764, -0.3546999990940094, -0.5939499735832214, 0.16464999318122864, -0.010726000182330608, -0.2894499897956848, 0.5130500197410583, 0.07378599792718887, -0.6152799725532532, -0.24901999533176422, -0.22686000168323517, 0.15807999670505524, -0.15115000307559967, 0.027658000588417053, 0.18828000128269196, -0.5978500247001648, 0.24913999438285828, 0.12008000165224075, 0.022440999746322632, 0.026503000408411026, 0.5831400156021118, 0.20266999304294586, -0.07193499803543091, -0.2937699854373932, -0.48243001103401184, -0.26381999254226685, 0.16344000399112701, 0.36329999566078186, 0.5646799802780151, -0.513949990272522, -0.17538000643253326, 0.4411900043487549, 0.23980000615119934, 0.07510600239038467, -0.016373999416828156, 0.0775739997625351, 0.025793999433517456, -0.35798001289367676, 0.21477000415325165, 0.04727799817919731, -0.3501499891281128, 0.6854199767112732, -0.24137000739574432, 0.16673000156879425, -0.1546899974346161, 0.27913999557495117, 0.2330700010061264, -0.10648000240325928, 0.5756099820137024, 0.2513499855995178, 1.1450999975204468, -0.126910001039505, 0.4980199933052063, 0.11467000097036362, 0.18393999338150024, -0.0320420004427433, 0.5115699768066406, 0.8647500276565552, 0.7844300270080566, -0.6806300282478333, 0.038686998188495636, -0.31018000841140747, -0.14622999727725983, -0.03495199978351593, 0.3649100065231323, -0.2709299921989441, -0.5845500230789185, 0.03401799872517586, 0.48805001378059387, -0.12454000115394592, 0.07971300184726715, -0.06571699678897858, 0.5462499856948853, 0.22713999450206757, 0.24740999937057495, -0.026352999731898308, 0.2616899907588959, 0.11751999706029892, 0.08970600366592407, 0.5293800234794617, -0.328110009431839, -0.07664799690246582, -0.24729999899864197, -0.08209399878978729, 0.43408000469207764, -0.2270199954509735, -0.3749699890613556, -0.12230999767780304, 0.06182499974966049, -0.21645000576972961, 0.3836100101470947, -0.12675000727176666, -0.5863400101661682, -0.5208600163459778, 0.06765499711036682, -0.20303000509738922, -0.3413600027561188, -0.06103299930691719, -0.06147199869155884, -0.2077600061893463, 0.03460700064897537, 0.34946998953819275, -0.41084998846054077, -0.40424999594688416, 0.20789000391960144, -0.0546410009264946, -0.27423998713493347, -0.15490999817848206, 0.1839199960231781, -0.3179500102996826, -0.7419099807739258, -0.38903000950813293, 0.2542000114917755, 0.4779300093650818, -0.765529990196228, 0.16137999296188354, 0.2893199920654297, 0.12472999840974808, 0.03145600110292435, 0.06039699912071228, -0.4933899939060211, 0.6909899711608887, 0.17148999869823456, -0.00754750007763505, 0.4405899941921234, 0.20528000593185425, -0.019562000408768654, -0.5724200010299683, 0.12188000231981277, 0.24793000519275665, 0.08915500342845917, -0.4195399880409241, -0.770829975605011, 0.34404999017715454, 0.5398200154304504, -0.048333000391721725, -0.36212000250816345, 0.15480999648571014, 0.020160000771284103, 0.35857000946998596, 0.18650999665260315, 0.3871400058269501, 0.36309000849723816, 0.9006500244140625, 0.6788600087165833, 0.09737800061702728, -0.020509999245405197, 0.20327000319957733, -0.4149099886417389, -0.16080999374389648, -0.15163999795913696, -0.15571999549865723, 0.09828899800777435, -0.7902100086212158, 0.22484999895095825, -0.5539699792861938, -0.9654300212860107, 0.13210000097751617, 0.514959990978241, 0.4359000027179718, 0.5169500112533569, 0.43039000034332275, -0.23034000396728516, -0.02234799973666668, -0.641290009021759, -0.9835600256919861, -0.3746599853038788, -0.32910001277923584, 0.19600999355316162, -0.544439971446991, 0.5577999949455261, 0.8002700209617615, 0.23297999799251556, 0.29447999596595764, -1.0758999586105347, -0.04731699824333191, -0.428629994392395, 0.36024999618530273, 0.07397899776697159, -0.12156999856233597, 0.6512699723243713, 0.16167999804019928, -0.010753000155091286, 0.010022000409662724, -0.21175000071525574, -0.4439300000667572, -0.8659600019454956, 0.35491999983787537, -0.3667899966239929, -0.08228799700737, -0.13297000527381897, 0.21270999312400818, 0.3520500063896179, 0.08247700333595276, -0.11128000169992447, -0.49823999404907227, -0.2325199991464615, -0.30928999185562134, -0.16494999825954437, -1.1970000267028809, 0.711430013179779, -0.496289998292923, 0.3147599995136261, -0.862030029296875, -0.5464199781417847, -0.13774000108242035, -0.2462099939584732, -0.6022199988365173, 0.06106799840927124, 0.38214999437332153, -0.6811599731445312, 0.2257699966430664, -0.1051499992609024, 0.16872000694274902, 0.1791599988937378, 0.6019999980926514, -0.23013000190258026, -0.510420024394989, -0.41997000575065613, 0.2619200050830841, -0.9297900199890137, -0.11330000311136246, -0.14139999449253082, 0.25885000824928284, -0.2973800003528595, -0.21658000349998474, 0.2942900061607361, 0.0007278900011442602, -0.3495599925518036, -0.3457599878311157, -0.5393499732017517, -1.1028000116348267, -0.49467000365257263, -0.3954299986362457, -0.1746399998664856, -1.1129000186920166, 0.2586100101470947, 0.5900400280952454, 0.36076998710632324, -0.06911800056695938, -0.2676900029182434, -0.13436000049114227, 0.7307999730110168, 0.10301999747753143, 0.9168999791145325, -0.24075999855995178, -0.8256300091743469, -0.13230000436306, 0.16850000619888306, 0.7561100125312805, 0.8303400278091431, -0.21845999360084534, -0.9769499897956848, 0.40623000264167786, 0.17135000228881836, 0.2392600029706955, 0.6333900094032288], u'Shoes.Loafers': [-0.0023169999476522207, -0.6132500171661377, -0.18389999866485596, 0.6307799816131592, -0.6868100166320801, -0.0657230019569397, 0.02100900001823902, -0.7105399966239929, 0.08677499741315842, 0.28621000051498413, -0.21281999349594116, 0.4470199942588806, 0.44885000586509705, 0.24323999881744385, 0.06035599857568741, 0.5474100112915039, 0.6207600235939026, 0.1127299964427948, 0.15578000247478485, 0.5189800262451172, 0.009690900333225727, 0.29596999287605286, -0.0863339975476265, -0.5506700277328491, -0.10296999663114548, 0.16790999472141266, 0.2930000126361847, 0.6450999975204468, 0.30768001079559326, 0.3715200126171112, -0.2994399964809418, 0.025868000462651253, -0.1987600028514862, -0.08304399996995926, 0.0762379989027977, 0.3806000053882599, 0.1327199935913086, -0.20565000176429749, 0.11715000122785568, 0.7367100119590759, 0.2053000032901764, 0.014426000416278839, 0.02876099944114685, -0.21735000610351562, -0.06471999734640121, -0.04679400101304054, 0.563260018825531, -0.20746000111103058, -0.6407999992370605, 0.3574199974536896, -0.2788200080394745, -0.3190099895000458, 0.4289099872112274, 0.22019000351428986, 0.14597000181674957, 0.44235000014305115, 0.048941999673843384, -0.259550005197525, -0.418969988822937, 0.34665998816490173, -0.18605999648571014, -0.3239699900150299, -0.06774099916219711, -0.3116700053215027, 0.6290299892425537, 0.15241999924182892, 0.010684999637305737, -0.341729998588562, 0.16878999769687653, -0.5260400176048279, 0.7187600135803223, 0.0997759997844696, -0.21118000149726868, -0.3522700071334839, 0.5978599786758423, 0.12955999374389648, 0.2603699862957001, 0.12352000176906586, 0.39259999990463257, -0.2650499939918518, 0.05839499831199646, 0.23601999878883362, -0.21285000443458557, -0.22877000272274017, 0.32875001430511475, 0.057930998504161835, 0.22809000313282013, -0.09848500043153763, -0.3882400095462799, 0.6670299768447876, -0.1975799947977066, 0.3590700030326843, 0.06497800350189209, 0.32030999660491943, 0.24451999366283417, -0.022159000858664513, -0.24390000104904175, 0.3195500075817108, -0.3838199973106384, 0.6099900007247925, 0.1397700011730194, -0.14350999891757965, -0.9288399815559387, -0.33952000737190247, -0.10023999959230423, -0.20006999373435974, 0.005575499963015318, -0.12155000120401382, -0.12290000170469284, -0.6060000061988831, -0.17892999947071075, 0.13989000022411346, -0.06841900199651718, -0.1657000035047531, 0.3545700013637543, -0.2642599940299988, 0.6612300276756287, -0.2919299900531769, 0.2509300112724304, -0.14240999519824982, -0.35269999504089355, -0.1137399971485138, 0.3060699999332428, 0.14188000559806824, -0.04400600120425224, 0.36553001403808594, 0.14764000475406647, 0.8839399814605713, 0.3528299927711487, 0.14970000088214874, -0.24282999336719513, -0.8259599804878235, -0.5581200122833252, -0.01551000028848648, -0.28885000944137573, -0.2966800034046173, 0.03292499855160713, 0.38218000531196594, -0.19732999801635742, -0.6714100241661072, 0.8912299871444702, 0.49803999066352844, -0.19800999760627747, -0.02297299914062023, 0.4869999885559082, 0.09281200170516968, -0.5854600071907043, -0.07080599665641785, 0.43786001205444336, 0.5423300266265869, -0.7673100233078003, 0.8610000014305115, -0.5241699814796448, -0.5048900246620178, -0.6126000285148621, -0.06761299818754196, 0.010030999779701233, -0.12445999681949615, -0.050367001444101334, 0.4919300079345703, 0.013793000020086765, -0.9798499941825867, -0.0850130021572113, -0.16008999943733215, 0.20197999477386475, 0.07388299703598022, -0.20396000146865845, 0.7886800169944763, 0.680679976940155, 0.021887000650167465, 0.29172998666763306, 0.8504700064659119, -0.6440899968147278, 0.12133999913930893, -0.1985200047492981, -0.7932900190353394, -0.2502500116825104, 0.26642000675201416, -0.17365999519824982, -0.4683699905872345, 0.4300000071525574, 0.6205599904060364, 0.018559999763965607, 0.11235000193119049, 0.35451000928878784, -0.47387000918388367, 0.8208699822425842, 0.5596100091934204, 0.23844000697135925, -0.39506998658180237, 0.4446299970149994, -0.42816001176834106, 0.19599999487400055, 0.0495310015976429, 0.5677899718284607, 0.14233000576496124, -0.4617699980735779, -0.9429000020027161, 0.07080599665641785, -0.34790998697280884, -0.09812299907207489, -0.282370001077652, 0.4574199914932251, 0.17057999968528748, 0.04095499962568283, -0.070592001080513, 0.5759000182151794, 0.12099000066518784, -0.7917699813842773, -0.3780600130558014, 0.7891700267791748, 0.10634999722242355, -0.7536799907684326, 0.6097599864006042, -0.31512001156806946, 0.6491900086402893, 0.32047998905181885, -0.319489985704422, -0.17858000099658966, -0.6309400200843811, 0.06140099838376045, 0.5350599884986877, 0.4978500008583069, 0.05657900124788284, -0.09100800007581711, -0.15006999671459198, -0.05691999942064285, 0.3152500092983246, -0.2103700041770935, -0.1829099953174591, 0.2732599973678589, 0.17438000440597534, -0.021807000041007996, 0.3134300112724304, 0.08630499988794327, -0.11321999877691269, 0.42083001136779785, 0.15851999819278717, -0.22311000525951385, 0.1793300062417984, 0.4270099997520447, 0.20826999843120575, -0.6078400015830994, 0.3181599974632263, 0.4308899939060211, 0.0795539990067482, 0.20361000299453735, -0.3312700092792511, 0.5224599838256836, -0.7408000230789185, -0.1463100016117096, 0.1228799968957901, 0.4980500042438507, -0.050342999398708344, -0.22472000122070312, -0.005983099807053804, 0.05860299989581108, 0.7727500200271606, 0.33006998896598816, -0.8527100086212158, 0.10614000260829926, -0.3515799939632416, 0.34314000606536865, -0.05614899843931198, -0.11695999652147293, 0.21491000056266785, 0.05978500097990036, -0.29513999819755554, -0.5079299807548523, 0.016788000240921974, 0.023837000131607056, 0.24122999608516693, 0.18877999484539032, -0.5256100296974182, -1.0266000032424927, 0.2897599935531616, 0.45181000232696533, 0.009402399882674217, -0.7113699913024902, -0.13592000305652618, 0.6775000095367432, 0.5088899731636047, 0.2178799957036972, -0.08953599631786346, -0.4771699905395508, 0.20774999260902405, 0.4093799889087677, -0.10367000102996826, -0.33474001288414, -0.8932700157165527, -0.18310999870300293, 0.6604300141334534, -0.2998200058937073, 0.9802299737930298, -0.6742600202560425, -0.4010699987411499, -0.046254999935626984, -0.12870000302791595, 0.3562000095844269, 0.13203999400138855]} diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/config.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/config.py new file mode 100644 index 0000000000000000000000000000000000000000..d3211f4d14fe71cac8397081692f5b8d4a685bc3 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/config.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging, os + +logging.basicConfig(format='[%(asctime)s] %(name)s: %(message)s', level=logging.INFO) + + +RANDOM_SEED = None + +SUMMARY_INTERVAL = 'auto' # int(of iter) or 'auto' +IMAGE_SUMMARY_INTERVAL = 'auto' # int(of iter) or 'auto' + + +ROOT_DIR = "." # change this to the project folder + + +WEIGHT_ROOT_DIR = ROOT_DIR+"/SymNet_NPU/weights/" +LOG_ROOT_DIR = ROOT_DIR+"/SymNet_NPU/output_log/" +DATA_ROOT_DIR = ROOT_DIR+"/data" + + +CZSL_DS_ROOT = { + 'MIT': DATA_ROOT_DIR+'/mit-states-original', + 'UT': 'ut-zap50k-original', +} + +GCZSL_DS_ROOT = { + 'MIT': DATA_ROOT_DIR+'/mit-states-natural', + 'UT': DATA_ROOT_DIR+'/ut-zap50k-natural', +} + +GRADIENT_CLIPPING = 5 + + +# if not os.path.exists(WEIGHT_ROOT_DIR): +# os.makedirs(WEIGHT_ROOT_DIR) +# if not os.path.exists(LOG_ROOT_DIR): +# os.makedirs(LOG_ROOT_DIR) \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/CZSL_dataset.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/CZSL_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..904ac1fd0e071f46d0374f44ba5b53f900620839 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/CZSL_dataset.py @@ -0,0 +1,248 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Modified from attributes-as-operators""" +import numpy as np +import torch, torchvision +import os, pickle, json +import tqdm + + +try: + from . import data_utils + from .. import config as cfg +except (ValueError, ImportError): + import data_utils + + +class CompositionDatasetActivations(torch.utils.data.Dataset): + + def __init__(self, train_url, name, root, phase, feat_file, split='compositional-split', with_image=False, obj_pred=None, transform_type='normal'): + # root: /home/ma-user/modelarts/inputs/data_url_0/ut-zap50k-original + self.train_url = train_url + self.root = root + self.phase = phase + self.split = split + self.with_image = with_image + + self.feat_dim = None + self.transform = data_utils.imagenet_transform(phase, transform_type) + self.loader = data_utils.ImageLoader(self.root+'/images/') + + feat_file = os.path.join(self.root, feat_file) + activation_data = torch.load(feat_file) + + self.activation_dict = dict(zip(activation_data['files'], activation_data['features'])) + self.feat_dim = activation_data['features'].size(1) + # pair = (attr, obj) + self.attrs, self.objs, self.pairs, self.train_pairs, self.test_pairs = self.parse_split() + self.attr2idx = {attr: idx for idx, attr in enumerate(self.attrs)} + self.obj2idx = {obj: idx for idx, obj in enumerate(self.objs)} + self.pair2idx = {pair: idx for idx, pair in enumerate(self.pairs)} + + + self.train_data, self.test_data = self.get_split_info() + + self.data = self.train_data if self.phase=='train' else self.test_data # list of [img_name, attr, obj, attr_id, obj_id, feat] + + # return {object: all attrs that occur with obj} + self.obj_affordance_mask = [] + for _obj in self.objs: + candidates = [attr for (_,attr,obj,_,_,_) in self.train_data+self.test_data if obj==_obj] + affordance = set(candidates) + mask = [1 if x in affordance else 0 for x in self.attrs] + self.obj_affordance_mask.append(mask) + + + # negative image pool + samples_grouped_by_obj = [[] for _ in range(len(self.objs))] + for i,x in enumerate(self.train_data): + samples_grouped_by_obj[x[4]].append(i) + + self.neg_pool = [] # [obj_id][attr_id] => list of sample id + for obj_id in range(len(self.objs)): + self.neg_pool.append([]) + for attr_id in range(len(self.attrs)): + self.neg_pool[obj_id].append( + [i for i in samples_grouped_by_obj[obj_id] if + self.train_data[i][3] != attr_id ] + ) + aux_data_root = './utils/aux_data' + print("aux_data_root",aux_data_root) + gamma = json.load(open(aux_data_root + "/%s_gamma.json"%name)) + gamma = {k:np.array(v, dtype=np.float32) for k,v in gamma.items()} + self.comp_gamma = {'a':gamma['comp_a'], 'b':gamma['comp_b']} + self.attr_gamma = {'a':gamma['attr_a'], 'b':gamma['attr_b']} + + + if obj_pred is None: + self.obj_pred = None + else: + obj_pred_path = os.path.join(self.train_url, 'obj_scores', obj_pred) + print("Loading object prediction from %s"%obj_pred_path) + with open(obj_pred_path, 'rb') as fp: + self.obj_pred = np.array(pickle.load(fp), dtype=np.float32) + + + def get_split_info(self): + data = torch.load(self.root+'/metadata.t7') + train_pair_set = set(self.train_pairs) + test_pair_set = set(self.test_pairs) + train_data, test_data = [], [] + + for instance in data: + + image, attr, obj = instance['image'], instance['attr'], instance['obj'] + + if attr=='NA' or (attr, obj) not in self.pairs: + # ignore instances with unlabeled attributes + # ignore instances that are not in current split + continue + + data_i = [image, attr, obj, self.attr2idx[attr], self.obj2idx[obj], self.activation_dict[image]] + if (attr, obj) in train_pair_set: + train_data.append(data_i) + else: + test_data.append(data_i) + + return train_data, test_data + + def parse_split(self): + + def parse_pairs(pair_list): + with open(pair_list,'r') as f: + pairs = f.read().strip().split('\n') + pairs = [t.split() for t in pairs] + pairs = list(map(tuple, pairs)) + attrs, objs = zip(*pairs) + return attrs, objs, pairs + + tr_attrs, tr_objs, tr_pairs = parse_pairs('%s/%s/train_pairs.txt'%(self.root, self.split)) + ts_attrs, ts_objs, ts_pairs = parse_pairs('%s/%s/test_pairs.txt'%(self.root, self.split)) + + all_attrs, all_objs = sorted(list(set(tr_attrs+ts_attrs))), sorted(list(set(tr_objs+ts_objs))) + all_pairs = sorted(list(set(tr_pairs + ts_pairs))) + + return all_attrs, all_objs, all_pairs, tr_pairs, ts_pairs + + + def sample_negative(self, attr_id, obj_id): + return np.random.choice(self.neg_pool[obj_id][attr_id]) + + + def __getitem__(self, index): + def get_sample(i): + image, attr, obj, attr_id, obj_id, feat = self.data[i] + if self.with_image: + img = self.loader(image) + img = self.transform(img) + else: + img = None + + return [img, attr_id, obj_id, self.pair2idx[(attr, obj)], feat] + + pos = get_sample(index) + + mask = np.array(self.obj_affordance_mask[pos[2]], dtype=np.float32) + + + if self.phase=='train': + negid = self.sample_negative(pos[1], pos[2]) # negative example + neg = get_sample(negid) + + data = pos + neg + [mask] + else: + data = pos + [mask] + + # train [img, attr_id, obj_id, pair_id, img_feature, img, attr_id, obj_id, pair_id, img_feature, aff_mask] + # test [img, attr_id, obj_id, pair_id, img_feature, aff_mask] + + if self.obj_pred is not None: + data.append(self.obj_pred[index,:]) + + return data + + def __len__(self): + return len(self.data) + + + + + + + +class CompositionDatasetActivationsGenerator(CompositionDatasetActivations): + + def __init__(self, root, feat_file, split='compositional-split', feat_extractor=None, transform_type='normal'): + super(CompositionDatasetActivationsGenerator, self).__init__(root, 'train', feat_file, split, transform_type=transform_type) + + assert os.path.exists(root) + with torch.no_grad(): + self.generate_features(feat_file, feat_extractor, transform_type) + print('Features generated.') + + + def generate_features(self, out_file, feat_extractor, transform_type): + + data = self.train_data+self.test_data + transform = data_utils.imagenet_transform('test', transform_type) + + if feat_extractor is None: + feat_extractor = torchvision.models.resnet18(pretrained=True) + feat_extractor.fc = torch.nn.Sequential() + feat_extractor.eval().cuda() + + image_feats = [] + image_files = [] + for chunk in tqdm.tqdm(data_utils.chunks(data, 512), total=len(data)//512): + files, attrs, objs = zip(*chunk) + imgs = list(map(self.loader, files)) + imgs = list(map(transform, imgs)) + feats = feat_extractor(torch.stack(imgs, 0).cuda()) + image_feats.append(feats.data.cpu()) + image_files += files + image_feats = torch.cat(image_feats, 0) + print ('features for %d images generated'%(len(image_files))) + + torch.save({'features': image_feats, 'files': image_files}, out_file) + + + + + +if __name__=='__main__': + """example code for generating new features for MIT states and UT Zappos + CompositionDatasetActivationsGenerator( + root = 'data-dir', + feat_file = 'filename-to-save', + feat_extractor = torchvision.models.resnet18(pretrained=True), + ) + """ + CompositionDatasetActivationsGenerator( + root = 'data/attributes-as-operators/data/mit-states', + feat_file = 'data/attributes-as-operators/data/mit-states/features.t7', + ) \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/__init__.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..82c27872446334b07185cbf8795dc194f085d915 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/__init__.py @@ -0,0 +1,57 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from .. import config as cfg +from utils.dataset import CZSL_dataset +from torch.utils.data import DataLoader +import numpy as np +import os.path as osp +import os +cwd = os.getcwd() +def get_dataloader(train_url, dataset_name, phase, feature_file="features.t7", batchsize=1, num_workers=1, shuffle=None, **kwargs): + dt_path = osp.join(train_url, cfg.CZSL_DS_ROOT[dataset_name]) + + dataset = CZSL_dataset.CompositionDatasetActivations( + train_url = train_url, + name = dataset_name, + root = dt_path, #data/mit-states-original/features.t7 + phase = phase, + feat_file = feature_file, + **kwargs) + + + if shuffle is None: + shuffle = (phase=='train') + print(shuffle) + + return DataLoader(dataset, batchsize, shuffle, num_workers=num_workers, + collate_fn = lambda data: [np.stack(d, axis=0) for d in zip(*data)] + ) + + + + diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/data_utils.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/data_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bdd29df853e288b4eedadda71c82907ad20e7115 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/dataset/data_utils.py @@ -0,0 +1,90 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import torchvision.transforms as transforms +from PIL import Image + + + +class ImageLoader: + def __init__(self, root): + self.img_dir = root + + def __call__(self, img): + str_types = [str] + try: + str_types.append(unicode) + except NameError: + pass + + if type(img) in str_types: + f = '%s/%s'%(self.img_dir, img) + img = Image.open(f).convert('RGB') + elif type(img) in [list, tuple]: + f = '%s/%s'%(self.img_dir, img[0]) + x,y,w,h = img[1:] # bbox + img = Image.open(f).convert('RGB') + img = img.crop((x, y, x+w, y+h)) + else: + raise NotImplementedError(str(type(img))) + return img + + +def imagenet_transform(phase, transform_type): + mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] + + if transform_type == 'normal': + if phase=='train': + transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize(mean, std) + ]) + elif phase in ['test', 'val']: + transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean, std) + ]) + elif transform_type == 'fixed': + transform = transforms.Compose([ + transforms.Resize(224), + transforms.ToTensor(), + transforms.Normalize(mean, std) + ]) + else: + raise NotImplementedError("transform_type %s"%transform_type) + + return transform + + +def chunks(l, n): + """Yield successive n-sized chunks from l.""" + for i in range(0, len(l), n): + yield l[i:i + n] \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/evaluator.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..eff09727b55ae5b4bc6f248dd47cc3019af2e01a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/evaluator.py @@ -0,0 +1,307 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.models as tmodels +import numpy as np +from . import utils +import itertools +import math +import collections +import logging + + +class CZSL_Evaluator: + """modified from AttrOperator""" + + def __init__(self, dset, model): + + self.dset = dset + + # convert text pairs to idx tensors: [('sliced', 'apple'), ('ripe', 'apple'), ...] --> torch.LongTensor([[0,1],[1,1], ...]) + pairs = [(dset.attr2idx[attr], dset.obj2idx[obj]) + for attr, obj in dset.pairs] + self.pairs = torch.LongTensor(pairs) + + # mask over pairs that occur in closed world + test_pair_set = set(dset.test_pairs) + mask = [1 if pair in test_pair_set else 0 for pair in dset.pairs] + self.closed_mask = torch.ByteTensor(mask) + + # object specific mask over which pairs occur in the object oracle setting + oracle_obj_mask = [] + for _obj in dset.objs: + mask = [1 if _obj==obj else 0 for attr, obj in dset.pairs] + oracle_obj_mask.append(torch.ByteTensor(mask)) + self.oracle_obj_mask = torch.stack(oracle_obj_mask, 0) + + + # generate masks for each setting, mask scores, and get prediction labels + def generate_predictions(self, scores, obj_truth): # (B, #pairs) + + def get_pred_from_scores(_scores): + _, pair_pred = _scores.max(1) + attr_pred, obj_pred = self.pairs[pair_pred][:,0], self.pairs[pair_pred][:,1] + return (attr_pred, obj_pred) # attr/obj word id (not name) + + def get_pred_from_scores_and_mask_best(_scores): + _, pair_pred = _scores.max(1) + attr_pred, obj_pred = self.pairs[pair_pred][:,0], self.pairs[pair_pred][:,1] + _scores[range(pair_pred.shape[0]),pair_pred] = -1e10 + return _scores, (attr_pred, obj_pred) # attr/obj word id (not name) + + results = {} + + # open world setting -- no mask + results.update({'open': get_pred_from_scores(scores)}) + + + # closed world setting - set the score for all NON test-pairs to -1e10 + mask = self.closed_mask.repeat(scores.shape[0], 1) + closed_scores = scores.clone() + if hasattr(mask, 'bool'): + closed_scores[(1-mask).bool()] = -1e10 + else: + closed_scores[(1-mask).byte()] = -1e10 + closed_scores, closed1 = get_pred_from_scores_and_mask_best(closed_scores) + results.update({'closed1': closed1}) + closed_scores, closed2 = get_pred_from_scores_and_mask_best(closed_scores) + results.update({'closed2': closed2}) + closed_scores, closed3 = get_pred_from_scores_and_mask_best(closed_scores) + results.update({'closed3': closed3}) + + + # object_oracle setting - set the score to -1e10 for all pairs where the true object does NOT participate + mask = self.oracle_obj_mask[obj_truth] + oracle_obj_scores = scores.clone() + if hasattr(mask, 'bool'): + oracle_obj_scores[(1-mask).bool()] = -1e10 + else: + oracle_obj_scores[(1-mask).byte()] = -1e10 + + results.update({'object_oracle': get_pred_from_scores(oracle_obj_scores)}) + + return results + + def score_model(self, scores, obj_truth): + + # put everything on CPU + #scores = {k:v.cpu() for k,v in scores.items()} + #obj_truth = obj_truth.cpu() + + # gather scores for all relevant (a,o) pairs + scores = torch.stack([ + scores[(self.dset.attr2idx[attr], self.dset.obj2idx[obj])] + for attr, obj in self.dset.pairs + ], 1) # (B, #pairs) + results = self.generate_predictions(scores, obj_truth) + return results + + def evaluate_predictions(self, predictions, attr_truth, obj_truth, histogram=False, synonym_mode=False): + assert not histogram + + # put everything on cpu + #attr_truth, obj_truth = attr_truth.cpu(), obj_truth.cpu() + + # top 1 pair accuracy + # open world: attribute, object and pair + attr_match = (attr_truth==predictions['open'][0]).float() + obj_match = (obj_truth==predictions['open'][1]).float() + open_match = attr_match*obj_match + + # closed world, obj_oracle: pair + closed_1_match = (attr_truth==predictions['closed1'][0]).float() * (obj_truth==predictions['closed1'][1]).float() + closed_2_match = (attr_truth==predictions['closed2'][0]).float() * (obj_truth==predictions['closed2'][1]).float() + closed_1_match + closed_3_match = (attr_truth==predictions['closed3'][0]).float() * (obj_truth==predictions['closed3'][1]).float() + closed_2_match + + if synonym_mode: + closed_2_match[closed_2_match>1] = 1 + closed_3_match[closed_3_match>1] = 1 + + assert torch.max(closed_1_match).item()<=1, torch.max(closed_1_match).item() + assert torch.max(closed_2_match).item()<=1, torch.max(closed_2_match).item() + assert torch.max(closed_3_match).item()<=1, torch.max(closed_3_match).item() + + + obj_oracle_match = (attr_truth==predictions['object_oracle'][0]).float() * (obj_truth==predictions['object_oracle'][1]).float() + + return attr_match, obj_match, closed_1_match, closed_2_match, closed_3_match, open_match, obj_oracle_match + + + def evaluate_only_attr_obj(self, prob_a, gt_a, prob_o, gt_o): + prob_a, prob_o = torch.from_numpy(prob_a), torch.from_numpy(prob_o) + _, pred_a = prob_a.max(1) + _, pred_o = prob_o.max(1) + + attr_match = (pred_a == gt_a).float() + obj_match = (pred_o == gt_o).float() + + return attr_match, obj_match + + + + + + + +class GCZSL_Evaluator: + """modified from TMN""" + + def __init__(self, dset): + + self.dset = dset + + # convert text pairs to idx tensors: [('sliced', 'apple'), ('ripe', 'apple'), ...] --> torch.LongTensor([[0,1],[1,1], ...]) + pairs = [(dset.attr2idx[attr], dset.obj2idx[obj]) + for attr, obj in dset.pairs] + self.train_pairs = [(dset.attr2idx[attr], dset.obj2idx[obj]) + for attr, obj in dset.train_pairs] + self.pairs = torch.LongTensor(pairs) + + # mask over pairs that occur in closed world + if dset.phase == 'train': + print('Evaluating with train pairs') + test_pair_set = set(dset.train_pairs) + elif dset.phase == 'val': + print('Evaluating with val pairs') + test_pair_set = set(dset.val_pairs + dset.train_pairs) + else: + print('Evaluating with test pairs') + test_pair_set = set(dset.test_pairs + dset.train_pairs) + self.test_pairs = [(dset.attr2idx[attr], dset.obj2idx[obj]) + for attr, obj in list(test_pair_set)] + mask = [1 if pair in test_pair_set else 0 for pair in dset.pairs] + self.closed_mask = torch.ByteTensor(mask) + + seen_pair_set = set(dset.train_pairs) + mask = [1 if pair in seen_pair_set else 0 for pair in dset.pairs] + self.seen_mask = torch.ByteTensor(mask) + + # object specific mask over which pairs occur in the object oracle setting + oracle_obj_mask = [] + for _obj in dset.objs: + mask = [1 if _obj == obj else 0 for attr, obj in dset.pairs] + oracle_obj_mask.append(torch.ByteTensor(mask)) + self.oracle_obj_mask = torch.stack(oracle_obj_mask, 0) + + + # generate masks for each setting, mask scores, and get prediction labels + def generate_predictions(self, scores, obj_truth): # (B, #pairs) + def get_pred_from_scores(_scores): + _, pair_pred = _scores.topk(10, dim=1) #sort(1, descending=True) + pair_pred = pair_pred[:, :10].contiguous().view(-1) + attr_pred, obj_pred = self.pairs[pair_pred][:, 0].view( + -1, 10), self.pairs[pair_pred][:, 1].view(-1, 10) + return (attr_pred, obj_pred) + + results = {} + + # open world setting -- no mask + mask = self.closed_mask.repeat(scores.shape[0], 1) + mask = 1 - mask + if hasattr(mask, "bool"): + mask = mask.bool() + closed_scores = scores.clone() + closed_scores[mask] = -1e10 + results.update({'open': get_pred_from_scores(closed_scores)}) + + # closed world setting - set the score for all NON test-pairs to -1e10 + #results.update({'closed': get_pred_from_scores(closed_scores)}) + results.update({'closed': results['open']}) + + # object_oracle setting - set the score to -1e10 for all pairs where the true object does NOT participate + mask = self.oracle_obj_mask[obj_truth] + oracle_obj_scores = scores.clone() + + mask = 1 - mask + if hasattr(mask, "bool"): + mask = mask.bool() + oracle_obj_scores[mask] = -1e10 + + results.update({ + 'object_oracle': get_pred_from_scores(oracle_obj_scores) + }) + + return results + + + def score_model(self, scores, obj_truth, bias=0.0): + # put everything on CPU + scores = {k: v.cpu() for k, v in scores.items()} + obj_truth = obj_truth.cpu() + # gather scores for all relevant (a,o) pairs + scores = torch.stack( + [scores[(self.dset.attr2idx[attr], self.dset.obj2idx[obj])] for attr, obj in self.dset.pairs], + 1) # (B, #pairs) + orig_scores = scores.clone() + mask = self.seen_mask.repeat(scores.shape[0], 1) + mask = 1 - mask + if hasattr(mask, "bool"): + mask = mask.bool() + scores[mask] += bias + results = self.generate_predictions(scores, obj_truth) + results['biased_scores'] = scores + results['scores'] = orig_scores + return results + + def evaluate_predictions(self, predictions, attr_truth, obj_truth, topk=1): + + # put everything on cpu + attr_truth, obj_truth = attr_truth.cpu(), obj_truth.cpu() + pairs = list( + zip(list(attr_truth.cpu().numpy()), list(obj_truth.cpu().numpy()))) + seen_ind = torch.LongTensor([ + i for i in range(len(attr_truth)) if pairs[i] in self.train_pairs + ]) + unseen_ind = torch.LongTensor([ + i for i in range(len(attr_truth)) + if pairs[i] not in self.train_pairs + ]) + + # top 1 pair accuracy + # open world: attribute, object and pair + attr_match = (attr_truth.unsqueeze(1).repeat( + 1, topk) == predictions['open'][0][:, :topk]) + obj_match = (obj_truth.unsqueeze(1).repeat( + 1, topk) == predictions['open'][1][:, :topk]) + open_match = (attr_match * obj_match).any(1).float() + attr_match = attr_match.any(1).float() + obj_match = obj_match.any(1).float() + open_seen_match = open_match[seen_ind] + open_unseen_match = open_match[unseen_ind] + + # closed world, obj_oracle: pair + closed_match = (attr_truth == predictions['closed'][0][:, 0]).float( + ) * (obj_truth == predictions['closed'][1][:, 0]).float() + + obj_oracle_match = ( + attr_truth == predictions['object_oracle'][0][:, 0]).float() * ( + obj_truth == predictions['object_oracle'][1][:, 0]).float() + + return attr_match, obj_match, closed_match, open_match, obj_oracle_match, open_seen_match, open_unseen_match \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/utils.py b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ea680f47be437c91e9514127ffe4bbf1527e3e09 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/SYMNET_ID1292_for_ACL/utils/utils.py @@ -0,0 +1,193 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +import os.path as osp +import numpy as np + +from . import config as cfg +from . import aux_data +from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig + +################################################################################ +# tools for solvers # +################################################################################ + +def create_session(): + """create tensorflow session""" + ####################### add ######################## + configs = tf.ConfigProto() + custom_op = configs.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["use_off_line"].b = True + + custom_op.parameter_map["dynamic_input"].b = True + custom_op.parameter_map["dynamic_graph_execute_mode"].s = tf.compat.as_bytes("lazy_recompile") + + # mix add white + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + # custom_op.parameter_map["modify_mixlist"].s = tf.compat.as_bytes("/home/test/ops_info.json") + + custom_op.parameter_map["profiling_mode"].b = True + custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes( + '{"output":"/cache/profiling","task_trace":"on", "aicpu":"on"}') + # custom_op.parameter_map["auto_tune_mode"].s = tf.compat.as_bytes("RL,GA") + + # close + configs.graph_options.rewrite_options.remapping = RewriterConfig.OFF + # close + configs.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + ####################### add ######################## + return tf.Session(config=configs) + + + +def display_args(args, logger, verbose=False): + """print some essential arguments""" + if verbose: + ignore = [] + for k,v in args.__dict__.items(): + if not callable(v) and not k.startswith('__') and k not in ignore: + logger.info("{:30s}{}".format(k,v)) + else: + logger.info('Name: %s'%args.name) + logger.info('Network: %s'%args.network) + logger.info('Data: %s'%args.data) + logger.info('FC layers: At {fc_att}, Cm {fc_compress}, Cls {fc_cls}'.format( + **args.__dict__)) + + + +def duplication_check(args): + if args.force: + return + elif args.trained_weight is None or args.trained_weight.split('/')[0] != args.name: + assert not osp.exists(osp.join(cfg.WEIGHT_ROOT_DIR, args.name)), \ + "weight dir with same name exists (%s)"%(args.name) + assert not osp.exists(osp.join(cfg.LOG_ROOT_DIR, args.name)), \ + "log dir with same name exists (%s)"%(args.name) + + +def formated_czsl_result(report): + fstr = '[{name}/{epoch}] rA:{real_attr_acc:.4f}|rO:{real_obj_acc:.4f}|Cl/T1:{top1_acc:.4f}|T2:{top2_acc:.4f}|T3:{top3_acc:.4f}' + + return fstr.format(**report) + + +################################################################################ +# glove embedder # +################################################################################ + +class Embedder: + """word embedder (for various vector type) + __init__(self) + """ + + def __init__(self, vec_type, vocab, data): + self.vec_type = vec_type + + if vec_type != 'onehot': + self.embeds = self.load_word_embeddings(vec_type, vocab, data) + self.emb_dim = self.embeds.shape[1] + else: + self.emb_dim = len(vocab) + + def get_embedding(self, i): + """actually implements __getitem__() function""" + if self.vec_type == 'onehot': + return tf.one_hot(i, depth=self.emb_dim, axis=1) + else: + i_onehot = tf.one_hot(i, depth=self.embeds.shape[0], axis=1) + return tf.matmul(i_onehot, self.embeds) + + + def load_word_embeddings(self, vec_type, vocab, data): + tmp = aux_data.load_wordvec_dict(data, vec_type) + if type(tmp) == tuple: + attr_dict, obj_dict = tmp + attr_dict.update(obj_dict) + embeds = attr_dict + else: + embeds = tmp + + embeds_list = [] + for k in vocab: + if k in embeds: + embeds_list.append(embeds[k]) + else: + raise NotImplementedError('some vocabs are not in dictionary: %s'%k) + + embeds = np.array(embeds_list, dtype=np.float32) + + print ('Embeddings shape = %s'%str(embeds.shape)) + return embeds + + + + + +################################################################################ +# network utils # +################################################################################ + + +def repeat_tensor(tensor, axis, multiple): + """e.g. (1,2,3)x3 = (1,1,1,2,2,2,3,3,3)""" + + result_shape = tensor.shape.as_list() + for i,v in enumerate(result_shape): + if v is None: + result_shape[i] = tf.shape(tensor)[i] + result_shape[axis] *= multiple + + tensor = tf.expand_dims(tensor, axis+1) + mul = [1]*len(tensor.shape) + mul[axis+1] = multiple + tensor = tf.tile(tensor, mul) + tensor = tf.reshape(tensor, result_shape) + + return tensor + + +def tile_tensor(tensor, axis, multiple): + """e.g. (1,2,3)x3 = (1,2,3,1,2,3,1,2,3)""" + mul = [1]*len(tensor.shape) + mul[axis] = multiple + + return tf.tile(tensor, mul) + + +def activation_func(name): + if name == "none": + return (lambda x:x) + elif name == "sigmoid": + return tf.sigmoid + elif name == "relu": + return tf.nn.relu + else: + raise NotImplementedError("activation function %s not implemented"%name) + diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4246e35a2d0b6c4d6fa2939d57cb4a689f62e336 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/LICENSE @@ -0,0 +1,251 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +## Some of TensorFlow's code is derived from Caffe, which is subject to the following copyright notice: + +COPYRIGHT + +All contributions by the University of California: + +Copyright (c) 2014, The Regents of the University of California (Regents) +All rights reserved. + +All other contributions: + +Copyright (c) 2014, the respective contributors +All rights reserved. + +Caffe uses a shared copyright model: each contributor holds copyright over +their contributions to Caffe. The project versioning records all such +contribution and copyright details. If a contributor wants to further mark +their specific copyright on a particular contribution, they should indicate +their copyright solely in the commit message of the change when it is +committed. + +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +CONTRIBUTION AGREEMENT + +By contributing to the BVLC/caffe repository through pull-request, comment, +or otherwise, the contributor releases their content to the +license and copyright terms herein. \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..838d9efeb1e0fb93518d0426be4ecd6aad7c4f9a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/README.md @@ -0,0 +1,67 @@ +## UGATIT 离线推理步骤 + +### Step0: 下载所需文件,安装依赖库 +#### checkpoint 文件地址: +> obs://cann-id0722/ACL_Tensorflow/checkpoint/ + +#### bin 文件地址: +> obs://cann-id0722/ACL_Tensorflow/data/testA +> obs://cann-id0722/ACL_Tensorflow/data/testB + +根据requirements文件安装后处理依赖库. + +**注**:bin文件 由图像文件经过预处理得到,可使用make_bin.py脚本自行转换. + +### Step1: 将 checkpoint 文件固化为 pb 文件: +将 convert_pb.py 中的 input_checkpoint 变量值改为 checkpoint 文件路径, +运行 convert_pb.py, 固化生成 pb 文件: +UGATIT_AtoB.pb +UGATIT_BtoA.pb + + +### Step2: 将 pb 文件转换为 om 文件: +需要使用 atc 工具. + +转换UGATIT_AtoB.pb命令: +``` +atc --model=./model/pb/UGATIT_AtoB.pb --framework=3 \ + --output=./model/om/UGATIT_AtoB --soc_version=Ascend310 \ + --input_shape="test_domain_A:1,256,256,3" --precision_mode="force_fp32" +``` + +转换UGATIT_BtoA.pb命令: +``` +atc --model=./model/pb/UGATIT_BtoA.pb --framework=3 \ + --output=./model/om/UGATIT_BtoA --soc_version=Ascend310 \ + --input_shape="test_domain_B:1,256,256,3" --precision_mode="force_fp32" +``` + + +### Step3 进行推理: +需要使用 msame 工具. +使用UGATIT_AtoB.om推理命令: +``` +./msame --model ./model/om/UGATIT_AtoB.om \ + --input ./data/testA/female_13138.bin \ + --output ./output/AtoB --outfmt BIN +``` + +使用UGATIT_BtoA.om推理命令: +``` +./msame --model ./model/om/UGATIT_BtoA.om \ + --input ./data/testB/0000.bin \ + --output ./output/BtoA --outfmt BIN +``` + +### Step4 进行后处理: +需要使用 Step3 中推理得到的 UGATIT_output_0.bin +将 process.py 中 fake变量值中的地址改为对应文件地址(修改变量值中的时间戳). + +运行 process.py,得推理图片结果. + +## UGATIT离线推理精度与推理时间 +在Ascend 310进行推理, 含有1张图片样本的bin文件, +### 推理时间: +2126.14ms +### 推理结果 +与GPU推理结果相同,达到精度要求 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/checkpoint/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/checkpoint/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/convert_pb.py b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/convert_pb.py new file mode 100644 index 0000000000000000000000000000000000000000..f788e38b78e4db12a13f4f9f76272a49c54786d8 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/convert_pb.py @@ -0,0 +1,38 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import tensorflow as tf + + +def freeze_graph(output_node_names, output_graph): + input_checkpoint = './UGATIT' # checkpoint文件路径 + saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=True) + + graph = tf.get_default_graph() + input_graph_def = graph.as_graph_def() + + with tf.Session() as sess: + saver.restore(sess, input_checkpoint) + output_graph_def = tf.graph_util.convert_variables_to_constants( + sess=sess, + input_graph_def=input_graph_def, + output_node_names=output_node_names) + with tf.io.gfile.GFile(output_graph, "wb") as f: + f.write(output_graph_def.SerializeToString()) + print("%d ops in the final graph." % len(output_graph_def.node)) + + +if __name__ == '__main__': + freeze_graph(['generator_A/Tanh'], './UGATIT_BtoA.pb') + freeze_graph(['generator_B/Tanh'], './UGATIT_AtoB.pb') \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testA/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testA/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testA/female_13138.jpg b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testA/female_13138.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ea0024d88635f922ca4d017cf80ba255dec62bc Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testA/female_13138.jpg differ diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testB/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testB/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testB/0000.jpg b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testB/0000.jpg new file mode 100644 index 0000000000000000000000000000000000000000..904107ab9a1b80a930b0bd4c2877d195d85a685e Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/data/testB/0000.jpg differ diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/make_bin.py b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/make_bin.py new file mode 100644 index 0000000000000000000000000000000000000000..0e2c61f065703dc1949ded1f592a46436d3dd92a --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/make_bin.py @@ -0,0 +1,22 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import numpy as np +import imageio + +test_image_path = './data/testA/female_13138.jpg' +test_image = np.float32(imageio.imread(test_image_path)) +test_image = test_image/127.5 - 1 +test_image.tofile('./data/testA/female_13138.bin') + diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..32e82ed2c20f35eaa19986ae17a990a26069a318 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/modelzoo_level.txt @@ -0,0 +1,5 @@ +FuncStatus:OK +PrecisionStatus:OK +PerfStatus:OK +ModelConvert:OK +QuantStatus:OK diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/process.py b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/process.py new file mode 100644 index 0000000000000000000000000000000000000000..cc2953e6a11c5faf92f06c3e5acc2d1ffe379d07 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/process.py @@ -0,0 +1,20 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import imageio + + +fake = np.fromfile('./output/BtoA/20220509_212345/UGATIT_BtoA_output_0.bin',np.float32).reshape(256,256,3) +fake = ((fake+1)/2)*255 +imageio.imsave('./result/BtoA/0000.png',fake) diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/requirements.txt b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..5ab90767dd47c3886bd35d9dcb716682e122c3aa --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/requirements.txt @@ -0,0 +1,2 @@ +numpy==1.21.5 +imageio==2.9.0 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/AtoB/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/AtoB/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/AtoB/female_13138.png b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/AtoB/female_13138.png new file mode 100644 index 0000000000000000000000000000000000000000..19a1e4cf1f4b667f78bd900989e683a146956ebb Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/AtoB/female_13138.png differ diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/BtoA/.keep b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/BtoA/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/BtoA/0000.jpg b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/BtoA/0000.jpg new file mode 100644 index 0000000000000000000000000000000000000000..01846f692f80c7e0a54ea162adcfad3c086e5876 Binary files /dev/null and b/ACL_TensorFlow/contrib/cv/UGATIT_ID0722_for_ACL/result/BtoA/0000.jpg differ diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0fb9401a0c283fb62ff199438704def06413a858 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/README.md @@ -0,0 +1,86 @@ +# 模型功能 +white-box-cartoonize 是一个将图像动漫化的模型。从图像中分别识别三种白盒表示:包含卡通图像平滑表面的表面表示,指赛璐珞风格工作流中稀疏色块和扁平化全局内容的结构表示,以及反映卡通图像中高频纹理、轮廓和细节的纹理表示。生成性对抗网络(GAN)框架用于学习提取的表示并对图像进行自动化。 + +- 参考论文: + + [Learning to Cartoonize Using White-box Cartoon Representations](https://github.com/SystemErrorWang/White-box-Cartoonization/tree/master/paper) + + 对于更详细的结果,可以参考[项目主页](https://github.com/SystemErrorWang/White-box-Cartoonization) + +# pb模型冻结 +在Ascend310推理服务器下或npu服务器上进行,由于需要使用到训练代码,因此需要将转换pb文件放在原代码code/train_code下。运行时将ckpt_path路径传入。 +```bash +python3 ckpt2pb.py --ckpt_path=output/train_cartoon/saved_models +``` +# om模型转换 +在Ascend310推理服务器下进行om模型转化。 +```bash +. /usr/local/Ascend/ascend-toolkit/set_env.sh #source环境变量 +export ASCEND_SLOG_PRINT_TO_STDOUT=1 + +atc --model=/home/HwHiAiUser/AscendProjects/1/wbcnet.pb --framework=3 --output=/home/HwHiAiUser/AscendProjects/wbc/wbcnet --soc_version=Ascend310 --input_shape="input:1,256,256,3" --log=info --out_nodes="add_1:0" +``` +请从此处下载[pb模型](https://canntf.obs.myhuaweicloud.com:443/vsl_zwt/vsl/pb_om/modelnet10.pb?AccessKeyId=NLVKVVAQHOUIA7ROJBEZ&Expires=1670766198&Signature=H4GOMDBr7ak8HGXRT4S03K/rJDc%3D) + +请从此处下载[om模型](https://canntf.obs.myhuaweicloud.com:443/vsl_zwt/vsl/pb_om/modelnet10.om?AccessKeyId=NLVKVVAQHOUIA7ROJBEZ&Expires=1670766284&Signature=OuArsad0gLTjPmXi%2BPM4BbJUMYI%3D) + + +# 使用msame工具推理 + +参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。 + +获取到msame可执行文件之后,进行推理测试。 + + +## 1.数据集转换bin +原始数据集包含了四类图片,在推理阶段需要将其转换为bin数据类型。在验证精度时我们仅使用scenery_photo + +下载好原始数据集后,将其保存在dataset目录下,并执行`preprocess.py`文件将jpg数据转换为推理需要的[bin数据](obs://cann--id2089/dataset/scenery_photo/) + + +## 2.推理 + +使用msame推理工具,发起推理测试,推理命令如下: + +```bash +./msame --model "gannet.om" --input "input_bin" --output "./output_final" --outfmt TXT +``` + +## 3.推理结果 + +``` +[INFO] acl init success +[INFO] open device 0 success +[INFO] create context success +[INFO] create stream success +[INFO] get run mode success +[INFO] load model /home/HwHiAiUser/AscendProjects/1/wbcnet.om success +[INFO] create model description success +[INFO] get input dynamic gear count success +[INFO] create model output success +/home/HwHiAiUser/AscendProjects/VSL/out/modelnet10//20211215_215629 +[INFO] start to process file:/home/HwHiAiUser/AscendProjects/1/input_bin/2013-11-08 16_45_24.jpg.bin +[INFO] model execute success +Inference time: 11.002ms +[INFO] get max dynamic batch size success +[INFO] output data success + + +Inference average time: 12.002000 ms +[INFO] destroy model input success +[INFO] unload model success, model Id is 1 +[INFO] Execute sample success +[INFO] end to destroy stream +[INFO] end to destroy context +[INFO] end to reset device is 0 +[INFO] end to finalize acl +``` + +将推理生成的txt文件下载下来并存入`dataset`目录下,用于之后的精度测试。 + +## 4.后续处理 +通过om文件生成的是bin文件,还需要通过后续处理转换成图片,运行process.py,生成的图片默认保存在当前文件的cartoonize文件下。若要修改则进入py文件内修改path变量即可。 +``` +python3 process.py +``` +若要对比离线推理与在线训练结果,则将二者生成的图片读取,对比差距即可。也可以采用fid指标计算精度。 \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/ckpt2pb.py b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/ckpt2pb.py new file mode 100644 index 0000000000000000000000000000000000000000..dde369598537a58ee5c22e7aa64c647b7800a91b --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/ckpt2pb.py @@ -0,0 +1,73 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +import tensorflow as tf +from tensorflow.python.tools import freeze_graph +from tensorflow.python.framework import graph_util +import os, sys +import argparse +from npu_bridge.estimator import npu_ops + +base_path=os.path.split(os.path.realpath(__file__))[0] +sys.path.append(base_path + "/../") + +import network +import guided_filter + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--ckpt_path', default="", help="""set checkpoint path""") + parser.add_argument("--patch_size", default=256, type=int) + args, unknown_args = parser.parse_known_args() + if len(unknown_args) > 0: + for bad_arg in unknown_args: + print("ERROR: Unknown command line arg: %s" % bad_arg) + raise ValueError("Invalid command line arg(s)") + return args + +def main(): + args = parse_args() + + tf.reset_default_graph() + + # set inputs node + inputs = tf.placeholder(tf.float32, shape=[None, args.patch_size, args.patch_size, 3], name="input") + + output = network.unet_generator(inputs) + final_out = guided_filter.guided_filter(inputs, output, r=1, eps=5e-3) + all_vars = tf.trainable_variables() + gene_vars = [var for var in all_vars if 'generator' in var.name] + + graph = tf.get_default_graph() + input_graph_def = graph.as_graph_def() + + output_graph="wbcnet.pb" + + with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + + saver = tf.train.Saver(var_list=gene_vars) + saver.restore(sess, tf.train.latest_checkpoint(args.ckpt_path)) + output_graph_def = graph_util.convert_variables_to_constants( + sess=sess, + input_graph_def=input_graph_def, + output_node_names=["add_1"]) + + with tf.gfile.GFile(output_graph, "wb") as f: + f.write(output_graph_def.SerializeToString()) + + print("done") + +if __name__ == '__main__': + main() diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..17b0057658a2119280a4f2d87c59f24f83618c15 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/modelzoo_level.txt @@ -0,0 +1,5 @@ +ModelConvert:OK +QuantStatus:POK +FuncStatus:OK +PerfStatus:OK +PrecisionStatus:OK \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/preprocess.py b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..60fe57f83ad0e109055abd5b124088646617c66e --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/preprocess.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import cv2 +import argparse +import numpy as np + +def arg_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--data_path", default='./dataset/scenery_photo', type=str) + parser.add_argument("--save_folder", default='./input_bin', type=str) + + args = parser.parse_args() + + return args + + +def resize_crop(image): + h, w, c = np.shape(image) + if min(h, w) > 720: + if h > w: + h, w = int(720 * h / w), 720 + else: + h, w = 720, int(720 * w / h) + image = cv2.resize(image, (w, h), + interpolation=cv2.INTER_AREA) + h, w = (h // 8) * 8, (w // 8) * 8 + image = image[:h, :w, :] + return image + + +def cartoonize(load_folder, save_folder): + + name_list = os.listdir(load_folder) + for name in name_list: + load_path = os.path.join(load_folder, name) + save_path = os.path.join(save_folder, name+ ".bin") + image = cv2.imread(load_path) + image = resize_crop(image) + batch_image = image.astype(np.float32) / 127.5 - 1 + batch_image = np.expand_dims(batch_image, axis=0).reshape(-1) + batch_image.tofile(save_path) + file = np.fromfile(save_path, dtype=np.float32) + +if __name__ == '__main__': + args = arg_parser() + if not os.path.exists(args.save_folder): + os.mkdir(args.save_folder) + cartoonize(args.data_path, args.save_folder) diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/process.py b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/process.py new file mode 100644 index 0000000000000000000000000000000000000000..9c32e7a554cd0c294f6b0b82295ecade4ecbbe60 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/process.py @@ -0,0 +1,25 @@ +import numpy as np +import os +import cv2 +from tqdm import tqdm + + +name_list = os.listdir("./output_final") +path = "./cartoonized" + +if not os.path.exists(path): + os.makedirs(path) + +for item in tqdm(name_list): + name = os.path.join("./output_final",item) + with open(name) as f: + file = f.read().split() + file = np.array(file) + + file = np.array(file).reshape(1,256,256,3).astype(np.float32) + save_path = os.path.join(path, item.replace("_output_0.txt", '')) + + output = (np.squeeze(file) + 1) * 127.5 + output = np.clip(output, 0, 255).astype(np.uint8) + cv2.imwrite(save_path, output) + diff --git a/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/requirements.txt b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..720dd2a7a442f8b69ee311d030fbdae334fdf657 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/White-Box-Cartoonization_ID2089_for_ACL/requirements.txt @@ -0,0 +1,7 @@ +tensorflow-gpu:1.12.0 or 1.13 +pytorch-fid +numpy==1.19.2 +opencv-python +tqdm +joblib +scikit-image==0.14.5 diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/.keep b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e00e198bb23fda3eea7b1dd48f8f8d5001a69368 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/README.md @@ -0,0 +1,97 @@ +## 模型功能 + +行人重识别(REID) + +## 原始模型 + +参考: + +https://github.com/ultralytics/yolov5 + +原实现模型: + +https://gitee.com/dw8023/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow + +pb文件下载地址 : + +链接:https://pan.baidu.com/s/1lgZmbp8SlZGSkLluzyM5mg +提取码:ofwm + +## om模型 + +om模型下载地址: + +链接:https://pan.baidu.com/s/1SXq5KX8qZEEQi_JTDji-XQ +提取码:214y + +使用ATC模型转换工具进行模型转换时可以参考如下指令: + +``` +atc --model=/root/yolov5/model/yolov5.pb --framework=3 --output=/root/yolov5/yolov5 --soc_version=Ascend310 --input_shape="input:1,640,640,3" +``` + +## 数据集准备 + +VOC原始验证集中的图像数据转换为bin文件参见img2bin.py文件: + + +bin格式数据集地址:(bin.zip) + +obs://yolov5-id0378/dataset/ + + + +## 使用msame工具推理 + +参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。 + +获取到msame可执行文件之后,进行性能测试。 + + + +## 性能测试 + +使用msame推理工具,参考如下命令,发起推理性能测试: + +``` +msame --model /root/yolov5/yolov5.om --input /root/yolov5/bin --output /root/yolov5/output/ --outfmt TXT +``` + +``` +... +[INFO] get max dynamic batch size success +[INFO] output data success +[INFO] destroy model input success +Inference average time : 89.70 ms +Inference average time without first time: 89.70 ms +[INFO] unload model success, model Id is 1 +[INFO] Execute sample success +[INFO] end to destroy stream +[INFO] end to destroy context +[INFO] end to reset device is 0 +[INFO] end to finalize acl +... +``` + +平均推理性能为 89.70ms + +## 精度测试 + +执行精度对比文件: + +``` +python3 compare.py +``` + +最终精度:(暂未达标) + +``` +Ascend310推理结果: + gpu结果: + npu结果: +``` + + + + + diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/eval.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/eval.py new file mode 100644 index 0000000000000000000000000000000000000000..4d660469168e09375f279c330a208689d2d61ba9 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/eval.py @@ -0,0 +1,333 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tensorflow as tf +import numpy as np +from tensorflow.keras import backend as K +from tqdm import tqdm +from utils.utils import (cvtColor, get_anchors, get_classes, preprocess_input, + resize_image) +import xml.etree.ElementTree as ET +from utils.utils_bbox import DecodeBox +from utils.utils_map import get_map +from PIL import Image + +MINOVERLAP = 0.5 +classes_path = 'model_data/voc_classes.txt' +output_path = 'out/2022410_0_32_31_672278' +VOCdevkit_path = 'VOCdevkit' +map_out_path = 'map_out_om' +anchors_path = 'model_data/yolo_anchors.txt' +anchors, num_anchors = get_anchors(anchors_path) +class_names, num_classes = get_classes(classes_path) + + +def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image): + #-----------------------------------------------------------------# + # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 + #-----------------------------------------------------------------# + box_yx = box_xy[..., ::-1] + box_hw = box_wh[..., ::-1] + input_shape = K.cast(input_shape, K.dtype(box_yx)) + image_shape = K.cast(image_shape, K.dtype(box_yx)) + + if letterbox_image: + #-----------------------------------------------------------------# + # 这里求出来的offset是图像有效区域相对于图像左上角的偏移情况 + # new_shape指的是宽高缩放情况 + #-----------------------------------------------------------------# + new_shape = K.round(image_shape * K.min(input_shape/image_shape)) + offset = (input_shape - new_shape)/2./input_shape + scale = input_shape/new_shape + + box_yx = (box_yx - offset) * scale + box_hw *= scale + + box_mins = box_yx - (box_hw / 2.) + box_maxes = box_yx + (box_hw / 2.) + boxes = K.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]]) + boxes *= K.concatenate([image_shape, image_shape]) + return boxes + + +def get_anchors_and_decode(feats, anchors, num_classes, input_shape, calc_loss=False): + num_anchors = len(anchors) + # ------------------------------------------# + # grid_shape指的是特征层的高和宽 + # ------------------------------------------# + grid_shape = K.shape(feats)[1:3] + # --------------------------------------------------------------------# + # 获得各个特征点的坐标信息。生成的shape为(20, 20, num_anchors, 2) + # --------------------------------------------------------------------# + grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, num_anchors, 1]) + grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], num_anchors, 1]) + grid = K.cast(K.concatenate([grid_x, grid_y]), K.dtype(feats)) + # ---------------------------------------------------------------# + # 将先验框进行拓展,生成的shape为(20, 20, num_anchors, 2) + # ---------------------------------------------------------------# + anchors_tensor = K.reshape(K.constant(anchors), [1, 1, num_anchors, 2]) + anchors_tensor = K.tile(anchors_tensor, [grid_shape[0], grid_shape[1], 1, 1]) + + # ---------------------------------------------------# + # 将预测结果调整成(batch_size, 20, 20, 3, 85) + # 85可拆分成4 + 1 + 80 + # 4代表的是中心宽高的调整参数 + # 1代表的是框的置信度 + # 80代表的是种类的置信度 + # ---------------------------------------------------# + feats = K.reshape(feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5]) + # ------------------------------------------# + # 对先验框进行解码,并进行归一化 + # ------------------------------------------# + box_xy = (K.sigmoid(feats[..., :2]) * 2 - 0.5 + grid) / K.cast(grid_shape[::-1], K.dtype(feats)) + box_wh = (K.sigmoid(feats[..., 2:4]) * 2) ** 2 * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats)) + # ------------------------------------------# + # 获得预测框的置信度 + # ------------------------------------------# + box_confidence = K.sigmoid(feats[..., 4:5]) + box_class_probs = K.sigmoid(feats[..., 5:]) + + # ---------------------------------------------------------------------# + # 在计算loss的时候返回grid, feats, box_xy, box_wh + # 在预测的时候返回box_xy, box_wh, box_confidence, box_class_probs + # ---------------------------------------------------------------------# + if calc_loss == True: + return grid, feats, box_xy, box_wh + return box_xy, box_wh, box_confidence, box_class_probs + + +def Decodebox(outputs, + anchors, + num_classes, + image_shape, + input_shape, + #-----------------------------------------------------------# + # 13x13的特征层对应的anchor是[116,90],[156,198],[373,326] + # 26x26的特征层对应的anchor是[30,61],[62,45],[59,119] + # 52x52的特征层对应的anchor是[10,13],[16,30],[33,23] + #-----------------------------------------------------------# + anchor_mask = [[6, 7, 8], [3, 4, 5], [0, 1, 2]], + max_boxes = 100, + confidence = 0.5, + nms_iou = 0.3, + letterbox_image = True): + + box_xy = [] + box_wh = [] + box_confidence = [] + box_class_probs = [] + for i in range(len(outputs)): + sub_box_xy, sub_box_wh, sub_box_confidence, sub_box_class_probs = \ + get_anchors_and_decode(outputs[i], anchors[anchor_mask[i]], num_classes, input_shape) + box_xy.append(K.reshape(sub_box_xy, [-1, 2])) + box_wh.append(K.reshape(sub_box_wh, [-1, 2])) + box_confidence.append(K.reshape(sub_box_confidence, [-1, 1])) + box_class_probs.append(K.reshape(sub_box_class_probs, [-1, num_classes])) + box_xy = K.concatenate(box_xy, axis = 0) + box_wh = K.concatenate(box_wh, axis = 0) + box_confidence = K.concatenate(box_confidence, axis = 0) + box_class_probs = K.concatenate(box_class_probs, axis = 0) + + #------------------------------------------------------------------------------------------------------------# + # 在图像传入网络预测前会进行letterbox_image给图像周围添加灰条,因此生成的box_xy, box_wh是相对于有灰条的图像的 + # 我们需要对其进行修改,去除灰条的部分。 将box_xy、和box_wh调节成y_min,y_max,xmin,xmax + # 如果没有使用letterbox_image也需要将归一化后的box_xy, box_wh调整成相对于原图大小的 + #------------------------------------------------------------------------------------------------------------# + boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image) + box_scores = box_confidence * box_class_probs + + #-----------------------------------------------------------# + # 判断得分是否大于score_threshold + #-----------------------------------------------------------# + mask = box_scores >= confidence + max_boxes_tensor = K.constant(max_boxes, dtype='int32') + boxes_out = [] + scores_out = [] + classes_out = [] + + for c in range(num_classes): + #-----------------------------------------------------------# + # 取出所有box_scores >= score_threshold的框,和成绩 + #-----------------------------------------------------------# + class_boxes = tf.boolean_mask(boxes, mask[:, c]) + class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c]) + + #-----------------------------------------------------------# + # 非极大抑制 + # 保留一定区域内得分最大的框 + #-----------------------------------------------------------# + nms_index = tf.image.non_max_suppression(class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=nms_iou) + + #-----------------------------------------------------------# + # 获取非极大抑制后的结果 + # 下列三个分别是:框的位置,得分与种类 + #-----------------------------------------------------------# + class_boxes = K.gather(class_boxes, nms_index) + class_box_scores = K.gather(class_box_scores, nms_index) + classes = K.ones_like(class_box_scores, 'int32') * c + + boxes_out.append(class_boxes) + scores_out.append(class_box_scores) + classes_out.append(classes) + boxes_out = K.concatenate(boxes_out, axis=0) + scores_out = K.concatenate(scores_out, axis=0) + classes_out = K.concatenate(classes_out, axis=0) + + return boxes_out, scores_out, classes_out + + +# def generate(model_path, anchors_mask, num_classes, phi, output): +# model_path = os.path.expanduser(model_path) +# assert model_path.endswith('.h5'), 'Keras model or weights must be a .h5 file.' +# # +# # yolo_model = yolo_body([None, None, 3], anchors_mask, num_classes, phi) +# # yolo_model.load_weights(model_path) +# # print('{} model, anchors, and classes loaded.'.format(model_path)) +# # +# # # anchors, num_anchors = get_anchors(anchors_path) +# # # class_names, num_classes = get_classes(classes_path) +# # # ---------------------------------------------------------# +# # # 在yolo_eval函数中,我们会对预测结果进行后处理 +# # # 后处理的内容包括,解码、非极大抑制、门限筛选等 +# # # ---------------------------------------------------------# +# boxes, scores, classes = Decodebox( +# outputs=output, +# anchors=anchors, +# num_classes=num_classes, +# image_shape=K.placeholder(shape=(2, )), +# input_shape=[640, 640], +# anchor_mask=anchors_mask, +# max_boxes=100, +# confidence=0.5, +# nms_iou=0.3, +# letterbox_image=True +# ) +# return boxes, scores, classes + +def main(): + if not os.path.exists(map_out_path): + os.makedirs(map_out_path) + if not os.path.exists(os.path.join(map_out_path, 'ground-truth')): + os.makedirs(os.path.join(map_out_path, 'ground-truth')) + if not os.path.exists(os.path.join(map_out_path, 'detection-results')): + os.makedirs(os.path.join(map_out_path, 'detection-results')) + if not os.path.exists(os.path.join(map_out_path, 'images-optional')): + os.makedirs(os.path.join(map_out_path, 'images-optional')) + + + + image_ids = open(os.path.join(VOCdevkit_path, "VOC2007/ImageSets/Main/test.txt")).read().strip().split() + for image_id in tqdm(image_ids): + image_path = os.path.join(VOCdevkit_path, "VOC2007/JPEGImages/" + image_id + ".jpg") + image = Image.open(image_path) + image = cvtColor(image) + f = open(os.path.join(map_out_path, "detection-results/" + image_id + ".txt"), "w", encoding='utf-8') + feats = [] + feats_path0 = os.path.join(output_path, "image_" + image_id + "_output_0.txt") + feats_path1 = os.path.join(output_path, "image_" + image_id + "_output_1.txt") + feats_path2 = os.path.join(output_path, "image_" + image_id + "_output_2.txt") + feats0 = np.loadtxt(feats_path0) + feats0 = np.reshape(feats0, newshape=(1, 80, 80, 75)) + feats0 = feats0.astype("float32") + feats0 = tf.convert_to_tensor(feats0) + feats.append(feats0) + + feats1 = np.loadtxt(feats_path1) + feats1 = np.reshape(feats1, newshape=(1, 40, 40, 75)) + feats1 = feats1.astype("float32") + feats1 = tf.convert_to_tensor(feats1) + feats.append(feats1) + + feats2 = np.loadtxt(feats_path2) + feats2 = np.reshape(feats2, newshape=(1, 20, 20, 75)) + feats2 = feats2.astype("float32") + feats2 = tf.convert_to_tensor(feats2) + feats.append(feats2) + + out_boxes, out_scores, out_classes = Decodebox(outputs=feats, + anchors=anchors, + num_classes=num_classes, + image_shape=[image.size[1], image.size[0]], + input_shape=[640, 640], + # anchor_mask=anchors_mask, + # max_boxes=100, + # confidence=0.5, + # nms_iou=0.3, + # letterbox_image=True + ) + out_boxes = K.eval(out_boxes) + out_scores = K.eval(out_scores) + out_classes = K.eval(out_classes) + + # with tf.Session() as sess: + # out_boxes = out_boxes.eval(session=sess, feed_dict={out_boxes: zero_array1}) + # out_scores = out_scores.eval(session=sess, feed_dict={out_scores: zero_array2}) + # out_classes = out_classes.eval(session=sess, feed_dict={out_classes: zero_array3}) + + for i, c in enumerate(out_classes): + predicted_class = class_names[int(c)] + score = str(out_scores[i]) + top, left, bottom, right = out_boxes[i] + if predicted_class not in class_names: + continue + f.write("%s %s %s %s %s %s\n" % ( + predicted_class, score[:6], str(int(left)), str(int(top)), str(int(right)), str(int(bottom)))) + f.close() + print("Get ground truth result.") + + for image_id in tqdm(image_ids): + with open(os.path.join(map_out_path, "ground-truth/" + image_id + ".txt"), "w") as new_f: + root = ET.parse(os.path.join(VOCdevkit_path, "VOC2007/Annotations/" + image_id + ".xml")).getroot() + for obj in root.findall('object'): + difficult_flag = False + if obj.find('difficult') != None: + difficult = obj.find('difficult').text + if int(difficult) == 1: + difficult_flag = True + obj_name = obj.find('name').text + if obj_name not in class_names: + continue + bndbox = obj.find('bndbox') + left = bndbox.find('xmin').text + top = bndbox.find('ymin').text + right = bndbox.find('xmax').text + bottom = bndbox.find('ymax').text + + if difficult_flag: + new_f.write("%s %s %s %s %s difficult\n" % (obj_name, left, top, right, bottom)) + else: + new_f.write("%s %s %s %s %s\n" % (obj_name, left, top, right, bottom)) + print("Get ground truth result done.") + + print("Get map.") + get_map(MINOVERLAP, True, path=map_out_path) + print("Get map done.") + +if __name__=="__main__": + main() \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/img2bin.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/img2bin.py new file mode 100644 index 0000000000000000000000000000000000000000..df12663c2f4ae8909b189b98ee61a33a7ac1cd49 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/img2bin.py @@ -0,0 +1,44 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import os +from tqdm import tqdm +from PIL import Image +from utils.utils import (cvtColor, get_anchors, get_classes, preprocess_input, resize_image) +from utils.utils_bbox import DecodeBox + +VOCdevkit_path = 'VOCdevkit' +image_ids = open(os.path.join(VOCdevkit_path, "VOC2007/ImageSets/Main/test.txt")).read().strip().split() + +# for image_id in tqdm(image_ids): +image_path = os.path.join(VOCdevkit_path, "VOC2007/JPEGImages/"+"000001"+".jpg") +image = Image.open(image_path) +image = cvtColor(image) +image_data = resize_image(image, (640, 640), True) +image_data = np.expand_dims(preprocess_input(np.array(image_data, dtype='float32')), 0) +image_data.tofile('bin/image.bin') \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/.keep b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/__init__.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9ef5ce451c77930159fb61d7447db731e4bbcc47 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# \ No newline at end of file diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..690d8505fa63395ac4db9ff3f4f36e025d709da8 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils.py @@ -0,0 +1,91 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from functools import reduce + +import numpy as np +from PIL import Image + + +def compose(*funcs): + if funcs: + return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs) + else: + raise ValueError('Composition of empty sequence not supported.') + +#---------------------------------------------------------# +# 将图像转换成RGB图像,防止灰度图在预测时报错。 +# 代码仅仅支持RGB图像的预测,所有其它类型的图像都会转化成RGB +#---------------------------------------------------------# +def cvtColor(image): + if len(np.shape(image)) == 3 and np.shape(image)[2] == 3: + return image + else: + image = image.convert('RGB') + return image + +#---------------------------------------------------# +# 对输入图像进行resize +#---------------------------------------------------# +def resize_image(image, size, letterbox_image): + iw, ih = image.size + w, h = size + if letterbox_image: + scale = min(w/iw, h/ih) + nw = int(iw*scale) + nh = int(ih*scale) + + image = image.resize((nw,nh), Image.BICUBIC) + new_image = Image.new('RGB', size, (128,128,128)) + new_image.paste(image, ((w-nw)//2, (h-nh)//2)) + else: + new_image = image.resize((w, h), Image.BICUBIC) + return new_image + +#---------------------------------------------------# +# 获得类 +#---------------------------------------------------# +def get_classes(classes_path): + with open(classes_path, encoding='utf-8') as f: + class_names = f.readlines() + class_names = [c.strip() for c in class_names] + return class_names, len(class_names) + +#---------------------------------------------------# +# 获得先验框 +#---------------------------------------------------# +def get_anchors(anchors_path): + '''loads the anchors from a file''' + with open(anchors_path, encoding='utf-8') as f: + anchors = f.readline() + anchors = [float(x) for x in anchors.split(',')] + anchors = np.array(anchors).reshape(-1, 2) + return anchors, len(anchors) + +def preprocess_input(image): + image /= 255.0 + return image diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_bbox.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_bbox.py new file mode 100644 index 0000000000000000000000000000000000000000..16cfadcb1db8a42e16290bb6ab134a10a22bbe51 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_bbox.py @@ -0,0 +1,298 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from tensorflow.keras import backend as K + + +#---------------------------------------------------# +# 对box进行调整,使其符合真实图片的样子 +#---------------------------------------------------# +def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image): + #-----------------------------------------------------------------# + # 把y轴放前面是因为方便预测框和图像的宽高进行相乘 + #-----------------------------------------------------------------# + box_yx = box_xy[..., ::-1] + box_hw = box_wh[..., ::-1] + input_shape = K.cast(input_shape, K.dtype(box_yx)) + image_shape = K.cast(image_shape, K.dtype(box_yx)) + + if letterbox_image: + #-----------------------------------------------------------------# + # 这里求出来的offset是图像有效区域相对于图像左上角的偏移情况 + # new_shape指的是宽高缩放情况 + #-----------------------------------------------------------------# + new_shape = K.round(image_shape * K.min(input_shape/image_shape)) + offset = (input_shape - new_shape)/2./input_shape + scale = input_shape/new_shape + + box_yx = (box_yx - offset) * scale + box_hw *= scale + + box_mins = box_yx - (box_hw / 2.) + box_maxes = box_yx + (box_hw / 2.) + boxes = K.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]]) + boxes *= K.concatenate([image_shape, image_shape]) + return boxes + +#---------------------------------------------------# +# 将预测值的每个特征层调成真实值 +#---------------------------------------------------# +def get_anchors_and_decode(feats, anchors, num_classes, input_shape, calc_loss=False): + num_anchors = len(anchors) + #------------------------------------------# + # grid_shape指的是特征层的高和宽 + #------------------------------------------# + grid_shape = K.shape(feats)[1:3] + #--------------------------------------------------------------------# + # 获得各个特征点的坐标信息。生成的shape为(20, 20, num_anchors, 2) + #--------------------------------------------------------------------# + grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, num_anchors, 1]) + grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], num_anchors, 1]) + grid = K.cast(K.concatenate([grid_x, grid_y]), K.dtype(feats)) + #---------------------------------------------------------------# + # 将先验框进行拓展,生成的shape为(20, 20, num_anchors, 2) + #---------------------------------------------------------------# + anchors_tensor = K.reshape(K.constant(anchors), [1, 1, num_anchors, 2]) + anchors_tensor = K.tile(anchors_tensor, [grid_shape[0], grid_shape[1], 1, 1]) + + #---------------------------------------------------# + # 将预测结果调整成(batch_size, 20, 20, 3, 85) + # 85可拆分成4 + 1 + 80 + # 4代表的是中心宽高的调整参数 + # 1代表的是框的置信度 + # 80代表的是种类的置信度 + #---------------------------------------------------# + feats = K.reshape(feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5]) + #------------------------------------------# + # 对先验框进行解码,并进行归一化 + #------------------------------------------# + box_xy = (K.sigmoid(feats[..., :2]) * 2 - 0.5 + grid) / K.cast(grid_shape[::-1], K.dtype(feats)) + box_wh = (K.sigmoid(feats[..., 2:4]) * 2) ** 2 * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats)) + #------------------------------------------# + # 获得预测框的置信度 + #------------------------------------------# + box_confidence = K.sigmoid(feats[..., 4:5]) + box_class_probs = K.sigmoid(feats[..., 5:]) + + #---------------------------------------------------------------------# + # 在计算loss的时候返回grid, feats, box_xy, box_wh + # 在预测的时候返回box_xy, box_wh, box_confidence, box_class_probs + #---------------------------------------------------------------------# + if calc_loss == True: + return grid, feats, box_xy, box_wh + return box_xy, box_wh, box_confidence, box_class_probs + +#---------------------------------------------------# +# 图片预测 +#---------------------------------------------------# +def DecodeBox(outputs, + anchors, + num_classes, + image_shape, + input_shape, + #-----------------------------------------------------------# + # 13x13的特征层对应的anchor是[116,90],[156,198],[373,326] + # 26x26的特征层对应的anchor是[30,61],[62,45],[59,119] + # 52x52的特征层对应的anchor是[10,13],[16,30],[33,23] + #-----------------------------------------------------------# + anchor_mask = [[6, 7, 8], [3, 4, 5], [0, 1, 2]], + max_boxes = 100, + confidence = 0.5, + nms_iou = 0.3, + letterbox_image = True): + + box_xy = [] + box_wh = [] + box_confidence = [] + box_class_probs = [] + for i in range(len(outputs)): + # test = K.eval(outputs[i]) + sub_box_xy, sub_box_wh, sub_box_confidence, sub_box_class_probs = \ + get_anchors_and_decode(outputs[i], anchors[anchor_mask[i]], num_classes, input_shape) + box_xy.append(K.reshape(sub_box_xy, [-1, 2])) + box_wh.append(K.reshape(sub_box_wh, [-1, 2])) + box_confidence.append(K.reshape(sub_box_confidence, [-1, 1])) + box_class_probs.append(K.reshape(sub_box_class_probs, [-1, num_classes])) + box_xy = K.concatenate(box_xy, axis = 0) + box_wh = K.concatenate(box_wh, axis = 0) + box_confidence = K.concatenate(box_confidence, axis = 0) + box_class_probs = K.concatenate(box_class_probs, axis = 0) + + #------------------------------------------------------------------------------------------------------------# + # 在图像传入网络预测前会进行letterbox_image给图像周围添加灰条,因此生成的box_xy, box_wh是相对于有灰条的图像的 + # 我们需要对其进行修改,去除灰条的部分。 将box_xy、和box_wh调节成y_min,y_max,xmin,xmax + # 如果没有使用letterbox_image也需要将归一化后的box_xy, box_wh调整成相对于原图大小的 + #------------------------------------------------------------------------------------------------------------# + boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image) + box_scores = box_confidence * box_class_probs + + #-----------------------------------------------------------# + # 判断得分是否大于score_threshold + #-----------------------------------------------------------# + mask = box_scores >= confidence + max_boxes_tensor = K.constant(max_boxes, dtype='int32') + boxes_out = [] + scores_out = [] + classes_out = [] + for c in range(num_classes): + #-----------------------------------------------------------# + # 取出所有box_scores >= score_threshold的框,和成绩 + #-----------------------------------------------------------# + class_boxes = tf.boolean_mask(boxes, mask[:, c]) + class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c]) + + #-----------------------------------------------------------# + # 非极大抑制 + # 保留一定区域内得分最大的框 + #-----------------------------------------------------------# + nms_index = tf.image.non_max_suppression(class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=nms_iou) + + #-----------------------------------------------------------# + # 获取非极大抑制后的结果 + # 下列三个分别是:框的位置,得分与种类 + #-----------------------------------------------------------# + class_boxes = K.gather(class_boxes, nms_index) + class_box_scores = K.gather(class_box_scores, nms_index) + classes = K.ones_like(class_box_scores, 'int32') * c + + boxes_out.append(class_boxes) + scores_out.append(class_box_scores) + classes_out.append(classes) + boxes_out = K.concatenate(boxes_out, axis=0) + scores_out = K.concatenate(scores_out, axis=0) + classes_out = K.concatenate(classes_out, axis=0) + + return boxes_out, scores_out, classes_out + + +if __name__ == "__main__": + import matplotlib.pyplot as plt + import numpy as np + + def sigmoid(x): + s = 1 / (1 + np.exp(-x)) + return s + #---------------------------------------------------# + # 将预测值的每个特征层调成真实值 + #---------------------------------------------------# + def get_anchors_and_decode(feats, anchors, num_classes): + # feats [batch_size, 20, 20, 3 * (5 + num_classes)] + # anchors [3, 2] + # num_classes + # 3 + num_anchors = len(anchors) + #------------------------------------------# + # grid_shape指的是特征层的高和宽 + # grid_shape [20, 20] + #------------------------------------------# + grid_shape = np.shape(feats)[1:3] + #--------------------------------------------------------------------# + # 获得各个特征点的坐标信息。生成的shape为(20, 20, num_anchors, 2) + # grid_x [20, 20, 3, 1] + # grid_y [20, 20, 3, 1] + # grid [20, 20, 3, 2] + #--------------------------------------------------------------------# + grid_x = np.tile(np.reshape(np.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, num_anchors, 1]) + grid_y = np.tile(np.reshape(np.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], num_anchors, 1]) + grid = np.concatenate([grid_x, grid_y], -1) + #---------------------------------------------------------------# + # 将先验框进行拓展,生成的shape为(20, 20, num_anchors, 2) + # [1, 1, 3, 2] + # [20, 20, 3, 2] + #---------------------------------------------------------------# + anchors_tensor = np.reshape(anchors, [1, 1, num_anchors, 2]) + anchors_tensor = np.tile(anchors_tensor, [grid_shape[0], grid_shape[1], 1, 1]) + + #---------------------------------------------------# + # 将预测结果调整成(batch_size, 20, 20, 3, 85) + # 85可拆分成4 + 1 + 80 + # 4代表的是中心宽高的调整参数 + # 1代表的是框的置信度 + # 80代表的是种类的置信度 + # [batch_size, 20, 20, 3 * (5 + num_classes)] + # [batch_size, 20, 20, 3, 5 + num_classes] + #---------------------------------------------------# + feats = np.reshape(feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5]) + + #------------------------------------------# + # 对先验框进行解码,并进行归一化 + #------------------------------------------# + box_xy = (sigmoid(feats[..., :2]) * 2 - 0.5 + grid) + box_wh = (sigmoid(feats[..., 2:4]) * 2) ** 2 * anchors_tensor + #------------------------------------------# + # 获得预测框的置信度 + #------------------------------------------# + box_confidence = sigmoid(feats[..., 4:5]) + box_class_probs = sigmoid(feats[..., 5:]) + + box_wh = box_wh / 32 + anchors_tensor = anchors_tensor / 32 + fig = plt.figure() + ax = fig.add_subplot(121) + plt.ylim(-2, 22) + plt.xlim(-2, 22) + plt.scatter(grid_x,grid_y) + plt.scatter(5, 5, c='black') + plt.gca().invert_yaxis() + + anchor_left = grid_x - anchors_tensor/2 + anchor_top = grid_y - anchors_tensor/2 + print(np.shape(anchors_tensor)) + print(np.shape(box_xy)) + rect1 = plt.Rectangle([anchor_left[5,5,0,0],anchor_top[5,5,0,1]],anchors_tensor[0,0,0,0],anchors_tensor[0,0,0,1],color="r",fill=False) + rect2 = plt.Rectangle([anchor_left[5,5,1,0],anchor_top[5,5,1,1]],anchors_tensor[0,0,1,0],anchors_tensor[0,0,1,1],color="r",fill=False) + rect3 = plt.Rectangle([anchor_left[5,5,2,0],anchor_top[5,5,2,1]],anchors_tensor[0,0,2,0],anchors_tensor[0,0,2,1],color="r",fill=False) + + ax.add_patch(rect1) + ax.add_patch(rect2) + ax.add_patch(rect3) + + ax = fig.add_subplot(122) + plt.ylim(-2, 22) + plt.xlim(-2, 22) + plt.scatter(grid_x,grid_y) + plt.scatter(5, 5, c='black') + plt.scatter(box_xy[0, 5, 5, :, 0],box_xy[0, 5, 5, :, 1],c='r') + plt.gca().invert_yaxis() + + pre_left = box_xy[...,0] - box_wh[...,0] / 2 + pre_top = box_xy[...,1] - box_wh[...,1] / 2 + + rect1 = plt.Rectangle([pre_left[0,5,5,0],pre_top[0,5,5,0]],box_wh[0,5,5,0,0],box_wh[0,5,5,0,1],color="r",fill=False) + rect2 = plt.Rectangle([pre_left[0,5,5,1],pre_top[0,5,5,1]],box_wh[0,5,5,1,0],box_wh[0,5,5,1,1],color="r",fill=False) + rect3 = plt.Rectangle([pre_left[0,5,5,2],pre_top[0,5,5,2]],box_wh[0,5,5,2,0],box_wh[0,5,5,2,1],color="r",fill=False) + + ax.add_patch(rect1) + ax.add_patch(rect2) + ax.add_patch(rect3) + + plt.show() + # + feat = np.random.normal(-0.5,0.5, [4, 20, 20, 75]) + anchors = [[116, 90], [156, 198], [373, 326]] + get_anchors_and_decode(feat, anchors, 20) diff --git a/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_map.py b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_map.py new file mode 100644 index 0000000000000000000000000000000000000000..24dc903c86787c89f065b5e84bf2e74ae0688a64 --- /dev/null +++ b/ACL_TensorFlow/contrib/cv/YOLOV5_ID0378_for_ACL/utils/utils_map.py @@ -0,0 +1,928 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import glob +import json +import math +import operator +import os +import shutil +import sys + +import cv2 +import matplotlib.pyplot as plt +import numpy as np + +''' + 0,0 ------> x (width) + | + | (Left,Top) + | *_________ + | | | + | | + y |_________| + (height) * + (Right,Bottom) +''' + +def log_average_miss_rate(precision, fp_cumsum, num_images): + """ + log-average miss rate: + Calculated by averaging miss rates at 9 evenly spaced FPPI points + between 10e-2 and 10e0, in log-space. + + output: + lamr | log-average miss rate + mr | miss rate + fppi | false positives per image + + references: + [1] Dollar, Piotr, et al. "Pedestrian Detection: An Evaluation of the + State of the Art." Pattern Analysis and Machine Intelligence, IEEE + Transactions on 34.4 (2012): 743 - 761. + """ + + if precision.size == 0: + lamr = 0 + mr = 1 + fppi = 0 + return lamr, mr, fppi + + fppi = fp_cumsum / float(num_images) + mr = (1 - precision) + + fppi_tmp = np.insert(fppi, 0, -1.0) + mr_tmp = np.insert(mr, 0, 1.0) + + ref = np.logspace(-2.0, 0.0, num = 9) + for i, ref_i in enumerate(ref): + j = np.where(fppi_tmp <= ref_i)[-1][-1] + ref[i] = mr_tmp[j] + + lamr = math.exp(np.mean(np.log(np.maximum(1e-10, ref)))) + + return lamr, mr, fppi + +""" + throw error and exit +""" +def error(msg): + print(msg) + sys.exit(0) + +""" + check if the number is a float between 0.0 and 1.0 +""" +def is_float_between_0_and_1(value): + try: + val = float(value) + if val > 0.0 and val < 1.0: + return True + else: + return False + except ValueError: + return False + +""" + Calculate the AP given the recall and precision array + 1st) We compute a version of the measured precision/recall curve with + precision monotonically decreasing + 2nd) We compute the AP as the area under this curve by numerical integration. +""" +def voc_ap(rec, prec): + """ + --- Official matlab code VOC2012--- + mrec=[0 ; rec ; 1]; + mpre=[0 ; prec ; 0]; + for i=numel(mpre)-1:-1:1 + mpre(i)=max(mpre(i),mpre(i+1)); + end + i=find(mrec(2:end)~=mrec(1:end-1))+1; + ap=sum((mrec(i)-mrec(i-1)).*mpre(i)); + """ + rec.insert(0, 0.0) # insert 0.0 at begining of list + rec.append(1.0) # insert 1.0 at end of list + mrec = rec[:] + prec.insert(0, 0.0) # insert 0.0 at begining of list + prec.append(0.0) # insert 0.0 at end of list + mpre = prec[:] + """ + This part makes the precision monotonically decreasing + (goes from the end to the beginning) + matlab: for i=numel(mpre)-1:-1:1 + mpre(i)=max(mpre(i),mpre(i+1)); + """ + for i in range(len(mpre)-2, -1, -1): + mpre[i] = max(mpre[i], mpre[i+1]) + """ + This part creates a list of indexes where the recall changes + matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1; + """ + i_list = [] + for i in range(1, len(mrec)): + if mrec[i] != mrec[i-1]: + i_list.append(i) # if it was matlab would be i + 1 + """ + The Average Precision (AP) is the area under the curve + (numerical integration) + matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i)); + """ + ap = 0.0 + for i in i_list: + ap += ((mrec[i]-mrec[i-1])*mpre[i]) + return ap, mrec, mpre + + +""" + Convert the lines of a file to a list +""" +def file_lines_to_list(path): + # open txt file lines to a list + with open(path) as f: + content = f.readlines() + # remove whitespace characters like `\n` at the end of each line + content = [x.strip() for x in content] + return content + +""" + Draws text in image +""" +def draw_text_in_image(img, text, pos, color, line_width): + font = cv2.FONT_HERSHEY_PLAIN + fontScale = 1 + lineType = 1 + bottomLeftCornerOfText = pos + cv2.putText(img, text, + bottomLeftCornerOfText, + font, + fontScale, + color, + lineType) + text_width, _ = cv2.getTextSize(text, font, fontScale, lineType)[0] + return img, (line_width + text_width) + +""" + Plot - adjust axes +""" +def adjust_axes(r, t, fig, axes): + # get text width for re-scaling + bb = t.get_window_extent(renderer=r) + text_width_inches = bb.width / fig.dpi + # get axis width in inches + current_fig_width = fig.get_figwidth() + new_fig_width = current_fig_width + text_width_inches + propotion = new_fig_width / current_fig_width + # get axis limit + x_lim = axes.get_xlim() + axes.set_xlim([x_lim[0], x_lim[1]*propotion]) + +""" + Draw plot using Matplotlib +""" +def draw_plot_func(dictionary, n_classes, window_title, plot_title, x_label, output_path, to_show, plot_color, true_p_bar): + # sort the dictionary by decreasing value, into a list of tuples + sorted_dic_by_value = sorted(dictionary.items(), key=operator.itemgetter(1)) + # unpacking the list of tuples into two lists + sorted_keys, sorted_values = zip(*sorted_dic_by_value) + # + if true_p_bar != "": + """ + Special case to draw in: + - green -> TP: True Positives (object detected and matches ground-truth) + - red -> FP: False Positives (object detected but does not match ground-truth) + - orange -> FN: False Negatives (object not detected but present in the ground-truth) + """ + fp_sorted = [] + tp_sorted = [] + for key in sorted_keys: + fp_sorted.append(dictionary[key] - true_p_bar[key]) + tp_sorted.append(true_p_bar[key]) + plt.barh(range(n_classes), fp_sorted, align='center', color='crimson', label='False Positive') + plt.barh(range(n_classes), tp_sorted, align='center', color='forestgreen', label='True Positive', left=fp_sorted) + # add legend + plt.legend(loc='lower right') + """ + Write number on side of bar + """ + fig = plt.gcf() # gcf - get current figure + axes = plt.gca() + r = fig.canvas.get_renderer() + for i, val in enumerate(sorted_values): + fp_val = fp_sorted[i] + tp_val = tp_sorted[i] + fp_str_val = " " + str(fp_val) + tp_str_val = fp_str_val + " " + str(tp_val) + # trick to paint multicolor with offset: + # first paint everything and then repaint the first number + t = plt.text(val, i, tp_str_val, color='forestgreen', va='center', fontweight='bold') + plt.text(val, i, fp_str_val, color='crimson', va='center', fontweight='bold') + if i == (len(sorted_values)-1): # largest bar + adjust_axes(r, t, fig, axes) + else: + plt.barh(range(n_classes), sorted_values, color=plot_color) + """ + Write number on side of bar + """ + fig = plt.gcf() # gcf - get current figure + axes = plt.gca() + r = fig.canvas.get_renderer() + for i, val in enumerate(sorted_values): + str_val = " " + str(val) # add a space before + if val < 1.0: + str_val = " {0:.2f}".format(val) + t = plt.text(val, i, str_val, color=plot_color, va='center', fontweight='bold') + # re-set axes to show number inside the figure + if i == (len(sorted_values)-1): # largest bar + adjust_axes(r, t, fig, axes) + # set window title + fig.canvas.set_window_title(window_title) + # write classes in y axis + tick_font_size = 12 + plt.yticks(range(n_classes), sorted_keys, fontsize=tick_font_size) + """ + Re-scale height accordingly + """ + init_height = fig.get_figheight() + # comput the matrix height in points and inches + dpi = fig.dpi + height_pt = n_classes * (tick_font_size * 1.4) # 1.4 (some spacing) + height_in = height_pt / dpi + # compute the required figure height + top_margin = 0.15 # in percentage of the figure height + bottom_margin = 0.05 # in percentage of the figure height + figure_height = height_in / (1 - top_margin - bottom_margin) + # set new height + if figure_height > init_height: + fig.set_figheight(figure_height) + + # set plot title + plt.title(plot_title, fontsize=14) + # set axis titles + # plt.xlabel('classes') + plt.xlabel(x_label, fontsize='large') + # adjust size of window + fig.tight_layout() + # save the plot + fig.savefig(output_path) + # show image + if to_show: + plt.show() + # close the plot + plt.close() + +def get_map(MINOVERLAP, draw_plot, path = './map_out'): + GT_PATH = os.path.join(path, 'ground-truth') + DR_PATH = os.path.join(path, 'detection-results') + IMG_PATH = os.path.join(path, 'images-optional') + TEMP_FILES_PATH = os.path.join(path, '.temp_files') + RESULTS_FILES_PATH = os.path.join(path, 'results') + + show_animation = True + if os.path.exists(IMG_PATH): + for dirpath, dirnames, files in os.walk(IMG_PATH): + if not files: + show_animation = False + else: + show_animation = False + + if not os.path.exists(TEMP_FILES_PATH): + os.makedirs(TEMP_FILES_PATH) + + if os.path.exists(RESULTS_FILES_PATH): + shutil.rmtree(RESULTS_FILES_PATH) + if draw_plot: + os.makedirs(os.path.join(RESULTS_FILES_PATH, "AP")) + os.makedirs(os.path.join(RESULTS_FILES_PATH, "F1")) + os.makedirs(os.path.join(RESULTS_FILES_PATH, "Recall")) + os.makedirs(os.path.join(RESULTS_FILES_PATH, "Precision")) + if show_animation: + os.makedirs(os.path.join(RESULTS_FILES_PATH, "images", "detections_one_by_one")) + + ground_truth_files_list = glob.glob(GT_PATH + '/*.txt') + if len(ground_truth_files_list) == 0: + error("Error: No ground-truth files found!") + ground_truth_files_list.sort() + gt_counter_per_class = {} + counter_images_per_class = {} + + for txt_file in ground_truth_files_list: + file_id = txt_file.split(".txt", 1)[0] + file_id = os.path.basename(os.path.normpath(file_id)) + temp_path = os.path.join(DR_PATH, (file_id + ".txt")) + if not os.path.exists(temp_path): + error_msg = "Error. File not found: {}\n".format(temp_path) + error(error_msg) + lines_list = file_lines_to_list(txt_file) + bounding_boxes = [] + is_difficult = False + already_seen_classes = [] + for line in lines_list: + try: + if "difficult" in line: + class_name, left, top, right, bottom, _difficult = line.split() + is_difficult = True + else: + class_name, left, top, right, bottom = line.split() + except: + if "difficult" in line: + line_split = line.split() + _difficult = line_split[-1] + bottom = line_split[-2] + right = line_split[-3] + top = line_split[-4] + left = line_split[-5] + class_name = "" + for name in line_split[:-5]: + class_name += name + " " + class_name = class_name[:-1] + is_difficult = True + else: + line_split = line.split() + bottom = line_split[-1] + right = line_split[-2] + top = line_split[-3] + left = line_split[-4] + class_name = "" + for name in line_split[:-4]: + class_name += name + " " + class_name = class_name[:-1] + + bbox = left + " " + top + " " + right + " " + bottom + if is_difficult: + bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False, "difficult":True}) + is_difficult = False + else: + bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False}) + if class_name in gt_counter_per_class: + gt_counter_per_class[class_name] += 1 + else: + gt_counter_per_class[class_name] = 1 + + if class_name not in already_seen_classes: + if class_name in counter_images_per_class: + counter_images_per_class[class_name] += 1 + else: + counter_images_per_class[class_name] = 1 + already_seen_classes.append(class_name) + + with open(TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json", 'w') as outfile: + json.dump(bounding_boxes, outfile) + + gt_classes = list(gt_counter_per_class.keys()) + gt_classes = sorted(gt_classes) + n_classes = len(gt_classes) + + dr_files_list = glob.glob(DR_PATH + '/*.txt') + dr_files_list.sort() + for class_index, class_name in enumerate(gt_classes): + bounding_boxes = [] + for txt_file in dr_files_list: + file_id = txt_file.split(".txt",1)[0] + file_id = os.path.basename(os.path.normpath(file_id)) + temp_path = os.path.join(GT_PATH, (file_id + ".txt")) + if class_index == 0: + if not os.path.exists(temp_path): + error_msg = "Error. File not found: {}\n".format(temp_path) + error(error_msg) + lines = file_lines_to_list(txt_file) + for line in lines: + try: + tmp_class_name, confidence, left, top, right, bottom = line.split() + except: + line_split = line.split() + bottom = line_split[-1] + right = line_split[-2] + top = line_split[-3] + left = line_split[-4] + confidence = line_split[-5] + tmp_class_name = "" + for name in line_split[:-5]: + tmp_class_name += name + " " + tmp_class_name = tmp_class_name[:-1] + + if tmp_class_name == class_name: + bbox = left + " " + top + " " + right + " " +bottom + bounding_boxes.append({"confidence":confidence, "file_id":file_id, "bbox":bbox}) + + bounding_boxes.sort(key=lambda x:float(x['confidence']), reverse=True) + with open(TEMP_FILES_PATH + "/" + class_name + "_dr.json", 'w') as outfile: + json.dump(bounding_boxes, outfile) + + sum_AP = 0.0 + ap_dictionary = {} + lamr_dictionary = {} + with open(RESULTS_FILES_PATH + "/results.txt", 'w') as results_file: + results_file.write("# AP and precision/recall per class\n") + count_true_positives = {} + + for class_index, class_name in enumerate(gt_classes): + count_true_positives[class_name] = 0 + dr_file = TEMP_FILES_PATH + "/" + class_name + "_dr.json" + dr_data = json.load(open(dr_file)) + + nd = len(dr_data) + tp = [0] * nd + fp = [0] * nd + score = [0] * nd + score05_idx = 0 + for idx, detection in enumerate(dr_data): + file_id = detection["file_id"] + score[idx] = float(detection["confidence"]) + if score[idx] > 0.5: + score05_idx = idx + + if show_animation: + ground_truth_img = glob.glob1(IMG_PATH, file_id + ".*") + if len(ground_truth_img) == 0: + error("Error. Image not found with id: " + file_id) + elif len(ground_truth_img) > 1: + error("Error. Multiple image with id: " + file_id) + else: + img = cv2.imread(IMG_PATH + "/" + ground_truth_img[0]) + img_cumulative_path = RESULTS_FILES_PATH + "/images/" + ground_truth_img[0] + if os.path.isfile(img_cumulative_path): + img_cumulative = cv2.imread(img_cumulative_path) + else: + img_cumulative = img.copy() + bottom_border = 60 + BLACK = [0, 0, 0] + img = cv2.copyMakeBorder(img, 0, bottom_border, 0, 0, cv2.BORDER_CONSTANT, value=BLACK) + + gt_file = TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json" + ground_truth_data = json.load(open(gt_file)) + ovmax = -1 + gt_match = -1 + bb = [float(x) for x in detection["bbox"].split()] + for obj in ground_truth_data: + if obj["class_name"] == class_name: + bbgt = [ float(x) for x in obj["bbox"].split() ] + bi = [max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])] + iw = bi[2] - bi[0] + 1 + ih = bi[3] - bi[1] + 1 + if iw > 0 and ih > 0: + ua = (bb[2] - bb[0] + 1) * (bb[3] - bb[1] + 1) + (bbgt[2] - bbgt[0] + + 1) * (bbgt[3] - bbgt[1] + 1) - iw * ih + ov = iw * ih / ua + if ov > ovmax: + ovmax = ov + gt_match = obj + + if show_animation: + status = "NO MATCH FOUND!" + + min_overlap = MINOVERLAP + if ovmax >= min_overlap: + if "difficult" not in gt_match: + if not bool(gt_match["used"]): + tp[idx] = 1 + gt_match["used"] = True + count_true_positives[class_name] += 1 + with open(gt_file, 'w') as f: + f.write(json.dumps(ground_truth_data)) + if show_animation: + status = "MATCH!" + else: + fp[idx] = 1 + if show_animation: + status = "REPEATED MATCH!" + else: + fp[idx] = 1 + if ovmax > 0: + status = "INSUFFICIENT OVERLAP" + + """ + Draw image to show animation + """ + if show_animation: + height, widht = img.shape[:2] + white = (255,255,255) + light_blue = (255,200,100) + green = (0,255,0) + light_red = (30,30,255) + margin = 10 + # 1nd line + v_pos = int(height - margin - (bottom_border / 2.0)) + text = "Image: " + ground_truth_img[0] + " " + img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0) + text = "Class [" + str(class_index) + "/" + str(n_classes) + "]: " + class_name + " " + img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), light_blue, line_width) + if ovmax != -1: + color = light_red + if status == "INSUFFICIENT OVERLAP": + text = "IoU: {0:.2f}% ".format(ovmax*100) + "< {0:.2f}% ".format(min_overlap*100) + else: + text = "IoU: {0:.2f}% ".format(ovmax*100) + ">= {0:.2f}% ".format(min_overlap*100) + color = green + img, _ = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width) + # 2nd line + v_pos += int(bottom_border / 2.0) + rank_pos = str(idx+1) + text = "Detection #rank: " + rank_pos + " confidence: {0:.2f}% ".format(float(detection["confidence"])*100) + img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0) + color = light_red + if status == "MATCH!": + color = green + text = "Result: " + status + " " + img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width) + + font = cv2.FONT_HERSHEY_SIMPLEX + if ovmax > 0: + bbgt = [ int(round(float(x))) for x in gt_match["bbox"].split() ] + cv2.rectangle(img,(bbgt[0],bbgt[1]),(bbgt[2],bbgt[3]),light_blue,2) + cv2.rectangle(img_cumulative,(bbgt[0],bbgt[1]),(bbgt[2],bbgt[3]),light_blue,2) + cv2.putText(img_cumulative, class_name, (bbgt[0],bbgt[1] - 5), font, 0.6, light_blue, 1, cv2.LINE_AA) + bb = [int(i) for i in bb] + cv2.rectangle(img,(bb[0],bb[1]),(bb[2],bb[3]),color,2) + cv2.rectangle(img_cumulative,(bb[0],bb[1]),(bb[2],bb[3]),color,2) + cv2.putText(img_cumulative, class_name, (bb[0],bb[1] - 5), font, 0.6, color, 1, cv2.LINE_AA) + + cv2.imshow("Animation", img) + cv2.waitKey(20) + output_img_path = RESULTS_FILES_PATH + "/images/detections_one_by_one/" + class_name + "_detection" + str(idx) + ".jpg" + cv2.imwrite(output_img_path, img) + cv2.imwrite(img_cumulative_path, img_cumulative) + + cumsum = 0 + for idx, val in enumerate(fp): + fp[idx] += cumsum + cumsum += val + + cumsum = 0 + for idx, val in enumerate(tp): + tp[idx] += cumsum + cumsum += val + + rec = tp[:] + for idx, val in enumerate(tp): + rec[idx] = float(tp[idx]) / np.maximum(gt_counter_per_class[class_name], 1) + + prec = tp[:] + for idx, val in enumerate(tp): + prec[idx] = float(tp[idx]) / np.maximum((fp[idx] + tp[idx]), 1) + + ap, mrec, mprec = voc_ap(rec[:], prec[:]) + F1 = np.array(rec)*np.array(prec)*2 / np.where((np.array(prec)+np.array(rec))==0, 1, (np.array(prec)+np.array(rec))) + + sum_AP += ap + text = "{0:.2f}%".format(ap*100) + " = " + class_name + " AP " #class_name + " AP = {0:.2f}%".format(ap*100) + + if len(prec)>0: + F1_text = "{0:.2f}".format(F1[score05_idx]) + " = " + class_name + " F1 " + Recall_text = "{0:.2f}%".format(rec[score05_idx]*100) + " = " + class_name + " Recall " + Precision_text = "{0:.2f}%".format(prec[score05_idx]*100) + " = " + class_name + " Precision " + else: + F1_text = "0.00" + " = " + class_name + " F1 " + Recall_text = "0.00%" + " = " + class_name + " Recall " + Precision_text = "0.00%" + " = " + class_name + " Precision " + + rounded_prec = [ '%.2f' % elem for elem in prec ] + rounded_rec = [ '%.2f' % elem for elem in rec ] + results_file.write(text + "\n Precision: " + str(rounded_prec) + "\n Recall :" + str(rounded_rec) + "\n\n") + if len(prec)>0: + print(text + "\t||\tscore_threhold=0.5 : " + "F1=" + "{0:.2f}".format(F1[score05_idx])\ + + " ; Recall=" + "{0:.2f}%".format(rec[score05_idx]*100) + " ; Precision=" + "{0:.2f}%".format(prec[score05_idx]*100)) + else: + print(text + "\t||\tscore_threhold=0.5 : F1=0.00% ; Recall=0.00% ; Precision=0.00%") + ap_dictionary[class_name] = ap + + n_images = counter_images_per_class[class_name] + lamr, mr, fppi = log_average_miss_rate(np.array(rec), np.array(fp), n_images) + lamr_dictionary[class_name] = lamr + + if draw_plot: + plt.plot(rec, prec, '-o') + area_under_curve_x = mrec[:-1] + [mrec[-2]] + [mrec[-1]] + area_under_curve_y = mprec[:-1] + [0.0] + [mprec[-1]] + plt.fill_between(area_under_curve_x, 0, area_under_curve_y, alpha=0.2, edgecolor='r') + + fig = plt.gcf() + fig.canvas.set_window_title('AP ' + class_name) + + plt.title('class: ' + text) + plt.xlabel('Recall') + plt.ylabel('Precision') + axes = plt.gca() + axes.set_xlim([0.0,1.0]) + axes.set_ylim([0.0,1.05]) + fig.savefig(RESULTS_FILES_PATH + "/AP/" + class_name + ".png") + plt.cla() + + plt.plot(score, F1, "-", color='orangered') + plt.title('class: ' + F1_text + "\nscore_threhold=0.5") + plt.xlabel('Score_Threhold') + plt.ylabel('F1') + axes = plt.gca() + axes.set_xlim([0.0,1.0]) + axes.set_ylim([0.0,1.05]) + fig.savefig(RESULTS_FILES_PATH + "/F1/" + class_name + ".png") + plt.cla() + + plt.plot(score, rec, "-H", color='gold') + plt.title('class: ' + Recall_text + "\nscore_threhold=0.5") + plt.xlabel('Score_Threhold') + plt.ylabel('Recall') + axes = plt.gca() + axes.set_xlim([0.0,1.0]) + axes.set_ylim([0.0,1.05]) + fig.savefig(RESULTS_FILES_PATH + "/Recall/" + class_name + ".png") + plt.cla() + + plt.plot(score, prec, "-s", color='palevioletred') + plt.title('class: ' + Precision_text + "\nscore_threhold=0.5") + plt.xlabel('Score_Threhold') + plt.ylabel('Precision') + axes = plt.gca() + axes.set_xlim([0.0,1.0]) + axes.set_ylim([0.0,1.05]) + fig.savefig(RESULTS_FILES_PATH + "/Precision/" + class_name + ".png") + plt.cla() + + if show_animation: + cv2.destroyAllWindows() + + results_file.write("\n# mAP of all classes\n") + mAP = sum_AP / n_classes + text = "mAP = {0:.2f}%".format(mAP*100) + results_file.write(text + "\n") + print(text) + + shutil.rmtree(TEMP_FILES_PATH) + + """ + Count total of detection-results + """ + det_counter_per_class = {} + for txt_file in dr_files_list: + lines_list = file_lines_to_list(txt_file) + for line in lines_list: + class_name = line.split()[0] + if class_name in det_counter_per_class: + det_counter_per_class[class_name] += 1 + else: + det_counter_per_class[class_name] = 1 + dr_classes = list(det_counter_per_class.keys()) + + """ + Write number of ground-truth objects per class to results.txt + """ + with open(RESULTS_FILES_PATH + "/results.txt", 'a') as results_file: + results_file.write("\n# Number of ground-truth objects per class\n") + for class_name in sorted(gt_counter_per_class): + results_file.write(class_name + ": " + str(gt_counter_per_class[class_name]) + "\n") + + """ + Finish counting true positives + """ + for class_name in dr_classes: + if class_name not in gt_classes: + count_true_positives[class_name] = 0 + + """ + Write number of detected objects per class to results.txt + """ + with open(RESULTS_FILES_PATH + "/results.txt", 'a') as results_file: + results_file.write("\n# Number of detected objects per class\n") + for class_name in sorted(dr_classes): + n_det = det_counter_per_class[class_name] + text = class_name + ": " + str(n_det) + text += " (tp:" + str(count_true_positives[class_name]) + "" + text += ", fp:" + str(n_det - count_true_positives[class_name]) + ")\n" + results_file.write(text) + + """ + Plot the total number of occurences of each class in the ground-truth + """ + if draw_plot: + window_title = "ground-truth-info" + plot_title = "ground-truth\n" + plot_title += "(" + str(len(ground_truth_files_list)) + " files and " + str(n_classes) + " classes)" + x_label = "Number of objects per class" + output_path = RESULTS_FILES_PATH + "/ground-truth-info.png" + to_show = False + plot_color = 'forestgreen' + draw_plot_func( + gt_counter_per_class, + n_classes, + window_title, + plot_title, + x_label, + output_path, + to_show, + plot_color, + '', + ) + + # """ + # Plot the total number of occurences of each class in the "detection-results" folder + # """ + # if draw_plot: + # window_title = "detection-results-info" + # # Plot title + # plot_title = "detection-results\n" + # plot_title += "(" + str(len(dr_files_list)) + " files and " + # count_non_zero_values_in_dictionary = sum(int(x) > 0 for x in list(det_counter_per_class.values())) + # plot_title += str(count_non_zero_values_in_dictionary) + " detected classes)" + # # end Plot title + # x_label = "Number of objects per class" + # output_path = RESULTS_FILES_PATH + "/detection-results-info.png" + # to_show = False + # plot_color = 'forestgreen' + # true_p_bar = count_true_positives + # draw_plot_func( + # det_counter_per_class, + # len(det_counter_per_class), + # window_title, + # plot_title, + # x_label, + # output_path, + # to_show, + # plot_color, + # true_p_bar + # ) + + """ + Draw log-average miss rate plot (Show lamr of all classes in decreasing order) + """ + if draw_plot: + window_title = "lamr" + plot_title = "log-average miss rate" + x_label = "log-average miss rate" + output_path = RESULTS_FILES_PATH + "/lamr.png" + to_show = False + plot_color = 'royalblue' + draw_plot_func( + lamr_dictionary, + n_classes, + window_title, + plot_title, + x_label, + output_path, + to_show, + plot_color, + "" + ) + + """ + Draw mAP plot (Show AP's of all classes in decreasing order) + """ + if draw_plot: + window_title = "mAP" + plot_title = "mAP = {0:.2f}%".format(mAP*100) + x_label = "Average Precision" + output_path = RESULTS_FILES_PATH + "/mAP.png" + to_show = True + plot_color = 'royalblue' + draw_plot_func( + ap_dictionary, + n_classes, + window_title, + plot_title, + x_label, + output_path, + to_show, + plot_color, + "" + ) + +def preprocess_gt(gt_path, class_names): + image_ids = os.listdir(gt_path) + results = {} + + images = [] + bboxes = [] + for i, image_id in enumerate(image_ids): + lines_list = file_lines_to_list(os.path.join(gt_path, image_id)) + boxes_per_image = [] + image = {} + image_id = os.path.splitext(image_id)[0] + image['file_name'] = image_id + '.jpg' + image['width'] = 1 + image['height'] = 1 + #-----------------------------------------------------------------# + # 感谢 多学学英语吧 的提醒 + # 解决了'Results do not correspond to current coco set'问题 + #-----------------------------------------------------------------# + image['id'] = str(image_id) + + for line in lines_list: + difficult = 0 + if "difficult" in line: + line_split = line.split() + left, top, right, bottom, _difficult = line_split[-5:] + class_name = "" + for name in line_split[:-5]: + class_name += name + " " + class_name = class_name[:-1] + difficult = 1 + else: + line_split = line.split() + left, top, right, bottom = line_split[-4:] + class_name = "" + for name in line_split[:-4]: + class_name += name + " " + class_name = class_name[:-1] + + left, top, right, bottom = float(left), float(top), float(right), float(bottom) + cls_id = class_names.index(class_name) + 1 + bbox = [left, top, right - left, bottom - top, difficult, str(image_id), cls_id, (right - left) * (bottom - top) - 10.0] + boxes_per_image.append(bbox) + images.append(image) + bboxes.extend(boxes_per_image) + results['images'] = images + + categories = [] + for i, cls in enumerate(class_names): + category = {} + category['supercategory'] = cls + category['name'] = cls + category['id'] = i + 1 + categories.append(category) + results['categories'] = categories + + annotations = [] + for i, box in enumerate(bboxes): + annotation = {} + annotation['area'] = box[-1] + annotation['category_id'] = box[-2] + annotation['image_id'] = box[-3] + annotation['iscrowd'] = box[-4] + annotation['bbox'] = box[:4] + annotation['id'] = i + annotations.append(annotation) + results['annotations'] = annotations + return results + +def preprocess_dr(dr_path, class_names): + image_ids = os.listdir(dr_path) + results = [] + for image_id in image_ids: + lines_list = file_lines_to_list(os.path.join(dr_path, image_id)) + image_id = os.path.splitext(image_id)[0] + for line in lines_list: + line_split = line.split() + confidence, left, top, right, bottom = line_split[-5:] + class_name = "" + for name in line_split[:-5]: + class_name += name + " " + class_name = class_name[:-1] + left, top, right, bottom = float(left), float(top), float(right), float(bottom) + result = {} + result["image_id"] = str(image_id) + result["category_id"] = class_names.index(class_name) + 1 + result["bbox"] = [left, top, right - left, bottom - top] + result["score"] = float(confidence) + results.append(result) + return results + +def get_coco_map(class_names, path): + from pycocotools.coco import COCO + from pycocotools.cocoeval import COCOeval + + GT_PATH = os.path.join(path, 'ground-truth') + DR_PATH = os.path.join(path, 'detection-results') + COCO_PATH = os.path.join(path, 'coco_eval') + + if not os.path.exists(COCO_PATH): + os.makedirs(COCO_PATH) + + GT_JSON_PATH = os.path.join(COCO_PATH, 'instances_gt.json') + DR_JSON_PATH = os.path.join(COCO_PATH, 'instances_dr.json') + + with open(GT_JSON_PATH, "w") as f: + results_gt = preprocess_gt(GT_PATH, class_names) + json.dump(results_gt, f, indent=4) + + with open(DR_JSON_PATH, "w") as f: + results_dr = preprocess_dr(DR_PATH, class_names) + json.dump(results_dr, f, indent=4) + + cocoGt = COCO(GT_JSON_PATH) + cocoDt = cocoGt.loadRes(DR_JSON_PATH) + cocoEval = COCOeval(cocoGt, cocoDt, 'bbox') + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() diff --git a/TensorFlow/built-in/audio/Jasper_ID0020_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/audio/Jasper_ID0020_for_TensorFlow/test/train_full_1p.sh index 1fb76bae696dfa940a78123186bc64a353f23e7e..a6dc4fa174b275c6f703b9f4e9dce34118f76425 100644 --- a/TensorFlow/built-in/audio/Jasper_ID0020_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/audio/Jasper_ID0020_for_TensorFlow/test/train_full_1p.sh @@ -151,7 +151,7 @@ echo "E2E Training Duration sec : $e2e_time" #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' #获取性能数据,不需要修改 #吞吐量 diff --git a/TensorFlow/built-in/audio/WaveGlow_ID0024_for_TensorFlow/train.py b/TensorFlow/built-in/audio/WaveGlow_ID0024_for_TensorFlow/train.py index 376775a78a1684da948cf7ad75f1e8749fee488b..a502fe1c4cba23e809f9d09c0b543b07c0a2a6eb 100644 --- a/TensorFlow/built-in/audio/WaveGlow_ID0024_for_TensorFlow/train.py +++ b/TensorFlow/built-in/audio/WaveGlow_ID0024_for_TensorFlow/train.py @@ -223,7 +223,7 @@ def main(): print("#########gpu number:",args.ngpu) args.logdir = os.path.join(hparams.logdir_root, args.run_name) - if not os.path.exists(args.logdir): + if not os.path.exists(args.logdir) and deviceid == 0: os.makedirs(args.logdir) args.gen_wave_dir = os.path.join(args.logdir, 'wave') diff --git a/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/main.py b/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/main.py index fdb74e8cdc937b8c16d65b1150d5488356bddb6f..5e9ea56baf8beca9c0d4592d8dfe95e1f94f6a3c 100644 --- a/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/main.py +++ b/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/main.py @@ -85,6 +85,8 @@ def main(_): custom_op = config_proto.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["iterations_per_loop"].i = 10 config_proto.graph_options.rewrite_options.remapping = RewriterConfig.OFF diff --git a/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/model.py b/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/model.py index 16b43f95c3658e80fc729cf35ad09cd9c746219d..7d6b025422368936df3b072eb9094346070873ee 100644 --- a/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/model.py +++ b/TensorFlow/built-in/cv/Image_translation/DualGAN_ID1001_for_TensorFlow/model.py @@ -91,14 +91,16 @@ class DualNet(object): self.log_freq = log_freq self.gamma = 10. self.disc_type = disc_type - self.build_model() + # self.build_model() - def build_model(self): + def build_model(self, imgA_batch, imgB_batch): ### define place holders - self.real_A = tf.placeholder(tf.float32,[self.batch_size, self.image_size, self.image_size, - self.A_channels ],name='real_A') - self.real_B = tf.placeholder(tf.float32, [self.batch_size, self.image_size, self.image_size, - self.B_channels ], name='real_B') + # self.real_A = tf.placeholder(tf.float32,[self.batch_size, self.image_size, self.image_size, + # self.A_channels ],name='real_A') + # self.real_B = tf.placeholder(tf.float32, [self.batch_size, self.image_size, self.image_size, + # self.B_channels ], name='real_B') + self.real_A = imgA_batch + self.real_B = imgB_batch ### define graphs self.A2B = self.A_g_net(self.real_A, reuse = False) @@ -216,24 +218,37 @@ class DualNet(object): return imgA, imgB def process_function(self, a, b): - return tf.py_func(self.read_data, inp=[a, b], Tout=[tf.float32, tf.float32]) - #result_tensor = tf.py_func(self.read_data, inp=[a, b], Tout=[tf.float32, tf.float32]) - #result_tensor[0].set_shape([256,256,3]) - #result_tensor[1].set_shape([256,256,3]) - #return result_tensor + # return tf.py_func(self.read_data, inp=[a, b], Tout=[tf.float32, tf.float32]) + result_tensor = tf.py_func(self.read_data, inp=[a, b], Tout=[tf.float32, tf.float32]) + result_tensor[0].set_shape([256,256,3]) + result_tensor[1].set_shape([256,256,3]) + return result_tensor def make_dataset(self, dataAlist, dataBlist, batch_size, epoch=1): ds = tf.data.Dataset.from_tensor_slices((dataAlist, dataBlist)) ds = ds.map(lambda a, b: self.process_function(a, b), num_parallel_calls=tf.data.experimental.AUTOTUNE) # same with data size for perfect shuffle ds = ds.shuffle(buffer_size=995) - ds = ds.repeat(epoch) ds = ds.batch(batch_size, drop_remainder=True) + ds = ds.repeat() ds = ds.prefetch(buffer_size=tf.contrib.data.AUTOTUNE) + # ds = ds.prefetch(buffer_size=1) return ds def train(self, args): """Train Dual GAN""" + + data_A = glob('{}/train/A/*.*[g|G]'.format(self.dataset_name)) + data_B = glob('{}/train/B/*.*[g|G]'.format(self.dataset_name)) + epoch_size = min(len(data_A), len(data_B)) // (self.batch_size) + train_dataset = self.make_dataset(data_A, data_B, self.batch_size, args.epoch) + iterator = train_dataset.make_initializable_iterator() + # next_element = iterator.get_next() + imgA_batch, imgB_batch = iterator.get_next() + self.sess.run(iterator.initializer) + + self.build_model(imgA_batch, imgB_batch) + decay = 0.9 self.d_optim = npu_tf_optimizer(tf.train.RMSPropOptimizer(args.lr, decay=decay)) \ .minimize(self.d_loss, var_list=self.d_vars) @@ -255,14 +270,17 @@ class DualNet(object): print(" Load failed...ignored...") print(" start training...") - data_A = glob('{}/train/A/*.*[g|G]'.format(self.dataset_name)) - data_B = glob('{}/train/B/*.*[g|G]'.format(self.dataset_name)) - epoch_size = min(len(data_A), len(data_B)) // (self.batch_size) + # data_A = glob('{}/train/A/*.*[g|G]'.format(self.dataset_name)) + # data_B = glob('{}/train/B/*.*[g|G]'.format(self.dataset_name)) + # epoch_size = min(len(data_A), len(data_B)) // (self.batch_size) - train_dataset = self.make_dataset(data_A, data_B, self.batch_size, args.epoch) - iterator = train_dataset.make_initializable_iterator() - next_element = iterator.get_next() - self.sess.run(iterator.initializer) + # train_dataset = self.make_dataset(data_A, data_B, self.batch_size, args.epoch) + # iterator = train_dataset.make_initializable_iterator() + # # next_element = iterator.get_next() + # imgA_batch, imgB_batch = iterator.get_next() + # self.sess.run(iterator.initializer) + + # self.build_model(imgA_batch, imgB_batch) for epoch_idx in range(args.epoch): # data_A = glob('{}/train/A/*.*[g|G]'.format(self.dataset_name)) @@ -274,10 +292,10 @@ class DualNet(object): print("#data_A: %d #data_B:%d" %(len(data_A),len(data_B))) print('[*] run optimizor...') - for batch_idx in range(0, epoch_size): + for batch_idx in range(0, epoch_size, 10): # imgA_batch = self.load_training_imgs(data_A, batch_idx) # imgB_batch = self.load_training_imgs(data_B, batch_idx) - imgA_batch, imgB_batch = self.sess.run(next_element) + # imgA_batch, imgB_batch = self.sess.run(next_element) if step % self.log_freq == 0: print("Epoch: [%2d] [%4d/%4d]"%(epoch_idx, batch_idx, epoch_size)) step = step + 1 @@ -298,20 +316,26 @@ class DualNet(object): return batch_imgs def run_optim(self,batch_A_imgs, batch_B_imgs, counter, start_time, batch_idx): - + train_op_d = util.set_iteration_per_loop(self.sess, self.d_optim, 10) + train_op_g = util.set_iteration_per_loop(self.sess, self.g_optim, 10) + + # _, Adfake,Adreal,Bdfake,Bdreal, Ad, Bd = self.sess.run( + # [self.d_optim, self.Ad_loss_fake, self.Ad_loss_real, self.Bd_loss_fake, self.Bd_loss_real, self.Ad_loss, self.Bd_loss], + # feed_dict = {self.real_A: batch_A_imgs, self.real_B: batch_B_imgs}) _, Adfake,Adreal,Bdfake,Bdreal, Ad, Bd = self.sess.run( - [self.d_optim, self.Ad_loss_fake, self.Ad_loss_real, self.Bd_loss_fake, self.Bd_loss_real, self.Ad_loss, self.Bd_loss], - feed_dict = {self.real_A: batch_A_imgs, self.real_B: batch_B_imgs}) + [self.d_optim, self.Ad_loss_fake, self.Ad_loss_real, self.Bd_loss_fake, self.Bd_loss_real, self.Ad_loss, self.Bd_loss]) if 'wgan' == self.GAN_type: - self.sess.run(self.clip_ops) + self.sess.run(self.clip_ops) if 'wgan' in self.GAN_type: if batch_idx % self.n_critic == 0: + # _, Ag, Bg, Aloss, Bloss = self.sess.run( + # [self.g_optim, self.Ag_loss, self.Bg_loss, self.A_loss, self.B_loss], + # feed_dict={ self.real_A: batch_A_imgs, self.real_B: batch_B_imgs}) _, Ag, Bg, Aloss, Bloss = self.sess.run( - [self.g_optim, self.Ag_loss, self.Bg_loss, self.A_loss, self.B_loss], - feed_dict={ self.real_A: batch_A_imgs, self.real_B: batch_B_imgs}) + [self.g_optim, self.Ag_loss, self.Bg_loss, self.A_loss, self.B_loss]) else: Ag, Bg, Aloss, Bloss = self.sess.run( [self.Ag_loss, self.Bg_loss, self.A_loss, self.B_loss], @@ -325,7 +349,7 @@ class DualNet(object): feed_dict={ self.real_A: batch_A_imgs, self.real_B: batch_B_imgs}) if batch_idx % self.log_freq == 0: print("time: %4.4f, Ad: %.2f, Ag: %.2f, Bd: %.2f, Bg: %.2f, U_diff: %.5f, V_diff: %.5f" \ - % (time.time() - start_time, Ad,Ag,Bd,Bg, Aloss, Bloss)) + % ((time.time() - start_time) / 10 / 10, Ad,Ag,Bd,Bg, Aloss, Bloss)) print("Ad_fake: %.2f, Ad_real: %.2f, Bd_fake: %.2f, Bd_real: %.2f" % (Adfake,Adreal,Bdfake,Bdreal)) def A_d_net(self, imgs, y = None, reuse = False): diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/LICENSE b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..12d255f8e0f049d3c3127e71788e219b86cdf55b --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/LICENSE @@ -0,0 +1,251 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +## Some of TensorFlow's code is derived from Caffe, which is subject to the following copyright notice: + +COPYRIGHT + +All contributions by the University of California: + +Copyright (c) 2014, The Regents of the University of California (Regents) +All rights reserved. + +All other contributions: + +Copyright (c) 2014, the respective contributors +All rights reserved. + +Caffe uses a shared copyright model: each contributor holds copyright over +their contributions to Caffe. The project versioning records all such +contribution and copyright details. If a contributor wants to further mark +their specific copyright on a particular contribution, they should indicate +their copyright solely in the commit message of the change when it is +committed. + +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +CONTRIBUTION AGREEMENT + +By contributing to the BVLC/caffe repository through pull-request, comment, +or otherwise, the contributor releases their content to the +license and copyright terms herein. \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README.md b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b11a84220a7356fca01e775d20f2ceaa9f0a10cb --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README.md @@ -0,0 +1,84 @@ +English | [中文](README_zh-cn.md) + +# Ascend Video Processing + +This repository implements a Video Processing & Enhancement Framework on Ascend Platform, aiming to lower the barrier of implementing and deploying video restoraton and enhancement models, help users to build their own processing pipelines. + +We also provide some open-source enhancement models, as samples about how to use this framework for efficient training, online inference, and offline inference on Ascend platform. One can see the [src/networks/edvr.py](src/networks/edvr.py) file contains the classic video super-resolution model [EDVR](http://arxiv.org/abs/1905.02716) that can be trained and evaluated in tensorflow on Ascend NPU platform, which includes a ``deformable convolution`` operator implenmented exclusively on NPU. As well, we'll provide an example on how to inference with EDVR OM (offline models) on Ascend, and how to build a naive processing pipeline with video in and video out. + +## Environment + +- python3.7 +- training & online inference (with checkpoint file or PB file) + - Ascend 910 or Ascend 710 +- offline inference (with OM) + - Ascend 310 or Ascend 710 +## Requirements + +- tensorflow==1.15 +- opencv-python +- yacs +- tqdm + +## Customize Model +To construct your own model and fit the framework, you should define the model with base class ``src.networks.base_model.Base``, put it in ``src/networks`` folder, and that's it: + +```python +from src.networks.base_model import Base + +class YOUR_MODEL(Base): + pass +``` + +You can use your customized model by setting ``cfg.model.name=YOUR_MODEL`` in ``configs/models/YOUR_MODEL.py``. All the model details, training and inference details can as well be configured in this file, which will overide the default config terms in [src/utils/defaults.py](src/utils/defaults.py). + +## Training + +Enter the repository folder: + +```sh +cd AscendVideo +``` + +Modify the [scripts/env.sh](scripts/env.sh) to make sure you can import ``npu_bridge`` python package: +```sh +source scripts/env.sh +python3 -c "import npu_bridge" +``` + +Run training on a single device 0 with the configuration ``configs/models/YOUR_MODEL.py``: + +```sh +# On a single device 0 +bash scripts/run_train.sh 0 configs/models/YOUR_MODEL.py +``` + +Run training on two devices 1,2 with the configuration ``configs/models/YOUR_MODEL.py``: + +```sh +# On multiple devices, e.g., 1,2 +bash scripts/run_train.sh 1,2 configs/models/YOUR_MODEL.py +``` + +## Inference +Once you have trained the model, the checkpoint files will be saved in the output directory (specified by ``cfg.train.output_dir``). Each checkpoint consists of three files: +- ``YOUR_MODEL-10000.data****`` +- ``YOUR_MODEL-10000.meta`` +- ``YOUR_MODEL-10000.index`` + +Suppose the video frames lies in ``/path/to/frames``, where each frame is indexed following some pattern like: ``0001.png``, ``0002.png``, etc. It is easy to do inference with: + +```bash +bash scripts/run_inference.sh 0 configs/models/YOUR_MODEL.py /path/to/YOUR_MODEL-10000 /path/to/frames +``` + +The inference result will be saved in ``/path/to/frames_YOUR_MODEL``. + +### Freeze ckpt to PB +Checkpoint to PB file. +```shell +bash scripts/run_freeze.sh ckpt config.yaml +``` + + + diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README_zh-cn.md b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README_zh-cn.md new file mode 100644 index 0000000000000000000000000000000000000000..ec29bf1b5bae3fb5587d158e1cd4ff39b0711e3c --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/README_zh-cn.md @@ -0,0 +1,109 @@ +[English](README.md) | 中文 + +# 昇腾视频增强 +本仓库为基于昇腾平台的视频处理、修复和增强框架,目的是降低相关领域的开发者在昇腾平台上进行算法和模型研究、实现和部署模型的门槛,提升算法迭代的效率,帮助开发者建立自己的视频修复和增强流程。 + +本仓库会提供若干开源增强模型的样例,来引导开发者如何使用本框架和昇腾平台进行高效的训练、在线推理和离线推理等。本仓库支持多种不同的视频处理任务,如去噪、超分、插帧、HDR、人脸增强以及其他可以用模型或者非模型来完成的视频处理任务。开发者可参考[src/networks/edvr.py](src/networks/edvr.py)文件,来了解一个经典的视频超分模型[EDVR](http://arxiv.org/abs/1905.02716)是如何在昇腾平台上进行搭建、训练和评估的。 + +EDVR模型包含了一个特殊的算子``deformable_convolution``(可变卷积),昇腾平台有一个独占性的实现,对tensorflow上可变卷积的计算进行了一定优化。同时,我们会提供EDVR离线模型(Offline Model)以供开发者参考,如何在昇腾平台上进行离线推理,以及如何建立自己的视频增强端到端流程。 + +## 环境 +- Python版本:python3.7 +- 训练和在线推理硬件:Ascend 910 + +## 依赖项 +- tensorflow==1.15 +- opencv-python +- yacs +- tqdm + +## 自定义模型 +添加自定义模型比较简单,只需要继承``src.networks.base_model.Base``类创建一个新的模型类,将其放在``src/networks``下即可: + +```python +from src.networks.base_model import Base + +class YOUR_MODEL(Base): + # Define your own structure. + pass +``` + +然后通过``configs/models/YOUR_MODEL.py``作为配置文件来来调用自定义模型即可: +```yaml +model: + name: YOUR_MODEL +# other configurations +``` +该文件也可以配置模型结构或是训练、推理策略,程序将在[src/utils/defaults.py](src/utils/defaults.py)的基础上进行覆盖该配置文件。 + +## 训练 +进入目录: + +```sh +cd AscendVideo +``` + +根据硬件需要对环境变量文件[scripts/env.sh](scripts/env.sh)进行修改: +```sh +vim scripts/env.sh + +# 修改对应的环境变量,确保能import npu_bridge +``` + +在0号NPU设备上使用``configs/models/YOUR_MODEL.py``配置文件进行训练: + +```sh +# On a single device 0 +bash scripts/run_train.sh 0 configs/models/YOUR_MODEL.py +``` + +在1,2两个NPU设备上使用``configs/models/YOUR_MODEL.py``进行多卡训练: + +```sh +# On multiple devices, e.g., 1,2 +bash scripts/run_train.sh 1,2 configs/models/YOUR_MODEL.py +``` + +## 推理 +训练完成之后,输出目录(由``cfg.train.output_dir``指定)下会生成定间隔保存的checkpoint文件,例如: +- ``YOUR_MODEL-10000.data****`` +- ``YOUR_MODEL-10000.meta`` +- ``YOUR_MODEL-10000.index`` + +给定任意输入视频帧路径``/path/to/frames``,每一帧按顺序编号为``0001.png``,``0002.png``以此类推,则只需要使用如下命令即可进行在线推理: + +```bash +bash scripts/run_inference.sh 0 configs/models/YOUR_MODEL.py /path/to/YOUR_MODEL-10000 /path/to/frames +``` +推理结果会保存在``/path/to/frames_YOUR_MODEL``路径下。 + +### 模型固化 +将checkpoint固化为PB文件: +```shell +bash scripts/run_freeze.sh configs/models/YOUR_MODEL.py /path/to/YOUR_MODEL-10000 +``` +其中固化的输入placeholder的size可以通过修改``configs/models/YOUR_MODEL.py``来进行配置。 + +## 测试样片效果 +我们提供了若干用于测试增强效果的视频片段,并且给出了昇腾视频增强对于这些片段的处理效果,包括单一的去噪、插帧、人脸增强、HDR色彩增强和超分辨率等算法。 + +| 片段 | 分辨率 | 帧率 | 链接 | 备注 | +| ------------- | --- | --- | ------------------------------------------------------------ | --- | +| 超分原片 | 1080P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/City-1080p.mp4 | | +| 2倍超分 | 2160P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/City-1080p-2x_vsr.mp4 | | +| 4倍超分 | 4320P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/City-1080p-4x_vsr.mp4 | | +| 去噪原片 | 1080P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/City-Noisy-1080p.mp4 | | +| 去噪效果 | 1080P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/City-Noisy-1080p_Denoised.mp4 | | +| 人脸原片 | 1080P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Face.mp4 | | +| 人脸增强效果 | 1080P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Face-Enhancement.mp4 | | +| 插帧原片 | 1080P | 23.976 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Waterdrop-24FPS-1080p.mp4 | | +| 2倍插帧 | 1080P | 47.952 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Waterdrop-48FPS-1080p.mp4 | | +| 4倍插帧 | 1080P | 95.904 |https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Waterdrop-96FPS-1080p.mp4 | | +| SDR原片 | 2160P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Color-SDR-1080p.mp4 | | +| HDR无色彩增强 | 2160P | 25 | https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Color_HLG-1080p.mp4 | 需要播放器或者屏幕支持HLG | +| HDR昇腾色彩增强 | 2160P | 25 |https://obs-ascend-test.obs.cn-east-2.myhuaweicloud.com/vsr/Color-Enhanced-HLG-1080p.mp4 | 需要播放器或者屏幕支持HLG | + +## 离线推理参考 + +昇腾[Sample仓库](https://gitee.com/ascend/samples)提供了超分模型EDVR的[离线推理案例](https://gitee.com/ascend/samples/tree/master/python/level2_simple_inference/6_other/video_super_resolution)以供参考 + diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/codecs/default_sdr_x264.json b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/codecs/default_sdr_x264.json new file mode 100644 index 0000000000000000000000000000000000000000..e084257ba1211725a4ec82a2da782679397c752d --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/codecs/default_sdr_x264.json @@ -0,0 +1,9 @@ +{ + "codec": { + "-crf": "10", + "-c:v": "libx264", + "-pix_fmt": "yuv420p", + "-vf": "zscale=rangein=full:range=limited:transferin=709:matrixin=709:primariesin=709:matrix=709:transfer=709:primaries=709:agamma=0" + }, + "format": "mp4" +} \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/models/edvr_config.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/models/edvr_config.py new file mode 100644 index 0000000000000000000000000000000000000000..d38fe2627f78abe3737f2ec7a39acc96c4df96ae --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/configs/models/edvr_config.py @@ -0,0 +1,60 @@ +cfg = dict( + data=dict( + data_dir='/data2/vsr_datasets/reds', + train=dict( + batch_size=16, + input_size=[64, 64], + augmentation=dict( + apply=True, + interval_list=[1,2,3], + options=""" + RandomCrop: + input_dim: 4 + RandomTemporalReverse: + input_dim: 4 + RandomFlipLeftRight: + input_dim: 4 + RandomFlipUpDown: + input_dim: 4 + """, + ), + ), + ), + edvr=dict( + with_tsa=True, + mid_channels=64, + use_dcn=False, + num_groups=1, + num_deform_groups=1, + num_blocks_extraction=5, + num_blocks_reconstruction=10, + upsampling='bilinear', + align_corners=False + ), + model=dict( + content_loss_reduction='mean', + content_loss_type='l1', + factor_for_adapt_input=4, + name='EDVR', + num_net_input_frames=5, + num_net_output_frames=1, + scale=4, + scope='G' + ), + loss=dict( + content=dict( + loss_type='L1Loss', + loss_reduction='mean' + ), + ), + train=dict( + print_interval=100, + output_dir='output/edvr', + generator=dict( + lr_schedule=dict( + total_steps=[10000] + ) + ) + ), + log_file='edvr_train.log', +) \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/requirements.txt b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0cff5f86ffff8ad9465fc71c20b9c28d85c89d2 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/requirements.txt @@ -0,0 +1,4 @@ +tensorflow==1.15 +opencv-python +yacs +tqdm \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/8p.json b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/8p.json new file mode 100644 index 0000000000000000000000000000000000000000..4532a33910ee4aff2ccde7779255c5373f34ad88 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/8p.json @@ -0,0 +1,109 @@ +{ + "board_id": "0x002f", + "chip_info": "910", + "deploy_mode": "lab", + "group_count": "1", + "group_list": [ + { + "device_num": "8", + "server_num": "1", + "group_name": "", + "instance_count": "8", + "instance_list": [ + { + "devices": [ + { + "device_id": "0", + "device_ip": "192.168.100.101" + } + ], + "rank_id": "0", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "1", + "device_ip": "192.168.101.101" + } + ], + "rank_id": "1", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "2", + "device_ip": "192.168.102.101" + } + ], + "rank_id": "2", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "3", + "device_ip": "192.168.103.101" + } + ], + "rank_id": "3", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "4", + "device_ip": "192.168.100.100" + } + ], + "rank_id": "4", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "5", + "device_ip": "192.168.101.100" + } + ], + "rank_id": "5", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "6", + "device_ip": "192.168.102.100" + } + ], + "rank_id": "6", + "server_id": "172.17.1.120" + }, + { + "devices": [ + { + "device_id": "7", + "device_ip": "192.168.103.100" + } + ], + "rank_id": "7", + "server_id": "172.17.1.120" + } + ] + } + ], + "para_plane_nic_location": "device", + "para_plane_nic_name": [ + "eth0", + "eth1", + "eth2", + "eth3", + "eth4", + "eth5", + "eth6", + "eth7" + ], + "para_plane_nic_num": "8", + "status": "completed" +} diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/create_new_experiment.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/create_new_experiment.sh new file mode 100644 index 0000000000000000000000000000000000000000..dfef0967bd4b2f59c87b08eb28dd66f010b31464 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/create_new_experiment.sh @@ -0,0 +1,12 @@ +#!/bin/bash +new_env=$1 +cur_dir=`pwd` +root_dir=${cur_dir} + +if [ ! -d ${new_env} ];then + mkdir ${root_dir}/${new_env} + cd ${root_dir}/${new_env} + ln -s ../src src + ln -s ../configs configs + ln -s ../scripts scripts +fi diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/env.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/env.sh new file mode 100644 index 0000000000000000000000000000000000000000..789fb1a92be5248f26184f20edf2dcced7b3d5cd --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/env.sh @@ -0,0 +1,11 @@ +# !/bin/bash + +export CUSTOM_OP_LIB_PATH=/usr/local/Ascend/fwkacllib/ops/framework/built-in/tensorflow/ +export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/:/usr/lib/:/usr/local/Ascend/fwkacllib/lib64/:/usr/local/Ascend/driver/lib64/common/:/usr/local/Ascend/driver/lib64/driver/:/usr/local/Ascend/add-ons/:/usr/local/Ascend/fwkacllib/lib64/plugin/opskernel:/usr/local/Ascend/fwkacllib/lib64/plugin/nnengine:/usr/local/Ascend/atc/lib64/plugin/opskernel:/usr/local/Ascend/atc/lib64/plugin/nnengine:/usr/local/Ascend/atc/lib64/stub:/usr/local/Ascend/acllib/lib64:/usr/local/python3.7/lib/:/usr/local/python3.7/lib/python3.7/site-packages/torch/lib/ +export PYTHONPATH=$PYTHONPATH:/usr/local/Ascend/atc/python/site-packages:/usr/local/Ascend/python/site-packages:/usr/local/Ascend/fwkacllib/python/site-packages:/usr/local/Ascend/fwkacllib/python/site-packages/auto_tune.egg/auto_tune:/usr/local/Ascend/fwkacllib/python/site-packages/schedule_search.egg:/usr/local/Ascend/opp/op_impl/built-in/ai_core/tbe:usr/local/Ascend/tfplugin/latest/tfplugin/python/site-packages:${PYTHONPATH} +export TOOLCHAIN_HOME=/usr/local/Ascend/toolkit +export PATH=$PATH:/usr/local/Ascend/fwkacllib/ccec_compiler/bin:/usr/local/Ascend/toolkit/bin:/usr/local/Ascend/fwkacllib/bin:/usr/local/Ascend/atc/bin:/usr/local/python3.7/bin/ +export ASCEND_OPP_PATH=/usr/local/Ascend/opp +export ASCEND_AICPU_PATH=/usr/local/Ascend +export SOC_VERSION=Ascend910 +export HCCL_CONNECT_TIMEOUT=600 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_8p.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..82bea58482c10cbcbb0a6a517eafe1a3fdedafcb --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_8p.sh @@ -0,0 +1,11 @@ +#!/bin/bash +cur_dir=`pwd` +root_dir=${cur_dir} + +mkdir data +for i in $(seq 0 7) +do + if [ ! -d "D$i" ];then + bash scripts/create_new_experiment.sh D${i} + fi +done diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_hccl_json.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_hccl_json.py new file mode 100644 index 0000000000000000000000000000000000000000..e0a6c2efdb659a8a2fad2e0420a9b563b7fc938b --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/prepare_hccl_json.py @@ -0,0 +1,41 @@ +import os +import sys +import copy +from collections import OrderedDict +import json + + +def parse_json(json_file): + with open(json_file, 'r') as f: + config = json.load(f, object_pairs_hook=OrderedDict) + return config + + +def generate_json(device_list, config, target_file): + new_config = copy.deepcopy(config) + device_insts = [] + insta_list = new_config["group_list"][0]["instance_list"] + rank = 0 + for inst in insta_list: + if inst["devices"][0]["device_id"] in device_list: + inst["rank_id"] = str(rank) + device_insts.append(inst) + rank += 1 + new_config["group_list"][0]["device_num"] = str(rank) + new_config["group_list"][0]["instance_count"] = str(rank) + new_config["group_list"][0]["instance_list"] = device_insts + + print(f'[INFO] Writing out hccl config json file to {target_file}') + with open(target_file, 'w') as f: + json.dump(new_config, f) + + +if __name__ == '__main__': + device_lists = sys.argv[1] + source_json_file = sys.argv[2] + target_file = sys.argv[3] + + device_lists = device_lists.strip().split(',') + config = parse_json(source_json_file) + + generate_json(device_lists, config, target_file) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_freeze.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_freeze.sh new file mode 100644 index 0000000000000000000000000000000000000000..d3bb74eb94bc7df02af04548d0382c4375cacee3 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_freeze.sh @@ -0,0 +1,38 @@ +#!/bin/bash +CKPT=$1 +CONFIG_FILE=$2 +DEVICE_ID=0 +DEVICE_RANK=1 + +source scripts/env.sh + +#export PRINT_MODEL=1 +export MOX_USE_NPU=1 +export FUSION_TENSOR_SIZE=2000000000 +export MOX_USE_TF_ESTIMATOR=0 +export MOX_USE_TDT=1 + +export HEARTBEAT=1 +export CONITNUE_TRAIN=true +export LOG_DIR=./log + +export ASCEND_GLOBAL_EVENT_ENABLE=0 +export ASCEND_GLOBAL_LOG_LEVEL=3 +export TF_CPP_MIN_LOG_LEVEL=3 + +# Turn profiling on +export JOB_ID=123456789 +export DEVICE_ID=${DEVICE_ID} +export DEVICE_INDEX=${DEVICE_ID} +export RANK_ID=${DEVICE_ID} +export RANK_SIZE=${DEVICE_RANK} +if [ ${DEVICE_RANK} -gt 1 ]; then + export RANK_TABLE_FILE=scripts/${DEVICE_RANK}p.json +fi + +rm -rf kernel_meta + +python3 src/main.py \ + --config-file ${CONFIG_FILE} \ + mode freeze \ + checkpoint ${CKPT} diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_inference.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_inference.sh new file mode 100644 index 0000000000000000000000000000000000000000..a20db0742dd92b32b49d37e69fd5f08f281bb5bb --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_inference.sh @@ -0,0 +1,179 @@ +#!/bin/bash + +devices=$1 +models=$2 +dir=$3 +codec_file=$4 + +resource_file='resource.json' +if [ -z "$4" ]; then + codec_file=configs/codecs/default_sdr_x264.json +fi +video_file_ext=$(cat ${codec_file} | python3 -c "import sys, json; print(json.load(sys.stdin)['format'])") + +# Set models +declare -A edvr=( +["config"]="configs/models/edvr_config.py" +["ckpt"]="outputs/edvr/TempoEDVR-280000" +) + +readarray -d , -t models <<< "${models}," +unset 'models[$((${#models[@]}-1))]' + +# Remove the last / if it exists +echo "$dir" | grep '/$' +if [ $? -eq 0 ] +then + dir=${dir%/} +fi + +# Set FPS +FPS=$(echo $dir | grep -Eo '[0-9]+[\.]?[0-9]+FPS' | grep -Eo '[0-9]+[\.]?[0-9]+') +FPS=$(awk -vp=$FPS -vq=1 'BEGIN{printf "%.3f" ,p * q}') + +# Check whether has been vfi +if test "${dir#*vfi}" != "${dir}" +then + FPS=$(awk -vp=${FPS} -vq=2 'BEGIN{printf "%0.3f" ,p * q}') +fi + +# Create temp txt file to record subvideo names +cur_dir=`pwd` +txt_file="temp.txt" +if [ -e "${dir_out}_videos/${txt_file}" ] +then + rm -f ${dir_out}_videos/${txt_file} +fi + +source scripts/env.sh + +function cmd() { + device_id=$1 + device_rank=$2 + rank_id=$3 + model_name=$4 + dir_in=$5 + dir_out=$6 + io_backend=$7 + + # Turn profiling on + export JOB_ID=123456789 + export DEVICE_ID=${device_id} + export DEVICE_INDEX=${device_id} + export RANK_ID=${rank_id} + export RANK_SIZE=${device_rank} + + export MOX_USE_NPU=1 + export FUSION_TENSOR_SIZE=2000000000 + export MOX_USE_TF_ESTIMATOR=0 + export MOX_USE_TDT=1 + + export HEARTBEAT=1 + export CONITNUE_TRAIN=true + export LOG_DIR=./log + + export ASCEND_GLOBAL_EVENT_LEVEL=0 + export ASCEND_GLOBAL_EVENT_ENABLE=0 + export ASCEND_GLOBAL_LOG_LEVEL=3 + export TF_CPP_MIN_LOG_LEVEL=3 + + rm -rf kernel_meta + rm -rf ~/ascend/log/plog + + declare -n model="$model_name" # model is a reference + python3 src/main.py \ + --config-file ${model["config"]} \ + mode inference \ + data.data_dir ${dir_in} \ + data.inference.auto_adapt_input True\ + inference.result_dir ${dir_out} \ + inference.io_backend ${io_backend} \ + inference.ffmpeg.video_filename ${rank_id}.${video_file_ext} \ + inference.ffmpeg.codec_file ${codec_file} \ + inference.ffmpeg.fps ${FPS} \ + env.rank_size ${RANK_SIZE} \ + checkpoint ${model["ckpt"]} \ + env.device 'npu' +} + +# read device id to list +function mfcb { local val="$4"; "$1"; eval "$2[$3]=\$val;"; }; +function val_ltrim { if [[ "$val" =~ ^[[:space:]]+ ]]; then val="${val:${#BASH_REMATH[0]}}"; fi; }; +function val_rtrim { if [[ "$val" =~ [[:space:]]+$ ]]; then val="${val:0:${#val}-${#BASH_REMATH[0]}}"; fi; }; +function val_trim { val_ltrim; val_rtrim; } + +if [[ -z "$1" ]]; then + echo "[INFO] device_id not set. Input argument could be like '1' or '0,1,2'." + echo "[INFO] Set device_id=0 by default." + device_list=0 + device_rank=1 +else + readarray -c1 -C 'mfcb val_trim device_list' -td, <<<"$devices,"; unset 'device_list[-1]'; declare -a device_list; + device_rank=${#device_list[@]} +fi +echo "[INFO] device_list: ${device_list[@]}" +echo "[INFO] device_rank: ${device_rank}" + + +cnt=0 +io_backend="disk" +for model_name in "${models[@]}"; do + if [[ "$model_name" =~ "vfi" ]]; then + # if model_name contains "vfi", multiply the fps + # bash does not support floating point + FPS=$(awk -vp=$FPS -vq=2 'BEGIN{printf "%.3f" ,p * q}') + fi + + if [[ "$var" =~ "hdr" && "$1" = "" ]]; then + codec_file=configs/codecs/exr2020_to_hlg_hdr_x264.json + video_file_ext=$(cat ${codec_file} | python3 -c "import sys, json; print(json.load(sys.stdin)['format'])") + fi + + cnt=$(( $cnt + 1 )) + + dir_out="${dir}_${model_name}" + if [ ! -d ${dir_out} ]; then + mkdir ${dir_out} + fi + + # set video output for the last model + if [ $cnt -eq ${#models[@]} ]; then + io_backend="disk:ffmpeg" + if [ ! -d "${dir_out}_videos" ]; then + mkdir ${dir_out}_videos + fi + fi + + if [ $device_rank -gt 1 ]; then + max_device_rank=`expr ${device_rank} - 1` + for d_id in ${!device_list[@]}; do + cd ${cur_dir} + bash scripts/create_new_experiment.sh D_${d_id} + cd D_${d_id} + # set video output for the last model + if [ $cnt -eq ${#models[@]} ]; then + # write video name to text file + echo "file ${dir_out}_videos/${d_id}.${video_file_ext}" >> ${dir_out}_videos/${txt_file} + fi + # inference + if [ $d_id -ne ${max_device_rank} ]; then + cmd ${device_list[$d_id]} ${device_rank} ${d_id} ${model_name} ${dir} ${dir_out} ${io_backend} & + else + cmd ${device_list[$d_id]} ${device_rank} ${d_id} ${model_name} ${dir} ${dir_out} ${io_backend} || exit 1 + fi + + done + # wait untill all jobs done + wait < <(jobs -p) + # concat all subvideos after the last model inference + if [ $cnt -eq ${#models[@]} ]; then + ffmpeg -y -f concat -safe 0 -i ${dir_out}_videos/${txt_file} -c copy ${dir_out}.${video_file_ext} + fi + else + cmd ${device_list[$d_id]} ${device_rank} ${device_list[$d_id]} ${model_name} ${dir} ${dir_out} ${io_backend} || exit 1 + mv ${dir_out}_videos/${device_list[$d_id]}.${video_file_ext} ${dir_out}.${video_file_ext} + fi + # update path + dir="${dir_out}" +done + diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_train.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..583f8443828b5ef73923cb1f989defe569e8b360 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/run_train.sh @@ -0,0 +1,89 @@ +#!/bin/bash + +devices=$1 +config_file=$2 + +source scripts/env.sh + +function cmd() { + device_id=$1 + device_rank=$2 + rank_id=$3 + config_file=$4 + rank_table_file=$5 + + # Turn profiling on + export JOB_ID=123456789 + export DEVICE_ID=${device_id} + export DEVICE_INDEX=${device_id} + export RANK_ID=${rank_id} + export RANK_SIZE=${device_rank} + if [ -n "$rank_table_file" ]; then + export RANK_TABLE_FILE=${rank_table_file} + fi + + export MOX_USE_NPU=1 + export FUSION_TENSOR_SIZE=2000000000 + export MOX_USE_TF_ESTIMATOR=0 + export MOX_USE_TDT=1 + + export HEARTBEAT=1 + export CONITNUE_TRAIN=true + export LOG_DIR=./log + + export ASCEND_GLOBAL_EVENT_LEVEL=0 + export ASCEND_GLOBAL_EVENT_ENABLE=0 + export ASCEND_GLOBAL_LOG_LEVEL=3 + export TF_CPP_MIN_LOG_LEVEL=3 + + python3 src/main.py \ + --config-file ${config_file} \ + env.rank_size ${device_rank} \ + env.device 'npu' +} + +# read device id to list +function mfcb { local val="$4"; "$1"; eval "$2[$3]=\$val;"; }; +function val_ltrim { if [[ "$val" =~ ^[[:space:]]+ ]]; then val="${val:${#BASH_REMATH[0]}}"; fi; }; +function val_rtrim { if [[ "$val" =~ [[:space:]]+$ ]]; then val="${val:0:${#val}-${#BASH_REMATH[0]}}"; fi; }; +function val_trim { val_ltrim; val_rtrim; } + +if [[ -z "$1" ]]; then + echo "[INFO] device_id not set. Input argument could be like '1' or '0,1,2'." + echo "[INFO] Set device_id=0 by default." + device_list=0 + device_rank=1 +else + readarray -c1 -C 'mfcb val_trim device_list' -td, <<<"$devices,"; unset 'device_list[-1]'; declare -a device_list; + device_rank=${#device_list[@]} +fi +echo "[INFO] device_list: ${device_list[@]}" +echo "[INFO] device_rank: ${device_rank}" + +cur_dir=`pwd` +if [ $device_rank -gt 1 ]; then + source_json=scripts/8p.json + trimmed_dev_list=`echo ${device_list[@]} | tr -d ' '` + if [ ${device_rank} -eq 8 ]; then + target_json=$source_json + echo "[INFO] 8p using source hccl config file: ${target_json} ..." + else + target_json=scripts/${device_rank}p_${trimmed_dev_list}.json + echo "[INFO] (Re)Generating hccl config file: ${target_json} ..." + python3 scripts/prepare_hccl_json.py ${devices} ${source_json} ${target_json} + fi + + max_device_rank=`expr ${device_rank} - 1` + for d_id in ${!device_list[@]}; do + cd ${cur_dir} + bash scripts/create_new_experiment.sh D_${device_list[$d_id]} + cd D_${device_list[$d_id]} + if [ $d_id -ne ${max_device_rank} ]; then + cmd ${device_list[$d_id]} ${device_rank} ${d_id} ${config_file} ${target_json} & + else + cmd ${device_list[$d_id]} ${device_rank} ${d_id} ${config_file} ${target_json} && echo "[INFO] Train done." + fi + done +else + cmd ${device_list[$d_id]} ${device_rank} 0 ${config_file} +fi diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/video2frames.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/video2frames.sh new file mode 100644 index 0000000000000000000000000000000000000000..edec343ca80fa558e29b273c5dd1b129e5d51cf9 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/scripts/video2frames.sh @@ -0,0 +1,170 @@ +#!/bin/bash + +helpFunction() +{ + echo "" + echo "Usage: $0 [-h] [-i VIDEO_PATH] [-o OUTPUT_PATH] [-f FILTERS] [-l LOG_DIR] [-q]" + echo -e "\t-i: Input source video file." + echo -e "\t-o: Output frames directory." + echo -e "\t-f: Filters for deinterlacing the video when it's interlaced. Default 'bobweaver'." + echo -e "\t-r: Frame rate if interlaced. Default '1' " + echo -e "\t-l: Log directory." + echo -q "\t-q: Quiet mode. Will not print any information." + exit 1 # Exit script after printing help +} + +while getopts "i:o:f:l:r:hq" opt +do + case "$opt" in + i ) video_path="$OPTARG" ;; + o ) frames_dir="$OPTARG" ;; + f ) filter="$OPTARG" ;; + l ) log_dir="$OPTARG" ;; + r ) rate="$OPTARG" ;; + q ) quiet=true ;; + h ) helpFunction ;; # Print helpFunction in case parameter is non-existent + esac +done + +if [ x"$video_path" = x"" -o ! -f "$video_path" ]; then + echo "[ERROR] Invalid video path: $video_path" + helpFunction +fi + +if [ x"$quiet" = x"" ]; then + quiet=false +fi + +if [ x"$rate" = x"" ]; then + rate='1' +fi + +timestamp=$(date '+%Y%m%dR%H%M%S') +if [ x"$log_dir" = x"" ]; then + log_dir=/tmp/.mindvideo +fi + +if [ ! -d "$log_dir" ]; then + mkdir -p $log_dir +fi +report_log=${log_dir}/${timestamp}_probe.log +extract_log=${log_dir}/${timestamp}_video.log + +# ======================================================================================= +# check which type of videos: +# progressive, pseudo-interlaced (will be treated as progressive), truly interlaced +# ======================================================================================= +test_nframes=400 +export FFREPORT=file=$extract_log +if [ -e $report_log ]; then + rm $report_log +fi + +ffmpeg -report -i ${video_path} -vframes $test_nframes -vf idet -f null - 2> $report_log +wait < <(jobs -p) + +nframes=( $(cat $report_log | grep 'Multi frame detection: ' | grep -woP '(\d+)') ) +# nframes: [tff, bff, progressive, undetermined] + +n_frame_interlaced=$(awk -vp=${nframes[0]} -vq=${nframes[1]} 'BEGIN{printf "%d" ,p + q}') +# echo $n_frame_interlaced + +if [ $n_frame_interlaced -gt ${nframes[2]} ]; then + type=interlaced +else + type=progressive +fi + +# if it's progressive, use none filter regardless of the previous settings +if [ $type = "progressive" ]; then + filter_name=none +elif [ x"$filter" = x"" ]; then + # else if it's interlaced, used bobweaver as the default deintelacing filter + filter_name=bobweaver +else + filter_name=$filter +fi + +if [ $quiet = "false" ]; then + echo "[INFO] Video type: $type; Filter: ${filter_name}" +fi + +# ====================================================================================== +# extract frames from video with the given deinterlacing filter. +# record the fps first. +# ====================================================================================== +fps=$(ffprobe -v error -select_streams v -of default=noprint_wrappers=1:nokey=1 -show_entries stream=r_frame_rate $video_path) +fps=$(echo "print(f'{$fps:.3f}')" | python3) +frames_fps=$fps +#n_total_frames=$(ffprobe -v error -select_streams v:0 -count_packets -of default=noprint_wrappers=1:nokey=1 -show_entries stream=nb_read_packets $video_path) + +# determine the filter +if [ "$filter_name" = "bobweaver" ]; then + if [ $type = "interlaced" ] && [ $rate = '2' ] ; then + # deinterlace with 2x fps + filter_cmd="-vf bwdif=1:0:0" + frames_fps=$(echo "print(f'{2*$frames_fps:.3f}')" | python3) + else + filter_cmd="-vf bwdif=0:0:0" + fi +elif [ "$filter_name" = "yadif" ]; then + if [ $type = "interlaced" ] && [ $rate = '2' ] ; then + # deinterlace with 2x fps + filter_cmd="-vf yadif=1:0:0" + frames_fps=$(echo "print(f'{2*$frames_fps:.3f}')" | python3) + else + filter_cmd="-vf yadif=0:0:0" + fi +elif [ "$filter_name" = "QTGMC" ]; then + if [ $type = "interlaced" ] && [ $rate = '2' ] ; then + filter_cmd="50fps.vpy" + frames_fps=$(echo "print(f'{2*$frames_fps:.3f}')" | python3) + else + filter_cmd="25fps.vpy" + fi +elif [ "$filter_name" = "none" ]; then + filter_cmd="" +fi + +frames_dir=$frames_dir/${frames_fps}FPS_frames + +if [ ! -d "$frames_dir" ]; then + mkdir -p $frames_dir +fi + +# ============================================================================== +# check whether is HDR +# ============================================================================== +COLORS=$(ffprobe -show_streams -v error "${video_path}" |egrep "^color_transfer|^color_space=|^color_primaries=" |head -3) +for C in $COLORS; do + if [[ "$C" = "color_space="* ]]; then + COLORSPACE=${C##*=} + elif [[ "$C" = "color_transfer="* ]]; then + COLORTRANSFER=${C##*=} + elif [[ "$C" = "color_primaries="* ]]; then + COLORPRIMARIES=${C##*=} + fi +done + +if [ "${COLORSPACE}" = "bt2020nc" ] && [ "${COLORTRANSFER}" = "smpte2084" ] && [ "${COLORPRIMARIES}" = "bt2020" ]; then + ext='exr' +elif [ "${COLORSPACE}" = "bt2020nc" ] && [ "${COLORTRANSFER}" = "arib-std-b67" ] && [ "${COLORPRIMARIES}" = "bt2020" ]; then + ext='exr' +else + ext='png' +fi + +if [ $quiet = "false" ]; then + echo "[INFO] Extracting frames from ${video_path}. This may take a while." + echo "[INFO] Cmd: ffmpeg -i ${video_path} $filter_cmd $frames_dir/%08d.${ext}" +fi + +if [ "$filter_name" = "QTGMC" ]; then + # This is only valid when in x86 + vspipe --y4m $filter_cmd -a "video_path=${video_path}" - | ffmpeg -i pipe: $frames_dir/%08d.${ext} +else + ffmpeg -i ${video_path} $filter_cmd $frames_dir/%08d.${ext} +fi +wait < <(jobs -p) + +echo "$type, ${fps}, ${frames_fps}" \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ed995e3d4b86bc4ed44ac806dca7afe1598bd5ce --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/__init__.py @@ -0,0 +1,160 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from src.dataloaders.dataloader import TfDataloader +from src.utils.exceptions import * +from src.utils.logger import logger +from src.utils.world import world + + +def build_train_dataloader(config, _world=None): + """ + Build train dataloader given config. + + Args: + config: yacs node, configuration. + + Returns: + generator of list[tensor]. + """ + + # Import the dataset classes only when needed to avoid import error + from src.dataloaders.train_dataset import ( + OfflineTrainDataset, + OnlineTrainDataset, + DummyTrainDataset, + MixtureDatasets, + ) + + # Support multi-dataset whose path are concated with ':' + # in cfg.data.data_dir + data_dir_list = config.data.data_dir.split(':') + task = config.task + online_degradation_mode = config.data.train.degradation.online + distributed = config.env.rank_size > 1 + device = config.env.device + batchsize = config.data.train.batch_size + + world_inst = _world or world + # _world should be initialized + if not world_inst.is_initialized: + raise WorldUninitializedError('World not initialized.') + + if config.debug_mode: + dataset_cls = DummyTrainDataset + elif online_degradation_mode: + dataset_cls = OnlineTrainDataset + else: + dataset_cls = OfflineTrainDataset + + if len(data_dir_list) > 1: + dataset = MixtureDatasets.from_datadir( + dataset_cls, data_dir_list, cfg=config) + else: + dataset = dataset_cls(data_dir=data_dir_list[0], cfg=config) + + dataloader = TfDataloader(dataset, batchsize, + distributed=distributed, + device=device) + return dataloader.batch_list + + +def build_test_dataloader(config, _world=None): + """ + Build inference dataloader given config. + + Args: + config: yacs node, configuration. + + Returns: + dict, contains the data term. + """ + from src.dataloaders.test_dataset import ( + VSRTestDataset, + DenoiseTestDataset, + VFITestDataset, + HDRTestDataset, + DummyTestDataset, + MixtureTestDataset, + ComposedTestDataset + ) + + TASK_MAP_TO_DATASET = { + 'vsr': VSRTestDataset, + 'denoise': DenoiseTestDataset, + 'vfi': VFITestDataset, + 'hdr': HDRTestDataset, + 'face': DenoiseTestDataset, + } + + data_dir_list = config.data.data_dir.split(':') + distributed = config.env.rank_size > 1 + task = config.task + world_inst = _world or world + # _world should be initialized + if not world_inst.is_initialized: + raise WorldUninitializedError('World not initialized.') + + assert task in TASK_MAP_TO_DATASET + + if config.debug_mode: + dataset_cls = DummyTestDataset + else: + dataset_cls = TASK_MAP_TO_DATASET[task] + + if config.debug_mode: + dataset = dataset_cls(data_dir=data_dir_list[0], cfg=config) + elif len(data_dir_list) > 1: + # For multi-dataset + dataset = MixtureTestDataset.from_datadir( + dataset_cls, data_dir_list, cfg=config) + else: + files = os.listdir(data_dir_list[0]) + if os.path.isdir(os.path.join(data_dir_list[0], files[0])): + # For dataset with multiple clips + dataset = ComposedTestDataset.from_datadir( + dataset_cls, data_dir_list[0], files, cfg=config + ) + else: + # Foe a single dataset with frames + dataset = dataset_cls(data_dir=data_dir_list[0], cfg=config) + + # Manually shard the dataset to inference on multiple devices. + if distributed: + dataset.shard(world_inst.rank_size, world_inst.rank_id) + return dataset + + +def build_dataloader(cfg, **kwargs): + """ + Build dataloader given scenario and configurations. + + Args: + cfg: yacs node, global configuration. + **kwargs: argument dicts. + """ + if cfg.mode in ['train', 'eval']: + dataloader = build_train_dataloader(cfg) + elif cfg.mode in 'inference': + dataloader = build_test_dataloader(cfg, **kwargs) + elif cfg.mode == 'freeze': + dataloader = None + else: + raise KeyError + return dataloader + + +__all__ = ['build_train_dataloader', 'build_test_dataloader', 'build_dataloader'] diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/dataloader.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/dataloader.py new file mode 100644 index 0000000000000000000000000000000000000000..c66399343856674c0b4192adbae1b5be6c4fe5c9 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/dataloader.py @@ -0,0 +1,114 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + + +class TfDataloader: + """A wrapper of the tensorflow Dataset class. + + This class aims to make the dataset construction more convenient. Users + are only required to implement the dataset class and return the specific + information, then TfDataloader will wrap the dataset class and load the + data, similar to Pytorch dataset and dataloader. + + Args: + dataset: Dataset class. See `src.dataloaders.train_dataset` + batch_size: int + drop_remainder: boolean, whether drop the last remainder terms. + Necessary on Ascend NPU. Default is True. + distributed: boolean, whether to use distribute dataloader. + shuffle: boolean, whether to shuffle the dataset. + repeat: boolean, whether to repeat the dataset (usually in training). + device: str, hardware used for accelerating. Optional in ['npu', 'cpu'] + """ + def __init__(self, dataset, batch_size=2, drop_remainder=True, + distributed=False, shuffle=True, repeat=True, device='npu', + _world=None): + self.batch_size = batch_size + self.drop_remainder = drop_remainder + self.distributed = distributed + self.device = device + self.dataset = dataset + self.shuffle = shuffle + self.repeat = repeat + self.world = _world + self.sample_indices = list(range(len(self.dataset))) + + self.build_iterator() + + def get_item(self, index): + """ Tensorflow wrapper of the _get_item method of Dataset class + + Args: + index: int, called by tensorflow.data.Dataset.map function. + """ + # The dtype and shape are defined by the dataset. Otherwise, + # tf does not know the shape. + data = tf.numpy_function(lambda x: self.dataset[x], + [index], + self.dataset.data_dtype) + + for d, shape in zip(data, self.dataset.data_shape): + d.set_shape(tuple(shape)) + return data + + def build_iterator(self): + """ Build dataloader iterator """ + video_dataset = tf.data.Dataset.from_tensor_slices(self.sample_indices) + + if self.shuffle: + video_dataset = video_dataset.shuffle(len(self.dataset)) + + video_dataset = video_dataset.map(self.get_item, + num_parallel_calls=tf.data.experimental.AUTOTUNE) + video_dataset = video_dataset.batch(self.batch_size, + drop_remainder=self.drop_remainder) + + if self.repeat: + video_dataset = video_dataset.repeat() + + if self.distributed: + video_dataset = video_dataset.shard(self.world.rank_size, + self.world.rank_id) + + video_dataset = video_dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE) + + iterator = video_dataset.make_one_shot_iterator() + + # tensorflow will wrap all the numpy.ndarray as tensors + self.batch_list = iterator.get_next() + + +def build_dataloader(dataset, batch_size=2, drop_remainder=True, distributed=False, + shuffle=True, repeat=True, device='npu'): + """ + Build dataloader given the dataset. + + Args: + dataset: Dataset class. See `src.dataloaders.train_dataset` + batch_size: int + drop_remainder: boolean, whether drop the last remainder terms. + Necessary on Ascend NPU. Default is True. + distributed: boolean, whether to use distribute dataloader. + shuffle: boolean, whether to shuffle the dataset. + repeat: boolean, whether to repeat the dataset (usually in training). + device: str, hardware used for accelerating. Optional in ['npu', 'cpu'] + + Returns: + list[tensorlfow tensor] + """ + dataloader = TfDataloader(dataset, batch_size, drop_remainder, + distributed, shuffle, repeat, device) + return dataloader.batch_list diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/test_dataset.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/test_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..a6c1a148eb43b7924b04e9c25509336b3ca9fd7a --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/test_dataset.py @@ -0,0 +1,668 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import numpy as np +from src.dataloaders.utils import ( + get_consecutive_frame_indices, + load_batch_image, + supported_file_format +) +from src.utils.file_io import imread +from src.utils.logger import logger + + +class _TestDataset: + """ The base class for training dataset. + + The derived classes should implement these functions: + _get_item: an indexing like item fetching method. + data_shape: returns the shape of each item produced by _get_item. + Shapes are like (t, h_lq, w_lq, c), (t, h_gt, w_gt, c). + data_dtype: returns the tensorflow dtype of each item produced + by _get_item. + + Args: + data_dir: str, top data directory of the test data. + Contains frames. + cfg: yacs node, global configuration. + """ + def __init__(self, data_dir, cfg): + """ Initialization of train dataset + + """ + self.cfg = cfg + self.scale = cfg.model.scale + + # Record the frame rate value, which will be used when determining + # the output filename. + self.frame_rate = 1 + self.num_lq_frames = cfg.model.num_net_input_frames + self.color_space = cfg.data.color_space + + # By default, all the frames will be infered. + if len(cfg.data.inference.subset_range) == 2: + # If given the frame index range to infer + min_index = cfg.data.inference.subset_range[0] + max_index = cfg.data.inference.subset_range[1] + + # Frame indices between [min_index, max_index] will be infered + logger.info(f'Inference range {min_index}, {max_index} in {data_dir}.') + is_subset = lambda x: min_index <= x <= max_index + elif cfg.data.inference.subset_list: + # If given the list of indices to infer + logger.info(f'Inference list {cfg.data.inference.subset_list} in {data_dir}.') + is_subset = lambda x: x in cfg.data.inference.subset_list + elif len(cfg.data.inference.subset_range) > 0: + # This is an invalid setting + logger.error(f'cfg.data.inference.subset_range should of length 2, ' + f'[min_index, max_index], ' + f'but is given {cfg.data.inference.subset_range}. ' + f'All the images will be inferred.') + is_subset = lambda x: True + else: + # This comp_fn will not be used, since `always_infer` is True + is_subset = lambda x: True + logger.info(f'Inference all images in {data_dir}.') + + def traverse_folder(dir): + file_list = list( + sorted( + filter( + lambda x: supported_file_format(x), + os.listdir(dir) + ) + ) + ) + num_frames = len(file_list) + base_index = int(file_list[0].split('.')[0]) + ext = file_list[0].split('.')[1] + file_meta = [dict(source_folder=dir, + filename=f, + num_frames=num_frames, + base_index=base_index, + ext=ext) + for f in file_list + if is_subset(int(f.split('.')[0]))] + return file_meta + + # Reseved value for nested fold structure. + # Will be used when save the results. + self._clipname = '' + + self.sample_list = traverse_folder(data_dir) + self.num_clips = 1 + + if len(self.sample_list) == 0: + raise FileNotFoundError(f'Found no files in {data_dir}') + + # Preload one lq sample to obtain the input shape + if self.sample_list: + # Record the ext. For VSR\Denoise\VFI tasks, the output will follow + # the input ext. For HDR, the input ext could be 'png', the output + # ext will be 'exr'. + self.ext = self.sample_list[0]['ext'] + self.output_ext = self.ext + + center_frame_meta = self.sample_list[0] + filename = center_frame_meta['filename'] + center_frame_index = int(filename[:-4]) + num_digits = len(filename[:-4]) + lq_path = center_frame_meta['source_folder'] + lq_file = os.path.join( + lq_path, + f'{center_frame_index:0{num_digits}d}.{self.ext}') + im = imread(lq_file) + self.lq_size = im.shape[:2] + + self.base_index = center_frame_meta['base_index'] + + @property + def raw_image_size(self): + """ + Returns the raw input image size (h, w). + + Returns: + tuple, the size of the input image + """ + return self.lq_size + + @property + def expect_output_file_ext(self): + """ + Returns the expected output file ext. For tasks other than HDR, + the output ext is the same with the input ext. For HDR, the output + will be 'exr'. + + Returns: + str, file extension. + """ + return self.output_ext + + @property + def expect_output_resolution(self): + """ Returns the expected output size (h, w). + + Returns: + tuple, the size of the output image + """ + return (self.scale * self.lq_size[0], self.scale * self.lq_size[1]) + + def __len__(self): + """ + Returns the number of samples to infer. When used in multi-device + inference. + + Returns: + int, the number of samples to infer. + """ + return len(self.sample_list) + + def __getitem__(self, item): + # No clue on the real input size, and thus does not require + # shape checking. + return self._get_item(item) + + def set_clip_name(self, clipname): + """ + Set the clip name for this dataset. + + Args: + clipname: str + """ + self._clipname = clipname + + def _shard_segment(self, rank_size, rank_id): + """ + Shard the data into ${rank_size} segments. For example, + case 1: + num_samples = 10, + rank_size = 3, + shard: [[0, 1, 2, 3], [4, 5, 6], [7, 8, 9]], + shard len: [4, 3, 3] + case 2: + num_samples = 10, + rank_size = 2, + shard: [[0, 1, 2, 3, 4]. [5, 6, 7, 8, 9]], + shard len: [5, 5] + Args: + rank_size: int + rank_id: int, [0, rank_size) + """ + num_samples = len(self.sample_list) + res = num_samples % rank_size + shard_size_base = int(num_samples // rank_size) + + if rank_id < res: + start_idx = (shard_size_base + 1) * rank_id + end_idx = start_idx + shard_size_base + 1 + else: + start_idx = (shard_size_base + 1) * res + \ + shard_size_base * (rank_id - res) + end_idx = start_idx + shard_size_base + + if rank_size == rank_id + 1: + end_idx = num_samples + logger.info(f'Data shard {start_idx} - {end_idx - 1} (total {num_samples})', + force=True) + + self.sample_list = list(self.sample_list[start_idx:end_idx]) + + def _shard_interleave(self, rank_size, rank_id): + """ + Shard the data into ${rank_size} interlaced segments. For example, + case 1: + num_samples = 10, + rank_size = 3, + shard: [[0, 3, 6, 9], [1, 4, 7], [2, 5, 8]], + shard len: [4, 3, 3] + case 2: + num_samples = 10, + rank_size = 2, + shard: [[0, 2, 4, 6, 8], [1, 3, 5, 7, 9]], + shard len: [5, 5] + Args: + rank_size: int + rank_id: int, [0, rank_size) + """ + start_idx = rank_id + self.sample_list = list(self.sample_list[start_idx::rank_size]) + + def shard(self, rank_size, rank_id, segment=True): + """ + Shard the sample list according to the rank_size and rank_id + + Args: + rank_size: int + rank_id: int, [0, rank_size) + segment: boolean, whether to shard into consecutive segments + or interlaced segments. Default 'True' + """ + if segment: + self._shard_segment(rank_size, rank_id) + else: + self._shard_interleave(rank_size, rank_id) + + def _get_item(self, item): + raise NotImplementedError + + +class VSRTestDataset(_TestDataset): + """ + Test dataset for VSR task. + """ + def _get_item(self, index): + center_frame_meta = self.sample_list[index] + folder = center_frame_meta['source_folder'] + filename = center_frame_meta['filename'] + num_frames = center_frame_meta['num_frames'] + base_index = center_frame_meta['base_index'] + center_frame_index = int(filename.split('.')[0]) + num_digits = len(filename.split('.')[0]) + + lq_indices = get_consecutive_frame_indices( + center_frame_index, + self.num_lq_frames, + num_frames, + base_index, interval=1, + pad_mode='reflect' + ) + lq_files = [os.path.join(folder, f'{ind:0{num_digits}d}.{self.ext}') + for ind in lq_indices] + lq = load_batch_image(lq_files, target_color_space=self.color_space) + + if self.cfg.data.normalized and not (self.ext == 'exr'): + lq = np.clip(lq / 255., 0., 1.) + + # Record the center frame id, which will be used when outputing the results. + center_frame_name = \ + f'{lq_indices[self.num_lq_frames//2]:0{num_digits}d}.{self.output_ext}' + + # If self._clipname is not empty, i.e., there exist several folders + # in the source lq folder + if self._clipname != '': + center_frame_name = os.path.join(self._clipname, center_frame_name) + + return dict(output_file=center_frame_name, lq=lq) + + +class DenoiseTestDataset(VSRTestDataset): + """ + Test dataset for Denoise task. + """ + def __init__(self, data_dir, cfg): + super().__init__(data_dir, cfg) + # Note that the output is the same size as the input in denoise. + self.scale = 1 + self.frame_rate = 1 + + +# DummyTestDataset for debug +class DummyTestDataset(_TestDataset): + """ + Dummy test daset for debugging. + """ + def __init__(self, data_dir, cfg): # pylint: disable=super-init-not-called + h = cfg.data.inference.input_size[0] + \ + (cfg.data.inference.patch_pad_size*2 + if cfg.data.inference.eval_using_patch else 0) + + w = cfg.data.inference.input_size[1] + \ + (cfg.data.inference.patch_pad_size*2 + if cfg.data.inference.eval_using_patch else 0) + + c = 1 if (cfg.data.color_space=='gray') else 3 + shape_lq = (cfg.model.num_net_input_frames, h, w, c) + self.sample_list = [np.zeros(shape_lq).astype(np.float32)] * 100 + logger.info(f'Using dummy test dataset with {len(self.sample_list)} ' + f'element (for debug only). with sizeof {shape_lq}') + self.lq_size = (h, w) + + def _get_item(self, index): + lq = self.sample_list[index] + return dict(output_file='dummy.png', lq=lq) + + +class VFITestDataset(VSRTestDataset): + """ + Test dataset for VFI task. + + The total number of output frames will be: + `self.num_sample_list * self.frame_rate` + where `self.num_sample_list + self.frame_rate - 1` frames are directly + copied from the input, and `(self.num_sample_list - 1) * (self.frame_rate - 1)` + frames are interpolated. + + Generally, if a model requires `M` input frames, and output `N*(self.frame_rate-1)` + frames each batch, then we set `self.num_lq_frames=M`, `self.num_interp_frames=N`, + and the num of key frames equals to `N+1`. + + We must be aware that not all input frames will be inserted with interpolated + frames. A model may require 4 input source frames [A, B, C, D], and interpolate + only 1 frame between B and C. The number of required input frames is indicated + by the `num_lq_frames`. Meanwhile in this case, only the center frames B and C + are `key frames` while A and D are just auxiliary information frames. The number + of `key frames` are given by `self.num_interp_frames + 1` with the assumption + that the key frames only lie in the center of the input frames. + """ + def __init__(self, data_dir, cfg): + super().__init__(data_dir, cfg) + # The frame rate is given by the model configuration. + self.frame_rate = cfg.model.frame_rate + # The number of the output frames in each batch, + # **not multiplying the frame_rate**. + self.num_interp_frames = cfg.model.num_net_output_frames + + self.num_final_digits = int( + np.ceil( + np.log10(self.frame_rate * len(self.sample_list)))) + self.scale = 1 + + def __len__(self): + return len(self.sample_list) - self.num_interp_frames + + def _get_item(self, index): + # Get the initial key frame metadata + key_frame_ids = [] + frame_meta = self.sample_list[index] + lq_path = frame_meta['source_folder'] + filename = frame_meta['filename'] + num_total_frames = frame_meta['num_frames'] + base_index = frame_meta['base_index'] + num_digits = len(filename.split('.')[0]) + start_frame_index = int(filename.split('.')[0]) + + # The next self.num_interp_frames+1 frames are key frames + key_frame_ids.append(start_frame_index) + for i in range(start_frame_index+1, start_frame_index+self.num_interp_frames+1): + key_frame_ids.append(i) + num_final_digits = max(self.num_final_digits, num_digits) + + # Assuming the key frames are in the center of input frames, + # get the auxiliary frames + lq_indices = get_consecutive_frame_indices( + key_frame_ids, + self.num_lq_frames, + num_total_frames, base_index, + interval=1, + pad_mode='replicate' + ) + + lq_files = [os.path.join(lq_path, f'{ind:0{num_digits}d}.{self.ext}') + for ind in lq_indices] + lq = load_batch_image(lq_files, target_color_space=self.color_space) + if self.cfg.data.normalized: + lq = np.clip(lq / 255., 0., 1.) + + def _format_output_filename(frame_id, _num_digits, ext): + output_file = f'{frame_id:0{_num_digits}d}.{ext}' + if self._clipname != '': + # Format the output file with `${clip}/00000.png` like pattern + output_file = os.path.join(self._clipname, output_file) + return output_file + + # Prepare input copies for VFI output. + # Record both the source-target filename, as well as the key frames data, + # in the dict `input_file_copy`: + # key: target_output_file + # value: [source_input_file, target_output_data] + # One can use the copy the source_input_file or write out the data to + # target_file. + input_file_copy = dict() + output_files = [] + for i, k_id in enumerate(key_frame_ids[:-1]): # leave the last key frame + source_file = os.path.join(lq_path, f'{k_id:0{num_digits}d}.{self.ext}') + + index_in_indices = lq_indices.index(k_id) + data = lq[index_in_indices] + + new_frame_id = base_index + (k_id - base_index) * self.frame_rate + target_file = _format_output_filename(new_frame_id, + num_final_digits, + self.output_ext) + + input_file_copy[target_file] = [source_file, data] + output_files.extend([_format_output_filename(new_frame_id+j+1, + num_final_digits, + self.output_ext) + for j in range(self.frame_rate-1)]) + + if index == len(self) - 1: # copy the last key frame only when reaching the end + source_file = os.path.join(lq_path, + f'{key_frame_ids[-1]:0{num_digits}d}.{self.ext}') + + index_in_indices = lq_indices.index(key_frame_ids[-1]) + data = lq[index_in_indices] + + new_frame_id = base_index + (key_frame_ids[-1] - base_index) * self.frame_rate + target_file = _format_output_filename( + new_frame_id, + num_final_digits, + self.output_ext) + input_file_copy[target_file] = [source_file, data] + + # copy the final frame + for i in range(self.frame_rate-1): + new_frame_id = new_frame_id + 1 + target_file = _format_output_filename( + new_frame_id, + num_final_digits, + self.output_ext) + input_file_copy[target_file] = [source_file, data] + + if len(output_files) == 1: + output_files = output_files[0] + + return dict(output_file=output_files, + lq=lq, + input_copies=input_file_copy) + + def _shard_segment(self, rank_size, rank_id): + num_samples = len(self.sample_list) + res = (num_samples - 1) % rank_size + shard_size_base = int((num_samples - 1) // rank_size) + + if rank_id < res: + start_idx = (shard_size_base + 1) * rank_id + # enclose the last as the key frame + end_idx = start_idx + (shard_size_base + 1) + 1 + else: + start_idx = (shard_size_base + 1) * res + shard_size_base * (rank_id - res) + # enclose the last as the key frame + end_idx = start_idx + shard_size_base + 1 + + if rank_size == rank_id + 1: + end_idx = num_samples + logger.info(f'Data shard {start_idx} - {end_idx - 1} (total {num_samples})', force=True) + self.sample_list = list(self.sample_list[start_idx:end_idx]) + self.num_samples_shard = len(self.sample_list) + self.shard_flag = True + + +class HDRTestDataset(VSRTestDataset): + """ + Test dataset for HDR task. The output ext should be set in + 'cfg.data.extension'. + """ + def __init__(self, data_dir, cfg): + super().__init__(data_dir, cfg) + self.output_ext = cfg.data.extension # regardless of the input ext + self.frame_rate = 1 + self.scale = 1 + + +class ComposedTestDataset(_TestDataset): + """ Test dataset for a test directory with multiple clips. + """ + def __init__(self): # pylint: disable=super-init-not-called + self._datasets = None + self.num_samples_list = [] + + @staticmethod + def from_datasets(*datasets): + """ + Construct composed dataset from a collection of sub dataset class. + + Args: + *datasets: list of test datasets. + """ + cls = ComposedTestDataset() + cls._datasets = list(datasets) + cls.num_samples_list = [len(d) for d in cls._datasets] + return + + @staticmethod + def from_datadir(subcls, data_dir, clip_list, cfg): + """ + Construct composed dataset from a collection of dataset folder. + + Args: + subcls: class type, task class + data_dir: str, top data folder + clip_list: list of str, clips in the data_dir + cfg: yacs node + """ + cls = ComposedTestDataset() + datasets = [] + for clip_name in clip_list: + data_clip = os.path.join(data_dir, clip_name) + sub_datasets = subcls(data_clip, cfg) + sub_datasets.set_clip_name(clip_name) + datasets.append(sub_datasets) + cls._datasets = datasets + cls.num_samples_list = [len(d) for d in cls._datasets] + return cls + + def get_datasets(self, index): + # Iterate over the datasets to locate the queried sample + dataset_id = 0 + for dataset_id, num in enumerate(self.num_samples_list): + if index - num < 0: + break + index -= num + return dataset_id, index + + def _get_item(self, item): + dataset_id, index = self.get_datasets(item) + data = self._datasets[dataset_id][index] + return data + + @property + def expect_task_output_meta_info(self): + return self._datasets[0].expect_task_output_meta_info + + @property + def raw_image_size(self): + return self._datasets[0].raw_image_size + + @property + def expect_output_file_ext(self): + return self._datasets[0].expect_output_file_ext + + def __len__(self): + return np.sum(self.num_samples_list) + + def shard(self, rank_size, rank_id): + # Shard not supported for now. + raise NotImplementedError('Composed dataset not support data shard.') + + +class MixtureTestDataset(_TestDataset): + """ + Test dataset for a test directory with multiple dataset folder. + """ + def __init__(self): # pylint: disable=super-init-not-called + self._datasets = None + self.num_samples_list = [0] + + @staticmethod + def from_datasets(*datasets): + """ + Construct mixture dataset from a list of test datasets. + + Args: + *datasets: list of test datasets instances. Should be return data + terms with the same dtype and shape. + + Returns: + a MixtureDatasets instance + """ + cls = MixtureTestDataset() + cls._datasets = list(datasets) + cls.num_samples_list = [len(d) for d in cls._datasets] + return + + @staticmethod + def from_datadir(subcls, data_dir_list, cfg): + """ + Construct mixture dataset from a list of data directories. + + Args: + subcls: test dataset class type + data_dir_list: list(str), each is a top directory of a dataset. + Should be return data terms with the same dtype and shape. + cfg: yacs Node, global configuration + + Returns: + a MixtureDatasets instance + """ + cls = MixtureTestDataset() + datasets = [] + for data_dir in data_dir_list: + files = os.listdir(data_dir) + if os.path.isdir(os.path.join(data_dir, files[0])): + sub_datasets = ComposedTestDataset.from_datadir( + subcls, + data_dir, + files, + cfg + ) + else: + sub_datasets = subcls(data_dir, cfg) + datasets.append(sub_datasets) + cls._datasets = list(datasets) + cls.num_samples_list = [len(d) for d in cls._datasets] + return cls + + @property + def data_dtype(self): + return self._datasets[0].data_dtype + + @property + def data_shape(self): + return self._datasets[0].data_shape + + def get_datasets(self, index): + dataset_id = 0 + for dataset_id, num in enumerate(self.num_samples_list): + if index - num < 0: + break + index -= num + return dataset_id, index + + def _get_item(self, item): + dataset_id, index = self.get_datasets(item) + return self._datasets[dataset_id][index] + + @property + def output_meta_info(self): + return self._datasets[0].output_meta_info + + @property + def raw_image_size(self): + return self._datasets[0].raw_image_size + + def __len__(self): + return np.sum(self.num_samples_list) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/train_dataset.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/train_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..d7f2355103a2c72a3854e749a031eeb01d89df0f --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/train_dataset.py @@ -0,0 +1,553 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import random + +import cv2 +import numpy as np + +import tensorflow as tf +import yaml +from tqdm import tqdm + +from src.dataloaders.utils import ( + gen_pattern, + pad_list, + get_consecutive_frame_indices, + batch_dim_squeeze, + load_batch_image, + supported_file_format +) + +from src.utils.degradation import Degradation +from src.utils.transform import Compose +from src.utils.file_io import imread +from src.utils.logger import logger + + +class _TrainDataset: + """The base class for training dataset. + + The derived classes should implement these functions: + _get_item: an indexing like item fetching method. + data_shape: returns the shape of each item produced by _get_item. + Shapes are like (t, h_lq, w_lq, c), (t, h_gt, w_gt, c). + data_dtype: returns the tensorflow dtype of each item produced + by _get_item. + + Supported data folder structure: + 1. All train datasets class support this structure + data + `-- reds + |-- images + | |-- lq + | | |-- 000 + | | | |-- 00000000.png + | | | |-- 00000001.png + | | | |-- 00000002.png + | | | |-- ... + | | | `-- 00000099.png + | | `-- 001 + | | |-- 00000000.png + | | |-- 00000001.png + | | |-- 00000002.png + | | |-- ... + | | `-- 00000099.png + | `-- gt + | |-- 000 + | | |-- 00000000.png + | | |-- 00000001.png + | | |-- 00000002.png + | | |-- ... + | | `-- 00000099.png + | `-- 001 + | |-- 00000000.png + | |-- 00000001.png + | |-- 00000002.png + | |-- ... + | `-- 00000099.png + `-- sets + |-- train.json + `-- val.json + 2. Online datasets class support this: + reds_gt + |-- 000 + | |-- 00000000.png + | |-- 00000001.png + | |-- 00000002.png + | |-- ... + | `-- 00000099.png + `-- 001 + |-- 00000000.png + |-- 00000001.png + |-- 00000002.png + |-- ... + `-- 00000099.png + + Args: + data_dir: str, top data directory of the train dataset. + Should include the `images` and `sets` sub-folders. + cfg: yacs node, global configuration. + """ + def __init__(self, data_dir, cfg): + """ + Initialization of train dataset. + + """ + self.cfg = cfg + self.num_lq_frames = cfg.data.num_data_lq_frames + self.num_gt_frames = cfg.data.num_data_gt_frames + self.interval_list = cfg.data.train.augmentation.interval_list + self.augment = cfg.data.train.augmentation.apply + self.scale = cfg.model.scale + self.set_file = os.path.join(data_dir, cfg.data.train.set_file) + self.crop_size = cfg.data.train.input_size + self.color_space = cfg.data.color_space + + # TODO: change the data structure + clip_list = self.parse_datafolder_structure(data_dir, self.set_file) + self.sample_list = [] + + # Store all the frame metadata of all the clips in the folder + for vid in clip_list: + in_path = self.gt_path_pattern.format(vid) + file_list = list( + sorted( + filter( + lambda x: supported_file_format(x), + os.listdir(in_path) + ) + ) + ) + num_frames = len(file_list) + base_index = int(file_list[0].split('.')[0]) + ext = file_list[0].split('.')[1] + + # File metadata includes `clip` name, file name itself, total number + # of frames in the clip, the starting id of the frames, and file + # extension (png, exr, .etc). + file_meta = [dict(clip=vid, + filename=f, + num_frames=num_frames, + base_index=base_index, + ext=ext) + for f in file_list] + + self.sample_list.extend(file_meta) + + if len(self.sample_list) == 0: + raise FileNotFoundError(f'Found no files in {data_dir}') + else: + logger.info(f'Found {len(self.sample_list)} files in {data_dir}') + + def parse_datafolder_structure(self, data_dir, set_file): + """ + Parse the default dataset structure. + + Args: + data_dir: str, the top folder of the dataset. + set_file: str, the json indicating the clips (both lq and gt) + + Returns: + list of str, the names of the clips (in lq and corresponding gt) + """ + if not self.cfg.data.train.degradation.online: + self.lq_path_pattern = os.path.join(data_dir, 'lq', '{}') + self.gt_path_pattern = os.path.join(data_dir, 'gt', '{}') + + if os.path.exists(set_file): + clip_list = [] + with open(set_file, 'r') as f: + for line in f.readlines(): + clip_list.append(line.strip()) + else: + clip_list = sorted(os.listdir(os.path.join(data_dir, 'gt'))) + + return clip_list + + def __len__(self): + """ + Total number of samples for training. + + Returns: + int, the number of training samples + """ + return len(self.sample_list) + + def check_shape(self, data): + """ + Called after '_get_item' to check whether the real shapes are the + same with the expected in 'data_shape'. + + Args: + data: + + Returns: + + """ + for index, d, shape in zip(range(len(data)), data, self.data_shape): + assert tuple(d.shape) == tuple(shape), \ + f'Expect return data at pos {index} to have shape {shape}, ' \ + f'but got {d.shape}' + + @property + def data_shape(self): + """ + Returns the shape of each item produced by _get_item. Shapes + are like (t, h_lq, w_lq, c), (t, h_gt, w_gt, c). + + Returns: + tuple of shapes, each can be like (t, h, w, c) + """ + raise NotImplementedError + + @property + def data_dtype(self): + """ + Returns the tensorflow dtype of each item produced by _get_item. + + Returns: + tuple of data types, each can be like tf.float32 + """ + raise NotImplementedError + + def __getitem__(self, index): + data = self._get_item(index) + + self.check_shape(data) + + return data + + def _get_item(self, index): + """ An indexing-like item fetching method + + Args: + index: int + + Returns: + tuple of data terms (as numpy.ndarray) + """ + raise NotImplementedError + + +# DummyTrainDataset for debug +class DummyTrainDataset(_TrainDataset): + """ A dummy train dataset for debugging. + """ + def __init__(self, data_dir, cfg): # pylint: disable=super-init-not-called + b = cfg.data.train.batch_size + h = cfg.data.train.input_size[0] + w = cfg.data.train.input_size[1] + c = 1 if (cfg.data.color_space=='gray') else 3 + shape_lq = (cfg.data.num_data_lq_frames, h, w, c) + self.lq_shape = shape_lq + self.gt_shape = (cfg.data.num_data_gt_frames, h, w, 3) + + num_samples = 100 + self.sample_list = [np.zeros(self.lq_shape).astype(np.float32) + for _ in range(num_samples)] + self.sample_list_gt = [np.zeros(self.gt_shape).astype(np.float32) + for _ in range(num_samples)] + + def _get_item(self, index): + lq = self.sample_list[index] + gt = self.sample_list_gt[index] + return lq, gt + + @property + def data_dtype(self): + return tf.float32, tf.float32 + + @property + def data_shape(self): + return self.lq_shape, self.gt_shape + + +class OfflineTrainDataset(_TrainDataset): + """ + Offline degradation task training dataset. + Augmentation is always online. + """ + def __init__(self, data_dir, cfg): + super().__init__(data_dir, cfg) + self.num_channels = 3 if self.color_space != 'gray' else 1 + + # Load augmentation options from cfg + options = yaml.safe_load(cfg.data.train.augmentation.options) + self.augment_pipeline = Compose.from_cfgs( + options, + crop_size=self.crop_size, # source crop size + scales=(1, self.scale) # scale of each crop, corresponds to + # returned data terms. + ) + + @property + def data_shape(self): + h, w = self.crop_size # this is the input (lq) crop size + + lq_shape = (self.num_lq_frames, + h, + w, + self.num_channels) + + # Squeeze the batch dim if possible. Single image case + lq_shape = batch_dim_squeeze(lq_shape) + + gt_shape = (self.num_gt_frames, + h*self.scale, + w*self.scale, + self.num_channels) + gt_shape = batch_dim_squeeze(gt_shape) + + return lq_shape, gt_shape + + @property + def data_dtype(self): + return tf.float32, tf.float32 + + def _get_item(self, index): + # Get meta data. We take the `index` frame as the center frame + center_frame_meta = self.sample_list[index] + vid = center_frame_meta['clip'] + filename = center_frame_meta['filename'] + num_frames = center_frame_meta['num_frames'] + base_index = center_frame_meta['base_index'] + ext = center_frame_meta['ext'] + center_frame_index = int(filename[:-4]) + num_digits = len(filename[:-4]) + + # Frames interval augmentation + if self.augment: + interval = random.choice(self.interval_list) + else: + interval = 1 + + # Get the consecutive frame indices + lq_indices = get_consecutive_frame_indices( + center_frame_index, + self.num_lq_frames, + num_frames, # total number of frames in the clip + base_index, + interval=interval, + pad_mode='reflect') + + lq_files = [os.path.join(self.lq_path_pattern.format(vid), + f'{ind:0{num_digits}d}.{ext}') + for ind in lq_indices] + + gt_indices = get_consecutive_frame_indices( + center_frame_index, + self.num_gt_frames, + num_frames, + base_index, + interval=interval, + pad_mode='reflect') + + gt_files = [os.path.join(self.gt_path_pattern.format(vid), + f'{ind:0{num_digits}d}.{ext}') + for ind in gt_indices] + + lq = load_batch_image(lq_files, self.color_space) + gt = load_batch_image(gt_files, self.color_space) + + if self.augment: + lq, gt = self.augment_pipeline(lq, gt) + + if self.num_lq_frames == 1 and lq.shape[0] == 1: + lq = lq[0] + if self.num_gt_frames == 1 and gt.shape[0] == 1: + gt = gt[0] + + if self.cfg.data.normalized: + lq = np.clip(lq / 255., 0, 1) + gt = np.clip(gt / 255., 0, 1) + + return lq, gt + + +class OnlineTrainDataset(OfflineTrainDataset): + """ Online degradation task training dataset. + """ + def __init__(self, data_dir, cfg): + super().__init__(data_dir, cfg) + # Loading degradation from cfg: + # add noise, down-sampling, blur, etc. + options = yaml.safe_load(cfg.data.train.degradation.options) + assert isinstance(options, dict) + + # TODO: remove preset degradation + self.degradation_pipeline = get_degradation_model( + scale=self.scale, + version=cfg.data.train.degradation.online_version) + + options = yaml.safe_load(cfg.data.train.augmentation.options) + assert isinstance(options, dict) + + # Loading augmentation from cfg: + # random crop, random flip, random interval, etc. + self.augment_pipeline = transforms.Compose.from_cfgs( + options, + crop_size=self.crop_size, + scales=(self.scale, ) + ) + + # Loading gt enhancement from cfg: + # usm, etc. + self.gt_enhancement = cfg.data.train.gt_enhancement + if cfg.data.gt_enhancement: + self.gt_enhancement_module = get_degradation_model( + version='gt_enhancement' + ) + + def load_gt(self, im_files): + gt_list = [] + for i, _im in enumerate(im_files): + gt = imread(_im, self.color_space) + gt_list.append(gt) + return np.array(gt_list) + + def _get_item(self, index): + center_frame_meta = self.sample_list[index] + vid = center_frame_meta['clip'] + filename = center_frame_meta['filename'] + num_frames = center_frame_meta['num_frames'] + base_index = center_frame_meta['base_index'] + ext = center_frame_meta['ext'] + center_frame_index = int(filename[:-4]) + num_digits = len(filename[:-4]) + + if self.augment: + interval = random.choice(self.interval_list) + else: + interval = 1 + + # Should load num_lq_frames gt images + gt_indices = get_consecutive_frame_indices( + center_frame_index, + self.num_lq_frames, + num_frames, base_index, + interval=interval, + pad_mode='reflect' + ) + gt_files = [os.path.join(self.gt_path_pattern.format(vid), + f'{ind:0{num_digits}d}.{ext}') + for ind in gt_indices] + + gt = self.load_gt(gt_files) + if self.augment: + gt = self.augment_pipeline(gt)[0] + + # Do degradation after augmentation to reduce computation. + lq_list = self.degradation_pipeline.apply_batch( + np.array(gt), + allow_quantization=ext != 'exr' + ) + + lq = np.array(lq_list).astype(np.float32) + gt = gt.astype(np.float32) + + # Select the center num_gt_frames out + gt = gt[(self.num_lq_frames//2-self.num_gt_frames//2): + (self.num_lq_frames//2+self.num_gt_frames//2)+1] + + if self.gt_enhancement: + gt = [self.gt_enhancement_module.apply(_gt) for _gt in gt] + gt = np.array(gt) + + if self.num_lq_frames == 1: + lq = lq[0] + if self.num_gt_frames == 1: + gt = gt[0] + + if self.cfg.data.normalized: + lq = np.clip(lq / 255., 0, 1) + gt = np.clip(gt / 255., 0, 1) + return lq, gt + + +class MixtureDatasets(_TrainDataset): + """ + Mixture dataset containing multiple train datasets. + Could be constructed from a list of folders. + """ + def __init__(self): # pylint: disable=super-init-not-called + self._datasets = None + self.num_samples_list = [] + + @staticmethod + def from_datasets(*datasets): + """ + Construct mixture dataset from a list of train datasets. + + Args: + *datasets: list of OfflineTrainDataset or OnlineTrainDataset + instances. Should be return data terms with the same dtype and + shape. + + Returns: + a MixtureDatasets instance + """ + cls = MixtureDatasets() + cls._datasets = list(datasets) + cls.num_samples_list = [len(d) for d in cls._datasets] + return + + @staticmethod + def from_datadir(subcls, data_dir_list, cfg): + """ + Construct mixture dataset from a list of data directories. + + Args: + subcls: OfflineTrainDataset or OnlineTrainDataset type + data_dir_list: list(str), each is a top directory of a dataset. + Should be return data terms with the same dtype and shape. + cfg: yacs Node, global configuration + + Returns: + a MixtureDatasets instance + """ + cls = MixtureDatasets() + datasets = [] + for data_dir in data_dir_list: + sub_datasets = subcls(data_dir, cfg) + datasets.append(sub_datasets) + cls._datasets = list(datasets) + cls.num_samples_list = [len(d) for d in cls._datasets] + return cls + + @property + def data_dtype(self): + return self._datasets[0].data_dtype + + @property + def data_shape(self): + return self._datasets[0].data_shape + + def get_datasets(self, index): + # Iterate over the datasets to locate the queried sample + dataset_id = 0 + for dataset_id, num in enumerate(self.num_samples_list): + if index - num < 0: + break + index -= num + return dataset_id, index + + def _get_item(self, item): + dataset_id, index = self.get_datasets(item) + return self._datasets[dataset_id][index] + + def __len__(self): + return np.sum(self.num_samples_list) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/utils.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4724330eea7bec757e2f38bf6023f4139bcb29db --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/dataloaders/utils.py @@ -0,0 +1,205 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import numpy as np +from src.utils.file_io import imread +from src.utils.constant import VALID_FILE_EXT + + +def supported_file_format(filename): + """ + Check whether the image file is supported. + + Args: + filename: str + + Returns: + boolean + """ + ext = filename.split('.')[-1] + return ext in VALID_FILE_EXT + + +def gen_pattern(data_dir, meta, split='lq'): + """ + Generate image pattern given metadata and folder structure. + + Args: + data_dir: str, top dataset folder + meta: dict, loaded metadata from set file + split: str, name of data split + + Returns: + str, file pattern of the images + """ + if meta is None: + return os.path.join(data_dir, '{}') + else: + if meta['prefix']: + return os.path.join(data_dir, + 'images', + meta['{}_folder'.format(split)], + '{}') + else: + return os.path.join(data_dir, + 'images', + '{}', + meta['{}_folder'.format(split)]) + + +def pad_list(src, head_pad_size, tail_pad_size, mode): + """ + Pad the given list to target length. + + Args: + src: list[int], indicies of the frames + head_pad_size: int, length of pad at the head (before `src`) + tail_pad_size: int, length of pad at the tail (after `src`) + mode: str, pad mode. Optional in ['reflect', 'replicate'] + + Returns: + list[int], padded indices list which has target size + """ + num_src = len(src) + if mode == 'reflect': + head_pad_value = list(reversed(src[1:])) + tail_pad_value = list(reversed(src[:-1])) + elif mode == 'replicate': + head_pad_value = [src[0]] * head_pad_size + tail_pad_value = [src[-1]] * tail_pad_size + else: + raise NotImplementedError + src = head_pad_value + src + src = src[-(num_src + head_pad_size):] + tail_pad_value + src = src[:num_src+head_pad_size+tail_pad_size] + return src + + +def get_consecutive_frame_indices(given_frame_ids, num_frames_required, + max_frames, base_index=0, interval=1, + pad_mode='reflect'): + """ + Get consecutive indices given the center frame index/indices. + + There will be padding at the border of the list. Two typical cases + used in VSR and VFI model: + Case 1: + If given ids are like: + given_frame_ids=[1,2], + num_frames_required=4, + pad_mode='replicate' + should return [1,1,2,3]. [1, 2] are at the center. + Case 2: + If given ids are like: + given_frame_ids=3, + num_frames_required=5, + interval=2, + pad_mode='reflect' + should return [3,1,3,5,7]. [3] lies at the center. + + Args: + given_frame_ids: int or list[int], the center frame indices. + num_frames_required: int, number of the frames required. + max_frames: int, the total number of the frames in the dataset. + base_index: int, the base index of the frame. Default is 0. + iterval: int, the frame iterval. Default is 1. + pad_mode: str, the pad method if on the border. + Optional in ['reflect', 'replicate'] + + Returns: + list[int], length equals to num_frames_required + """ + + # Find the cosecutive frame indices + if isinstance(given_frame_ids, (list, tuple)): + # Currently only supported in vfi + assert num_frames_required % 2 == 0, \ + f'{len(given_frame_ids)} frame ids are given. ' \ + f'The required number of frames should be even.' + num_extra_frames = num_frames_required - len(given_frame_ids) + min_id = min(given_frame_ids) + max_id = max(given_frame_ids) + + else: + # Supported in other tasks + assert num_frames_required % 2 == 1, \ + f'Only the center frame id is given. ' \ + f'The required number of frames should be odd.' + min_id = max_id = given_frame_ids + num_extra_frames = num_frames_required + + assert min_id >= base_index and max_id < max_frames + base_index + + # Obtain the indices within the range [base_index, base_index + max_frames] + index = [] + left_pad = False + right_pad = False + for k in range(min_id - interval*(num_extra_frames//2), + max_id+interval*(num_extra_frames//2)+1, + interval): + if base_index > k: + left_pad = True + continue + elif k >= (base_index + max_frames): + right_pad = True + continue + else: + index.append(k) + + index_len = len(index) + # When the given frames are on the edge, perform padding, + if left_pad: + index = pad_list(index, num_frames_required-index_len, 0, pad_mode) + + if right_pad: + index = pad_list(index, 0, num_frames_required-index_len, pad_mode) + + return index + + +def batch_dim_squeeze(dim): + """ + Squeeze the batch dimension when possible. + + Args: + dim: list[int], shape of the tensor. + + Returns: + list[int], reduced dimension. + """ + return dim[1:] if dim[0] == 1 else dim + + +def load_batch_image(files, target_color_space, as_array=True): + """ + Load batch images, may return as np.ndarray or just a list. + + Args: + files: list[str], file paths to read. + target_color_space: str, color space to which the images are converted. + as_array: boolean, return as list or np.ndarray. + + Returns: + If as_array is True, return the images are np.ndarray. Else, return + as the list. + """ + + im = [] + for f in files: + assert os.path.exists(f), f"{f} not exists." + _im = imread(f, target_color_space).astype(np.float32) + im.append(_im) + return np.array(im) if as_array else im diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..98f6c71a50281f91278ec5234aa56c286dc333cf --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/__init__.py @@ -0,0 +1,43 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def build_engine(cfg): + """Returns the engine class given the mode in cfg. + + Args: + cfg: yacs node, global configuration. + + Returns: + engine class. + """ + mode = cfg.mode + ckpt = cfg.checkpoint + if mode == 'train': + from .trainer import SessionTrainer + return SessionTrainer + elif mode == 'inference': + from .inferencer import SessionInferencer, ModelFreeInferencer + if ckpt.endswith(".pb"): + return ModelFreeInferencer + else: + return SessionInferencer + elif mode == 'freeze': + from .freezer import SessionFreezer + return SessionFreezer + else: + raise NotImplementedError + + +__all__ = ['build_engine'] diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/freezer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/freezer.py new file mode 100644 index 0000000000000000000000000000000000000000..e675f4b460ad162ef3fe434719f37cf131e8c9a4 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/freezer.py @@ -0,0 +1,111 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import numpy as np +import tensorflow as tf +from src.runner.saver import loose_loading +from src.runner.sess_config import get_sess_config +from src.utils.adapter import NetworkIOAdapter +from src.utils.logger import logger +from tensorflow.python.framework import graph_util + + +class _Freezer: + """Freezer engine to freeze ckpt to pb file. + + Args: + dataloader: None, will never be used. The input size will be determined + by configure. + network: network instance, will not be used in this mode. + cfg: yacs node, global configuration. + """ + def __init__(self, dataloader, network, cfg, **kwargs): + self.cfg = cfg + self.network = network + self.adapter = NetworkIOAdapter(cfg) # do not register_raw_size. Use the setting value + + # Different from the inference, we'll fix the input size. + # The fixed input size is given by: + # cfg.data.inference.best_patch_size[0] + pads_h + cfg.data.inference.patch_pad_size[0] + # cfg.data.inference.best_patch_size[1] + pads_w + cfg.data.inference.patch_pad_size[1] + pads_h, pads_w = self.adapter.cal_adapted_size(self.adapter.best_in_size) + self.adapter.limited_in_size = [self.adapter.best_in_size[0] + pads_h + self.adapter.eval_pad_size*2, + self.adapter.best_in_size[1] + pads_w + self.adapter.eval_pad_size*2] + self.adapter.register_raw_size(self.adapter.limited_in_size) + + self.network.build_graph((cfg.data.eval_batch_size, self.adapter.input_size)) + + def restore(self): + """ + Restore the graph from ckpt. + """ + raise NotImplementedError + + def run(self): + """ + Execute function to freeze the graph to pb. + """ + raise NotImplementedError + + +class SessionFreezer(_Freezer): + """ + A tf.Session based freezer engine. + """ + def __init__(self, dataloader, network, cfg): + super().__init__(dataloader, network, cfg) + sess_cfg = get_sess_config(cfg.device, + cfg.solver.xla, + cfg.solver.mix_precision, + False) + self.session = tf.Session(config=sess_cfg) + + def restore(self): + """ + Restore the requireed part of the graph given the ckpt. + """ + loose_loading(self.session, self.cfg.model.scope, self.cfg.output_dir, self.cfg.checkpoint) + + def run(self): + """ + Execute function to freeze the graph to pb. + """ + with self.session as sess: + tf.io.write_graph(sess.graph_def, self.cfg.checkpoint.rsplit('/', 1)[0], 'freeze_graph.pbtxt') + + logger.info('Loading trained model ...') + self.restore() + logger.info('Model loaded success.') + logger.info('Freeze model to pb files') + + pb_path = os.path.join(self.cfg.checkpoint + '.pb') + try: + if hasattr(self.network, 'inference_func'): + constant_graph = graph_util.convert_variables_to_constants( + sess, sess.graph_def, + self.network.output_node_name + ) + else: + constant_graph = graph_util.convert_variables_to_constants( + sess, sess.graph_def, + [self.network.output_node.name.split(':')[0]] + ) + with tf.gfile.FastGFile(pb_path, mode='wb') as f: + f.write(constant_graph.SerializeToString()) + logger.info('Model frozen success.') + except Exception as e: + logger.error('Failed to freeze model.') + logger.info(e) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/inferencer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/inferencer.py new file mode 100644 index 0000000000000000000000000000000000000000..cd96cf4b061e180af2eb82038f10aabb71e19017 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/inferencer.py @@ -0,0 +1,312 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os +import shutil +import time +from functools import partial +from multiprocessing import Manager + +import numpy as np +import tensorflow as tf +from src.runner.saver import loose_loading +from src.runner.sess_config import get_sess_config +from src.utils.adapter import NetworkIOAdapter +from src.utils.exceptions import * +from src.utils.file_io import ImageWriter, image_deprocess +from src.utils.logger import logger +from src.utils.moving_avg import MovingAvg +from src.utils.constant import FILE_EXT_TO_PIX_FMT +from src.utils.world import world +from tqdm import trange + + +class _Inferencer: + """Base inference engine. + + Args: + dataloader: dict, inference data dict produced by test dataset instance. + network: network instance, whose class should derive from + src.networks.base_model.Base . + cfg: yacs node, global configuration. + _world: world instance, could be given from the caller function, + or by default, the global world instance (see src.utils.world). + """ + def __init__(self, dataloader, network, cfg, _world=None): + self.device = cfg.env.device + self.is_distributed = cfg.env.rank_size > 1 + self.cfg = cfg + self.dataloader = dataloader + self.network = network + self.step_time = MovingAvg(smooth=0.9) + self.adapter = NetworkIOAdapter(cfg) + # _world should be initialized + self.world = _world or world + if not self.world.is_initialized: + raise WorldUninitializedError('World not initialized.') + + self._total = 0 + + def restore(self, *args, **kwargs): + """ + Restore parameters from ckpt. + """ + raise NotImplementedError + + def run(self): + """ + Execute inference steps. + """ + raise NotImplementedError + + +# Note: we use numpy dataloader instead of tf dataloader in inference +class SessionInferencer(_Inferencer): + """Session based inference engine. + + Args: + dataloader: dict, inference data dict produced by test dataset instance. + network: network instance, whose class should derive from + src.networks.base_model.Base . + cfg: yacs node, global configuration. + _world: world instance, could be given from the caller function, + or by default, the global world instance (see src.utils.world). + """ + def __init__(self, dataloader, network, cfg, _world=None): + super().__init__(dataloader, network, cfg, _world) + self.scale = cfg.model.scale + self.session = None + self.graph = None + + # Get expected output data information. Both are used for ffmpeg io-backend. + output_size = self.dataloader.expect_output_resolution + output_ext = self.dataloader.expect_output_file_ext + + pix_fmt = FILE_EXT_TO_PIX_FMT[output_ext] + + # Prepare image writer if is set. + if not self.cfg.inference.write_out: + logger.warn(f'You have set "write_out" to False, ' + f'hence there will be no outputs to {self.cfg.inference.io_backend}.') + else: + output_dir = self.cfg.inference.result_dir + output_dir = os.path.realpath(output_dir) + # By default, we write results to hard disk + self.image_deprocess_fn = partial( + image_deprocess, + source_color_space=self.cfg.data.color_space, + benormalized=self.cfg.data.normalized) + self.result_writer = ImageWriter( + output_dir, cfg, + benormalized=self.cfg.data.normalized, + source_color_space=self.cfg.data.color_space, + output_resolution=output_size, + pix_fmt=pix_fmt) + + def restore(self): + """Restore parameters from ckpt. + """ + if self.cfg.checkpoint == 'none': + # Reserved for tasks that are not performed using networks. + pass + elif (self.cfg.checkpoint == '' + and len(self.cfg.train.pretrained_scope_list) > 0): + # For models that consists of several sub-networks, e.g., vfi model + # with pretrained optical flow network. + assert len(self.cfg.train.pretrained_scope_list) == \ + len(self.cfg.train.pretrained_scope_ckpt) + for scope, ckpt in zip(self.cfg.train.pretrained_scope_list, + self.cfg.train.pretrained_scope_ckpt): + loose_loading(self.session, scope, '', ckpt) + return 0 + else: + # Commonly used branch. + return loose_loading(self.session, self.cfg.model.scope, + '', self.cfg.checkpoint) + + def network_preparation(self): + """Build network forward graph, and restor from ckpt. + """ + sess_cfg = get_sess_config(self.cfg.env.device, + self.cfg.session.xla, + self.cfg.session.mix_precision, + False) + self.session = tf.Session(config=sess_cfg) + + # Register the image raw size when inference to let the adapter decide + # whether to inference using patchwise strategy or as a whole. + self.adapter.register_raw_size(self.dataloader.raw_image_size) + + # Get the real adapted input size from adapter to build the graph. + self.network.build_graph(input_size=(self.cfg.data.inference.batch_size, + self.adapter.input_size)) + init_op = tf.group(tf.global_variables_initializer(), + tf.local_variables_initializer()) + self.session.run(init_op) + if self.cfg.debug_mode != 'zeroin': + self.restore() + + def run(self): + """Execute inference steps. + """ + self.network_preparation() + + # Dataset shard is done in building dataset, see dataloaders.__init__.py + self._total = len(self.dataloader) + + range_fn = partial(trange, position=self.world.rank_id, desc=f'On DeviceID {self.world.device_id}') + + if self.session is None: + raise SessionUndefinedError(f'{type(self).__name__}.session is not defined.') + + logger.info(f'Start inference.') + + if self.cfg.inference.write_out: + self.result_writer.initialize() + + for i in range_fn(self._total): + data_dict = self.dataloader[i] + st_time = time.time() + hq = self._inf_engine(data_dict) + once_time = time.time() - st_time + # Skip the first step since the elapse time is abnormal due to compilation. + if i > 0: + self.step_time.update(once_time) + + if self.cfg.inference.write_out: + self.write_out(data_dict['output_file'], hq, data_dict.get('input_copies', None)) + + if self.cfg.inference.write_out: + self.result_writer.finalize() + logger.info(f'\tInference time: {self.step_time.avg * 1000:.2f} ms/image') + + def _inf_engine(self, data_dict): + """Determine inference strategy. + """ + # TODO: support multiple feed dict. + lq = data_dict['lq'] + if hasattr(self.network, 'inference_func'): + # Reserved API if the processing of the network is not end-to-end. + # Pass through all the inputs, in case the model requires multiple-inputs. + data_dict['lq'] = self.adapter.adapt_input(data_dict['lq']) + hq = self.network.inference_func(self.session, data_dict, self.graph, self.adapter.mode) + hq = self.adapter.reverse_adapt(hq.squeeze()) + elif self.adapter.patch_mode: + patch_per_step = self.cfg.data.inference.batch_size + img_patches = self.adapter.extract_image_patches(lq, patch_per_step) + num_step = img_patches.shape[0] // patch_per_step + patch_hq = [] + for i in range(num_step): + batch_data = img_patches[i * patch_per_step:(i + 1) * patch_per_step] + if patch_per_step == 1 and batch_data.shape[0] != 1 and self.cfg.model.input_format_dimension == 5: + batch_data = batch_data[None, ...] + elif self.cfg.model.input_format_dimension == 4: + batch_data = np.reshape(batch_data, [-1, *batch_data.shape[2:]]) + _patch_hq = self._inf_func(batch_data) + patch_hq.extend(_patch_hq) + hq = self.adapter.stitching_patches_to_image(patch_hq) + else: + lq = self.adapter.adapt_input(lq) + hq = self._inf_func(lq[None]) + hq = self.adapter.reverse_adapt(hq.squeeze()) + return hq.squeeze() + + def _inf_func(self, lq): + """Real calling inference function. + + Args: + lq: numpy array, input array. + + Returns: + hq: numpy array, processd output array. + """ + # TODO: support multiple feed dict. + hq = self.session.run(self.network.output_node, feed_dict={self.network.input_node: lq}) + return hq + + def write_out(self, output_files, network_outputs, input_copies): + """Write out function. + """ + output_dict = dict() + + if isinstance(output_files, (list, tuple)): + assert len(output_files) == len(network_outputs) + network_outputs_ = [self.image_deprocess_fn(n, hdr=output_files[0].endswith('.exr')) + for n in network_outputs] + output_dict.update(dict(zip(output_files, network_outputs_))) + elif isinstance(output_files, str): + network_outputs_ = self.image_deprocess_fn(network_outputs, hdr=output_files.endswith('.exr')) + output_dict[output_files] = network_outputs_ + else: + raise NotImplementedError + + if input_copies is not None: + # deprocess the copied data + input_copies_deprocess = dict() + for k, v in input_copies.items(): + input_copies_deprocess[k] = [v[0], self.image_deprocess_fn(v[1], hdr=k.endswith('.exr'))] + output_dict.update(input_copies_deprocess) + + self.result_writer.write_out(output_dict) + + +class ModelFreeInferencer(SessionInferencer): + """ + Inferencer using pb file, without model python file. + """ + def restore(self): + """Restore from pb file. + + Returns: + graph: tf.graph, the forward tensorflow graph. + """ + with tf.gfile.GFile(self.cfg.checkpoint, "rb") as gf: + graph_def = tf.GraphDef() + graph_def.ParseFromString(gf.read()) + with tf.Graph().as_default() as graph: + tf.import_graph_def(graph_def, name="") + return graph + + def network_preparation(self): + """Build network forward graph, and restor from pb. Prepare adapter. + """ + sess_cfg = get_sess_config(self.cfg.env.device, + self.cfg.solver.xla, + self.cfg.solver.mix_precision, + False) + + # Load from PB + self.graph = self.restore() + self.session = tf.Session(config=sess_cfg, graph=self.graph) + + # Fix the real eval in size before register image raw size. + # This function will use the + # model.best_in_size + data.eval_padsize * 2 + # as the fixed eval in size + self.adapter.fix_eval_in_size() + self.adapter.register_raw_size(self.dataloader.raw_image_size) + + def _inf_func(self, lq): + """Real calling inference function. + + Args: + lq: numpy array, input array. + + Returns: + hq: numpy array, processd output array. + """ + hq = self.session.run(self.graph.get_tensor_by_name("SR_output:0"), + feed_dict={self.graph.get_tensor_by_name("L_input:0"): lq}) + return hq diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/trainer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..89b36c47e8a6dbe054bb96306ce026ee96040bfb --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/engine/trainer.py @@ -0,0 +1,499 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os +import time + +import numpy as np +import tensorflow as tf +from src.losses.modules.perceptual import load_perceptual_module +from src.runner.common import name_space +from src.runner.distributed_variables_broadcast import \ + broadcast_global_variables, allreduce_avg +from src.runner.helper import build_adversarial_train_helper +from src.runner.saver import strict_loading, loose_loading +from src.runner.sess_config import get_sess_config +from src.runner.solver import build_solver +from src.utils.exceptions import * +from src.utils.logger import logger +from src.utils.moving_avg import MovingAvg +from src.utils.world import world + + +class _Trainer: + """Base trainer class. + This class is for tensorflow for now. + + Args: + dataloader: list[tensor] generated from tf dataloader. See `src.dataloaders.dataloder` + for more information. + network: network instance whose class derives from Base network class. + cfg: yacs node, global configuration. + _world: World instance, option reserved for extension. By default, the trainer uses a + preset global `world` instance. + """ + def __init__(self, dataloader, network, cfg, _world=None): + self.device = cfg.env.device + self.is_distributed = cfg.env.rank_size > 1 + self.cfg = cfg + + self.dataloader = dataloader + self.network = network + self.g_train_op = None + self.d_train_op = None + self.g_solver = None + self.d_solver = None + + self.step_time = MovingAvg(smooth=0.9) + self.step_loss = MovingAvg(smooth=0.99) + + self.world = _world or world + if not self.world.is_initialized: + raise WorldUninitializedError('World not initialized.') + + # Call network.build_graph to construct the basic graph. + # Including dataloader, forward graph, and loss + self.network.build_graph(dataloader=self.dataloader) + + # Helper is to coordinate the adversarial training, i.e., + # whether to update the generator or the discriminator according to + # certain strategy. + self.helper = build_adversarial_train_helper(cfg) + + # Build the optimizers + self.build() + + def build(self): + """ + Top building function to prepare optimizers. + """ + self.build_g_optimizer() + + # Prepare discriminator optimizer if required + if self.cfg.loss.adversarial.loss_weight > 0.: + self.build_d_optimizer() + + # Use GLOBAL_VARIABLES to get both the weights and buffers. + # Do not use tf.GraphKeys.TRAINABLE_VARIABLES here, which will miss the + # bn buffers. + generator_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, + scope=self.cfg.model.scope) + self.saver = tf.train.Saver(var_list=generator_vars, + max_to_keep=10, + keep_checkpoint_every_n_hours=1) + + def build_g_optimizer(self): + """ + Build generator optimizer. + """ + # Build generator solver + self.g_solver = build_solver(self.cfg.train.generator.lr_schedule, + self.cfg.train.optimizer, + self.cfg.session.mix_precision, + self.cfg.train.loss_scale, + self.device, + self.is_distributed) + + # All generator losses are collected in name_space.GeneratorLoss scope. + # Add them to get the final generator loss. + losses_dict = name_space.get_collection(name_space.GeneratorLoss) + losses = tf.add_n(list(losses_dict.values())) + + name_space.add_to_collection(name_space.GeneratorLoss, 'loss_total', losses) + # TODO: encapsulate the learning rate + name_space.add_to_collection(name_space.GeneratorRunOp, 'g_lr', self.g_solver.lr) + + generator_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, + scope=self.cfg.model.scope) + + g_train_op = self.g_solver.opt.minimize(losses, var_list=generator_vars) + + # bn buffer update after the optimization + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, + scope=self.cfg.model.scope) + if list(update_ops): + with tf.control_dependencies([g_train_op]): + g_train_op = tf.group(*update_ops) + + self.g_train_op = g_train_op + + # Add to name_space for later query + name_space.add_to_collection(name_space.GeneratorRunOp, 'g_train', self.g_train_op) + + def build_d_optimizer(self): + """ + Build discriminator optimizer. + """ + self.d_solver = build_solver(self.cfg.train.discriminator.lr_schedule, + self.cfg.train.optimizer, + self.cfg.session.mix_precision, + self.cfg.train.loss_scale, + self.device, + self.is_distributed) + # All discriminator losses are collected in name_space.DiscriminatorLoss scope. + # Add them to get the final discriminator loss. + losses_dict = name_space.get_collection(name_space.DiscriminatorLoss) + + self.d_loss = tf.add_n(list(losses_dict.values())) + name_space.add_to_collection(name_space.DiscriminatorRunOp, 'd_lr', self.d_solver.lr) + + discriminator_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, + scope=name_space.DiscriminatorVarScope) + d_train_op = self.d_solver.opt.minimize(self.d_loss, var_list=discriminator_vars) + + # If parameter clip is applied, do it after optimization. + if self.cfg.loss.adversarial.parameter_clip: + amin, amax = self.cfg.loss.adversarial.parameter_clip_range + with tf.control_dependencies([d_train_op]): + d_train_op = tf.group([var.assign(tf.clip_by_value(var, amin, amax)) + for var in discriminator_vars]) + + # bn buffer update + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope=name_space.DiscriminatorVarScope) + if list(update_ops): + with tf.control_dependencies([d_train_op]): + d_train_op = tf.group(*update_ops) + + self.d_train_op = d_train_op + name_space.add_to_collection(name_space.DiscriminatorRunOp, 'd_train', self.d_train_op) + + def save(self, *args, **kwargs): + """ + Save ckpt during training every certain steps. + """ + raise NotImplementedError + + def print(self, *args, **kwargs): + """ + Print function to dump train information. + """ + raise NotImplementedError + + def restore(self): + """ + Restore from a ckpt for continual training. + """ + raise NotImplementedError + + def load_pretrained(self, *args, **kwargs): + """ + Load pretrained sub-networks for overall training and fine-tune. + """ + raise NotImplementedError + + def run(self): + """ + Execute function to run the train steps. + """ + raise NotImplementedError + + +class SessionTrainer(_Trainer): + """ + Tensorflow trainer using tf.Session. + """ + def __init__(self, dataloader, network, cfg): + super().__init__(dataloader, network, cfg) + sess_cfg = get_sess_config(cfg.env.device, + cfg.session.xla, + cfg.session.mix_precision, + cfg.env.rank_size>1) + self.session = tf.Session(config=sess_cfg) + + # TODO: refactor summary + if cfg.train.use_tensorboard: # for visualization when training drm + self.writer = tf.summary.FileWriter( + os.path.join(self.cfg.train.output_dir, 'summary'), + self.session.graph + ) + else: + self.writer = None + + def save(self, step): + """ + Save checkpoint on the step + + Args: + step: int + """ + if not os.path.exists(self.cfg.train.output_dir): + os.makedirs(self.cfg.train.output_dir) + + self.saver.save(self.session, + os.path.join(self.cfg.train.output_dir, + self.cfg.model.name), + global_step=step) + + def print(self, step, ops_result, loss_ops_result): + """ + Print train step information on the screen + + Args: + step: int, current train step. + ops_result: dict, data obtained by session.run. + loss_ops_result: dict, loss information + """ + loss_str = [f'{k}: {f"{v:3.3f}":>7}' for k, v in loss_ops_result.items()] + + fps = (self.cfg.data.train.batch_size + / (self.step_time.cur_val + 1e-6) + * self.cfg.env.rank_size) + eta = (self.g_solver.total_step - step) * self.step_time.avg + eta = str(datetime.timedelta(seconds=int(eta))) + + solver_info = [f'Step: [{step:>7d} / {self.g_solver.total_step}]'] + # If adversarial, print whether generator or discriminator is updated + if self.cfg.loss.adversarial.loss_weight > 0.: + adv_info = self.helper.info + g_update = adv_info['g_update'] + d_update = adv_info['d_update'] + + solver_info.append(f'g update: {f"{g_update}":>5}') + g_lr = ops_result['g_lr'] + solver_info.append(f'g lr: {f"{g_lr:.7f}":>6}') + + solver_info.append(f'd update: {f"{d_update}":>5}') + d_lr = ops_result['d_lr'] + solver_info.append(f'd lr: {f"{d_lr:.7f}":>6}') + else: + g_lr = ops_result['g_lr'] + solver_info.append(f'g lr: {f"{g_lr:.7f}":>6}') + + misc_info = [f'smooth_total: {f"{self.step_loss.smooth_avg:3.3f}":>7}', + f'step time: {f"{self.step_time.cur_val*1000:5.1f}":>7} ms', + f'fps: {f"{fps:3.2f}":>6}', + f'eta: {eta:>8}', + f'on device: {self.world.device_id:1d}'] + + print_info = ', '.join([*solver_info, *loss_str, *misc_info]) + logger.info(print_info) + + def load_pretrained(self, scope): + """ + Load part of the graph. + + This function is typically used in fine-tune, multi-stage training + scenarios. + + Args: + scope: str, top scope name for pretrained sub-graph. + """ + if self.cfg.checkpoint == '' and ( + len(self.cfg.train.pretrained_scope_list) > 0 + ): + assert len(self.cfg.train.pretrained_scope_list) == \ + len(self.cfg.model.pretrained_scope_ckpt) + for scope, ckpt in zip(self.cfg.train.pretrained_scope_list, + self.cfg.model.pretrained_scope_ckpt): + loose_loading(self.session, scope, + self.cfg.train.output_dir, ckpt) + else: + loose_loading(self.session, self.cfg.model.scope, + self.cfg.train.output_dir, self.cfg.checkpoint) + + def restore(self): + """ + Restore ckpt. + + This function is for continue training scenario. Thus every thing in + the generator will be loaded. + + Returns: + int, recover iteration to continue training. + """ + return strict_loading(self.session, + self.cfg.model.scope, + self.cfg.train.output_dir, + self.cfg.checkpoint) + + def run(self): + """ + Core function for the trainer to execute. + """ + # Initialization parameters. + init_op = tf.group(tf.global_variables_initializer(), + tf.local_variables_initializer()) + self.session.run(init_op) + + # Restore from ckpt if needed. + recover_step = 0 + if self.cfg.train.continue_training: + # For continue training. + recover_step = self.restore() + elif self.cfg.checkpoint != '' or ( + len(self.cfg.train.pretrained_scope_list) > 0): + # For multi-stage training, load pretrained model. + # Each trained with given scope. + self.load_pretrained(self.cfg.model.scope) + + # Load vgg-19 perceptual if needed. + if self.cfg.loss.perceptual.loss_weight > 0: + load_perceptual_module(self.session, self.cfg.loss.perceptual) + + # Synch all the nodes for initialization in distributed training. + if self.is_distributed: + logger.info(f'Broadcast variables from root rank') + broadcast_global_variables(self.session, + self.cfg.env.device, + self.cfg.env.root_rank) + + # Dump the train graph on the root node. + if self.world.is_root_rank: + tf.io.write_graph(self.session.graph_def, + self.cfg.train.output_dir, + 'train_graph.pbtxt') + logger.info(f'Start training.') + + # Train (may continue from recover step) + self._train(recover_step) + + def prepare_adv_adapt_op(self, d_loss_ops): + """ + Prepare auxiliary ops when adversarial training. + + Due to the insufficiency of Ascend platform in dynamic graph which is + common in adversarial training, we do the adaptive balance on session + run level instead of tf graph. To do this, we define a helper class to + determine whether to update generator and discriminator each step. + + On adaptive strategy which adjust the training step according to the + discriminator loss, we we must collect all the decision, aggregate and + sychronize the decision across the nodes. + + """ + + if self.cfg.loss.adversarial.loss_weight > 0. and ( + self.cfg.loss.adversarial.adaptive_strategy): + logger.info('Using adversarial training with adaptive strategy.') + logger.info('There will be some warm-start iterations for ' + 'discriminator, while the generator won\'t update.') + if self.is_distributed: + # In adaptive strategy, we should manually synchronize the + # discriminator losses across the devices. + logger.info('Distributed adversarial adaptive training. Generating ' + 'synchronize nodes.') + adv_helper_criteria = allreduce_avg( + d_loss_ops['discriminator'], + self.cfg.env.device, + self.world.rank_size + ) + else: + adv_helper_criteria = d_loss_ops['discriminator'] + else: + # To unify the interface, define a tf.no_op + adv_helper_criteria = tf.no_op() + return adv_helper_criteria + + def prepare_fetches(self): + """ + Prepare watched tensors. In each step, we want to know the + generator total loss, each part of generator losses, discriminator + total loss (if used), and some summary ops. + + Returns: + g_ops: dict, {op_name: op_tensor} of the generator. + d_ops: dict, {op_name: op_tensor} of the discriminator. + losses: dict, {loss_name: loss_tensor} for printing. + summary_ops: dict, {summary_name: summary_op} for visualization. + adv_helper_criteria: tensor, the criteria to tell whether update + generator or discriminator. May be a hccl operator. + """ + # prepare train ops, loss ops, summary ops + g_ops = name_space.get_collection(name_space.GeneratorRunOp) + g_loss_ops = name_space.get_collection(name_space.GeneratorLoss) + + d_ops = name_space.get_collection(name_space.DiscriminatorRunOp) + d_loss_ops = name_space.get_collection(name_space.DiscriminatorLoss) + + summary_ops = name_space.get_collection(name_space.Summary) + + adv_helper_criteria = self.prepare_adv_adapt_op(d_loss_ops) + + return g_ops, d_ops, {**g_loss_ops, **d_loss_ops}, summary_ops, adv_helper_criteria + + def prepare_feeds(self): + """ + Prepare feed dict for session run. + + Returns: + dict, will be fed to session run. + """ + # TODO: remove learning rate feed dict. + feed_dict = {self.g_solver.lr: self.g_solver.update_lr()} + if self.cfg.loss.adversarial.loss_weight > 0: + feed_dict[self.d_solver.lr] = self.d_solver.update_lr() + return feed_dict + + def _train(self, init_step=0): + """ + Train steps. + + Args: + init_step: int, the starting step of training. + """ + _g_ops, _d_ops, loss_ops, summary_ops, adv_helper_criteria = \ + self.prepare_fetches() + train_st = time.time() + for it in range(init_step, self.g_solver.total_step): + feed_dict = self.prepare_feeds() + + # In adversarial scenario, we use a helper instance to filter + # the truly evaluated ops. + real_g_ops, real_d_ops = self.helper.filter(_g_ops, _d_ops) + + st_time = time.time() + ops_result, loss_ops_result, adv_helper_criteria_result = \ + self.session.run([{**real_g_ops, **real_d_ops}, + loss_ops, + adv_helper_criteria], + feed_dict=feed_dict) + once_time = time.time() - st_time + + if self.world.is_root_rank: + if it > init_step: + # Skip the first print_interval steps, whose values + # might be abnormal + self.step_time.update(once_time) + total_loss = loss_ops_result['loss_total'] + self.step_loss.update(total_loss) + + if (it + 1) % self.cfg.train.print_interval == 0: + self.print(it + 1, ops_result, loss_ops_result) + + if (it + 1) % self.cfg.train.checkpoint_interval == 0: + self.save(it + 1) + + # Update adversarial helper function + self.helper.update_status(adv_helper_criteria_result, it+1) + + # TODO: support tensorboard, summary and evaluation. + # For tensorboard visualization + # if self.writer is not None and (it + 1) % 100 == 0: + # summary_merge = tf.summary.merge_all() + # summary_loss_result = self.session.run(summary_merge, feed_dict=feed_dict) + # self.writer.add_summary(summary_loss_result, it + 1) + + if (self.cfg.train.dump_intermediate == 'intermediate' + and (it + 1) % self.cfg.train.dump_intermediate == 0): + summary_ops_result = self.session.run(summary_ops) + # In distributed training, we should run summary ops on all the devices in + # order to synchronize. But only the root node will dump the data. + if self.world.is_root_rank: + self.network.dump_summary(it + 1, summary_ops_result) + + train_time = time.time() - train_st + time_mi = train_time / 60 + logger.info('Training finished. Average step time:{:.2f} ms, total elapse time: {:.1f} min.' + .format(np.mean(self.step_time.avg) * 1000, time_mi)) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ed055f65e62da02cccdf6a4c4380c8a14e557770 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .act import * +from .norm import * +from .conv import * +from .dcn import * +from .linear import * +from .dropout import * diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/act.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/act.py new file mode 100644 index 0000000000000000000000000000000000000000..9bc380396e504ebd51f7d02657bdfc79bebd4483 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/act.py @@ -0,0 +1,74 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf +from .base_layer import BaseLayer + + +__all__ = ['ActLayer'] + +class ActLayer(BaseLayer): + """Activation layer class. + + Args: + cfg: dict, should specify the activation `type` and other parameters. + name: str, scope name. + """ + def __init__(self, cfg, name=None): + super(ActLayer, self).__init__() + self.type = cfg.get('type').lower() + if self.type == 'leakyrelu': + self.alpha = cfg.get('alpha', 0.2) + elif self.type == 'prelu': + # see https://pytorch.org/docs/stable/generated/torch.nn.PReLU.html + # for explanation + self.channelwise = cfg.get('channelwise', True) + self.name = name + + def forward(self, x): + if self.type == 'relu': + return tf.nn.relu(x, name=self.name) + elif self.type == 'elu': + return tf.nn.elu(x, name=self.name) + elif self.type == 'prelu': + ndim = len(x.get_shape().as_list()) + if self.channelwise: + num_parameters = x.get_shape().as_list()[-1] + else: + num_parameters = 1 + + a = tf.get_variable( + name=self.name+'_prelu_a', + shape=(num_parameters, ), + dtype=x.dtype, + trainable=True, + initializer=tf.constant_initializer(0.25)) + + if self.channelwise: + a = tf.reshape(a, shape=tuple([1]*(ndim-1) + [num_parameters])) + neg_mask = tf.cast(tf.less(x, 0.), dtype=x.dtype) + neg_x = a * x # apply parameter `a` channel-wise + return x * (1. - neg_mask) + neg_x * neg_mask + else: + return tf.nn.leaky_relu(x, alpha=a, name=self.name) + elif self.type == 'tanh': + return tf.nn.tanh(x, name=self.name) + elif self.type == 'leakyrelu': + return tf.nn.leaky_relu(x, alpha=self.alpha, name=self.name) + elif self.type == 'softplus': + return tf.nn.softplus(x, name=self.name) + elif self.type == 'sigmoid': + return tf.nn.sigmoid(x, name=self.name) + else: + raise NotImplementedError diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/base_layer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/base_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..11e79581a651cafad04ab3b7f9a429a7ce842947 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/base_layer.py @@ -0,0 +1,33 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class BaseLayer(object): + """Base layer class for all other common layers. + We'll use `Layer(args)(feat)` to call the layer. + """ + def __call__(self, *args, **kwargs): + return self.forward(*args, **kwargs) + + def forward(self, *args, **kwargs): + raise NotImplementedError + + def get_kernel(self): + raise NotImplementedError + + def get_bias(self): + raise NotImplementedError + + def get_buffer(self): + raise NotImplementedError diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/conv.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/conv.py new file mode 100644 index 0000000000000000000000000000000000000000..4916bee0a42e0f57e05f02f8009b70bd678b9522 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/conv.py @@ -0,0 +1,420 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import tensorflow as tf +import numpy as np + +from .base_layer import BaseLayer + +from src.runner.initializer import get_initializer, calculate_fan +from src.utils.utils import to_pair +from src.ops.weight_regularzation import spectral_norm + +__all__ = ["Conv2D", "Conv3D", "Conv2DTranspose", "Conv3DTranspose"] + +class _ConvBaseLayer(BaseLayer): + """A base class of convolution layer. + + y = conv(x, weights) + bias + + Properties: + kernel: tensor, conv kernel. + bias: tensor, bias tensor. + """ + def __init__(self, + num_filters, + kernel_size=3, + strides=1, + dilations=1, + use_bias=True, + use_spectral_norm=False, + padding='same', + padding_mode='CONSTANT', + name='_conv_base'): + """ + Initialization function of convolution base class. + + Args: + num_filters: int, number of filters. + kernel_size: int or list[int], the kernel size. + strides: int or list[int], the stride size. + dilations: int or list[int], the kernel dilations. + use_bias: boolean, whether to use bias. Default True. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + padding: str or list[int]. If is given list of padding size, the + padding will be 'valid'. One can also pass in str such as + ('same', 'valid'). + padding_mode: str, indicating how to pad, i.e., REFLECT or CONSTANT. + name: str, variable scope name. + """ + self.num_filters = num_filters + self.kernel_size = to_pair(kernel_size, 2) + self.strides = to_pair(strides, 2) + self.dilation = to_pair(dilations, 2) + + self.name = name + self.use_bias = use_bias + self.use_spectral_norm = use_spectral_norm + self.padding = padding + self.padding_mode = padding_mode + + def __call__(self, x): + """ + Execute function of forward. + + Args: + x: tensor, input feature. + + Returns: + tensor, convolved feature. + """ + + # Get the data type of the input. + self.dtype = x.dtype + self.in_channels = x.get_shape().as_list()[-1] + + # Get the weight and bias initializers. + self.kernel_initializer = self.get_kernel_init(x) + self.bias_initializer = self.get_bias_init(x) + + # Apply forward. + with tf.variable_scope(self.name): + x = self.apply_padding(x) + x = self.forward(x) + + return x + + def apply_padding(self, x): + """ + Do padding_mode before convolution. In 'same' padding_mode, the padding_mode will be + conducted by convolution operator itself. + + Args: + x: tensor, input feature map. + + Returns: + tensor, padded feature map or the original one. + """ + # padding_mode for conv2d + if isinstance(self.padding, (list, tuple)): + if len(padding_mode) != 2: + raise ValueError('Invalid padding_mode') + padding_h, padding_w = padding_mode + padding_new = ((0,0), + (padding_h, padding_h), + (padding_w, padding_w), + (0,0)) + x = tf.pad(x, padding_new, mode=self.padding_mode.upper()) + self.padding = 'Valid' + elif self.padding_mode.upper() == 'REFLECT': + padding_h = (self.kernel_size[0]//2, self.kernel_size[0]//2) + padding_w = (self.kernel_size[1]//2, self.kernel_size[1]//2) + padding_new = ((0,0), padding_h, padding_w, (0,0)) + x = tf.pad(x, padding_new, mode=self.padding_mode.upper()) + self.padding = 'Valid' + return x + + def get_kernel_init(self, x): + """ + Get kernel initializer. This function is called after passing through + the input feature. We use 'kaiming_uniform' initializer. + + Args: + x: tensor, input feature map. + + Returns: + tensorflow initializer. + """ + kernel_initializer = get_initializer( + dict(type='kaiming_uniform', a=math.sqrt(5)), + self.in_channels, + self.num_filters, + self.kernel_size, + dtype=self.dtype) + return kernel_initializer + + def get_bias_init(self, x): + """ + Get bias initializer. This function is called after passing through + the input feature. + + Args: + x: tensor, input feature map. + + Returns: + tensorflow initializer. + """ + fan = calculate_fan(self.kernel_size, self.in_channels) + bound = 1 / math.sqrt(fan) + bias_initializer = tf.random_uniform_initializer( + -bound, + bound, + dtype=self.dtype) + return bias_initializer + + @property + def kernel(self): + w = tf.get_variable( + "kernel", + shape=[*self.kernel_size, self.in_channels, self.num_filters], + initializer=self.kernel_initializer, + regularizer=None, + dtype=self.dtype) + if self.use_spectral_norm: + w = spectral_norm(w) + return w + + @property + def bias(self): + bias = tf.get_variable( + "bias", + [self.num_filters], + initializer=self.bias_initializer, + dtype=self.dtype) + return bias + +class Conv2D(_ConvBaseLayer): + """A convolution2D class. + """ + def __init__(self, + num_filters, + kernel_size=(3, 3), + strides=(1, 1), + dilations=(1, 1), + use_bias=True, + use_spectral_norm=False, + padding='same', + padding_mode='CONSTANT', + name='Conv2D'): + super().__init__(num_filters, + kernel_size, + strides, + dilations, + use_bias, + use_spectral_norm, + padding, + padding_mode, + name) + + def forward(self, x): + """ + Forward computation of the convolution 2d. + + Args: + x: tensor, input feature map. + + Returns: + tensor + """ + x = tf.nn.conv2d( + input=x, + filter=self.kernel, + strides=[1, *self.strides, 1], + padding=self.padding.upper()) + if self.use_bias: + x = tf.nn.bias_add(x, self.bias) + return x + + +class Conv2DTranspose(Conv2D): + """A convolution transpose 2D class. + """ + def __init__(self, + num_filters, + kernel_size=(3, 3), + strides=(1, 1), + dilations=(1, 1), + use_bias=True, + use_spectral_norm=False, + padding='same', + padding_mode='CONSTANT', + name='Conv2DTranspose'): + super().__init__(num_filters, + kernel_size, + strides, + dilations, + use_bias, + use_spectral_norm, + padding, + padding_mode, + name) + + @property + def kernel(self): + # The kernel shape is (H_ksize, W_ksize, out_channels, in_channels), + # different from Conv2D. + w = tf.get_variable( + "kernel", + shape=[*self.kernel_size, self.num_filters, self.in_channels], + initializer=self.kernel_initializer, + regularizer=None, + dtype=self.dtype) + if self.use_spectral_norm: + w = spectral_norm(w) + return w + + def forward(self, x): + """Forward computation of the convolution transpose 2d. + + Args: + x: tensor, input feature map. + """ + n, h, w, c = x.shape.as_list() + output_shape = [n, + h * self.strides[0], + w * self.strides[1], + self.num_filters] + x = tf.nn.conv2d_transpose( + input=x, + filter=self.kernel, + output_shape=output_shape, + strides=[1, *self.strides, 1], + padding=self.padding.upper()) + if self.use_bias: + x = tf.nn.bias_add(x, self.bias) + return x + + +class Conv3D(_ConvBaseLayer): + """A convolution 3D class. + """ + def __init__(self, + num_filters, + kernel_size=(3, 3, 3), + strides=(1, 1, 1), + dilations=(1, 1, 1), + use_bias=True, + use_spectral_norm=False, + padding='same', + padding_mode='CONSTANT', + name='Conv3D'): + super().__init__(num_filters, + kernel_size, + strides, + dilations, + use_bias, + use_spectral_norm, + padding, + padding_mode, + name) + self.kernel_size = to_pair(kernel_size, 3) + self.strides = to_pair(strides, 3) + self.dilation = to_pair(dilations, 3) + + def apply_padding(self, x): + """Do padding_mode before convolution. + In 'same' padding_mode, the padding_mode will be conducted by + convolution operator itself. + + Args: + x: tensor, input feature map. + + Returns: + tensor, padded feature map or the original one. + """ + # padding_mode for conv3d + if type(self.padding) in [list, tuple]: + if len(self.padding) != 3: + raise ValueError('Invalid padding_mode') + padding_d, padding_h, padding_w = self.padding + padding_new = ((0,0), + (padding_d, padding_d), + (padding_h, padding_h), + (padding_w, padding_w), (0,0)) + self.padding = 'Valid' + x = tf.pad(x, padding_new, mode=self.padding_mode.upper()) + elif self.padding_mode.lower() == 'reflect': + padding_d = (self.kernel_size[0]//2, self.kernel_size[0]//2) + padding_h = (self.kernel_size[1]//2, self.kernel_size[1]//2) + padding_w = (self.kernel_size[2]//2, self.kernel_size[2]//2) + padding_new = ((0,0), padding_d, padding_h, padding_w, (0,0)) + x = tf.pad(x, padding_new, self.padding_mode.upper()) + self.padding = 'Valid' + return x + + def forward(self, x): + """Forward computation of the convolution 3d. + + Args: + x: tensor, input feature map. + """ + x = tf.nn.conv3d( + input=x, + filter=self.kernel, + strides=[1, *self.strides, 1], + padding=self.padding.upper()) + if self.use_bias: + x = tf.nn.bias_add(x, self.bias) + return x + + +class Conv3DTranspose(Conv3D): + """A convolution transpose 3D class. + """ + def __init__(self, + num_filters, + kernel_size=(3, 3, 3), + strides=(1, 1, 1), + dilations=(1, 1, 1), + use_bias=True, + use_spectral_norm=False, + padding='same', + padding_mode='CONSTANT', + name='Conv3DTranspose'): + super().__init__(num_filters, + kernel_size, + strides, + dilations, + use_bias, + use_spectral_norm, + padding, + padding_mode, + name) + + @property + def kernel(self): + # The kernel shape is (H_ksize, W_ksize, out_channels, in_channels), + # different from Conv3D. + w = tf.get_variable( + "kernel", + shape=[*self.kernel_size, self.num_filters, self.in_channels], + initializer=self.kernel_initializer, + regularizer=None, + dtype=self.dtype) + if self.use_spectral_norm: + w = spectral_norm(w) + return w + + def forward(self, x): + """Forward computation of the convolution transpose 3d. + + Args: + x: tensor, input feature map. + """ + n, h, w, c = x.shape.as_list() + output_shape = [n, + h * self.strides[0], + w * self.strides[1], + self.num_filters] + x = tf.nn.conv3d_transpose( + input=x, + filter=self.kernel, + output_shape=output_shape, + strides=[1, *self.strides, 1], + padding=self.padding.upper()) + if self.use_bias: + x = tf.nn.bias_add(x, self.bias) + return x diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dcn.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dcn.py new file mode 100644 index 0000000000000000000000000000000000000000..548dced8e5289994c0e19b81e55263ad6139de41 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dcn.py @@ -0,0 +1,376 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +import tensorflow as tf +import numpy as np + +from .conv import Conv2D +from .base_layer import BaseLayer + +from src.utils.utils import to_pair +from src.utils.logger import logger + + +try: + from npu_bridge.tbe.npu_cube_ops import deformable_conv2d + OP_IMPL = 'npu' +except Exception: + logger.error('Failed to import NPU deformable_conv2d. ' + 'Please use the composed tf operator instead.' + '(This is NOT an actual error)') + OP_IMPL = 'tf' + + + +__all__ = ["DCNPack"] + +class DeformableConvLayer(BaseLayer): + """Deformable convolution layer. + + Args: + in_channels: int, number of channels of the input feature. + out_channels: int, number of channels of the output feature. + kernel_size: int or list[int] or tuple[int], kernel size of the conv + operation. + strides: int or list[int] or tuple[int], strides of the conv. + padding: str, options in ['same', 'valid']. Case insensitive. + dilations: int or list[int] or tuple[int], dilations of the conv. + use_bias: boolean, whether to add bias or not. Default True. + num_groups: int, number of convolution groups. + num_deform_groups: int, number of the groups of the offsets. + trainable: boolean, whether to train the parameters. + impl: str, which operator to use. Options in ['tf', 'npu']. If using + 'tf' version, the DCN will be composed of the tensorflow operators, + which may be memory and runtime inefficient. For Ascned platform, + we recommend to use npu deformable_conv2d instead. + """ + def __init__(self, + in_channels, + out_channels, + kernel_size, + strides=1, + padding='valid', + dilations=1, + use_bias=True, + num_groups=1, + num_deform_groups=1, + trainable=True, + impl='tf'): + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = to_pair(kernel_size, 2) + self.strides = to_pair(strides, 2) + self.padding = padding.lower() + self.dilations = to_pair(dilations, 2) + self.use_bias = use_bias + self.num_groups = num_groups + self.num_deform_groups = num_deform_groups + self.trainable = trainable + self.kernel_intermediate_shape = [] + self.build() + self.impl = impl + + def build(self): + """Prepare the weights and bias. + """ + n = self.in_channels + for k in self.kernel_size: + n *= k + stdv = 1. / math.sqrt(n) + initializer = tf.random_uniform_initializer(-stdv, stdv) + + self.kernel_intermediate_shape = [*self.kernel_size, self.in_channels//self.num_groups, self.out_channels//self.num_groups, self.num_groups] + + self.kernel = tf.get_variable( + "W", + [*self.kernel_size, self.in_channels//self.num_groups, self.out_channels], + initializer=initializer, + trainable=self.trainable) + if self.use_bias: + self.bias = tf.get_variable( + "b", + (self.out_channels,), + initializer=tf.constant_initializer(value=0.0), + trainable=self.trainable) + + def _cal_pads(self, ih, iw): + """Calculation padding given the input. + """ + if self.padding == 'same': + strh, strw = self.strides + kh, kw = self.kernel_size + dilh, dilw = self.dilations + tails_h = ih % strh + tails_w = iw % strw + dkh = dilh * (kh - 1) + 1 + dkw = dilw * (kw - 1) + 1 + pad_h = dkh - tails_h if tails_h > 0 else dkh - strh + pad_w = dkw - tails_w if tails_w > 0 else dkw - strw + pads = [pad_h // 2, pad_h // 2 + pad_h % 2, pad_w // 2, pad_w // 2 + pad_w % 2] + else: + pads = [0, 0, 0, 0] + return pads + + def forward(self, inputs, offset): + """Deformable Conv2d forward function. + """ + if self.impl == 'npu' and OP_IMPL == 'npu': + return self._forward_npu(inputs, offset) + else: + return self._forward_tf(inputs, offset) + + def _forward_npu(self, inputs, offset): + """Forward function of NPU deformable operator. + """ + _, ih, iw, _ = inputs.get_shape().as_list() + c = offset.get_shape().as_list()[3] + assert c == self.num_deform_groups*self.kernel_size[0]*self.kernel_size[1]*3 + offset_all = offset + + pads = self._cal_pads(ih, iw) + out = deformable_conv2d( + inputs, + self.kernel, + offset_all, + strides=[1] + list(self.strides) + [1], + pads=pads, + data_format='NHWC', + dilations=[1] + list(self.dilations) + [1], + groups=self.num_groups, + deformable_groups=self.num_deform_groups) + + if self.use_bias: + out = tf.nn.bias_add(out, self.bias) + return out + + def _forward_tf(self, inputs, offset): + """Forward function of tf composed deformable operator. + """ + def _get_in_bound_mask(x_, y_): + out_of_bound_x = tf.logical_or(tf.greater(x_, in_w-1), tf.less(x_, 0)) + out_of_bound_y = tf.logical_or(tf.greater(y_, in_h-1), tf.less(y_, 0)) + out_of_bound_mask = tf.logical_or(out_of_bound_x, out_of_bound_y) + return 1. - tf.to_float(out_of_bound_mask) + + inputs = self._pad_input(inputs) + bs, in_h, in_w, _ = inputs.get_shape().as_list() + bs, out_h, out_w, c = offset.get_shape().as_list() + + assert c == self.num_deform_groups*self.kernel_size[0]*self.kernel_size[1]*3 + c3 = c // 3 + + # get x, y axis offset. Swap the order to 'x,y' instead of 'y,x', align with npu dcn op + x_off = offset[:, :, :, :c3] + y_off = offset[:, :, :, c3:c3*2] + mask = offset[:, :, :, c3*2:] + + # input feature map gird coordinates + y, x = self._get_conv_indices(in_h, in_w) + y, x = [tf.to_float(i) for i in [y, x]] + y, x = [tf.tile(i, [1, 1, 1, self.num_deform_groups]) for i in [y, x]] + + # current deformable offsets + y, x = y + y_off, x + x_off + + # get four coordinates of points around (x, y) + y0, x0 = [tf.to_int32(tf.floor(i)) for i in [y, x]] + y1, x1 = y0 + 1, x0 + 1 + + # according to the strategy, prepare in_bound mask if use zero. + # In fact, gathernd NPU will take 0 if the index is out-of-bound, + # while CPU will throw an error. Therefore, do an explicit masking + m0 = _get_in_bound_mask(x0, y0) + m1 = _get_in_bound_mask(x1, y0) + m2 = _get_in_bound_mask(x0, y1) + m3 = _get_in_bound_mask(x1, y1) + + y_res = y - tf.to_float(y0) + x_res = x - tf.to_float(x0) + + w0_ori = (1. - y_res) * (1. - x_res) + w1_ori = (1. - y_res) * x_res + w2_ori = y_res * (1. - x_res) + w3_ori = y_res * x_res + + # clip the indices + y0_clip, y_clip, y1_clip = [tf.clip_by_value(i, 0, in_h - 1) for i in [y0, y, y1]] + x0_clip, x_clip, x1_clip = [tf.clip_by_value(i, 0, in_w - 1) for i in [x0, x, x1]] + + # get pixel values + indices = [[y0_clip, x0_clip], [y0_clip, x1_clip], [y1_clip, x0_clip], [y1_clip, x1_clip]] + p0, p1, p2, p3 = [self._get_pixel_values_at_point(inputs, i) for i in indices] + + # cast to float + x0_clip, x_clip, x1_clip, y0_clip, y_clip, y1_clip = [tf.to_float(i) for i in + [x0_clip, x_clip, x1_clip, y0_clip, y_clip, y1_clip]] + + # weights + w0 = m0 * w0_ori + w1 = m1 * w1_ori + w2 = m2 * w2_ori + w3 = m3 * w3_ori + + w0, w1, w2, w3 = [tf.reshape(i, [*i.get_shape()[:3], self.num_deform_groups, *self.kernel_size, 1]) + for i in [w0, w1, w2, w3]] + + # bilinear interpolation + pixels = tf.add_n([w0 * p0, w1 * p1, w2 * p2, w3 * p3]) + + if mask is not None: + pixels = tf.reshape(mask, [*mask.get_shape()[:3], self.num_deform_groups, *self.kernel_size, 1]) * pixels + + # reshape the "big" feature map + pixels = tf.transpose(pixels, [0,1,4,2,5,3,6]) + pixels = tf.reshape(pixels, [bs, out_h*self.kernel_size[0], out_w*self.kernel_size[1], -1]) + + # conv + kernel_reshaped = tf.reshape(self.kernel, self.kernel_intermediate_shape) + ich = pixels.shape[-1] // self.num_groups + out = tf.concat([tf.nn.conv2d( + pixels[:, :, :, i*ich:(i+1)*ich], + kernel_reshaped[:, :, :, :, i], + strides=self.kernel_size, + padding='VALID', + ) + for i in range(self.num_groups)], axis=-1) + + if self.use_bias: + out = tf.nn.bias_add(out, self.bias) + + return out + + def _pad_input(self, x): + """Pad the input before calculating the offsets. + """ + if self.padding == 'same': + ih, iw = x.get_shape().as_list()[1:3] + pads = self._cal_pads(ih, iw) + + if pads[0] + pads[1] + pads[2] + pads[3] != 0: + x = tf.pad(x, [[0, 0]] + [pads[:2]] + [pads[2:]] + [[0, 0]]) + + return x + + def _get_conv_indices(self, feat_h, feat_w): + """Get the x, y coordinates in the window when a filter sliding on the + feature map + """ + + x, y = tf.meshgrid(tf.range(feat_w), tf.range(feat_h)) + x, y = [tf.reshape(i, [1, *i.get_shape(), 1]) for i in [x, y]] # shape [1, h, w, 1] + x, y = [tf.image.extract_image_patches(i, + [1, *self.kernel_size, 1], + [1, *self.strides, 1], + [1, *self.dilations, 1], + 'VALID') + for i in [x, y]] # shape [1, out_h, out_w, filter_h * filter_w] + return y, x + + def _get_pixel_values_at_point(self, inputs, indices): + """Get pixel values at the given point. + """ + y, x = indices + bs, h, w, c = y.get_shape().as_list()[0: 4] + + if c % self.num_deform_groups != 0 or inputs.shape[-1] % self.num_deform_groups != 0: + raise ValueError + + per_group_offset_ch = c // self.num_deform_groups # kh*kw + per_group_input_ch = inputs.shape[-1] // self.num_deform_groups + batch_idx = tf.reshape(tf.range(0, bs), (bs, 1, 1, 1)) + b = tf.tile(batch_idx, (1, h, w, per_group_offset_ch)) + + outs = [] + for j in range(self.num_deform_groups): + pixel_idx = tf.stack([b, y[:, :, :, j*per_group_offset_ch:(j+1)*per_group_offset_ch], + x[:, :, :, j*per_group_offset_ch:(j+1)*per_group_offset_ch]], axis=-1) # [bs, h, w, per_group_offset_ch, 3] + outs.append(tf.gather_nd(inputs[:, :, :, j*per_group_input_ch:(j+1)*per_group_input_ch], pixel_idx)) + outs = tf.concat(outs, axis=-1) # [bs, h, w, per_group_offset_ch, cin] + + # reshape and transpose the outputs in order to align with the outer axis order + outs = tf.reshape(outs, [*outs.shape[:3], *self.kernel_size, self.num_deform_groups, -1]) + return tf.transpose(outs, [0,1,2,5,3,4,6]) + + +class DCNPack: + def __init__(self, + out_channels, + kernel_size=(3, 3), + strides=(1, 1), + padding='same', + dilations=(1, 1), + use_bias=True, + num_groups=1, + num_deform_groups=1, + name='DCN', + impl='npu'): + self.out_channels = out_channels + self.kernel_size = kernel_size + self.strides = strides + self.padding = padding + self.dilations = dilations + self.use_bias = use_bias + self.num_groups = num_groups + self.num_deform_groups = num_deform_groups + self.name = name + self.impl = impl + + def __call__(self, x, extra_feat): + with tf.variable_scope(self.name): + x = tf.cast(x, tf.float32) + + n_elem = (self.num_deform_groups + * self.kernel_size[0] + * self.kernel_size[1]) + + num_offset_channels = n_elem * 3 + + conv_offset = Conv2D(num_offset_channels, + kernel_size=self.kernel_size, + strides=self.strides, + padding=self.padding, + dilations=self.dilations, + use_bias=self.use_bias, + name='conv_offset')(extra_feat) + + conv_offset = tf.cast(conv_offset, tf.float32) + + # Get the modulation + modulation = tf.nn.sigmoid(conv_offset) + offset = conv_offset + + # Prepare a masking + weight = np.ones((1, 1, 1, num_offset_channels)).astype(np.float32) + weight[..., n_elem*2:] = 0. + weight = tf.convert_to_tensor(weight) + + # Make the n_elem*2 channels the offsets, the last n_elem channels + # the modulation. + input_offset_mask = weight * offset + (1. - weight) * modulation + + out = DeformableConvLayer( + in_channels=int(x.shape[-1]), + out_channels=self.out_channels, + kernel_size=self.kernel_size, + strides=self.strides, + padding=self.padding, + dilations=self.dilations, + use_bias=self.use_bias, + num_groups=self.num_groups, + num_deform_groups=self.num_deform_groups, + impl=self.impl)(x, input_offset_mask) + + return out diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dropout.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dropout.py new file mode 100644 index 0000000000000000000000000000000000000000..4dd5c9f65866828e2ed4791d2d8624411762b47b --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/dropout.py @@ -0,0 +1,60 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +from src.layers.base_layer import BaseLayer +from src.utils.logger import logger + +try: + from npu_bridge.estimator import npu_ops + OP_IMPL = 'npu' +except Exception: + logger.error('Failed to import NPU dropout. Please use the composed tf operator instead.') + OP_IMPL = tf + + +__all__ = ["Dropout"] + +class Dropout(BaseLayer): + """Dropout layer. + + Use NPU high performance operator if possible. + + Args: + keep_prob: float, ranged in [0, 1], specifying the keeping probability + of the feature point. + """ + def __init__(self, keep_prob=0.1, name=None): + self.name = name + self.keep_prob = keep_prob + + def forward(self, input_tensor, training=False): + """Perform dropout. + """ + if not training: + return input_tensor + + with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE): + if OP_IMPL == tf: + output = tf.nn.dropout(input_tensor, self.keep_prob) + else: + if self.keep_prob is None or self.keep_prob == 1.0: + return input_tensor + + ##################modify for npu###################### + # Modify dropout for high performance + output = npu_ops.dropout(input_tensor, self.keep_prob) + ##################npu modify end###################### + return output diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/linear.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/linear.py new file mode 100644 index 0000000000000000000000000000000000000000..e02a2ce47655be7897a2845eae571e7b8a7f5ad8 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/linear.py @@ -0,0 +1,135 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math +import tensorflow as tf + +from .base_layer import BaseLayer + +from src.ops.weight_regularzation import spectral_norm +from src.runner.initializer import get_initializer, calculate_fan + + +__all__ = ["Linear"] + +class Linear(BaseLayer): + """A linear layer. + + y = x*weigths + bias + + Args: + num_filters: int, number of filters in linear layer. + use_bias: boolean, whether to apply bias. Default True. + name: str, layer scope name. + use_spectral_norm: boolean, whether to use spectral normalization + on the weights. Default False. + trainable: boolean, whether weights and bias are trainable. + + Attributes: + kernel: tensor, linear kernel. + bias: tensor, bias tensor. + """ + def __init__(self, num_filters, use_bias=True, + name='Linear', use_spectral_norm=False, + trainable=True): + self.num_filters = num_filters + self.use_bias = use_bias + self.name = name + self.use_spectral_norm = use_spectral_norm + self.trainable = trainable + + def get_kernel_init(self, x): + """Get kernel initializer. + This function is called after passing through the input feature. + We use 'kaiming_uniform' initializer. + """ + kernel_initializer = get_initializer( + dict(type='kaiming_uniform', a=math.sqrt(5)), + self.in_channels, + self.num_filters, + (1, ), + dtype=self.dtype) + return kernel_initializer + + def get_bias_init(self, x): + """Get bias initializer. + This function is called after passing through the input feature. + """ + fan = calculate_fan((1, ), self.in_channels) + bound = 1 / math.sqrt(fan) + bias_initializer = tf.random_uniform_initializer( + -bound, + bound, + dtype=self.dtype) + return bias_initializer + + @property + def kernel(self): + w = tf.get_variable( + "kernel", + shape=[*self.kernel_size, self.in_channels, self.num_filters], + initializer=self.kernel_initializer, + regularizer=None, + dtype=self.dtype) + if self.use_spectral_norm: + w = spectral_norm(w) + return w + + @property + def bias(self): + bias = tf.get_variable( + "bias", + [self.num_filters], + initializer=self.bias_initializer, + dtype=self.dtype) + return bias + + def __call__(self, x): + """Execute function of forward. + + Args: + x: tensor, input feature. + + Returns: + tensor, feature. + """ + + # Get the data type of the input. + self.dtype = x.dtype + self.in_channels = x.get_shape().as_list()[-1] + + # Get the weight and bias initializers. + self.kernel_initializer = self.get_kernel_init(x) + self.bias_initializer = self.get_bias_init(x) + + # Apply forward. + with tf.variable_scope(self.name): + x = self.forward(x) + + return x + + def forward(self, x): + """Forward computation of the linear layer. + """ + x = tf.layers.dense( + x, + units=self.num_filters, + use_bias=self.use_bias, + kernel_initializer=self.kernel_initializer, + bias_initializer=self.bias_initializer, + trainable=self.trainable, + name=self.name, + ) + if self.use_bias: + x = tf.nn.bias_add(x, self.bias) + return x diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/norm.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/norm.py new file mode 100644 index 0000000000000000000000000000000000000000..cc33db42e42dfeb2248cfe0f9b331878108485c5 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/layers/norm.py @@ -0,0 +1,112 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf +import tensorflow.contrib.slim as slim + +from .base_layer import BaseLayer + + +__all__ = ['NormLayer'] + +EPSILON = 1e-5 +DECAY = 0.99 + +def batch_norm(x, center=True, scale=True, is_train=True): + """Batch normalization function. + + y = (x - \mu) / \sigma * \gamma + \beta + + Args: + x: tensor, input feature map. + center: boolean, whether to use bias, i.e. beta. + scale: bollean, whether to use affine parameters, i.e. \gamma. + is_train: boolean, whether to update the buffers (moving average and var). + + Returns: + tensor, normalized feature map whose shape is the same with input. + """ + output = slim.batch_norm(x, decay=DECAY, center=center, scale=scale, + epsilon=EPSILON, updates_collections=tf.GraphKeys.UPDATE_OPS, + fused=False, is_training=is_train) + + return output + + +def instance_norm(x, center=True, scale=True, is_train=True): + """Apply instance normalization. + + Args: + x: tensor, input feature map. + center: boolean, whether to use bias, i.e. beta. + scale: bollean, whether to use affine parameters, i.e. \gamma. + is_train: boolean, whether to update the buffers (moving average and var). + + Returns: + tensor, normalized feature map whose shape is the same with input. + """ + return slim.instance_norm(x, center=center, scale=scale, epsilon=EPSILON, + trainable=is_train) + + +def layer_norm(x, center=True, scale=True, is_train=True): + """Apply layer normalization. + + Args: + x: tensor, input feature map. + center: boolean, whether to use bias, i.e. beta. + scale: bollean, whether to use affine parameters, i.e. \gamma. + is_train: boolean, whether to update the buffers (moving average and var). + + Returns: + tensor, normalized feature map whose shape is the same with input. + """ + return slim.layer_norm(x, center=True, scale=True, trainable=is_train) + + +NORM_FUNC = { + "bn": batch_norm, + "in": instance_norm, + "ln": layer_norm, +} + +class NormLayer(BaseLayer): + """ + Normalization layer class. + + Args: + norm_type: str, specifying the type of the norm layer. Possible choices: + ('bn', 'ln', 'in'). + center: boolean, whether to use bias, i.e. beta. + scale: bollean, whether to use affine parameters, i.e. \gamma. + is_train: boolean, whether to update the buffers (moving average and var). + + Raises: + ValueError, if the norm_type not in ('bn', 'ln', 'in') + """ + def __init__(self, norm_type, center=True, scale=True, is_train=True): + super(NormLayer, self).__init__() + if norm_type not in NORM_FUNC: + raise ValueError(f"Supported normalization layer type: {NORM_FUNC.keys()}, " + f"but is given {norm_type}") + self.fn = NORM_FUNC[norm_type] + self.is_train = is_train + self.center = center + self.scale = scale + + def forward(self, x): + return self.fn(x, + center=self.center, + scale=self.scale, + is_train=self.is_train) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/losses.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..8d8dd229167a6912927c4a59909f023e1f3abf23 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/losses.py @@ -0,0 +1,218 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tensorflow as tf + +from src.utils.logger import logger +from src.utils.klass import get_subclass_given_name + + +def get_loss(loss_type, pred, groundtruth, weight_map=None, **kwargs): + """Get the corresponding loss tensor given the loss type and the data pair. + + Args: + loss_type: str, type of loss class. + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + try: + klass = get_subclass_given_name(BaseLoss, loss_type) + except IndexError: + raise ValueError(f'Cannot find loss type {loss_type}.') + return klass()(pred, groundtruth, weight_map=None, **kwargs) + + +class BaseLoss(object): + def __call__(self, pred, grountruth, **kwargs): + raise NotImplementedError + + +class L1Loss(BaseLoss): + """Pixelwise L1-loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, weight_map=None, **kwargs): + loss = tf.abs(pred - groundtruth) + if weight_map is not None: + loss = loss * weight_map + return loss + + +class MarginalL1Loss(BaseLoss): + """Pixelwise L1-loss with margins. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + eps: scalar, a small scalar to margin out the values that are too small. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, eps=1e-6, weight_map=None, **kwargs): + + loss = tf.maximum(tf.abs(pred - groundtruth), eps) + if weight_map is not None: + loss = loss * weight_map + return loss + + +class L2Loss(BaseLoss): + """Pixelwise L2-loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, weight_map=None, **kwargs): + loss = tf.square(pred - groundtruth) + if weight_map is not None: + loss = loss * weight_map + return loss + + +class HuberLoss(BaseLoss): + """Pixelwise Huber loss, a.k.a. the smooth-l1 loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + delta: scalar, threshold to indicate where to change between l1 and l2. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, delta=1., weight_map=None, **kwargs): + res = pred - groundtruth + mask = tf.cast(tf.less(tf.abs(res), 1.), tf.float32) + lesser_region = 0.5 * l2_loss(pred, groundtruth) + greater_region = l1_loss(pred, groundtruth) - 0.5*delta**2 + loss = mask * lesser_region + (1. - mask) * greater_region + if weight_map is not None: + loss = loss * weight_map + return loss + + +# Alias smooth l1 loss. +SmoothL1Loss = HuberLoss + + +class CharbonnierLoss(BaseLoss): + """Pixelwise Charbonnier loss. A variant of L1-loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + eps: scalar, a small value to avoid inf or nan during sqrt. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, eps=1e-6, weight_map=None, **kwargs): + + loss = tf.sqrt((pred - groundtruth) ** 2 + eps) + if weight_map is not None: + loss = loss * weight_map + return loss + + +class MSELoss(BaseLoss): + """ + Pixelwise mse loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, weight_map=None, **kwargs): + + loss = tf.square(groundtruth - pred) + if weight_map is not None: + loss = loss * weight_map + + return loss + + +class FocalLoss(BaseLoss): + """Pixelwise FocalLoss. See https://arxiv.org/pdf/1708.02002.pdf + + Args:s + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + alpha: scalar, a small balance value. Default 0.25 as in the paper. + gamma: scalar, focusing parameter which is greater than 0. Default 2. + eps: scalar, a small value to avoid nan when tf.log. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, alpha=0.25, gamma=2, eps=1e-6, + weight_map=None, **kwargs): + + pt = tf.where(groundtruth, pred, 1.-pred) + loss = - alpha * tf.pow(1. - pt, gamma) * tf.log(tf.maximum(pt, eps)) + if weight_map is not None: + loss = loss * weight_map + return loss + + +class CosineDistanceLoss(BaseLoss): + """Pixelwise cosine distance loss. + + Args: + pred: tensor, the predictions. + groundtruth: tensor, the target tensor. + axis: int, which axis to do the normalization. + eps: scalar, a small value to avoid nan when tf.log. + weight_map: tensor or None. If given, the loss will be weighted. + + Returns: + tensor, whose shape is the same with the pred and groundtruth. + """ + def __call__(self, pred, groundtruth, axis=-1, eps=1e-6, weight_map=None, + **kwargs): + + prod = pred * groundtruth + prod = tf.reduce_sum(prod, axis=axis, keepdims=True) + pred_norm = tf.reduce_sum(tf.square(pred), axis=axis, keepdims=True) + gt_norm = tf.reduce_sum(tf.square(groundtruth), axis=axis, keepdims=True) + norm_scale = tf.sqrt(pred_norm * gt_norm + eps) + loss = 1. - prod / norm_scale + if weight_map is not None: + loss = loss * weight_map + return loss diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/adversarial.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/adversarial.py new file mode 100644 index 0000000000000000000000000000000000000000..4e5b60ffcf05d34070bdb8389a4a58da3b0c3306 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/adversarial.py @@ -0,0 +1,303 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np +import tensorflow as tf + +from src.losses.modules.gan import get_gan +from src.utils.klass import get_subclass_given_name +from src.runner.common import name_space + + +def build_adversarial_loss(hq, gt, cfg): + """hq and gt both in shape [b*num_out_frames, h, w, c] + """ + num_out_frames = cfg.data.num_data_gt_frames + adv_loss_type = cfg.loss.adversarial.loss_type + try: + loss_model = get_subclass_given_name(_BaseAdvLoss, + adv_loss_type)(cfg) + except IndexError: + logger.error(f'Cannot find adversarial loss type {adv_loss_type}.') + raise ValueError() + + hr_shape = gt.get_shape().as_list() + + # Check whether 3D network is used and reshape the tensor to 4D or 5D. + if '3D' in cfg.loss.adversarial.gan_type and len(hr_shape) == 4: + gt = tf.reshape(gt, [-1, num_out_frames, *hr_shape[1:]]) + hq = tf.reshape(hq, [-1, num_out_frames, *hr_shape[1:]]) + elif (not '3D' in cfg.loss.adversarial.gan_type) and len(hr_shape) != 4: + gt = tf.reshape(gt, [-1, *hr_shape[2:]]) + hq = tf.reshape(hq, [-1, *hr_shape[2:]]) + + return loss_model(real=gt, fake=hq) + + +class _BaseAdvLoss: + """Base adversarial loss class. + All the adv losses will be derived from the base class. + + After real data point and fake one forward through the discriminator, + the logits will be used to calculate the losses. + """ + def __init__(self, cfg): + reduction = cfg.loss.adversarial.loss_reduction + self.discriminator = get_gan(cfg) + self.reduction = reduction + self.cfg = cfg + + def __call__(self, real, fake): + """ + Forward the real and fake sample through the discriminator and + calculate the losses. + + Args: + real: tensor, 4D or 5D tensor for real samples. + fake: tensor, the same as real. Fake generated samples. + + Returns: + real_loss: scalar tensor, loss for the real sample. + fake_loss: scalar tensor, loss for the fake sample. + """ + + # Forward through the discriminators to get the logits. + fake_logit = self.discriminator(fake) + real_logit = self.discriminator(real) + + # Calculate the losses. + real_loss, fake_loss = self.forward(real_logit, fake_logit) + + # Cast to fp32 before reduction in case of precision loss on Ascend. + real_loss = tf.cast(real_loss, tf.float32) + fake_loss = tf.cast(fake_loss, tf.float32) + + if self.reduction == 'mean': + reduction_fn = tf.reduce_mean + elif self.reduction == 'sum': + reduction_fn = tf.reduce_sum + else: + raise NotImplementedError + + # Apply weights before output. + real_loss = reduction_fn(real_loss) * self.cfg.loss.adversarial.loss_weight + fake_loss = reduction_fn(fake_loss) * self.cfg.loss.adversarial.loss_weight + + name_space.add_to_collection(name_space.DiscriminatorLoss, + 'discriminator', real_loss) + name_space.add_to_collection(name_space.GeneratorLoss, + 'adversarial', fake_loss) + + return real_loss, fake_loss + + def forward(self, real_logit, fake_logit): + raise NotImplementedError + + +class VanillaAdvLoss(_BaseAdvLoss): + """ + Vanialla adversarial loss, i.e. + loss_d = E(log D) + E(log (1 - D(G))) + loss_g = - E(log D(G)) + """ + def forward(self, real_logit, fake_logit): + fake_loss = tf.nn.sigmoid_cross_entropy_with_logits( + logits=fake_logit, + labels=tf.ones_like(fake_logit)) + real_loss = tf.nn.sigmoid_cross_entropy_with_logits( + logits=real_logit, + labels=tf.ones_like(real_logit)) \ + + tf.nn.sigmoid_cross_entropy_with_logits( + logits=fake_logit, + labels=tf.zeros_like(fake_logit)) + + return real_loss, fake_loss + + +class HingeAdvLoss(_BaseAdvLoss): + """ + Hinge adversarial loss, i.e. + loss_d = E(max(0, 1 - D)) + E(max(0, 1 + D(G))) + loss_g = - E(D(G)) + """ + def forward(self, real_logit, fake_logit): + fake_loss = - fake_logit + real_loss = tf.nn.relu(1.0 - real_logit) + tf.nn.relu(1.0 + fake_logit) + return real_loss, fake_loss + + +class RSAdvLoss(VanillaAdvLoss): + """ + Relativistic adversarial loss, i.e. + loss_d = - E(log sigmoid(D - D(G))) + loss_g = - E(log sigmoid(D(G) - D)) + """ + # Relativistic Standard GAN + def forward(self, real_logit, fake_logit): + fake_loss = tf.nn.sigmoid_cross_entropy_with_logits( + logits=fake_logit, + labels=tf.ones_like(fake_logit)) + real_loss = tf.nn.sigmoid_cross_entropy_with_logits( + logits=real_logit, + labels=tf.ones_like(real_logit)) + + return real_loss, fake_loss + + def __call__(self, real, fake): + fake_logit = self.discriminator(fake) + real_logit = self.discriminator(real) + real_loss, fake_loss = self.forward(real_logit - fake_logit, + fake_logit - real_logit) + + real_loss = tf.cast(real_loss, tf.float32) + fake_loss = tf.cast(fake_loss, tf.float32) + + if self.reduction == 'mean': + reduction_fn = tf.reduce_mean + elif self.reduction == 'sum': + reduction_fn = tf.reduce_sum + else: + raise NotImplementedError + + real_loss = reduction_fn(real_loss) + fake_loss = reduction_fn(fake_loss) * self.cfg.loss.adversarial.loss_weight + + name_space.add_to_collection(name_space.DiscriminatorLoss, + 'discriminator', real_loss) + name_space.add_to_collection(name_space.GeneratorLoss, + 'adversarial', fake_loss) + + return real_loss, fake_loss + + +class RaSAdvLoss(VanillaAdvLoss): + """ + Relativistic adversarial loss, i.e. + loss_d = - E(log sigmoid(D - E(D(G)))) + E(log sigmoid(D(G) - E(D))) + loss_g = - E(log sigmoid(D(G) - E(D))) + E(log sigmoid(D - E(D(G)))) + """ + # Relativistic Average GAN + def forward(self, real_logit, fake_logit): + fake_loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_logit, + labels=tf.ones_like(fake_logit)) \ + + tf.nn.sigmoid_cross_entropy_with_logits(logits=real_logit, + labels=tf.zeros_like(fake_logit)) + real_loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=real_logit, + labels=tf.ones_like(real_logit)) \ + + tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_logit, + labels=tf.zeros_like(fake_logit)) + + return real_loss, fake_loss + + def __call__(self, real, fake): + fake_logit = self.discriminator(fake) + real_logit = self.discriminator(real) + real_loss, fake_loss = self.forward(real_logit - tf.reduce_mean(fake_logit, axis=0, keepdims=True), + fake_logit - tf.reduce_mean(real_logit, axis=0, keepdims=True)) + + real_loss = tf.cast(real_loss, tf.float32) + fake_loss = tf.cast(fake_loss, tf.float32) + + if self.reduction == 'mean': + reduction_fn = tf.reduce_mean + elif self.reduction == 'sum': + reduction_fn = tf.reduce_sum + else: + raise NotImplementedError + + real_loss = reduction_fn(real_loss) + fake_loss = reduction_fn(fake_loss) * self.cfg.loss.adversarial.loss_weight + + name_space.add_to_collection(name_space.DiscriminatorLoss, + 'discriminator', real_loss) + name_space.add_to_collection(name_space.GeneratorLoss, + 'adversarial', fake_loss) + + return real_loss, fake_loss + + +class LSAdvLoss(_BaseAdvLoss): + """ + Least-square adversarial loss, i.e. + loss_d = 0.5 * E((D - 1)**2) + 0.5 * E(D(G)**2) + loss_g = E((D(G) - 1)**2) + """ + def forward(self, real_logit, fake_logit): + fake_loss = tf.square(fake_logit - 1) + real_loss = 0.5 * tf.square(real_logit - 1) + 0.5 * tf.square(fake_logit) + + return real_loss, fake_loss + + +class WGANLoss(_BaseAdvLoss): + """ + Wesserstein adversarial loss, i.e. + loss_d = E(D) + E(1 - D(G)) + loss_g = - E(D(G)) + """ + def forward(self, real_logit, fake_logit): + fake_loss = - fake_logit + real_loss = fake_logit - real_logit + return real_loss, fake_loss + + +class WGAN_GP_Loss(WGANLoss): + """ + Wesserstein adversarial loss with gradient panelty, i.e. + loss_d = E(D) + E(1 - D(G)) + GP(D, D(G)) + loss_g = - E(D(G)) + """ + def gradient_penalty(self, real, fake): + b = real.get_shape().as_list()[0] + ndim = len(real.get_shape().as_list()) + shape = (b, ) + (1, ) * (ndim - 1) + alpha = tf.random.uniform(shape=shape) + interpolates = alpha * real + (1. - alpha) * fake + + interp_logit = self.discriminator(interpolates) + grads = tf.gradients( + interp_logit, + xs=interpolates + ) + gradient_penalty = tf.reduce_mean((grads-1.)**2) + return gradient_penalty + + def __call__(self, real, fake): + fake_logit = self.discriminator(fake) + real_logit = self.discriminator(real) + real_loss, fake_loss = self.forward(real_logit, fake_logit) + grad_penalty = self.gradient_penalty(real, fake) + + real_loss = tf.cast(real_loss, tf.float32) + fake_loss = tf.cast(fake_loss, tf.float32) + grad_penalty = tf.cast(grad_penalty, tf.float32) + + if self.reduction == 'mean': + reduction_fn = tf.reduce_mean + elif self.reduction == 'sum': + reduction_fn = tf.reduce_sum + else: + raise NotImplementedError + + real_loss = reduction_fn(real_loss) \ + + grad_penalty * self.cfg.loss.adversarial.grad_penalty_weight + fake_loss = reduction_fn(fake_loss) * self.cfg.loss.adversarial.loss_weight + + name_space.add_to_collection(name_space.DiscriminatorLoss, + 'discriminator', real_loss) + name_space.add_to_collection(name_space.GeneratorLoss, + 'adversarial', fake_loss) + + return real_loss, fake_loss diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/gan.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/gan.py new file mode 100644 index 0000000000000000000000000000000000000000..1045cde727a69a2b3f22bc3631c38d801b05d48b --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/gan.py @@ -0,0 +1,520 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np +import tensorflow as tf + +from src.runner.common import name_space +from src.layers import Conv2D, Conv3D, NormLayer, ActLayer, Linear +from src.utils.klass import get_subclass_given_name + + +def get_gan(cfg): + """Get GAN instance given the configuration. + + Args: + cfg: yacs node, config for the GAN. + + Returns: + GAN instance. + """ + + try: + klass = get_subclass_given_name(BaseGAN, cfg.loss.adversarial.gan_type) + except IndexError: + logger.error(f'Cannot find GAN type {cfg.loss.adversarial.gan_type}.') + raise ValueError() + + return klass(cfg.loss.adversarial.mid_channels, cfg.loss.adversarial.norm_type) + + +class BaseGAN: + """Base GAN class. + """ + def __init__(self, scope=name_space.DiscriminatorVarScope): + self.scope = scope + + def __call__(self, input): + with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE): + return self.forward(input) + + def forward(self, input): + raise NotImplementedError + + +class VanillaGAN(BaseGAN): + """A vanilla discriminator for 4D feature map. + + Args: + mid_channels: int, multiplier of the channels in the middle layers. + norm_type: str, type of the normalization layer. + scope: str, discriminator scope name. + """ + def __init__(self, mid_channels=64, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.norm_type = norm_type + self.mid_channels = mid_channels + self.kernel_size = (3, 3) + + def conv_norm_act(self, inputs, output_channel, kernel_size, stride, norm_type, is_train, scope): + """A conv-norm-activation sequence. + """ + with tf.variable_scope(scope): + net = Conv2D(output_channel, kernel_size, stride, + use_bias=norm_type=='in', + name='conv', + use_spectral_norm=norm_type=='sn')(inputs) + net = NormLayer(norm_type, is_train=is_train)(net) + net = ActLayer(dict(type='leakyrelu', alpha=0.2), name='lrelu')(net) + return net + + def forward(self, input): + """Forward pass through the discriminator. + """ + # no batchnorm for the first layer, output size [in_h/2, in_w/2] + net = Conv2D(self.mid_channels, + kernel_size=self.kernel_size, + strides=(1, 1), + name='conv_first')(input) + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + + # The discriminator block part + # block 1, output size [in_h/4, in_w/4] + net = self.conv_norm_act(net, self.mid_channels, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_1') + # block 2, output size [in_h/8, in_w/8] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_2') + # block 3, output size [in_h/16, in_w/16] + net = self.conv_norm_act(net, self.mid_channels*3, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_3') + # block_4, output size [in_h/32, in_w/32] + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_4') + + # The dense layer 1 + b, h, w, c = net.get_shape().as_list() + net = tf.reshape(net, [b, h * w * c]) + net = Linear(256, name='linear1')(net) # channel-wise dense layer + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + net = Linear(1, name='linear_final')(net) # channel-wise dense layer + return net + + +class VanillaGAN3D(BaseGAN): + """A vanilla discriminator for 5D feature map. + + Args: + mid_channels: int, multiplier of the channels in the middle layers. + norm_type: str, type of the normalization layer. + scope: str, discriminator scope name. + """ + def __init__(self, mid_channels=32, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.norm_type = norm_type + self.mid_channels = mid_channels + self.kernel_size = (3, 5, 5) + + def conv_norm_act(self, inputs, output_channel, kernel_size, stride, norm_type, is_train, scope): + """A conv-norm-activation sequence. + """ + with tf.variable_scope(scope): + net = Conv3D(output_channel, kernel_size, stride, use_bias=norm_type=='in', name='conv', + use_spectral_norm=norm_type=='sn')(inputs) + net = NormLayer(norm_type, is_train=is_train)(net) + net = ActLayer(dict(type='leakyrelu', alpha=0.2), name='lrelu')(net) + return net + + def forward(self, input): + """Forward pass through the discriminator. + """ + + # no batchnorm for the first layer, output size [in_h/2, in_w/2] + net = Conv3D(self.mid_channels, kernel_size=self.kernel_size, strides=(1, 1, 1), + name='conv_first')(input) + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + + # The discriminator block part + # block 1, output size [in_h/4, in_w/4] + net = self.conv_norm_act(net, self.mid_channels, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_1') + # block 2, output size [in_h/8, in_w/8] + net = self.conv_norm_act(net, self.mid_channels, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_2') + # block 3, output size [in_h/16, in_w/16] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_3') + # block_4, output size [in_h/32, in_w/32] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_4') + # block_5, output size [in_h/64, in_w/64] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_5') + + # The dense layer 1 + b, t, h, w, c = net.get_shape().as_list() + net = tf.reshape(net, [b, t * h * w * c]) + net = Linear(256, name='linear1')(net) # channel-wise dense layer + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + net = Linear(1, name='linear_final')(net) # channel-wise dense layer + return net + + +class PatchGAN(BaseGAN): + """A PatchGAN discriminator for 4D feature map. + + Args: + mid_channels: int, multiplier of the channels in the middle layers. + norm_type: str, type of the normalization layer. + scope: str, discriminator scope name. + """ + def __init__(self, mid_channels=64, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.norm_type = norm_type + self.mid_channels = mid_channels + self.kernel_size = (3, 3) + + def conv_norm_act(self, inputs, output_channel, kernel_size, stride, norm_type, is_train, scope): + """A conv-norm-activation sequence. + """ + with tf.variable_scope(scope): + net = Conv2D(output_channel, kernel_size, stride, use_bias=norm_type=='in', name='conv', + use_spectral_norm=norm_type=='sn')(inputs) + net = NormLayer(norm_type, is_train=is_train)(net) + net = ActLayer(dict(type='leakyrelu', alpha=0.2), name='lrelu')(net) + return net + + def forward(self, input): + """Forward pass through the discriminator. + """ + + # no batchnorm for the first layer, output size [in_h/2, in_w/2] + net = Conv2D(self.mid_channels, kernel_size=self.kernel_size, + strides=(1, 1), name='conv_first')(input) + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + + # The discriminator block part + # block 1, output size [in_h/4, in_w/4] + net = self.conv_norm_act(net, self.mid_channels, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_1') + # block 2, output size [in_h/8, in_w/8] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_2') + # block 3, output size [in_h/16, in_w/16] + net = self.conv_norm_act(net, self.mid_channels*3, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_3') + # block_4, output size [in_h/32, in_w/32] + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (2, 2), self.norm_type, + True, 'disblock_4') + + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (1, 1), self.norm_type, + True, 'disblock_5') + net = Conv2D(self.mid_channels, kernel_size=(3, 3), strides=(1, 1), name='conv_last')(net) + return net + + +class PatchGAN3D(BaseGAN): + """A PatchGAN discriminator for 5D feature map. + + Args: + mid_channels: int, multiplier of the channels in the middle layers. + norm_type: str, type of the normalization layer. + scope: str, discriminator scope name. + """ + def __init__(self, mid_channels=64, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.norm_type = norm_type + self.mid_channels = mid_channels + self.kernel_size = (3, 5, 5) + + def conv_norm_act(self, inputs, output_channel, kernel_size, stride, norm_type, is_train, scope): + """ + A conv-norm-activation sequence. + """ + with tf.variable_scope(scope): + net = Conv3D(output_channel, kernel_size, stride, use_bias=norm_type=='in', name='conv', + use_spectral_norm=norm_type=='sn')(inputs) + net = NormLayer(norm_type, is_train=is_train)(net) + net = ActLayer(dict(type='leakyrelu', alpha=0.2), name='lrelu')(net) + return net + + def forward(self, input): + """ + Forward pass through the discriminator. + """ + # no batchnorm for the first layer, output size [in_h/2, in_w/2] + net = Conv3D(self.mid_channels, kernel_size=self.kernel_size, + strides=(1, 1, 1), name='conv_first')(input) + net = ActLayer(dict(type='leakyrelu', alpha=0.2))(net) + + # The discriminator block part + # block 1, output size [in_h/4, in_w/4] + net = self.conv_norm_act(net, self.mid_channels*2, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_1') + # block 2, output size [in_h/8, in_w/8] + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_3') + # block 3, output size [in_h/16, in_w/16] + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_5') + # block_4, output size [in_h/32, in_w/32] + net = self.conv_norm_act(net, self.mid_channels*4, + self.kernel_size, (1, 2, 2), self.norm_type, + True, 'disblock_7') + + net = Conv3D(self.mid_channels, kernel_size=self.kernel_size, + strides=(1, 1, 1), name='conv_last')(net) + # net = tf.reduce_mean(net, axis=1, keepdims=True) + return net + + +class BigGAN(BaseGAN): + """A BigGAN discriminator for 4D feature map. + + Args: + mid_channels: int, multiplier of the channels in the middle layers. + norm_type: str, type of the normalization layer. + scope: str, discriminator scope name. + """ + def __init__(self, mid_channels=64, norm_type='none', scope=name_space.DiscriminatorVarScope): + """ + Initialization function of the discriminator. + + + """ + super().__init__(scope) + self.ch = mid_channels + self.sn = False + self.layer_num = 4 + + def hw_flatten(self, x): + return tf.reshape(x, shape=[x.shape[0], -1, x.shape[-1]]) + + def down_sample(self, x): + return tf.layers.average_pooling2d(x, pool_size=2, strides=2, padding='SAME') + + def init_down_resblock(self, x_init, channels, use_bias=True, sn=False, scope='resblock'): + with tf.variable_scope(scope): + with tf.variable_scope('res1'): + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x_init) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + + with tf.variable_scope('res2'): + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + x = self.down_sample(x) + + with tf.variable_scope('shortcut'): + x_init = self.down_sample(x_init) + x_init = Conv2D(channels, kernel_size=(1, 1), use_bias=use_bias, use_spectral_norm=sn)(x_init) + + return x + x_init + + def down_resblock(self, x_init, channels, to_down=True, use_bias=True, sn=False, scope='resblock'): + with tf.variable_scope(scope): + init_channel = x_init.shape.as_list()[-1] + with tf.variable_scope('res1'): + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x_init) + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + + with tf.variable_scope('res2'): + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + if to_down: + x = self.down_sample(x) + + if to_down or init_channel != channels: + with tf.variable_scope('shortcut'): + x_init = Conv2D(channels, kernel_size=(1, 1), use_bias=use_bias, use_spectral_norm=sn)(x_init) + if to_down: + x_init = self.down_sample(x_init) + + return x + x_init + + def google_attention(self, x, channels, scope='attention'): + with tf.variable_scope(scope): + batch_size, height, width, num_channels = x.get_shape().as_list() + f = Conv2D(channels // 8, kernel_size=(1, 1), use_spectral_norm=self.sn, name='f')(x) # [bs, h, w, c'] + f = tf.layers.max_pooling2d(f, pool_size=2, strides=2, padding='SAME') + g = Conv2D(channels // 8, kernel_size=(1, 1), use_spectral_norm=self.sn, name='g')(x) # [bs, h, w, c'] + h = Conv2D(channels // 2, kernel_size=(1, 1), use_spectral_norm=self.sn, name='h')(x) # [bs, h, w, c] + h = tf.layers.max_pooling2d(h, pool_size=2, strides=2, padding='SAME') + + # N = h * w + s = tf.matmul(self.hw_flatten(g), self.hw_flatten(f), transpose_b=True) # # [bs, N, N] + + beta = tf.nn.softmax(s) # attention map + + o = tf.matmul(beta, self.hw_flatten(h)) # [bs, N, C] + gamma = tf.get_variable("gamma", [1], initializer=tf.constant_initializer(0.0)) + + o = tf.reshape(o, shape=[batch_size, height, width, num_channels // 2]) # [bs, h, w, C] + o = Conv2D(channels, kernel_size=(1, 1), use_spectral_norm=self.sn, name='c')(o) # [bs, h, w, c] + x = gamma * o + x + + return x + + def forward(self, x): + """Forward pass through the discriminator. + """ + ch = self.ch + x = self.init_down_resblock(x, channels=ch, sn=self.sn, scope='resblock_0') + x = self.down_resblock(x, channels=ch * 2, sn=self.sn, scope='resblock_1') + + x = self.google_attention(x, channels=ch * 2, scope='self_attention') + + ch = ch * 2 + for i in range(self.layer_num): + if i == self.layer_num - 1: + x = self.down_resblock(x, channels=ch, sn=self.sn, to_down=False, scope='resblock_' + str(i+2)) + else: + x = self.down_resblock(x, channels=ch * 2, sn=self.sn, scope='resblock_' + str(i+2)) + ch = ch * 2 + + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + x = tf.reduce_sum(x, axis=[1, 2]) + x = Linear(1, name='linear')(x) + return x + +class MSPatchGAN(BaseGAN): + """ + A multi-scale PatchGAN discriminator for 4D feature map. + """ + def __init__(self, nf=64, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.nf = nf + + def patchGAN(self, x, n_layers, d_layers): + x = Conv2D(self.nf, kernel_size=(4, 4), strides=(2, 2), name='conv_first')(x) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + + for n in range(1, n_layers): + x = Conv2D(self.nf * min(2 ** n, 8), kernel_size=(4, 4), strides=(1, 1), name='conv' + str(n))(x) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + if n < d_layers: + x = Conv2D(self.nf * min(2 ** n, 8), kernel_size=(4, 4), strides=(2, 2), name='conv' + str(n) + '_down')(x) + x = Conv2D(1, kernel_size=(4, 4), strides=(1, 1), name='conv_last')(x) + return x + + def forward(self, x): + n, h, w, c = x.get_shape().as_list() + x_big = tf.image.resize_bilinear(x, size=(int(2*h), int(2*w)), align_corners=False, half_pixel_centers=False) + x_mid = x + x_sml = tf.image.resize_bilinear(x, size=(int(0.5*h), int(0.5*w)), align_corners=False, half_pixel_centers=False) + with tf.variable_scope('big', reuse=tf.AUTO_REUSE): + out_big = self.patchGAN(x_big, n_layers=3, d_layers=3) + with tf.variable_scope('mid', reuse=tf.AUTO_REUSE): + out_mid = self.patchGAN(x_mid, n_layers=3, d_layers=2) + with tf.variable_scope('sml', reuse=tf.AUTO_REUSE): + out_sml = self.patchGAN(x_sml, n_layers=3, d_layers=1) + x = tf.concat([out_big, out_mid, out_sml], axis=-1) + return x + + +class MSPatchBigGAN(BaseGAN): + """ + A multi-scale PatchBigGAN discriminator for 4D feature map. + """ + def __init__(self, nf=16, norm_type='in', scope=name_space.DiscriminatorVarScope): + super().__init__(scope) + self.nf = nf + self.sn = False + + def hw_flatten(self, x): + return tf.reshape(x, shape=[x.shape[0], -1, x.shape[-1]]) + + def down_sample(self, x): + return tf.layers.average_pooling2d(x, pool_size=2, strides=2, padding='SAME') + + def init_down_resblock(self, x_init, channels, use_bias=True, sn=False, scope='resblock'): + with tf.variable_scope(scope): + with tf.variable_scope('res1'): + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x_init) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + + with tf.variable_scope('res2'): + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + x = self.down_sample(x) + + with tf.variable_scope('shortcut'): + x_init = self.down_sample(x_init) + x_init = Conv2D(channels, kernel_size=(1, 1), use_bias=use_bias, use_spectral_norm=sn)(x_init) + + return x + x_init + + def down_resblock(self, x_init, channels, to_down=True, use_bias=True, sn=False, scope='resblock'): + with tf.variable_scope(scope): + init_channel = x_init.shape.as_list()[-1] + with tf.variable_scope('res1'): + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x_init) + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + + with tf.variable_scope('res2'): + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + x = Conv2D(channels, kernel_size=(3, 3), use_bias=use_bias, use_spectral_norm=sn)(x) + if to_down: + x = self.down_sample(x) + + if to_down or init_channel != channels: + with tf.variable_scope('shortcut'): + x_init = Conv2D(channels, kernel_size=(1, 1), use_bias=use_bias, use_spectral_norm=sn)(x_init) + if to_down: + x_init = self.down_sample(x_init) + + return x + x_init + + def patchGAN(self, x, n_layers, d_layers): + x = self.init_down_resblock(x, channels=self.nf, sn=self.sn, scope='resblock_0') + x = self.down_resblock(x, channels=self.nf * 2, sn=self.sn, scope='resblock_1') + for n in range(n_layers): + if n < d_layers: + x = self.down_resblock(x, channels=self.nf * min(2 ** n, 8), sn=self.sn, scope='resblock_' + str(n + 2)) + else: + x = self.down_resblock(x, channels=self.nf * min(2 ** n, 8), sn=self.sn, to_down=False, scope='resblock_' + str(n + 2)) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + x = Conv2D(self.nf * 8, kernel_size=(3, 3), strides=(1, 1), use_spectral_norm=self.sn, name='conv')(x) + x = ActLayer(dict(type='leakyrelu', alpha=0.2))(x) + x = Conv2D(1, strides=(1, 1), name='conv_last')(x) + return x + + def forward(self, x): + n, h, w, c = x.get_shape().as_list() + x_big = tf.image.resize_bilinear(x, size=(int(2*h), int(2*w)), align_corners=False, half_pixel_centers=False) + x_mid = x + x_sml = tf.image.resize_bilinear(x, size=(int(0.5*h), int(0.5*w)), align_corners=False, half_pixel_centers=False) + with tf.variable_scope('big', reuse=tf.AUTO_REUSE): + out_big = self.patchGAN(x_big, n_layers=3, d_layers=3) + with tf.variable_scope('mid', reuse=tf.AUTO_REUSE): + out_mid = self.patchGAN(x_mid, n_layers=3, d_layers=2) + with tf.variable_scope('sml', reuse=tf.AUTO_REUSE): + out_sml = self.patchGAN(x_sml, n_layers=3, d_layers=1) + x = tf.concat([out_big, out_mid, out_sml], axis=-1) + return x diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/perceptual.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/perceptual.py new file mode 100644 index 0000000000000000000000000000000000000000..4e8f4a27bac33d526d307afa81f0d33faefe8738 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/perceptual.py @@ -0,0 +1,181 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np +import tensorflow as tf + +from src.runner.common import name_space +from src.utils.logger import logger + +from .vgg import VGG19_slim + + +def auto_download_pretrained(module='vgg', ckpt_path='./pretrained_modules'): + """Automatically download pretrained models. + + Args: + module: str, perceptual model name. + ckpt_path: str, where to save the downloaded ckpt file. + """ + import subprocess + if module in ['vgg', 'vgg_19']: + cmd0 = "wget http://download.tensorflow.org/models/vgg_19_2016_08_28.tar.gz -O " + \ + os.path.join(ckpt_path, "vgg19.tar.gz") + cmd0 += ";tar -xvf " + os.path.join(ckpt_path, "vgg19.tar.gz") + " -C " + ckpt_path + \ + "; rm " + os.path.join(ckpt_path, "vgg19.tar.gz") + else: + raise NotImplementedError + + subprocess.call(cmd0, shell=True) + + +def load_perceptual_module(sess, module_cfg): + """Load perceptual module to the corresponding scope. + + Args: + sess: tf.Session instance. + module_cfg: yacs node, perceptual configuration. + """ + ckpt_dir = module_cfg.get('ckpt_dir', './pretrained_modules') + module = module_cfg.get('module', 'vgg_19') + if not os.path.exists(ckpt_dir): + os.makedirs(ckpt_dir, exist_ok=True) + + ckpt_file = os.path.join(ckpt_dir, f'{module}.ckpt') + if not os.path.exists(ckpt_file): + logger.info('No pretrained module. Downloading ...') + auto_download_pretrained(module, ckpt_dir) + + try: + logger.info('Loading pretrained perceptual module ...') + var_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=module) + restore = tf.train.Saver(var_list) + restore.restore(sess, ckpt_file) + logger.info('Load pretrained perceptual module success.') + except Exception as e: + logger.error('Failed to load pretrained perceptual model.') + logger.info(e) + + +def build_perceptual_loss(generated, targets, module_cfg): + """Calculate the perceptual loss given the configuration. + + Args: + generated: tensor, the generated results. + targets: tensor, the groundtruth. + module_cfg: yacs node, perceptual configuration. + + Returns: + scalar tensor, the perceptual loss. + """ + module = module_cfg.get('module', 'vgg_19') + + # Convert to 4D shape. + gen_shape = generated.get_shape().as_list() + if len(gen_shape) == 5: + generated = tf.reshape(generated, shape=(-1, *gen_shape[2:])) + + tar_shape = targets.get_shape().as_list() + if len(tar_shape) == 5: + targets = tf.reshape(targets, shape=(-1, *tar_shape[2:])) + + # Get the intermediate resuls given the layer names. + if module == 'vgg_19': + with tf.name_scope('vgg_19'): + default_layer_labels = ['vgg_19/conv2/conv2_2', + 'vgg_19/conv3/conv3_4', + 'vgg_19/conv4/conv4_4', + 'vgg_19/conv5/conv5_4'] + default_layer_weights = [1., 1., 1., 1.] + layer_labels = module_cfg.get('layers', default_layer_labels) + layer_weights = module_cfg.get('layer_weights', default_layer_weights) + gen_fm = VGG19_slim(generated, reuse=tf.AUTO_REUSE, deep_list=layer_labels) + target_fm = VGG19_slim(targets, reuse=tf.AUTO_REUSE, deep_list=layer_labels) + else: + raise NotImplementedError + + # Compute the distance between the generated and groundtruth features. + with tf.variable_scope('perceptual_loss'): + loss = 0 + layer_n = len(layer_labels) + + for layer_i in range(layer_n): + cur_diff = tf.reduce_sum(gen_fm[layer_labels[layer_i]] * target_fm[layer_labels[layer_i]], axis=[3]) + # cosine similarity, -1~1, 1 best + cur_diff = 1.0 - tf.reduce_mean(cur_diff) # 0 ~ 2, 0 best + scaled_layer_loss = layer_weights[layer_i] * cur_diff + loss += scaled_layer_loss + + return loss + + +def build_content_style_loss(generated, targets, module_cfg): + """Calculate the perceptual style loss given the configuration. + + Args: + generated: tensor, the generated results. + targets: tensor, the groundtruth. + module_cfg: yacs node, perceptual configuration. + + Returns: + scalar tensor, the style loss. + """ + module = module_cfg.get('module', 'vgg_19') + + gen_shape = generated.get_shape().as_list() + if len(gen_shape) == 5: + generated = tf.reshape(generated, shape=(-1, *gen_shape[2:])) + + tar_shape = targets.get_shape().as_list() + if len(tar_shape) == 5: + targets = tf.reshape(targets, shape=(-1, *tar_shape[2:])) + + if module == 'vgg_19': + with tf.name_scope('vgg_19'): + default_layer_labels = ['vgg_19/conv2/conv2_2', + 'vgg_19/conv3/conv3_4', + 'vgg_19/conv4/conv4_4', + 'vgg_19/conv5/conv5_4'] + default_layer_weights = [1., 1., 1., 1.] + layer_labels = module_cfg.get('layers', default_layer_labels) + layer_weights = module_cfg.get('layers_weights', default_layer_weights) + print(layer_weights) + gen_fm = VGG19_slim(generated, reuse=tf.AUTO_REUSE, deep_list=layer_labels, norm_flag=False) + target_fm = VGG19_slim(targets, reuse=tf.AUTO_REUSE, deep_list=layer_labels, norm_flag=False) + else: + raise NotImplementedError + + with tf.variable_scope('perceptual_loss'): + loss = 0 + layer_n = len(layer_labels) + content_loss = 0 + style_loss = 0 + layers_content_weights = [0.008, 0.001, 0.03125, 40.0] + layer_style_weights = [0.002, 0.000008, 0.03125, 10000.0] + for layer_i in range(layer_n): + f1 = gen_fm[layer_labels[layer_i]] + f2 = target_fm[layer_labels[layer_i]] + content_loss += layers_content_weights[layer_i] * tf.reduce_mean(tf.square(f1 / 10.0 - f2 / 10.0)) + if layer_i > 2: + b,h,w,c = f1.shape + f1T = tf.reshape(f1, (b, h*w, c)) + f2T = tf.reshape(f2, (b, h*w, c)) + f1G = tf.matmul(f1T, f1T, transpose_a=True) + f2G = tf.matmul(f2T, f2T, transpose_a=True) + norm = tf.cast(100.0 * h * w, tf.float32) + style_loss += layer_style_weights[layer_i] * tf.reduce_mean(tf.square(f1G / norm - f2G / norm)) + loss = content_loss * 0.2 + style_loss * 0.08 + + return loss \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/vgg.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/vgg.py new file mode 100644 index 0000000000000000000000000000000000000000..536500a7a978c4dada5445a00590148bad5c2d44 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/losses/modules/vgg.py @@ -0,0 +1,88 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tensorflow as tf +import tensorflow.contrib.slim as slim + +VGG_MEAN = [123.68, 116.78, 103.94] + + +def vgg_19(inputs, + scope='vgg_19', + reuse=False): + """VGG19 model. + Borrowed from https://github.com/thunil/TecoGAN/blob/master/lib/ops.py#L287 + Changed from the Oxford Net VGG 19-Layers version E Example. + Note: Only offer features from conv1 until relu54, classification part is removed + + Args: + inputs: a tensor of size [batch_size, height, width, channels]. + scope: Optional scope for the variables. + + Returns: + the last op containing the log predictions and end_points dict. + """ + with tf.variable_scope(scope, 'vgg_19', [inputs], reuse=reuse) as sc: + end_points_collection = sc.name + '_end_points' + # Collect outputs for conv2d, fully_connected and max_pool2d. + with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d], + outputs_collections=end_points_collection): + net = slim.repeat(inputs, 2, slim.conv2d, 64, 3, scope='conv1', reuse=reuse) + net = slim.max_pool2d(net, [2, 2], scope='pool1') + + net = slim.repeat(net, 2, slim.conv2d, 128, 3, scope='conv2',reuse=reuse) + net = slim.max_pool2d(net, [2, 2], scope='pool2') + + net = slim.repeat(net, 4, slim.conv2d, 256, 3, scope='conv3', reuse=reuse) + net = slim.max_pool2d(net, [2, 2], scope='pool3') + + net = slim.repeat(net, 4, slim.conv2d, 512, 3, scope='conv4',reuse=reuse) + net = slim.max_pool2d(net, [2, 2], scope='pool4') + + net = slim.repeat(net, 4, slim.conv2d, 512, 3, scope='conv5',reuse=reuse) + net = slim.max_pool2d(net, [2, 2], scope='pool5') + + # Convert end_points_collection into a end_point dict. + end_points = slim.utils.convert_collection_to_dict(end_points_collection) + + return net, end_points + + +def VGG19_slim(input_fm, reuse, deep_list=None, norm_flag=True): + """Get the VGG19 features given the fm name. + Borrowed from https://github.com/thunil/TecoGAN/blob/master/lib/Teco.py#L5 + + Args: + input_fm: tensor, input feature map. + reuse: boolean, whether to reuse the scope variables. + deep_list: list[str], which features are to extract and used for calculation. + norm_flag: boolean, whether to normalize the feature map with Frobenius-norm. + """ + # deep_list, define the feature to extract + input_img_ab = input_fm * 255.0 - tf.constant(VGG_MEAN) + # model: + _, output = vgg_19(input_img_ab, reuse=reuse) + # feature maps: + results = {} + with tf.name_scope('vgg_norm'): + for key in output: + if (deep_list is None) or (key in deep_list): + orig_deep_feature = tf.cast(output[key], tf.float32) + if norm_flag: + orig_len = tf.sqrt(tf.reduce_sum(tf.square(orig_deep_feature), axis=[3], keepdims=True)+1e-12) + results[key] = orig_deep_feature / orig_len + else: + results[key] = orig_deep_feature + return results diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/main.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/main.py new file mode 100644 index 0000000000000000000000000000000000000000..96bb0fc6d3b4d1fb271abbaa69e9494fd3ce8af6 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/main.py @@ -0,0 +1,134 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import sys +import inspect +import importlib +from yacs.config import CfgNode + +import src.runner.npu_pkgs + +from src.utils.logger import logger as logger +from src.utils.defaults import cfg +from src.utils.world import world +from src.utils.utils import convert_dict_to_list +from src.networks import build_network +from src.engine import build_engine +from src.dataloaders import build_dataloader +from src.utils.constant import VALID_MODE + + +def get_args(): + """Get external arguments. + + Returns: + Namespace: the arguements to the whole program. + """ + parser = argparse.ArgumentParser(description="HiSi Ascend Video Processing Toolkit") + parser.add_argument( + "--config-file", + default="", + metavar="FILE", + help="path to config file", + type=str, + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + + args = parser.parse_args() + + return args + + +def dump_cfg(_cfg): + """Dump config info to log file and stdout. + + Args: + _cfg: yacs node, the configuration. + """ + cfg_str = _cfg.dump() + if not os.path.exists(_cfg.train.output_dir): + os.makedirs(_cfg.train.output_dir, exist_ok=True) + dump_file = os.path.join(_cfg.train.output_dir, f"configure_{_cfg.mode}.yaml") + with open(dump_file, 'w') as f: + f.write(cfg_str) + logger.info(_cfg) + + +def processing(cfg): + """Processing function. + + This function supports training, inference and freeze engine. + + Args: + cfg: yacs node, global configuraton. + """ + world.initialize(device_type=cfg.env.device) + + if not cfg.log_file.startswith('/'): + log_file = os.path.join(cfg.train.output_dir, cfg.log_file) + else: + log_file = cfg.log_file + + # Silence all nodes other than the root node. + if world.is_root_rank: + logger.add_log_file(log_file) + else: + logger.silence = True + + if world.is_root_rank: + dump_cfg(cfg) + + # build networks + network = build_network(cfg) + + # build dataloader + dataloader = build_dataloader(cfg) + + # get engine + engine_type = build_engine(cfg) + engine = engine_type(dataloader, network, cfg) + engine.run() + + +def main(): + """Main entry function. + """ + args = get_args() + # Support either python config file with a dict, or a yaml file. + if args.config_file.endswith('.py'): + vars = {} + exec(open(args.config_file).read(), vars) + cfg.merge_from_other_cfg(CfgNode(vars['cfg'])) + elif config_module_name[-1] == 'yaml': + cfg.merge_from_file(args.config_file) + else: + raise ValueError() + + cfg.merge_from_list(args.opts) + cfg.freeze() + + assert cfg.mode in VALID_MODE + + processing(cfg) + + +if __name__ == '__main__': + main() diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/metrics/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/metrics/image_similarity.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/metrics/image_similarity.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb2608916c2bae58fd2e53dbdc628a69858de44 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/metrics/image_similarity.py @@ -0,0 +1,54 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + + +def ssim(a, b, max_val): + """Calculate SSIM of two images. + + Args: + a: tensor, 4D image tensor. + b: tensor, has the same shape with a. + max_val: scalar, the max value of tensor a and b. + """ + return tf.image.ssim(a, b, max_val, filter_size=11, + filter_sigma=1.5, k1=0.01, k2=0.03) + + +def ssim_multiscale(a, b, max_val): + """Calculate multi-scale SSIM of two images. + + Args: + a: tensor, 4D image tensor. + b: tensor, has the same shape with a. + max_val: scalar, the max value of tensor a and b. + """ + return tf.image.ssim_multiscale( + a, b, max_val, filter_size=11, + filter_sigma=1.5, k1=0.01, k2=0.03 + ) + + +def psnr(a, b, max_val): + """Calculate PSNR of two images. + + Args: + a: tensor, 4D image tensor. + b: tensor, has the same shape with a. + max_val: scalar, the max value of tensor a and b. + """ + return tf.image.psnr(a, b, max_val) + + diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b3aa40061e345225b58a78fd7aedd85388405485 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .conv_module import * +from .edvr_submodules import * +from .res_block import * +from .spade import * diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/conv_module.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/conv_module.py new file mode 100644 index 0000000000000000000000000000000000000000..3f33d3f53c02f54a39e95e530c8efe2a4017987f --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/conv_module.py @@ -0,0 +1,210 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf + +from src.layers import Conv2D, Conv3D, ActLayer, NormLayer, Conv2DTranspose + + +__all__ = ['Conv2DNormAct', 'Conv3DNormAct', 'Conv2DTransposeNormAct'] + + +class Conv2DNormAct: + """A base module consists of conv2d followed by norm and activation. + + Both normalization and activation layers are optional. + + Args: + num_filters: int, number of filters. + kernel_size: int or list[int], the kernel size. + strides: int or list[int], the stride size. + dilations: int or list[int], the kernel dilations. + padding: str or list[int]. If is given list of padding size, the + padding will be 'valid'. One can also pass in str such as + ('same', 'valid'). + padding_mode: str, indicating how to pad, i.e., REFLECT or CONSTANT. + + use_bias: boolean, whether to use bias. Default True. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + trainable: boolean, whether in training phase. If True, the buffers will + be add to UPDATE_OPS and update. + act_cfg: dict, specify the activation `type` and other parameters. + norm_cfg: dict, specify the normalization `type` and other parameters. + name: str, variable scope name. + """ + def __init__(self, num_filters, kernel_size=(3, 3), strides=(1, 1), + dilations=(1, 1), padding='same', padding_mode='CONSTANT', + use_bias=True, use_spectral_norm=False, trainable=True, + act_cfg=None, norm_cfg=None, + name='Conv2DModule'): + self.num_filters = num_filters + self.kernel_size = kernel_size + self.strides = strides + self.dilations = dilations + + self.padding = padding + self.padding_mode = padding_mode + + self.use_bias = use_bias + self.use_spectral_norm = use_spectral_norm + self.trainable = trainable + + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + + self.name = name + + def __call__(self, x): + with tf.variable_scope(self.name): + use_bias = self.use_bias if self.norm_cfg is None else False + x = Conv2D( + self.num_filters, kernel_size=self.kernel_size, strides=self.strides, + dilations=self.dilations, use_bias=self.use_bias, + use_spectral_norm=self.use_spectral_norm, + padding=self.padding, padding_mode=self.padding_mode, + name='Conv2D')(x) + + if self.norm_cfg is not None: + x = NormLayer(self.norm_cfg, is_train=self.trainable, name='Norm')(x) + + if self.act_cfg is not None: + x = ActLayer(self.act_cfg)(x) + return x + + +class Conv3DNormAct: + """A base module consists of conv3d followed by norm and activation. + + Both normalization and activation layers are optional. + + Args: + num_filters: int, number of filters. + kernel_size: int or list[int], the kernel size. + strides: int or list[int], the stride size. + dilations: int or list[int], the kernel dilations. + padding: str or list[int]. If is given list of padding size, the + padding will be 'valid'. One can also pass in str such as + ('same', 'valid'). + padding_mode: str, indicating how to pad, i.e., REFLECT or CONSTANT. + + use_bias: boolean, whether to use bias. Default True. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + trainable: boolean, whether in training phase. If True, the buffers will + be add to UPDATE_OPS and update. + act_cfg: dict, specify the activation `type` and other parameters. + norm_cfg: dict, specify the normalization `type` and other parameters. + name: str, variable scope name. + """ + def __init__(self, num_filters, kernel_size=(3, 3, 3), strides=(1, 1, 1), + dilations=(1, 1, 1), padding='same', padding_mode='CONSTANT', + use_bias=True, use_spectral_norm=False, trainable=True, + act_cfg=None, norm_cfg=None, + name='Conv3DModule'): + self.num_filters = num_filters + self.kernel_size = kernel_size + self.strides = strides + self.dilations = dilations + + self.padding = padding + self.padding_mode = padding_mode + + self.use_bias = use_bias + self.use_spectral_norm = use_spectral_norm + self.trainable = trainable + + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + + self.name = name + + def __call__(self, x): + with tf.variable_scope(self.name): + use_bias = self.use_bias if self.norm_cfg is None else False + x = Conv3D(self.num_filters, kernel_size=self.kernel_size, + strides=self.strides, dilations=self.dilations, + padding=self.padding, padding_mode=self.padding_mode, + use_bias=self.use_bias, use_spectral_norm=self.use_spectral_nor, + name='Conv3D')(x) + + if self.norm_cfg is not None: + x = NormLayer(self.norm_cfg, is_train=self.trainable, name='Norm')(x) + + if self.act_cfg is not None: + x = ActLayer(self.act_cfg)(x) + return x + + +class Conv2DTransposeNormAct: + """A base module consists of conv2d transpose followed by norm and activation. + + Both normalization and activation layers are optional. + + Args: + num_filters: int, number of filters. + kernel_size: int or list[int], the kernel size. + strides: int or list[int], the stride size. + dilations: int or list[int], the kernel dilations. + padding: str or list[int]. If is given list of padding size, the + padding will be 'valid'. One can also pass in str such as + ('same', 'valid'). + padding_mode: str, indicating how to pad, i.e., REFLECT or CONSTANT. + + use_bias: boolean, whether to use bias. Default True. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + trainable: boolean, whether in training phase. If True, the buffers will + be add to UPDATE_OPS and update. + act_cfg: dict, specify the activation `type` and other parameters. + norm_cfg: dict, specify the normalization `type` and other parameters. + name: str, variable scope name. + """ + def __init__(self, num_filters, kernel_size=(3, 3), strides=(1, 1), + dilations=(1, 1), padding='same', padding_mode='CONSTANT', + use_bias=True, use_spectral_norm=False, trainable=True, + act_cfg=None, norm_cfg=None, + name='Conv2DTransposeModule'): + self.num_filters = num_filters + self.kernel_size = kernel_size + self.strides = strides + self.dilations = dilations + + self.padding = padding + self.padding_mode = padding_mode + + self.use_bias = use_bias + self.use_spectral_norm = use_spectral_norm + self.trainable = trainable + + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + + self.name = name + + def __call__(self, x): + with tf.variable_scope(self.name): + use_bias = self.use_bias if self.norm_cfg is None else False + x = Conv2DTranspose( + self.num_filters, kernel_size=self.kernel_size, strides=self.strides, + dilations=self.dilations, use_bias=self.use_bias, + use_spectral_norm=self.use_spectral_norm, + padding=self.padding, padding_mode=self.padding_mode, + name='Conv2DTranspose')(x) + + if self.norm_cfg is not None: + x = NormLayer(self.norm_cfg, is_train=self.trainable, name='Norm')(x) + + if self.act_cfg is not None: + x = ActLayer(self.act_cfg)(x) + return x diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/edvr_submodules.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/edvr_submodules.py new file mode 100644 index 0000000000000000000000000000000000000000..df8c0e6eb3cee9fc1f7e2b6f56323dd0f023dd50 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/edvr_submodules.py @@ -0,0 +1,267 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np +import tensorflow as tf + +from src.layers import Conv2D, Conv3D, ActLayer, DCNPack +from src.ops import resize + +from .conv_module import Conv2DNormAct + + +class PCDAlign(object): + """ + Pyramid, cascade and deformable alignment module in EDVR. + + Args: + num_feat: int, number of channels multiplier in the intermediate layers. + num_conv_groups: int, number of groups in convolution in dcn. + deformable_groups: int, number of groups in offsets in dcn. + dcn_impl: str, version of dcn operator. Possible choice in ('tf', 'npu'). + upsample_method: str, method of resize operator. Possible choice in + ('bilinear', 'bicubic'). + align_corners: boolean, used in resize. Whether to align corners during + resize. + """ + def __init__(self, num_feat=64, num_conv_groups=1, deformable_groups=1, + dcn_impl='npu', upsample_method='bilinear', align_corners=True): + self.mid_channels = num_feat + self.num_deform_groups = deformable_groups + self.num_groups = num_conv_groups + self.upsample_method = upsample_method + self.dcn_impl = dcn_impl + self.align_corners = align_corners + + def __call__(self, neighbor_feats, ref_feats, + act_cfg=dict(type='LeakyRelu', alpha=0.1), + name='pcd_align'): + """Forward pass of PCD module. + + Args: + neighbor_feats: list[tensor], the multi-scale feature maps of a + single neighbor frame. + ref_feats: list[tensor], the multi-scale feature maps of the center + frame. + act_cfg: dict, specify the activation `type` and other parameters. + name: str, variable scope name. + + Returns: + tensor, aligned multi-frame features. + """ + with tf.variable_scope(name, reuse=tf.AUTO_REUSE): + # The number of pyramid levels is 3. + assert len(neighbor_feats) == 3 and len(ref_feats) == 3, ( + 'The length of neighbor_feats and ref_feats must be both 3, ' + 'but got {} and {}'.format(len(neighbor_feats), len(ref_feats))) + + # Pyramids + upsampled_offset, upsampled_feat = None, None + for i in range(3, 0, -1): + with tf.variable_scope('level{}'.format(i)): + offset = tf.concat([neighbor_feats[i - 1], ref_feats[i - 1]], axis=-1) + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='offset_conv1')(offset) + if i == 3: + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='offset_conv2')(offset) + else: + offset = tf.concat([offset, upsampled_offset], axis=-1) + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='offset_conv2')(offset) + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='offset_conv3')(offset) + + feat = DCNPack(self.mid_channels, kernel_size=[3, 3], padding='same', + num_deform_groups=self.num_deform_groups, num_groups=self.num_groups, + name='dcn_l{}'.format(i), impl=self.dcn_impl, + )(neighbor_feats[i - 1], offset) + if i == 3: + feat = ActLayer(act_cfg)(feat) + else: + feat = tf.concat([feat, upsampled_feat], axis=-1) + feat = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg if i == 2 else None, + name='feat_conv')(feat) + + if i > 1: + # upsample offset and features + upsampled_offset = resize( + offset, size=[offset.shape[1] * 2, offset.shape[2] * 2], align_corners=self.align_corners, + name='upsample_offset{}'.format(i), method=self.upsample_method) + upsampled_offset = upsampled_offset * 2 + upsampled_feat = resize( + feat, size=[feat.shape[1] * 2, feat.shape[2] * 2], align_corners=self.align_corners, + name='upsample_feat{}'.format(i), method=self.upsample_method) + + # Cascading + offset = tf.concat([feat, ref_feats[0]], axis=-1) + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='cas_offset_conv1')(offset) + offset = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='cas_offset_conv2')(offset) + feat = DCNPack(self.mid_channels, kernel_size=[3, 3], padding='same', + num_deform_groups=self.num_deform_groups, name='dcn_cas', + impl=self.dcn_impl)(feat, offset) + feat = ActLayer(act_cfg)(feat) + + return feat + + +class PCWoDCN(object): + """ + A verbose pyramid and cascade module. + + Args: + num_feat: int, number of channels multiplier in the intermediate layers. + upsample_method: str, method of resize operator. Possible choice in + ('bilinear', 'bicubic'). + align_corners: boolean, used in resize. Whether to align corners during + resize. + """ + + def __init__(self, num_feat=64, upsample_method='bilinear', + align_corners=True): + self.mid_channels = num_feat + self.upsample_method = upsample_method + self.align_corners = align_corners + + def __call__(self, neighbor_feats, ref_feats, + act_cfg=dict(type='LeakyRelu', alpha=0.1), + name='pcd_align'): + """Forward pass of PCD module. + + Args: + neighbor_feats: list[tensor], the multi-scale feature maps of a + single neighbor frame. + ref_feats: list[tensor], the multi-scale feature maps of the center + frame. + act_cfg: dict, specify the activation `type` and other parameters. + name: str, variable scope name. + + Returns: + tensor, aligned multi-frame features. + """ + with tf.variable_scope(name, reuse=tf.AUTO_REUSE): + # The number of pyramid levels is 3. + assert len(neighbor_feats) == 3 and len(ref_feats) == 3, ( + 'The length of neighbor_feats and ref_feats must be both 3, ' + 'but got {} and {}'.format(len(neighbor_feats), len(ref_feats))) + + # Pyramids + upsampled_offset, upsampled_feat = None, None + for i in range(3, 0, -1): + with tf.variable_scope('level{}'.format(i)): + feat = Conv2DNormAct(self.mid_channels, kernel_size=[3, 3], + padding='same', name='pc_conv{}'.format(i))(neighbor_feats[i - 1]) + if i == 3: + feat = ActLayer(act_cfg)(feat) + else: + feat = tf.concat([feat, upsampled_feat], axis=-1) + feat = Conv2DNormAct(self.mid_channels, + act_cfg=act_cfg if i == 2 else None, + name='feat_conv')(feat) + + if i > 1: + upsampled_feat = resize( + feat, size=[feat.shape[1] * 2, feat.shape[2] * 2], + align_corners=self.align_corners, + name='upsample_feat{}'.format(i), + method=self.upsample_method) + + # Cascading + feat = Conv2DNormAct(self.mid_channels, kernel_size=[3, 3], + padding='same', name='dcn_cas')(feat) + feat = ActLayer(act_cfg)(feat) + + return feat + + +class TSAFusion(object): + """Fusiong of temporal and spatial attention. + + Args: + num_frames: int, number of input frames. + num_feat: int, multiplier of the filters number in the middle layers. + upsample_method: str, resize method. Possible choices in + ('bilinear', 'bicubic'). + align_corners: boolean, whether to align with corners when resize. + """ + def __init__(self, num_frames, num_feat, upsample_method='bilinear', + align_corners=True): + self.num_frames = num_frames + self.num_feat = num_feat + self.upsample_method = upsample_method + self.align_corners = align_corners + + def __call__(self, aligned_feat, act_cfg=dict(type='LeakyRelu', alpha=0.1)): + """Forward pass. + + Args: + aligned_feat: tensor + act_cfg: dict, specify the activation `type` and other parameters. + + Returns: + tensor, aggregated multi-frame features. + """ + with tf.variable_scope('tsa_fusion', reuse=tf.AUTO_REUSE): + # temporal attention + embedding_ref = Conv2D(self.num_feat, name='temporal_attn1')(aligned_feat[self.num_frames//2]) + + # corr_l = [] # correlation list + aligned_feat_list = [] + for i in range(self.num_frames): + emb = Conv2D(self.num_feat, name='temporal_attn2')(aligned_feat[i]) + emb = tf.cast(emb, tf.float32) + corr = tf.reduce_sum(emb * embedding_ref, axis=-1, keep_dims=True) # (n, h, w, 1) + # corr_l.append(corr) + + corr_prob = tf.nn.sigmoid(corr) + aligned_feat_list.append(corr_prob * aligned_feat[i]) + aligned_feat = tf.concat(aligned_feat_list, axis=-1) # (n, h, w, t*c) + feat = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), act_cfg=act_cfg, name='feat_fusion')(aligned_feat) + + # spatial attention + attn = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), act_cfg=act_cfg, name='spatial_attn1')(aligned_feat) + attn_max = tf.nn.max_pool2d(attn, 3, 2, 'SAME') + attn_avg = tf.nn.avg_pool(attn, 3, 2, 'SAME') + attn = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), + act_cfg=act_cfg, name='spatial_attn2')(tf.concat([attn_max, attn_avg], axis=-1)) + # pyramid levels + attn_level = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), act_cfg=act_cfg, name='spatial_attn_l1')(attn) + attn_max = tf.nn.max_pool2d(attn_level, 3, 2, 'SAME') + attn_avg = tf.nn.avg_pool(attn_level, 3, 2, 'SAME') + attn_level = Conv2DNormAct(self.num_feat, act_cfg=act_cfg, name='spatial_attn_l2')\ + (tf.concat([attn_max, attn_avg], axis=-1)) + attn_level = Conv2DNormAct(self.num_feat, act_cfg=act_cfg, name='spatial_attn_l3')(attn_level) + + attn_level = resize( + attn_level, size=[attn_level.shape[1] * 2, attn_level.shape[2] * 2], + align_corners=self.align_corners, + name='upsample1', method=self.upsample_method) + + attn = Conv2DNormAct(self.num_feat, act_cfg=act_cfg, name='spatial_attn3')(attn) + attn_level + attn = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), act_cfg=act_cfg, name='spatial_attn4')(attn) + + attn = resize( + attn, size=[attn.shape[1] * 2, attn.shape[2] * 2], + align_corners=self.align_corners, + name='upsample2', method=self.upsample_method) + attn = Conv2D(self.num_feat, name='spatial_attn5')(attn) + attn = Conv2DNormAct(self.num_feat, kernel_size=(1, 1), act_cfg=act_cfg, name='spatial_attn_add1')(attn) + attn_add = Conv2D(self.num_feat, kernel_size=(1, 1), name='spatial_attn_add2')(attn) + + attn = tf.cast(attn, tf.float32) + attn = tf.nn.sigmoid(attn) + + feat = tf.cast(feat, tf.float32) + attn_add = tf.cast(attn_add, tf.float32) + + # after initialization, * 2 makes (attn * 2) to be close to 1. + feat = feat * attn * 2 + attn_add + return feat diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/res_block.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/res_block.py new file mode 100644 index 0000000000000000000000000000000000000000..7d4870f6dc9aaf3122373bda031545f1f4ba64c6 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/res_block.py @@ -0,0 +1,145 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import tensorflow as tf +from src.layers import Conv2D, Conv3D, ActLayer, NormLayer +from src.utils.utils import to_pair + +from .conv_module import Conv2DNormAct, Conv3DNormAct + + +class ResBlock(object): + """A ResBlock class, consists of several conv blocks with bn. + + Args: + num_blocks: int, number of conv blocks. + mid_channels: int, number of the channels in the conv layers. + res_scale: float, a scalar that scale the residual. + act_cfg: dict, specify the activation `type` and other parameters. + norm_cfg: dict, specify the normalization `type` and other parameters. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + trainable: boolean, whether in training phase. If True, the buffers will + be add to UPDATE_OPS and update. + name: str, variable scope name. + """ + def __init__(self, num_blocks, mid_channels, res_scale=1.0, + act_cfg=dict(type='ReLU'), + norm_cfg=dict(type='bn'), + use_spectral_norm=False, + trainable=True, name='ResBlock'): + self.num_blocks = num_blocks + self.output_channel = mid_channels + self.res_scale = res_scale + self.name = name + self.trainable = trainable + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.use_spectral_norm = use_spectral_norm + + def shortcut_func(self, x): + """Shortcut path. + + May use a conv layer to change the number of channels. + """ + c_in = x.get_shape().as_list() + if c_in[-1] == self.output_channel: + return x + else: + return Conv2D(self.output_channel, + scale=self.scale, + name='conv_shortcut', + use_spectral_norm=self.use_spectral_norm)(x) + + def build_block(self, x, index): + """Build a basic conv block. + """ + identity = self.shortcut_func(x) + + out = Conv2DNormAct(self.output_channel, scale=scales[0], + act_cfg=self.act_cfg, norm_cfg=self.norm_cfg, + use_spectral_norm=self.use_spectral_norm, + name='conv{}a'.format(idx))(x) + out = Conv2DNormAct(self.output_channel, scale=scales[1], + norm_cfg=self.norm_cfg, + use_spectral_norm=self.use_spectral_norm, + name='conv{}b'.format(idx))(out) + + return identity + out * self.res_scale + + def __call__(self, x): + with tf.variable_scope(self.name) as scope: + for i in range(self.num_blocks): + x = self.build_block(x, i + 1) + return x + + +class ResBlockNoBN(object): + """A ResBlock class, consists of several conv blocks without bn. + + Args: + num_blocks: int, number of conv blocks. + mid_channels: int, number of the channels in the conv layers. + res_scale: float, a scalar that scale the residual. + act_cfg: dict, specify the activation `type` and other parameters. + norm_cfg: dict, specify the normalization `type` and other parameters. + use_spectral_norm: boolean, whether to use specatral normalization. + Default False. + trainable: boolean, whether in training phase. If True, the buffers will + be add to UPDATE_OPS and update. + name: str, variable scope name. + """ + def __init__(self, num_blocks, mid_channels, res_scale=1.0, + act_cfg=dict(type='ReLU'), dilation=1, + use_spectral_norm=False, trainable=True, + name='ResBlockNoBN'): + self.num_blocks = num_blocks + self.mid_channels = mid_channels + self.res_scale = res_scale + self.name = name + self.trainable = trainable + self.act_cfg = act_cfg + self.dilation = (dilation, dilation) + self.use_spectral_norm = use_spectral_norm + + def shortcut_func(self, x): + """Shortcut path. May use a conv layer to change the number of channels. + """ + c_in = x.get_shape().as_list() + if c_in[-1] == self.output_channel: + return x + else: + return Conv2D(self.output_channel, + scale=self.scale, + name='conv_shortcut', + use_spectral_norm=self.use_spectral_norm)(x) + + def build_block(self, x, idx): + """Build a basic conv block. + """ + out = Conv2D(self.mid_channels, + use_spectral_norm=self.use_spectral_norm, + name='conv{}a'.format(idx))(x) + out = ActLayer(self.act_cfg)(out) + out = Conv2D(self.mid_channels, + use_spectral_norm=self.use_spectral_norm, + name='conv{}b'.format(idx))(out) + return x + out * self.res_scale + + def __call__(self, x): + with tf.variable_scope(self.name) as scope: + for i in range(self.num_blocks): + x = self.build_block(x, i + 1) + return x diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/spade.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/spade.py new file mode 100644 index 0000000000000000000000000000000000000000..6a8d90be5193954a2d339c6353523993e2f8a929 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/modules/spade.py @@ -0,0 +1,150 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tensorflow as tf + +from src.layers import Conv2D, NormLayer, ActLayer + + +class SPADE(object): + """SPatially-Adaptive (DE)normalization. + + See https://arxiv.org/pdf/1903.07291.pdf. The forward pass is borrowed from + https://github.com/NVlabs/SPADE/blob/master/models/networks/normalization.py. + + Given the input input map \mathbf{h}, and a reference feature map \mathbf{m}, + the output activation value is : + ```math + \mathbf{y} = \gamma(\mathbf{m}) * \frac{\mathbf{h} - \mu}{\sigma} + \beta(\mathbf{m}) + ``` + where \gamma and \beta are both functions of \mathbf{m}. + + Args: + num_filters: int, number of filters of the output tensor. + kernel_size: int or list[int], kernel size of the conv layers. + num_hidden: int, number of filters in the middle layers which compute the + gamma and beta for normalization. + training: boolean, indicating whether in training phase or not. + norm_type: str, the type of normalization. + """ + def __init__(self, num_filters, kernel_size=(3,3), + num_hidden = 128, training=False, norm_type='in', + return_all=False, name='spade', + ver='v2'): + self.num_filters = num_filters + self.kernel_size = kernel_size + self.training = training + self.norm_type = norm_type + + + def spade(x, ref_feat, norm_nc, kernel_size=(3,3), name='spade', + nhidden = 128, training=False, norm_type='in', vis=False, + ver='v2'): + with tf.variable_scope(name): + # Part 1. generate parameter-free normalized activations + normalized = NormLayer(norm_type=self.norm_type, center=False, + scale=False, is_train=training)(x) + + # Part 2. produce scaling and bias conditioned on reference map + shape_x = x.get_shape().as_list() + shape_label = ref_feat.get_shape().as_list() + ref_feat = tf.image.resize_images(ref_feat, (shape_x[1], shape_x[2]), + method=tf.image.ResizeMethod.BILINEAR, + align_corners=True) + + if ver == 'v1': + actv = Conv2D(nhidden, kernel_size=kernel_size, padding='SAME', + strides=(1, 1), use_bias=True, trainable=True, + name='mlp_shared')(ref_feat) + actv = tf.nn.relu(actv) + else: + x_trans = Conv2D(shape_label[-1], kernel_size=kernel_size, + padding='SAME', strides=(1, 1), use_bias=True, + trainable=True, name='mlp_trans')(x) + actv = tf.nn.relu(ref_feat * x_trans) + actv = Conv2D(nhidden, kernel_size=kernel_size, padding='SAME', + strides=(1, 1), use_bias=True, trainable=True, + name='mlp_shared')(actv) + actv = tf.nn.relu(actv) + + gamma = Conv2D(norm_nc, kernel_size=kernel_size, padding='SAME', + strides=(1, 1), use_bias=True, trainable=True, + name='mlp_gamma')(actv) + beta = Conv2D(norm_nc, kernel_size=kernel_size, padding='SAME', + strides=(1, 1), use_bias=True, trainable=True, + name='mlp_beta')(actv) + + # apply scale and bias + out = normalized * (1 + gamma) + beta + if vis: + return out, gamma, beta + return out + + +class SPADEResBlock: + """ResBlock based on SPatially-Adaptive (DE)normalization. + + See https://arxiv.org/pdf/1903.07291.pdf. The forward pass is borrowed from + https://github.com/NVlabs/SPADE/blob/master/models/networks/normalization.py. + + Given the input input map \mathbf{h}, and a reference feature map \mathbf{m}, + the output activation value is : + ```math + \mathbf{y} = \gamma(\mathbf{m}) * \frac{\mathbf{h} - \mu}{\sigma} + \beta(\mathbf{m}) + ``` + where \gamma and \beta are both functions of \mathbf{m}. + + Args: + num_filters: int, number of filters of the output tensor. + kernel_size: int or list[int], kernel size of the conv layers. + nhidden: int, number of filters in the middle layers which compute the + gamma and beta for normalization. + training: boolean, indicating whether in training phase or not. + norm_type: str, the type of normalization. + """ + def __init__(self, fin, fout, trainable=True, spectral_norm=False, with_spade=True, name='spade_res_block'): + self.learned_short = fin != fout + self.fmiddle = min(fin, fout) + self.trainable = trainable + self.fin = fin + self.fout = fout + self.with_spade = with_spade + self.scope = name + + def __call__(self, x, ref): + # TODO: extend to[NTHWC] 5D input + with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE): + feat = x + if self.with_spade: + feat = spade(feat, ref, self.fin, kernel_size=(3, 3), name='spade1', training=self.trainable) + feat = Conv2D(self.fmiddle, kernel_size=(3, 3), strides=(1, 1), + padding='SAME', trainable=self.trainable, name='conv1')(feat) + feat = ActLayer(dict(type='leakyrelu', alpha=0.2))(feat) + + if self.with_spade: + feat = spade(feat, ref, self.fmiddle, kernel_size=(3, 3), name='spade2', training=self.trainable) + feat = Conv2D(self.fout, kernel_size=(3, 3), strides=(1, 1), + padding='SAME', trainable=self.trainable, name='conv2')(feat) + feat = ActLayer(dict(type='leakyrelu', alpha=0.2))(feat) + + short_cut = x + if self.learned_short: + if self.with_spade: + short_cut = spade(short_cut, ref, self.fin, kernel_size=(3, 3), + name='spade_shortcut', training=self.trainable) + short_cut = Conv2D(self.fout, kernel_size=(3, 3), strides=(1, 1), + padding='SAME', trainable=self.trainable, name='conv3')(short_cut) + + return short_cut + feat diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9543d0650499b3d49f5b743a3200dfcda3fd7a56 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/__init__.py @@ -0,0 +1,42 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import inspect +import importlib + +from .base_model import Base +from .register import registry + +model_dir = os.path.dirname(os.path.realpath(__file__)) +model_file = os.listdir(model_dir) + +# Automatically import all the defined class in the files under src.networks +__all__ = ['registry'] +for model in model_file: + module_name = model.split('.')[0] + if module_name in ['register', 'base_model', '__init__', 'VSR']: + continue + mod = importlib.import_module(f'.{module_name}', 'src.networks') + for name, obj in inspect.getmembers(mod, inspect.isclass): + if issubclass(obj, Base) and module_name not in __all__ and obj.__module__ == module_name: + __all__.append(name) + + +def build_network(cfg): + network = registry[cfg.model.name](cfg=cfg) + return network + + +__all__.append('build_network') diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/base_model.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/base_model.py new file mode 100644 index 0000000000000000000000000000000000000000..521eefbaa7301bd34376a3ef1eca8d73836fefbf --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/base_model.py @@ -0,0 +1,416 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import glob +import os + +import numpy as np +import tensorflow as tf +from src.losses.losses import get_loss +from src.losses.modules.adversarial import build_adversarial_loss +from src.losses.modules.perceptual import build_content_style_loss +from src.losses.modules.perceptual import build_perceptual_loss +from src.ops.edge import get_edges +from src.networks.register import RegisteredModel +from src.runner.common import name_space +from src.utils.logger import logger +from src.utils.utils import convert_to_dict + + +class Base(object, metaclass=RegisteredModel): + """Base class for all the video processing models. + + Attributes: + cfg: yacs CfgNode. Global configuration. + model_name: str, model name. + scale: int, output scale w.r.t input, e.g. EDVR output is 4x the scale + of the input. + num_net_input_frames: int, the total number of input frames to the + network. For EDVR, the default is 5. See src.utils.default. + num_net_output_frames: int, the number of output result frames by the + network. Default is 1. Also see src.utils.default. + num_data_lq_frames: int, the total number of lq frames in each case + generated by the datasets. Typically it can be the same with + ``num_net_input_frames``. But if one is to use temporal supervision + and there will be multiple output frames. In this case, see the + example below. + num_data_gt_frames: int, the total number of the hq frames in each case + produced by the dataset. Typically it is the same with + ``num_net_output_frames``. + input_color_space: int, the color space of the input frames. Default to + ``rgb``. + num_in_channels: int, number of the channels of the input frames. + Corresponds to ``input_color_space``. + is_train: boolean, whether the model is in training phase. Determined by + the ``cfg.mode``. + generative_model_scope: str, top scope name for the tensorflow graph. + Default value is 'G'. + output_dir: str, path to dump the summary. + + Example: + The most confusion configuration may be the ``num_**_frames``. Here is + an example of the basic scenario (multi-input frames and single center + output frame): + + Frame 1 -----> |---------| + Frame 2 -----> | | + Frame 3 -----> | network | -----> Frame 3' -----> Loss + Frame 4 -----> | | + Frame 5 -----> |---------| + + In this case, we have ``num_net_input_frames=5`` and + ``num_net_output_frames=1``. Also, since there is no temporal + supervision for the outputs, ``num_data_lq_frames=num_net_input_frames=5`` + and ``num_data_gt_frames=num_net_output_frames=1``, which is the EDVR + case. + + A second case is multi-input frames and multi-output frames: + + Frame 1 -----> |---------| -----> Frame 1' -----> |------| + Frame 2 -----> | | -----> Frame 2' -----> | | + Frame 3 -----> | network | -----> Frame 3' -----> | loss | -----> Loss + Frame 4 -----> | | -----> Frame 4' -----> | | + Frame 5 -----> |---------| -----> Frame 5' -----> |------| + + In this case, `num_data_lq_frames=num_net_input_frames=5`` and + ``num_data_gt_frames=num_net_output_frames=5``. + + Third case, multi-input frames, single center output frame and with + temporal supervision: + + Frame 1 -----> |---------| + Frame 2 -----> | | + Frame 3 -----> | | -----> Frame 3' -----> |------| + Frame 4 -----> | | -----> Frame 4' -----> | | + Frame 5 -----> | network | -----> Frame 5' -----> | loss | -----> Loss + Frame 6 -----> | | -----> Frame 6' -----> | | + Frame 7 -----> | | -----> Frame 7' -----> |------| + Frame 8 -----> | | + Frame 9 -----> |---------| + + In the 3rd case, ``num_data_lq_frames=9``, ``num_net_input_frames=5``, + ``num_data_gt_frames=5``, ``num_net_output_frames=1``, which satisfies: + + ``num_data_lq_frames = num_data_gt_frames + num_net_input_frames - num_net_output_frames`` + + During inference, the network is still multi-input frames and + single center output frame (same with the 1st case), while temporal + loss can be applied to the network during training. + + Args: + cfg: Configuration loaded from the *.yaml file. + """ + def __init__(self, cfg): + self.model_name = cfg.model.name + self.scale = cfg.model.scale + self.num_net_input_frames = cfg.model.num_net_input_frames + self.num_net_output_frames = cfg.model.num_net_output_frames + self.num_data_lq_frames = cfg.data.num_data_lq_frames + self.num_data_gt_frames = cfg.data.num_data_gt_frames + + self.input_color_space = cfg.data.color_space + self.num_in_channels = 3 + self.is_train = cfg.mode == 'train' + + self.cfg = cfg + self.lq = None # input low-quality + self.gt = None # groundtruth + self.hq = None # output high-quality + self.generative_model_scope = cfg.model.scope + self.output_dir = cfg.train.output_dir + + @property + def output_node(self): + """Obtain the default output result of the network + + Return: + A 4D [N, H, W, C] or 5D [N, T, H, W, C] tensorflow tensor. + """ + return self.hq + + @property + def input_node(self): + """Obtain the default input node of the network. + + Return: + A 4D [N, H, W, C] or 5D [N, T, H, W, C] tensor. + """ + return self.lq + + def parameters(self, scope=''): + """Obtain the trainable parameters given the scope. + + Args: + scope: str, the parameter scope. If is empty, return all the + parameters in the top scope ``self.generative_model_scope``. + + Return: + A list of parameter tensor in the given scope. + """ + if scope == '': + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, + self.generative_model_scope) + else: + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, + scope=scope) + + def calculate_content_loss(self, gt, hq): + """Compute the pixel-wise content loss. The loss will be added to + ``name_space.GeneratorLoss``. + + Args: + gt: tensor, predictions of the network. + hq: tensor, ground-truth of the training. + + """ + eps = self.cfg.loss.content.loss_margin + reduction = self.cfg.loss.content.loss_reduction + + loss = get_loss(self.cfg.loss.content.loss_type, gt, hq, eps=eps) + # reduction strategy is adjusted to ascend platform, to keep the + # gradient neither too large (in case of overflow) nor too small + # (in case gradient vanishing because of the CUBE operator). + if reduction == 'mean': + loss = tf.reduce_sum(tf.reduce_mean(loss, axis=[1, 2])) + elif reduction == 'sum': + loss = tf.reduce_mean(tf.reduce_sum(loss, axis=[1, 2, 3])) + else: + raise NotImplementedError + name_space.add_to_collection( + name_space.GeneratorLoss, + f'content {self.cfg.loss.content.loss_type}', + loss) + return loss + + def calculate_perceptual_loss(self, gt, hq): + """Compute perceptual loss. The loss will be added to + ``name_space.GeneratorLoss``. + + Args: + gt: tensor, predictions of the network. + hq: tensor, ground-truth of the training. + """ + # perceptual loss will be weighted in build_perceptual_loss + perceptual_config = convert_to_dict(self.cfg.loss.perceptual, []) + perceptual_loss = build_perceptual_loss(gt, hq, perceptual_config) + perceptual_loss = perceptual_loss * self.cfg.loss.perceptual.loss_weight + + name_space.add_to_collection( + name_space.GeneratorLoss, + 'perceptual', + perceptual_loss) + return perceptual_loss + + def calculate_border_loss(self, gt, hq): + """Compute edge loss. The loss will be added to ``name_space.GeneratorLoss``. + + Args: + gt: tensor, predictions of the network. + hq: tensor, ground-truth of the training. + """ + + hq_edge = get_edges(gt, method=self.cfg.loss.edge.method) + gt_edge = get_edges(hq, method=self.cfg.loss.edge.method) + edge_loss = get_loss(self.cfg.loss.content.loss_type, hq_edge, gt_edge) + edge_loss = tf.reduce_sum(tf.reduce_mean(edge_loss, axis=[1, 2])) + edge_loss = edge_loss * self.cfg.loss.edge.loss_weight + + name_space.add_to_collection( + name_space.GeneratorLoss, + 'edge', + edge_loss) + + return edge_loss + + def calculate_content_style_loss(self, gt, hq): + """Compute style loss. The loss will be added to + ``name_space.GeneratorLoss``. + + Args: + gt: tensor, predictions of the network. + hq: tensor, ground-truth of the training. + """ + # perceptual loss will be weighted in build_perceptual_loss + perceptual_config = convert_to_dict(self.cfg.loss.perceptual, []) + perceptual_loss = build_content_style_loss(gt, hq, perceptual_config) + perceptual_loss = perceptual_loss * self.cfg.loss.perceptual.loss_weight + + name_space.add_to_collection( + name_space.GeneratorLoss, + 'style', + perceptual_loss) + return perceptual_loss + + def calculate_adversarial_loss(self, gt, hq): + """Compute adversarial loss. The loss will be added to + ``name_space.GeneratorLoss``. + + Args: + gt: tensor, predictions of the network. + hq: tensor, ground-truth of the training. + """ + + # discriminator loss will be weighted and added to name_space in build_ + # adversarial_loss + _ = build_adversarial_loss(gt, hq, self.cfg) + + def build_losses(self, *args, **kwargs): + """Compute all the losses, including pixel-wise content loss (required), + perceptual and perceptual style loss (if loss_weight > 0), edge loss ( + if loss_weight > 0), and adversarial loss (if loss_weight > 0). + """ + # all losses should be added to name_space collections + gt = tf.cast(self.gt, tf.float32) + hq = tf.cast(self.hq, tf.float32) + + hq = tf.reshape(hq, gt.shape) + + _ = self.calculate_content_loss(gt, hq) + if self.cfg.loss.edge.loss_weight > 0: + _ = self.calculate_border_loss(gt, hq) + + if self.cfg.loss.perceptual.loss_weight > 0: + _ = self.calculate_perceptual_loss(gt, hq) + + if self.cfg.loss.adversarial.loss_weight > 0: + self.calculate_adversarial_loss(gt, hq) + + def build_metrics(self, *args, **kwargs): + # Reserved for evaluation. + pass + + def prepare_placeholder(self, size): + """Prepare placeholder for **inference** phase, given the input size. + + Args: + size: tuple/list, including [batchsize, (h, w)] + + Returns: + None + """ + # Note: this function is only for non-train mode + if self.lq is not None: + pass + b, spatial = size + + if self.cfg.model.input_format_dimension == 5: + if b is None or b < 0: + b = None + self.lq = tf.placeholder( + tf.float32, + shape=[b, + self.num_net_input_frames, + *spatial, + self.num_in_channels], + name='L_input') + elif self.cfg.model.input_format_dimension == 4: + # Mainly used for offline model inference for speeding up in the + # AIPP on Ascend 310 + if b is None or b < 0: + self.lq = tf.placeholder( + tf.float32, + shape=[None, + *spatial, + self.num_in_channels], + name='L_input') + else: + self.lq = tf.placeholder( + tf.float32, + shape=[b*self.num_net_input_frames, + *spatial, + self.num_in_channels], + name='L_input') + else: + raise ValueError(f'Input format dimension only support 4 or 5, ' + f'but got {self.cfg.model.input_format_dimension}') + + def build_graph(self, dataloader=None, input_size=None, *args, **kwargs): + """Build tensorflow graph, network building, loss calculation, metrics + calculation, etc. + + Args: + dataloader: tf.Datasets, in training or evaluation phase. + input_size: tuple or list, [b, (h, w)], for inference and freeze + phase. + + Returns: + None + """ + if self.cfg.mode in ['freeze', 'inference']: + assert input_size is not None + self.prepare_placeholder(input_size) + elif self.cfg.mode in ['train', 'eval']: + assert dataloader is not None + self.lq, self.gt = dataloader + else: + raise NotImplementedError + + # Forward propagation + self.hq = self.build_generator(self.lq) + + if self.cfg.mode == 'train': + name_space.add_to_collection(name_space.Summary, 'hq', self.gt) + self.build_losses() + elif self.cfg.mode == 'eval': + name_space.add_to_collection(name_space.Summary, 'hq', self.gt) + self.build_metrics() + + if self.cfg.mode in ['eval', 'inference', 'freeze'] and \ + self.cfg.model.convert_output_to_uint8: + self.hq = tf.cast( + tf.round( + tf.clip_by_value( + self.hq * 255., + 0., + 255.)), + tf.uint8 + ) + + # Setup the output node for inference without network file. + self.hq = tf.identity(self.hq, name='HQ_output') + + name_space.add_to_collections((name_space.Summary, + name_space.InputField), + 'lq', + self.lq) + name_space.add_to_collections((name_space.Summary, + name_space.OutputField), + 'gt', + self.hq) + + def build_generator(self, lq, *args, **kwargs): + """Building the forward network. This is the interface every derived + network class should implement. + + Args: + lq: tensor, input frames. 4D or 5D tensor. + + Returns: + None + """ + raise NotImplementedError + + def dump_summary(self, step, summary_dict): + """Function to visualize the intermediate training status. + In case, tensorboard is not available, one can use this function to + check the intermediate training or evaluation results. + + Args: + step: int, training step + summary_dict: dict, contains all the results. + + Returns: + None + """ + pass diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/edvr.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/edvr.py new file mode 100644 index 0000000000000000000000000000000000000000..135d7f14ca7d1b7ee51f8c85a526a719dc128965 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/edvr.py @@ -0,0 +1,170 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import os + +import numpy as np +import tensorflow as tf + +from src.layers import Conv2D, ActLayer +from src.modules import Conv2DNormAct, ResBlockNoBN +from src.ops import depth_to_space, resize, split +from src.modules.edvr_submodules import PCDAlign, TSAFusion, PCWoDCN +from src.networks.base_model import Base +from src.runner.common import name_space +from src.utils.file_io import imwrite + + +class EDVR(Base): + """EDVR video super-resolution network. + + Args: + cfg: yacs node. EDVR configures configured in edvr_config.py. + """ + def __init__(self, cfg): + super().__init__(cfg) + self.with_tsa = cfg.edvr.with_tsa + self.mid_channels = cfg.edvr.mid_channels + self.num_groups = cfg.edvr.num_groups + self.num_deform_groups = cfg.edvr.num_deform_groups + self.num_blocks_extraction = cfg.edvr.num_blocks_extraction + self.num_blocks_reconstruction = cfg.edvr.num_blocks_reconstruction + + if cfg.edvr.use_dcn: + self.align_module = PCDAlign(self.mid_channels, 1, self.num_deform_groups, + dcn_impl='npu', + upsample_method=self.cfg.edvr.upsampling, + align_corners=self.cfg.edvr.align_corners) + else: + self.align_module = PCWoDCN(self.mid_channels, + upsample_method=self.cfg.edvr.upsampling, + align_corners=self.cfg.edvr.align_corners) + + self.tsa_fusion_module = TSAFusion(self.num_net_input_frames, + self.mid_channels, + self.cfg.edvr.upsampling, + align_corners=self.cfg.edvr.align_corners) + + def feature_extraction(self, x, act_cfg=dict(type='LeakyRelu', alpha=0.1)): + # extract LR features + with tf.variable_scope('extraction', reuse=tf.AUTO_REUSE): + # L1 + # l1_feat = tf.reshape(x, [-1, x.shape[2], x.shape[3], x.shape[4]]) + l1_feat = Conv2D(self.mid_channels, name='conv_first')(x) + l1_feat = ActLayer(act_cfg)(l1_feat) + l1_feat = ResBlockNoBN(num_blocks=self.num_blocks_extraction, mid_channels=self.mid_channels)(l1_feat) + # L2 + l2_feat = Conv2DNormAct(self.mid_channels, strides=[2, 2], act_cfg=act_cfg, name='feat_l2_conv1')(l1_feat) + l2_feat = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='feat_l2_conv2')(l2_feat) + # L3 + l3_feat = Conv2DNormAct(self.mid_channels, strides=[2, 2], act_cfg=act_cfg, name='feat_l3_conv1')(l2_feat) + l3_feat = Conv2DNormAct(self.mid_channels, act_cfg=act_cfg, name='feat_l3_conv2')(l3_feat) + + return l1_feat, l2_feat, l3_feat + + def reconstruction(self, feat, x_center, act_cfg=dict(type='LeakyRelu', alpha=0.1)): + # reconstruction + out_channel = x_center.get_shape().as_list()[-1] + with tf.variable_scope('reconstruction', reuse=tf.AUTO_REUSE): + out = ResBlockNoBN(num_blocks=self.num_blocks_reconstruction, mid_channels=self.mid_channels)(feat) + level_upsample = int(math.log2(self.scale)) + for i in range(level_upsample): + out = Conv2D(self.mid_channels * 2 ** 2, name=f'upsample{i+1}')(out) + out = depth_to_space(out, 2) + out = Conv2D(self.mid_channels, name='conv_hr')(out) + out = ActLayer(act_cfg)(out) + out = Conv2D(out_channel, name='conv_last')(out) + + base = resize( + x_center, + size=[x_center.shape[1] * self.scale, x_center.shape[2] * self.scale], + align_corners=self.cfg.edvr.align_corners, + name='img_upsample', method=self.cfg.edvr.upsampling) + base = tf.cast(base, tf.float32) + out = tf.cast(out, tf.float32) + self.residual = out + out += base + + return out + + def build_generator(self, x): + # shape of x: [B,T_in,H,W,C] + with tf.variable_scope(self.generative_model_scope, reuse=tf.AUTO_REUSE): + if self.cfg.model.input_format_dimension == 4: + x_shape = x.get_shape().as_list() + x = tf.reshape(x, [-1, self.num_net_input_frames, *x_shape[1:]]) + + x_list = split(x, self.num_net_input_frames, axis=1, keep_dims=False) + x_center = x_list[self.num_net_input_frames//2] + + l1_feat_list = [] + l2_feat_list = [] + l3_feat_list = [] + for f in range(self.num_net_input_frames): + l1_feat, l2_feat, l3_feat = self.feature_extraction(x_list[f]) + l1_feat_list.append(l1_feat) + l2_feat_list.append(l2_feat) + l3_feat_list.append(l3_feat) + + ref_feats = [ + l1_feat_list[self.num_net_input_frames//2], + l2_feat_list[self.num_net_input_frames//2], + l3_feat_list[self.num_net_input_frames//2] + ] + aligned_feat = [] + + for i in range(self.num_net_input_frames): + neighbor_feats = [ + l1_feat_list[i], + l2_feat_list[i], + l3_feat_list[i] + ] + # aligned_feat.append(self.pcd_align(neighbor_feats, ref_feats)) + aligned_feat.append(self.align_module(neighbor_feats, ref_feats)) + + if self.with_tsa: + # feat = self.tsa_fusion(aligned_feat) + feat = self.tsa_fusion_module(aligned_feat) + else: + aligned_feat = tf.stack(aligned_feat, axis=1) # (n, t, h, w, c) + aligned_feat_shape = aligned_feat.get_shape().as_list() + last_dim = aligned_feat_shape[-1] * aligned_feat_shape[1] + aligned_feat = tf.transpose(aligned_feat, [0, 2, 3, 1, 4]) + aligned_feat = tf.reshape(aligned_feat, + [-1, aligned_feat.shape[1], aligned_feat.shape[2], last_dim]) + feat = Conv2D(self.mid_channels, kernel_size=[1, 1], name='fusion')(aligned_feat) + + # reconstruction + out = self.reconstruction(feat, x_center) + + return out + + def dump_summary(self, step, summary_dict): + # Keys of the summary dict correspond to the keys defined base_model "build_generator" + lr = summary_dict['lr'] + sr = summary_dict['sr'] + hr = summary_dict['hr'] + + os.makedirs(os.path.join(self.output_dir, 'intermediate'), exist_ok=True) + + output_file = os.path.join(self.output_dir, 'intermediate', f'step{step:06d}_lr.png') + imwrite(output_file, np.squeeze(lr[0, self.num_net_input_frames//2]), + source_color_space=self.cfg.data.color_space) + + output_file = os.path.join(self.output_dir, 'intermediate', f'step{step:06d}_hr.png') + imwrite(output_file, np.squeeze(hr[0]), source_color_space=self.cfg.data.color_space) + + output_file = os.path.join(self.output_dir, 'intermediate', f'step{step:06d}_sr.png') + imwrite(output_file, sr[0], source_color_space=self.cfg.data.color_space) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/register.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/register.py new file mode 100644 index 0000000000000000000000000000000000000000..530a60a9c75679a07b5936b1b8a3f98e05f36f21 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/networks/register.py @@ -0,0 +1,29 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +registry = {} + + +def register(cls): + registry[cls.__name__] = cls + return cls + + +class RegisteredModel(type): + """A class for model registration. + """ + def __new__(cls, clsname, bases, attrs): + newclass = super(RegisteredModel, cls).__new__(cls, clsname, bases, attrs) + register(newclass) # here is your register function + return newclass diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2d63ba3e71b68bd9b0d11278abdb5eb0808b3a8a --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .upsample import * +from .edge import * +from .slicing import * \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/edge.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/edge.py new file mode 100644 index 0000000000000000000000000000000000000000..d3af3b02c5d8b9acceb04ef0ec0cb9d0a16baf01 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/edge.py @@ -0,0 +1,110 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import tensorflow as tf +from scipy import signal +import random + + +__all__ = ['tf_gaussian_blur', 'get_edges'] + + +def gaussian_kernel(kernel_size, standard_dev): + """Returns a 2D Gaussian kernel array with side length size and a sigma of + signal. + + Args: + kernel_size: int. + standard_dev: float, scalar of the kernel width. + + Returns: + ndarray, a normalized np.ndarray of shape [kernel_size, kernel_size]. + """ + gkern1d = signal.gaussian(kernel_size, std=standard_dev).reshape(kernel_size, 1) + gkern2d = np.outer(gkern1d, gkern1d) + return (gkern2d/gkern2d.sum()) + + +def tf_gaussian_blur(x, kernel_size, standard_dev): + """Apply gaussian blur to tensor using tf interface. Only works for RGB or 3-channel + tensors. + + Args: + x: tensor, 4D. + kernel_size: int, blur kernel size. + standard_dev: float. + + Returns: + tensor, blured version of the input. + """ + gau_k = gaussian_kernel(kernel_size, standard_dev) + gau_0 = np.zeros_like(gau_k) + gau_list = np.float32( [ + [gau_k, gau_0, gau_0], + [gau_0, gau_k, gau_0], + [gau_0, gau_0, gau_k]] ) # only works for RGB images! + gau_wei = np.transpose(gau_list, [2,3,0,1]) + + fix_gkern = tf.constant(gau_wei, dtype=tf.float32, shape=[kernel_size, kernel_size, 3, 3], name='gauss_blurWeights' ) + # shape [batch_size, crop_h, crop_w, 3] + cur_data = tf.nn.conv2d(x, fix_gkern, strides=[1,1,1,1], padding="SAME", name='gauss_blur') + return cur_data + + +def get_edges(x, method='sobel', use_default=False): + """Get the edge map of a tensor x. + + Args: + x: tensor, input feature map, whose number of channels can be larger than 3. + method: str, which edge detector is used. + use_default: boolean, whether to use tensorflow default sobel edge detector. + + Returns: + tensor, the edge map of the input tensor. + """ + if method == 'sobel' and use_default: + edge = tf.image.sobel_edges(x) + output_h, output_w = tf.split(edge, 2, axis=-1) + output_h = tf.squeeze(output_h, axis=-1) + output_w = tf.squeeze(output_w, axis=-1) + edge_norm = tf.abs(output_h) * 0.5 + tf.abs(output_w) * 0.5 + elif method == 'sobel': + # Blur before apply sobel operator. + x = tf_gaussian_blur(x, 3, 1.2) + x = tf.reduce_mean(x, axis=-1, keep_dims=True) + kernel_h = [[-1, -2, -1], [0, 0, 0], [1, 2, 1]] + kernel_w = [[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]] + x_pad = tf.pad(x, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT') + c = x.get_shape().as_list()[-1] + conv_k_h = tf.constant(kernel_h, dtype=tf.float32, shape=(3, 3, 1, 1)) + conv_k_h = tf.tile(conv_k_h, (1, 1, c, 1)) + conv_k_w = tf.constant(kernel_w, dtype=tf.float32, shape=(3, 3, 1, 1)) + conv_k_w = tf.tile(conv_k_w, (1, 1, c, 1)) + output_h = tf.nn.depthwise_conv2d(x_pad, conv_k_h, strides=[1, 1, 1, 1], padding='VALID') + output_w = tf.nn.depthwise_conv2d(x_pad, conv_k_w, strides=[1, 1, 1, 1], padding='VALID') + edge_norm = tf.abs(output_h) * 0.5 + tf.abs(output_w) * 0.5 + elif method == 'laplacian': + # Blur before apply edge operator. + x = tf_gaussian_blur(x, 3, 1.2) + x = tf.reduce_mean(x, axis=-1, keep_dims=True) + kernel = [[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]] + conv_k = tf.constant(kernel, dtype=tf.float32, shape=(3, 3, 1, 1)) + x_pad = tf.pad(x, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT') + output = tf.nn.depthwise_conv2d(x_pad, conv_k, strides=[1, 1, 1, 1], padding='VALID') + edge_norm = tf.abs(output) + else: + raise NotImplementedError + + return edge_norm \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/slicing.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/slicing.py new file mode 100644 index 0000000000000000000000000000000000000000..096f9e4c585697c1981fd2c78e6367ecf239ecff --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/slicing.py @@ -0,0 +1,42 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +__all__ = ['split'] + + +def split(x, num_or_size_splits, axis=0, keep_dims=False): + """Split the tensor with possibly reduced dimension. + + Args: + x: tensor, the source tensor to split. + num_or_size_splits: int or list[int]. If is given `int`, specifying + the number of the splits; if given list[int], then the summation + of the sizes should equal to the length of the `axis` of x. + axis: int, which axis to split. + keep_dims: boolean, whether to reduce the `axis` dimension after split. + Dafault to False. + + Returns: + list[tensor] + """ + x_list = tf.split(x, num_or_size_splits, axis) + + if not keep_dims: + x_list2 = [tf.squeeze(x_, axis) for x_ in x_list] + return x_list2 + + return x_list + diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/upsample.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/upsample.py new file mode 100644 index 0000000000000000000000000000000000000000..c3b32150360cd07d20e48ad1030ba5ec5fa76a6b --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/upsample.py @@ -0,0 +1,216 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +from src.utils.utils import to_pair + +__all__ = [ + 'resize', 'depth_to_space', 'space_to_depth', + 'decimation_up', 'decimation_down' +] + + +def depth_to_space(x, scale, use_default=False, data_format='NHWC'): + """Rearrange data from depths to blocks of spatial data. + + Given a tensor of size [N, H, W, C], this operator converts the tensor + to size [N, H*scale, W*scale, C/(scale*scale)]. + + Args: + x: tensor, which has the shape [N, H, W, C] or [N, C, H, W]. + scale: int, specifying how many blocks the depths is rearrageed. Both + h and w dimension will be scaled up by this value. + use_default: boolean, use tensorflow default implementation. If False, + use a composed operator instead. Default False. + data_format: str, possible choices in ['NHWC', 'NCHW']. + + Returns: + tensor, which has the shape [N, H*scale, W*scale, C/(scale*scale)]. + """ + if use_default: + out = tf.nn.depth_to_space(x, scale, data_format=data_format) + elif data_format == 'NHWC': + b, h, w, c = x.get_shape().as_list() + c_scaled = c // (scale**2) + out = tf.reshape(x, [-1, h, w, scale, scale, c_scaled]) + out = tf.transpose(out, [0, 1, 3, 2, 4, 5]) + out = tf.reshape(out, [-1, h * scale, w * scale, c_scaled]) + elif data_format == 'NCHW': + b, c, h, w = x.get_shape().as_list() + c_scaled = c // (scale**2) + out = tf.reshape(x, [-1, scale, scale, c_scaled, h, w]) + out = tf.transpose(out, [0, 3, 4, 1, 5, 2]) + out = tf.reshape(out, [-1, c_scaled, h * scale, w * scale]) + else: + raise ValueError(f'Unknown data format `{data_format}`') + return out + + +def space_to_depth(x, scale, use_default=False, data_format='NHWC'): + """Rearrange data from blocks of spatial data to depths. + + Given a tensor of size [N, H, W, C], this operator converts the tensor + to size [N, H/scale, W/scale, C*(scale*scale)]. + + Args: + x: tensor, which has the shape [N, H, W, C] or [N, C, H, W]. + scale: int, specifying how many blocks the depths is rearrageed. Both + h and w dimension will be scaled down by this value. + use_default: boolean, use tensorflow default implementation. If False, + use a composed operator instead. Default False. + data_format: str, possible choices in ['NHWC', 'NCHW']. + + Returns: + tensor, which has the shape [N, H/scale, W/scale, C*(scale*scale)]. + """ + if use_default: + out = tf.nn.space_to_depth(x, scale, data_format=data_format) + elif data_format == 'NHWC': + b, h, w, c = x.get_shape().as_list() + c_scaled = c * (scale**2) + out = tf.reshape(x, [-1, h//scale, scale, w//scale, scale, c]) + out = tf.transpose(out, [0, 1, 3, 2, 4, 5]) + out = tf.reshape(out, [-1, h//scale, w//scale, c_scaled]) + elif data_format == 'NCHW': + b, c, h, w = x.get_shape().as_list() + c_scaled = c * (scale**2) + out = tf.reshape(x, [-1, c, h//scale, scale, w//scale, scale]) + out = tf.transpose(out, [0, 3, 5, 1, 2, 4]) + out = tf.reshape(out, [-1, c_scaled, h//scale, w//scale]) + else: + raise ValueError(f'Unknown data format `{data_format}`') + return out + + +def resize(x, size, align_corners=False, name=None, half_pixel_centers=False, method='bicubic'): + """Wrap of tensorflow resize function. + + Args: + x: tensor, which has the shape [N, H, W, C] or [N, C, H, W]. + size: list[int] of length 2, indicating the target size [H_target, W_target]. + align_corners: boolean, whether to align corners when resizing. + name: str, the name of the resize operation. + half_pixel_centers: boolean, whether use the half pixel as the center. + method: str, resize method. Possible choices in ('bicubic', 'bilinear', 'area') + + Return: + tensor, the resized version fo x which is of shape [N, H_target, W_target, C]. + """ + if method == 'bicubic': + upsampling = tf.image.resize_bicubic + elif method == 'bilinear': + upsampling = tf.image.resize_bilinear + elif method == 'area': + upsampling = tf.image.resize_area + return upsampling(x, size=size, align_corners=align_corners, name=name) + else: + raise ValueError + return upsampling(x, size=size, align_corners=align_corners, name=name, half_pixel_centers=half_pixel_centers) + + +def decimation_up(x, scale, data_format='NHWC'): + """Interpolate the tensor to target scale. + + Given a tensor of size [N, H, W, C], this operator converts the tensor + to size [N, H*scale, W*scale, C]. The interpolateed pixels will be filled + with zeros. + + For example, the entries of 2D tensor x is: + [[1, 2], + [3, 4]] + + When scale=3, the output will be: + [[1, 0, 0, 2, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [3, 0, 0, 4, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]] + + Args: + x: tensor, which has the shape [N, H, W, C] or [N, C, H, W]. + scale: int, specifying the magnification the h and w. + data_format: str, possible choices in ['NHWC', 'NCHW']. + + Returns: + tensor, which has the shape [N, H*scale, W*scale, C]. + """ + x_shape = x.get_shape().as_list() + + scale = to_pair(scale, 2) + sh, sw = scale + + zeros = tf.zeros([*x_shape, sh*sw-1], dtype=x.dtype) + x_expand = tf.expand_dims(x, -1) + x_up = tf.concat([x_expand, zeros], axis=-1) + x_up = tf.reshape(x_up, shape=[*x_shape, sh, sw]) + if data_format == 'NCHW': + n, c, h, w = x_shape + x_up = tf.transpose(x_up, (0, 1, 2, 4, 3, 5)) + x_up = tf.reshape(x_up, [n, c, h*sh, w*sw]) + elif data_format == 'NHWC': + n, h, w, c = x_shape + x_up = tf.transpose(x_up, (0, 1, 4, 2, 5, 3)) + x_up = tf.reshape(x_up, [n, h*sh, w*sw, c]) + else: + raise ValueError + + return x_up + + +def decimation_down(x, scale, data_format='NCHW'): + """Decimation the tensor with target scale. + + Given a tensor of size [N, H, W, C], this operator converts the tensor + to size [N, H/scale, W/scale, C]. The values remained are the upper-left + corner value within each block. + + For example, the entries of 2D tensor x is: + [[ 1, 2, 3, 4], + [ 5, 6, 7, 8], + [ 9, 10, 11, 12], + [13, 14, 15, 16]] + + When scale=2, the output will be: + [[1, 3], + [9, 11]] + + Args: + x: tensor, which has the shape [N, H, W, C] or [N, C, H, W]. + scale: int, specifying the down magnification the h and w. + data_format: str, possible choices in ['NHWC', 'NCHW']. + + Returns: + tensor, which has the shape [N, H/scale, W/scale, C] + """ + x_shape = x.get_shape().as_list() + + scale = to_pair(scale, 2) + sh, sw = scale + + if data_format == 'NCHW': + b, c, h, w = x_shape + x_down = tf.reshape(x, [b, c, h//sh, sh, w//sw, sw]) + x_down = tf.slicing(x_down, (0, 0, 0, 0, 0, 0), (-1, -1, -1, 1, -1, 1)) + x_down = tf.squeeze(x_down, axis=(3, 5)) + elif data_format == 'NHWC': + b, h, w, c = x_shape + x_down = tf.reshape(x, [b, h//sh, sh, w//sw, sw, c]) + x_down = tf.slicing(x_down, (0, 0, 0, 0, 0, 0), (-1, -1, 1, -1, 1, -1)) + x_down = tf.squeeze(x_down, axis=(2, 4)) + else: + raise ValueError + + return x_down diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/weight_regularzation.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/weight_regularzation.py new file mode 100644 index 0000000000000000000000000000000000000000..cb902ba60c5a6fd2561298cb98993e3c6ecf9a8a --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/ops/weight_regularzation.py @@ -0,0 +1,53 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +def spectral_norm(w, iteration=1): + """Spectral normalization of kernels. + + Borrowed from https://github.com/taki0112/Spectral_Normalization-Tensorflow/blob/master/spectral_norm.py + + Args: + w: tensor, conv/linear layer kernel. + iteration: int, number of power iteration. + + Returns: + A normalized kernel tensor. + """ + w_shape = w.shape.as_list() + w = tf.reshape(w, [-1, w_shape[-1]]) + + u = tf.get_variable("spectral_norm_u", [1, w_shape[-1]], + initializer=tf.truncated_normal_initializer(), + trainable=False) + + u_hat = u + v_hat = None + for i in range(iteration): + # power iteration + # Usually iteration = 1 will be enough + v_ = tf.matmul(u_hat, tf.transpose(w)) + v_hat = tf.nn.l2_normalize(v_) + + u_ = tf.matmul(v_hat, w) + u_hat = tf.nn.l2_normalize(u_) + + sigma = tf.matmul(tf.matmul(v_hat, w), tf.transpose(u_hat)) + w_norm = w / sigma + + with tf.control_dependencies([u.assign(u_hat)]): + w_norm = tf.reshape(w_norm, w_shape) + + return w_norm \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..909661ab8a78390bd13b6c47bf89cfe47fac00c0 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/common.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/common.py new file mode 100644 index 0000000000000000000000000000000000000000..fd65820230913346c1a20b41bc09a4b3ee61c51d --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/common.py @@ -0,0 +1,87 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import OrderedDict + +from src.utils.klass import Singleton +from src.utils.logger import logger + + +class _NameSpace(metaclass=Singleton): + """A common name-space class to record, sort and retrieve the tensorflow ops. + + Attributes: + GeneratorLoss: the scope of the loss ops of the generator. + DiscriminatorLoss: the scope of the loss ops of the discriminator. + GeneratorVarScope: the scope of the variables in the generator. + DiscriminatorVarScope: the scope of the variables in the discriminator. + PerceptualVarScope: the scope of the variables in the perceptual module. + Summary: the scope of the summary ops. + GeneratorRunOp: the scope of the running ops, i.e. train_op, lr_update_op, + of the generator. + DiscriminatorRunOp: the scope of the running ops, i.e. train_op, lr_update_op, + of the discriminator. + InputField: the scope of the input tensor and ops. + OutputField: the scope of the output tensor and ops. + + Example: record the losses in the GeneratorLoss scope, retrieve and add them to get + the final total loss for training. + + >>> from src.runner.common import name_space + >>> l1_loss = compute_loss1(pred, gt) + >>> name_space.add_to_collection(name_space.GeneratorLoss, 'l1_loss', l1_loss) + >>> l2_loss = compute_loss2(pred, gt) + >>> name_space.add_to_collection(name_space.GeneratorLoss, 'l2_loss', l2_loss) + >>> ... + >>> losses_dict = name_space.get_collection(name_space.GeneratorLoss) + >>> total_loss = tf.add_n(list(losses_dict.values())) # l1_loss + l2_loss + """ + __scopes = dict( + GeneratorLoss='gen_loss', + DiscriminatorLoss='dis_loss', + GeneratorVarScope='gen_var', + DiscriminatorVarScope='dis_var', + PerceptualVarScope='percep_var', + Summary='summary', + GeneratorRunOp='gen_op', + DiscriminatorRunOp='dis_op', + InputField='input', + OutputField='output', + ) + + __collections = dict() + + def __init__(self): + for key, value in self.__scopes.items(): + setattr(self, key, value) + self.__collections[value] = OrderedDict() + + def add_to_collection(self, namespace, key, value): + assert namespace in self.__scopes.values() + if key in self.__collections[namespace]: + logger.warn(f'Key "{key}" has already exists in scope "{namespace}".') + self.__collections[namespace][key] = value + + def add_to_collections(self, namespaces, key, value): + assert isinstance(namespaces, (list, tuple)) + for name in namespaces: + self.add_to_collection(name, key, value) + + def get_collection(self, namespace): + return self.__collections[namespace] + + def get_op(self, namespace, opname): + return self.__collections[namespace][opname] + + +name_space = _NameSpace() diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/distributed_variables_broadcast.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/distributed_variables_broadcast.py new file mode 100644 index 0000000000000000000000000000000000000000..c8670c143b4fabd488168397d2a9fd1c523060a4 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/distributed_variables_broadcast.py @@ -0,0 +1,90 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from src.utils.logger import logger + + +def broadcast_global_variables(sess, device, root_rank=0): + """A helper function to broadcast the variables across the devices in + distributed training. + + Args: + sess: tf.Session instance. + device: str, possible choices in ('npu'). + root_rank: int, the root node rank of the cluster. Default 0. + + Raises: + ValueError, when device is not in ('npu'). + """ + if device == 'npu': + npu_broadcast(sess, root_rank) + else: + raise ValueError + + +def npu_broadcast(sess, root_rank=0): + """Broadcast the variables in NPU environment. + + We use hccl interface to do the broadcast. + + Args: + sess: tf.Session instance. + root_rank: int, the root node rank of the cluster. Default 0. + """ + from npu_bridge.hccl import hccl_ops + logger.info(f'Broadcast variables from root_rank {root_rank} ...') + op_list = [] + for var in tf.global_variables(): + if "float" in var.dtype.name: + outputs = hccl_ops.broadcast(tensor=[var], root_rank=root_rank) + if outputs is not None: + op_list.append(outputs[0].op) + op_list.append(tf.assign(var, outputs[0])) + bcast = tf.group(op_list) + sess.run(bcast) + + +def allreduce_avg(tensor, device, ranksize): + """A helper function to perform the reduce mean across the devices in + distributed engine. + + Args: + tensor: tensor to reduce average. + device: str, possible choices in ('npu'). + ranksize: int, the number of the nodes in the cluster. + + Raises: + ValueError, when device is not in ('npu'). + """ + if device == 'npu': + return npu_allreduce_avg(tensor, ranksize) + else: + raise NotImplementedError + + +def npu_allreduce_avg(tensor, ranksize): + """Reduce mean across the devices in NPU environment. + + Args: + tensor: tensor to reduce average. + ranksize: int, the number of the nodes in the cluster. + + Returns: + tensor, reduced average tensor. + """ + from npu_bridge.hccl import hccl_ops + # There is no 'mean' reduction in allreduce ops. Use 'sum' instead. + # See https://support.huaweicloud.com/mprtg-A800_9000_9010/atlasprtg_13_0024.html + tensor_reduced = hccl_ops.allreduce(tensor / ranksize, "sum") + return tensor_reduced diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/helper.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/helper.py new file mode 100644 index 0000000000000000000000000000000000000000..1677934b9e8fb3b711fd93db8e8a5f3faeddcf24 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/helper.py @@ -0,0 +1,213 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import os + +import tensorflow as tf +from src.utils.world import world + + +class _AdversarialTrainHelper: + """A helper for adversarial training. + + In each step, the generator and discriminator will produce losses. + The helper determines whether to update the G and D in the next + iteration. For example, in step `i`, if the D loss (already evaluated) + is found very small, which means that D might be too strong and should + halt for some steps, the helper will filter out the discriminator ops in + step `i+1` to keep the discriminator, then evaluate again in step `i+1` + to determine whether to update G and D in step `i+2`. + + Example: + >>> # Define the tf ops + >>> helper = _AdversarialTrainHelper() + >>> g_train_op, d_train_op = define_train() + >>> d_train_op_dict = {'d_op': d_train_op} + >>> g_train_op_dict = {'g_op': g_train_op} + >>> d_loss = compute_d_loss(fake, real) + >>> ... + >>> # in step i=0 + >>> i = 0 + >>> g_train_op_dict_real, d_train_op_dict_real = helper.filter(g_train_op_dict, d_train_op_dict) + >>> _, _, d_loss_eval = sess.run([g_train_op_dict_real, d_train_op_dict_real, d_loss]) + >>> helper.update_status(d_loss_eval, i+1) + >>> ... + >>> # in step i=1 + >>> i = 1 + >>> # decide whether to update G and D in step 1 according to the result in step 0. + >>> g_train_op_dict_real, d_train_op_dict_real = helper.filter(g_train_op_dict, d_train_op_dict) + >>> _, _, d_loss_eval = sess.run([g_train_op_dict, d_train_op_dict, d_loss]) + >>> # update status with the d_loss and step index. + >>> helper.update_status(d_loss_eval, i+1) + """ + def __init__(self): + self._called_once = False + self._info = dict() + + @property + def info(self): + return self._info + + def filter(self, g_ops_in, d_ops_in, *args, **kwargs): + if not self._called_once: + # For the first time, we must run all the operations on NPU + # to construct the whole graph. It is regardless of the update + # strategy. + g_update, d_update = True, True + self._called_once = True + else: + # Once called and initialized, use the configured strategy to + # check whether to update G and D. + g_update, d_update = self.check_state() + + # Post validation to make sure that at least one of G and D should + # update. + g_update, d_update = self.post_validation(g_update, d_update) + + # Save the decision + self._info = dict( + g_update=g_update, + d_update=d_update + ) + + g_ops = dict(**g_ops_in) + if not g_update: + g_ops.pop('g_train') + + d_ops = dict(**d_ops_in) + if not d_update and 'd_train' in d_ops_in: + d_ops.pop('d_train') + + return g_ops, d_ops + + def post_validation(self, g_update, d_update): + # Abnormal states when both g_update and d_update are false + if (not g_update) and (not d_update): + g_update = True + d_update = False + return g_update, d_update + + def check_state(self): + # This is where specific strategy should implement how to make decisions + # on the whether to update G and D. + raise NotImplementedError + + def update_status(self, *args, **kwargs): + # Record the step and the criteria value. + raise NotImplementedError + + def not_initialized(self): + raise ValueError(f'Helper has not been initialized.') + + +class ByPassTrainHelper(_AdversarialTrainHelper): + """A bypass train helper. + + The ops will not be filtered at all. + """ + def __init__(self, use_adv=False): + super().__init__() + self.use_adv = use_adv + + def check_state(self): + # g_update always True + # d_update according to self.use_adv + return True, self.use_adv + + def update_status(self, *args, **kwargs): + pass + + +class AdaptiveTrainHelper(_AdversarialTrainHelper): + """An adaptive train helper given the loss values. + """ + def __init__(self, d_threshold, g_threshold=None): + super().__init__() + self.d_threshold = d_threshold + self.g_threshold = g_threshold + self.previous_d_loss = None + self.previous_step = None + self.d_warmstarted = False + + def update_status(self, loss=None, step=None): + self.previous_d_loss = loss + self.previous_step = step + + def check_state(self): + if self.previous_d_loss is None: + self.not_initialized() + + d_update = self.previous_d_loss > self.d_threshold + if not self.d_warmstarted: + # Don't ever update generator when discriminator is not yet that strong. + # Once the discriminator is at first time strong enough, apply the dynamic update. + g_update = False + if not d_update: + self.d_warmstarted = True + elif self.g_threshold is None: + g_update = True + else: + g_update = self.previous_d_loss < self.g_threshold + + return g_update, d_update + + +class FixedStepTrainHelper(_AdversarialTrainHelper): + """A train helper with fixed interval. + """ + def __init__(self, g_update_interval=-1, d_update_interval=-1): + super().__init__() + self.g_update_interval = g_update_interval + self.d_update_interval = d_update_interval + self.previous_step = None + + def update_status(self, loss=None, step=None): + self.previous_step = step + + def check_state(self): + if self.previous_step is None: + self.not_initialized() + + g_update = (self.previous_step + 1) % self.g_update_interval == 0 + d_update = (self.previous_step + 1) % self.d_update_interval == 0 + + return g_update, d_update + + +def build_adversarial_train_helper(cfg): + """Build corresponding train helper given the configuration. + + Args: + cfg: yacs node, global configuration. + + Returns: + helper instance. + """ + if cfg.loss.adversarial.loss_weight > 0.: + if cfg.loss.adversarial.adaptive_strategy: + helper = AdaptiveTrainHelper(cfg.loss.adversarial.d_balance) + elif cfg.loss.adversarial.g_update_interval > 1 or cfg.loss.adversarial.d_update_interval > 1: + if cfg.loss.adversarial.g_update_interval > 1 and cfg.loss.adversarial.d_update_interval > 1: + raise ValueError('Either g update interval or d update interval should be 1.') + helper = FixedStepTrainHelper(cfg.loss.adversarial.g_update_interval, + cfg.loss.adversarial.d_update_interval) + else: + # no helper + helper = ByPassTrainHelper(use_adv=True) + else: + # no helper + helper = ByPassTrainHelper(use_adv=False) + + return helper diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/initializer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/initializer.py new file mode 100644 index 0000000000000000000000000000000000000000..1fd24bc4825998a1df1f3283d012cd9417db2e13 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/initializer.py @@ -0,0 +1,106 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +import tensorflow as tf + + +def calculate_gain(nonlinearity, param=None): + """Calculate gain when initialization. + """ + if nonlinearity == 'sigmoid': + return 1 + elif nonlinearity == 'tanh': + return 5.0 / 3 + elif nonlinearity == 'relu': + return math.sqrt(2.0) + elif nonlinearity == 'elu': + return math.sqrt(1.55) + elif nonlinearity == 'leakyrelu': + if param is None: + negative_slope = 0.01 + elif not isinstance(param, bool) and isinstance(param, int) or isinstance(param, float): + # True/False are instances of int, hence check above + negative_slope = param + else: + raise ValueError("negative_slope {} not a valid number".format(param)) + return math.sqrt(2.0 / (1 + negative_slope ** 2)) + else: + raise ValueError("Unsupported nonlinearity {}".format(nonlinearity)) + + +def calculate_fan(kernel_size, in_channels, out_channels=None, mode='fan_in'): + """Calculate fan when initialization. + """ + if mode == 'fan_in': + fan = in_channels + elif mode == 'fan_out': + fan = out_channels + else: + raise KeyError + for k in kernel_size: + fan *= k + return fan + + +def get_initializer(init_cfg, in_channels, out_channels, kernel_size, dtype=tf.dtypes.float32): + """Get initializer given the input/output channels and kernel_size. + + Args: + init_cfg: dict, specifying the initialization type and mode. + in_channels: int, specifying the number of the input channels. + out_channels: int, specifying the number of the ouput channels. + kernel_size: list[int], containing the kernel size of each dimension. + dtype: enum, specifying the data type of the initializer. + + Returns: + initializer instance. + """ + type = init_cfg.pop('type') + + if type == 'kaiming_uniform': + a = init_cfg.pop('a', 0) + mode = init_cfg.pop('mode', 'fan_in') + nonlinearity = init_cfg.pop('nonlinearity', 'leakyrelu') + fan = calculate_fan(kernel_size, in_channels, out_channels, mode) + gain = calculate_gain(nonlinearity, a) + std = gain / math.sqrt(fan) + bound = math.sqrt(3.0) * std + initializer = tf.random_uniform_initializer(-bound, bound, dtype=dtype) + elif type == 'kaiming_normal': + a = init_cfg.pop('a', 0) + mode = init_cfg.pop('mode', 'fan_in') + nonlinearity = init_cfg.pop('nonlinearity', 'leakyrelu') + fan = calculate_fan(kernel_size, in_channels, out_channels, mode) + gain = calculate_gain(nonlinearity, a) + std = gain / math.sqrt(fan) + initializer = tf.random_normal_initializer(0.0, std, dtype=dtype) + elif type == 'xavier_uniform': + gain = init_cfg.pop('gain', 1.) + fan_in = calculate_fan(kernel_size, in_channels, out_channels, 'fan_in') + fan_out = calculate_fan(kernel_size, in_channels, out_channels, 'fan_out') + std = gain * math.sqrt(2.0 / float(fan_in + fan_out)) + a = math.sqrt(3.0) * std # Calculate uniform bounds from standard deviation + initializer = tf.random_uniform_initializer(-a, a, dtype=dtype) + elif type == 'xavier_normal': + gain = init_cfg.pop('gain', 1.) + fan_in = calculate_fan(kernel_size, in_channels, out_channels, 'fan_in') + fan_out = calculate_fan(kernel_size, in_channels, out_channels, 'fan_out') + std = gain * math.sqrt(2.0 / float(fan_in + fan_out)) + initializer = tf.random_normal_initializer(0.0, std, dtype=dtype) + else: + raise NotImplementedError + + return initializer diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/loss_scaling.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/loss_scaling.py new file mode 100644 index 0000000000000000000000000000000000000000..1ae2fd5a1ef10314624867a63acfaaffc55a32a1 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/loss_scaling.py @@ -0,0 +1,50 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf + + + +def npu_loss_scale_optimizer(opt, loss_scale, is_distributed=False): + """A wrap function of loss scaling optimizer for NPU tensorflow. + + Args: + opt: optimizer instance. + loss_scale: str, specifying the strategy to apply loss scaling. + Possible choices could be `off`: do not use loss scaling. + `d`: dynamic loss scaling, and `f*`: fixed loss scaling, + where `*` can be converted to an integer that specifies the + scale factor, `2^(int(*))`. + is_distributed: boolean, whether in distributed training. + + Returns: + a wrapped optimizer with loss scaling. + """ + from npu_bridge.estimator.npu.npu_loss_scale_optimizer import NPULossScaleOptimizer + from npu_bridge.estimator.npu.npu_loss_scale_manager import FixedLossScaleManager + from npu_bridge.estimator.npu.npu_loss_scale_manager import ExponentialUpdateLossScaleManager + if loss_scale == 'off': + pass + else: + if loss_scale.startswith('d'): + loss_scale_manager = \ + ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=1000, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) + elif loss_scale.startswith('f'): + scale_factor = int(loss_scale[2:]) + loss_scale_manager = FixedLossScaleManager(loss_scale=2 ** scale_factor) + else: + raise ValueError + opt = NPULossScaleOptimizer(opt, loss_scale_manager, is_distributed=is_distributed) + + return opt diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/lr_schedule.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/lr_schedule.py new file mode 100644 index 0000000000000000000000000000000000000000..57960ee012b623bddf3abfc598d00bf2c5279789 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/lr_schedule.py @@ -0,0 +1,117 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math + +import tensorflow as tf + + +class BaseSchedule(object): + """Base class of learning rate schedule. + + Args: + base_lr: float, base learning rate at the beginning. + recover_step: int, recover step to continue training. + """ + def __init__(self, base_lr, recover_step=0): + self.lr = tf.placeholder(tf.float32, shape=[], name='learning_rate') + self.cur_step = recover_step + self.base_lr = base_lr + self.cur_lr = base_lr + + def __call__(self): + self.cur_step += 1 + return self.cur_lr + + +class CosineSchedule(BaseSchedule): + """A cosine learning rate schedule. + + Args: + base_lr: float, base learning rate at the beginning. + total_steps: list[int], the phased steps where the learning will be adjusted. + min_lr: float, minimum learning rate. + recover_step: int, recover step to contine training. + """ + def __init__(self, base_lr, total_steps, min_lr, recover_step=0): + super().__init__(base_lr, recover_step) + self.total_steps = total_steps[0] + self.min_lr = min_lr + self.alpha = min_lr / base_lr + + def __call__(self): + cosine_decay = 0.5 * (1 + math.cos(math.pi * self.cur_step / self.total_steps)) + decayed = (1 - self.alpha) * cosine_decay + self.alpha + self.cur_lr = self.base_lr * decayed + return super(CosineSchedule, self).__call__() + + +class RestartCosineSchedule(BaseSchedule): + """A cosine restart learning rate schedule. + + Args: + base_lr: float, base learning rate at the beginning. + total_steps: list[int], the phased steps where the learning will be adjusted. + restart_weights: list[float], the phased weigths which the learning will be adjusted to. + min_lr: float, minimum learning rate. + recover_step: int, recover step to contine training. + """ + def __init__(self, base_lr, total_steps, restart_weights, min_lr, recover_step=0): + super(RestartCosineSchedule, self).__init__(base_lr, recover_step) + self.total_steps = total_steps + self.restart_weights = restart_weights + self.min_lr = min_lr + self.alpha = min_lr / base_lr + + def _match_stage(self): + cur_step = self.cur_step + for total_steps, restart_weight in zip(self.total_steps, self.restart_weights): + if cur_step < total_steps: + return cur_step, total_steps, self.base_lr * restart_weight + else: + cur_step -= total_steps + raise ValueError('Should have stopped') + + def __call__(self): + cur_step, total_steps, base_lr = self._match_stage() + cosine_decay = 0.5 * (1 + math.cos(math.pi * cur_step / total_steps)) + decayed = (1 - self.alpha) * cosine_decay + self.alpha + self.cur_lr = base_lr * decayed + return super(RestartCosineSchedule, self).__call__() + + +def build_schedule(lr_cfg, recover_step=0): + """Build learning rate schedule. + + Args: + lr_cfg: dict, specifying the learning rate schedule type and its configuration. + recover_step: int, recover step to contine training. + + Returns: + A learning rate schedule instance. + """ + lr_type = lr_cfg.type.lower() + base_lr = lr_cfg.base_lr + total_steps = lr_cfg.total_steps + + if lr_type == 'cosine': + min_lr = lr_cfg.min_lr + return CosineSchedule(base_lr, total_steps, min_lr, recover_step) + elif lr_type == 'cosinerestart': + min_lr = lr_cfg.min_lr + restart_weights = lr_cfg.restart_weights + return RestartCosineSchedule(base_lr, total_steps, restart_weights, min_lr, recover_step) + elif lr_type == 'step': + raise NotImplementedError + else: + raise KeyError('Unkown type {}'.format(lr_type)) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/npu_pkgs.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/npu_pkgs.py new file mode 100644 index 0000000000000000000000000000000000000000..b9f54ad3cfe03efc0fe2d9d38ebe997cde355866 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/npu_pkgs.py @@ -0,0 +1,19 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.estimator import npu_ops +from npu_bridge.estimator.npu.npu_config import NPURunConfig +from npu_bridge.estimator.npu.npu_estimator import NPUEstimator +from npu_bridge.estimator.npu.npu_optimizer import NPUDistributedOptimizer +from npu_bridge.estimator.npu.npu_optimizer import allreduce +from npu_bridge.hccl import hccl_ops diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/optimizer.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..584ec0c8f6098a1e666ca5a9e89401ee11938a6a --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/optimizer.py @@ -0,0 +1,76 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf + + +def build_optimizer(lr, opt_cfg, device, is_distributed, mix_precision, loss_scale): + """Build optimizer + + Args: + lr: learning rate schedule instance. + opt_cfg: dict, specifying the optimizer configuration. + device: str, specifying the device type. Possible choices in ('npu', 'cpu'). + is_distributed: boolean, whether in distributed learning. + mix_precision: boolean, whether to use mix precisioin. + loss_scale: str, specifying the strategy to apply loss scaling. + Possible choices could be `off`: do not use loss scaling. + `d`: dynamic loss scaling, and `f*`: fixed loss scaling, + where `*` can be converted to an integer that specifies the + scale factor, `2^(int(*))`. + + Returns: + An optimizer instance. + """ + opt_type = opt_cfg.type.lower() + + if opt_type == 'adam': + beta1 = opt_cfg.get('beta1', 0.9) + beta2 = opt_cfg.get('beta2', 0.999) + epsilon = opt_cfg.get('epsilon', 1e-08) + opt = tf.train.AdamOptimizer(lr, beta1=beta1, beta2=beta2, epsilon=epsilon) + elif opt_type == 'momentum': + momentum = opt_cfg.get('momentum', 0.9) + opt = tf.train.MomentumOptimizer(lr, momentum=momentum) + else: + raise KeyError('Unkown type {}'.format(opt_type)) + + if device == 'npu': + return npu_optimizer_wrapper(opt, mix_precision, loss_scale, is_distributed) + else: + return opt + + +def npu_optimizer_wrapper(opt, mix_precision, loss_scale, is_distributed=False): + """A wrapper function of optimizer on NPU. + + Args: + opt: optimizer instance. + is_distributed: boolean, whether in distributed learning. + mix_precision: boolean, whether to use mix precisioin. + loss_scale: str, specifying the strategy to apply loss scaling. + Possible choices could be `off`: do not use loss scaling. + `d`: dynamic loss scaling, and `f*`: fixed loss scaling, + where `*` can be converted to an integer that specifies the + scale factor, `2^(int(*))`. + + Returns: + An optimizer instance. + """ + from npu_bridge.estimator.npu.npu_optimizer import NPUDistributedOptimizer + from .loss_scaling import npu_loss_scale_optimizer + if is_distributed: + opt = NPUDistributedOptimizer(opt) + if mix_precision: + opt = npu_loss_scale_optimizer(opt, loss_scale, is_distributed) + return opt \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/saver.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/saver.py new file mode 100644 index 0000000000000000000000000000000000000000..f75eb0e18fe74a839c13361097ece36c0b4accd5 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/saver.py @@ -0,0 +1,137 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re + +import tensorflow as tf +from tensorflow.python import pywrap_tensorflow + +from src.utils.logger import logger + + +def get_variables_in_checkpoint_file(file_name): + """Get all the variables given the checkpoint file + + Args: + file_name: str, ckpt file. + + Returns: + Dict of tensor name to tensor. + """ + try: + reader = pywrap_tensorflow.NewCheckpointReader(file_name) + var_to_shape_map = reader.get_variable_to_shape_map() + return var_to_shape_map + except Exception as e: # pylint: disable=broad-except + logger.error(str(e)) + if "corrupted compressed block contents" in str(e): + logger.error("It's likely that your checkpoint file has been compressed " + "with SNAPPY.") + + +def loading_variables(sess, variables, checkpoint, strict=False): + """Loading specific variables given session and checkpoint. + """ + if not strict: + var_dic = get_variables_in_checkpoint_file(checkpoint) + var_missing = [] + var_restore = [] + + for v in variables: + if v.name.split(':')[0] in var_dic: + var_restore.append(v) + logger.info('Match: {} {} {}/{}'.format( + v.name, + v.dtype, + v.shape, + var_dic[v.name.split(':')[0]])) + else: + logger.info('Miss: {} {}'.format(v.name, v.shape)) + var_missing.append(v.name) + assert len(variables) == len(var_restore) + len(var_missing) + + saver = tf.train.Saver(var_list=var_restore) + saver.restore(sess, checkpoint) + else: + saver = tf.train.Saver(var_list=variables) + saver.restore(sess, checkpoint) + logger.info("Loading checkpoints...{} Success".format(checkpoint)) + + # Get the step information in ckpt file, may be used for continual training. + recover_step = 0 + regex = re.compile('[A-Za-z.]*-([0-9]*).?[A-Za-z0-9]*$') + try: + b, = regex.search(checkpoint).groups() + if b is not None and b != '': + recover_step = int(b) + 1 + except: + pass + return recover_step + + +def restore(sess, var_list, directory, checkpoint, strict=False): + """Restore variables from ckpt. + """ + if os.path.exists(checkpoint + '.meta'): + logger.info(f'Found checkpoint {checkpoint}.') + ckpt_name = checkpoint + else: + logger.info(f'Cannot find checkpoint {checkpoint}. Searching in {directory} ...') + ckpt = tf.train.get_checkpoint_state(directory) + if ckpt and ckpt.model_checkpoint_path: + ckpt_name = os.path.basename(ckpt.model_checkpoint_path) + ckpt_name = os.path.join(directory, ckpt_name) + logger.info(f'Found checkpoint {ckpt_name}.') + else: + logger.error("Reading checkpoints... ERROR") + raise ValueError(f'Cannot find checkpoint in {directory}') + return loading_variables(sess, var_list, ckpt_name, strict=strict) + + +def strict_loading(sess, scope, directory, checkpoint): + """Strict loading **every single variable** in the scope. + """ + if scope == '': + logger.info(f"Reading checkpoints (no given scope) ...") + variables = tf.get_collection(tf.GraphKeys.VARIABLES) + else: + logger.info(f"Reading checkpoints for scope '{scope}' ...") + variables = tf.get_collection(tf.GraphKeys.VARIABLES, scope=scope) + return restore(sess, variables, directory, checkpoint, strict=True) + + +def loose_loading(sess, scope, directory, checkpoint): + """Loading variables in the scope, but allow missing keys or variables. + """ + if scope == '': + logger.info(f"Reading checkpoints (no given scope) ...") + variables = tf.get_collection(tf.GraphKeys.VARIABLES) + else: + logger.info(f"Reading checkpoints for scope '{scope}' ...") + variables = tf.get_collection(tf.GraphKeys.VARIABLES, scope=scope) + var_dic = get_variables_in_checkpoint_file(checkpoint) + var_missing = [] + var_restore = [] + + for v in variables: + loading_cond = v.name.split(':')[0] in var_dic and (scope in v.name.split(':')[0]) + if loading_cond: + var_restore.append(v) + logger.info('Match: {} {} {}/{}'.format(v.name, v.dtype, v.shape, var_dic[v.name.split(':')[0]])) + else: + logger.info('Miss: {} {}'.format(v.name, v.shape)) + var_missing.append(v.name) + assert len(variables) == len(var_restore) + len(var_missing) + return restore(sess, var_restore, directory, checkpoint, strict=True) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/sess_config.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/sess_config.py new file mode 100644 index 0000000000000000000000000000000000000000..7eeb1da8f6a0783303317374ecbb9a8c98b2659c --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/sess_config.py @@ -0,0 +1,87 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import tensorflow as tf + + +def _npu_config(mix_precision, is_distributed): + """Prepare NPU tf.Session config + + Args: + mix_precision: boolean, whether to use mix precision. + is_distributed: boolean, whether in distributed scenario. + + Returns: + A tf.ConfigProto instance. + """ + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["enable_data_pre_proc"].b = False + custom_op.parameter_map["mix_compile_mode"].b = False + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["graph_memory_max_size"].s = \ + tf.compat.as_bytes(str(28*1024 * 1024 * 1024)) + custom_op.parameter_map["variable_memory_max_size"].s = \ + tf.compat.as_bytes(str(3*1024 * 1024 * 1024)) + + if mix_precision: + custom_op.parameter_map["precision_mode"].s = \ + tf.compat.as_bytes("allow_mix_precision") + if is_distributed: + config.graph_options.rewrite_options.optimizers.extend( + ["pruning", + "function", + "constfold", + "shape", + "arithmetic", + "loop", + "dependency", + "layout", + "memory", + "GradFusionOptimizer"]) + + from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + return config + + +def _cpu_config(): + """Prepare CPU tf.Session config + + Returns: + A tf.ConfigProto instance. + """ + return tf.ConfigProto() + + +def get_sess_config(device='npu', xla=False, mix_precision=True, is_distributed=False): + """Build session config. + + Args: + device: str, what type of hardware to use. + xla: boolean, whether to use xla. + mix_precision: boolean, whether to use mix precision. + is_distributed: boolean, whether in distributed scenario. + + Returns: + A tf.ConfigProto instance. + """ + if device == 'npu': + return _npu_config(mix_precision, is_distributed) + elif device == 'cpu': + return _cpu_config() + else: + raise KeyError('Unsupported device: {}'.format(device)) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/solver.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/solver.py new file mode 100644 index 0000000000000000000000000000000000000000..1ea18c091c6514424522dffdf52b8b61eb5ca9a7 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/runner/solver.py @@ -0,0 +1,86 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# Copyright (c) 2022 Huawei Technologies Co., Ltd +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .lr_schedule import build_schedule +from .optimizer import build_optimizer + + +class Solver(object): + """Solver class including optimizer and learning-rate schedule. + + Args: + lr_cfg: yacs node, learning-rate schedule. + opt_cfg: yacs node, optimizer config. + device: str, valid options: ['npu', 'cpu'] + is_distributed: boolean, whether used in distributed training. + mix_precision: boolean, whether used mix precision during training. + loss_scale: boolean, whether use loss scaling to compensate the + precision loss during dtype conversion. + """ + def __init__(self, lr_cfg, opt_cfg, device, is_distributed, mix_precision, + loss_scale): + self.lr_schedule = build_schedule(lr_cfg) + self.opt = build_optimizer(self.lr_schedule.lr, opt_cfg, + device, + is_distributed, + mix_precision, + loss_scale) + self.total_step = sum(lr_cfg.total_steps) + + def update_lr(self): + """Update learning rate based on schedule and step. + """ + return self.lr_schedule() + + @property + def lr(self): + """Returns learning rate placeholder. + """ + return self.lr_schedule.lr + + @property + def cur_lr(self): + """Returns current learning rate. + """ + return self.lr_schedule.cur_lr + + +def build_solver(lr_cfg, optimizer_cfg, mix_precision, loss_scale, device, + is_distributed): + """Build solver for training. + + Args: + lr_cfg: yacs node, learning-rate schedule. + optimizer_cfg: yacs node, optimizer config. + device: str, valid options: ['npu', 'cpu'] + is_distributed: boolean, whether used in distributed training. + mix_precision: boolean, whether used mix precision during training. + loss_scale: boolean, whether use loss scaling to compensate the + precision loss during dtype conversion. + + Return: + A solver instance. + """ + assert device in ['npu', 'cpu'] + + return Solver(lr_cfg, + optimizer_cfg, + device, + is_distributed, + mix_precision, + loss_scale) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1b6d1d7edd8e369641588e8a7721b78f402a0b63 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from src.utils.constant import * \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/adapter.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..22f24d40471465ef1378cbf8a3c037e43cbe35fa --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/adapter.py @@ -0,0 +1,393 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from itertools import product + +import numpy as np + +from .exceptions import * +from .logger import logger + + +def factor_ceiling(x, factor): + """Get the ceiling that is divisible by the factor. + """ + remain = x % factor + pad = 0 + if remain: + pad = factor - remain + x = x + pad + return x, pad + + +class NetworkIOAdapter: + """A network io adapter to process the input images when inference. + + Because of the memory limitation, we may not be able to process the whole + frame into a model in a single session.run. In this scenario, we use a + dispatch-process-stitching strategy to process the input frames. The + NetworkIOAdapter class is used to automatically make patches from the source + input frames, and stitching them together to get the whole result, during + which each patches can be overlapped. + + There are two basic modes in this adapter when using ckpt to inference: + 1. Inferece as a whole, where the model is able to process the whole image. + In this scenario, the only thing the adapter will do is to pad the input + frames to satisfy the network smallest feature map. If the smallest + feature map of the model is 1/N proportion to the original input, then + the size of the input frames should be divisible by N. Therefore, if + we find the original frame size does not satisfy this condition, the + adpater will pad the frames. After inference, the result will be trimmed + to the expected size. + + Example: + >>> adapter = NetworkIOAdapter(cfg) + >>> input_frames = get_data() # [N, T, H, W, C] + >>> adapter.register_raw_size(input_frames.shape[2:4]) + >>> print(adapter.patch_mode) + False + >>> padded_input = adapter.adapt_input(input_frames) + >>> result = sess.run(output_tensor, feed_dict={input_node: padded_input}) + >>> final_result = adapter.reverse_adapt(result) + + 2. Inference using patches. In this mode, we have to inference the original + input frames using split-and-conquer and then stitch them to the expected + result. The patch size can be configured by the cfg.data.inference.best_patch_size + for the efficient inference. We first pad the input frames to the factor + ceiling of the best_patch_size, thus the padded original image can be split + into several pieces. Then each patch is additionally padded with the + overlap size to avoid the discontinuity between two patch results. The + pad size of each patch should cover the size of the receptive field of the + network. The session will inference each padded patch, followed by stitching + method to aggregate the patch results to a complete one. The padded size + of the patch will be first trimmed off, and the patches will be concatenated + together. Finally, the corrsponding padded size of the whole image will be + trimmed to get the final result. + + Example: + >>> adapter = NetworkIOAdapter(cfg) + >>> input_frames = get_data() # [N, T, H, W, C] + >>> adapter.register_raw_size(input_frames.shape[2:4]) + >>> print(adapter.patch_mode) + True + >>> patches = adapter.extract_image_patches(input_frames) + >>> num_patches = len(patches) + >>> patch_per_step = cfg.data.inference.batch_size + >>> result_patches = [] + >>> for i in range(num_patches//patch_per_step): + ... _patch = sess.run(output_tensor, feed_dict={input_node: patches[i:i+patch_per_step]}) + ... result_patches.extend(_patch) + >>> final_result = adapter.stitching_patches_to_image(result_patches) + + **A special scenario is to infer with pb file**, where the graph is already + freezed. In this scenario, the input size is also fixed, and we use the + adapter to automatically determine how to inference. **One must aware that + the actual size of the input patch is**: + cfg.data.inference.input_size + cfg.data.inference.patch_pad_size * 2 + Therefore, one must ensure that the value above can be divisble by the network + feature map scale factor. + + Args: + cfg: yacs node, global configuration. + """ + def __init__(self, cfg): + self.cfg = cfg + + # network input settings + self.limited_in_size = cfg.data.inference.max_input_size + self.best_in_size = cfg.data.inference.best_patch_size + self.scale = cfg.model.scale + self.factor_for_adapt_input = cfg.model.factor_for_adapt_input + self.auto_mode = cfg.data.inference.auto_adapt_input + + if self.auto_mode and not self.limited_in_size: + raise ValueError('Max input size is required when in auto mode.') + if self.auto_mode == True: + self.mode = 'auto' + else: + self.mode = None + + if not self.best_in_size: + self.best_in_size = self.limited_in_size + + self.num_output_frames = cfg.model.num_net_output_frames + + # patch evaluation settings + self.eval_in_patch = cfg.data.inference.eval_using_patch + self.eval_pad_size = cfg.data.inference.patch_pad_size + # size of the input image, before adapted + self.eval_raw_size = [100000, 100000] + # size of the input patch when in patch mode, or the image size when in + # whole mode, before adapted + self.eval_in_size = cfg.data.inference.input_size + + self.fixed_in_size_flag = False + + # saved data for output, w.r.t. eval in patches strategy + self._network_direct_outsize = [] + self._stitching_mode_padsize = [] + self._patch_batch_pad = 0 + self._vtip_stitching_method = False + self._num_split = (0, 0) + + # saved data for reverse adapt, w.r.t. network input constrains + self._input_adapt_padsize = [0, 0] + + @property + def input_size(self): + # In patch mode, it should be padded size + if self.eval_in_patch: + pads_h, pads_w = self.eval_pad_size[0]*2, self.eval_pad_size[1]*2 + else: + # to keep inline with the original code, we should set + # eval_in_patch = eval_raw_size = raw image size + # when inference the whole image + pads_h, pads_w = self._input_adapt_padsize + + h = self.eval_in_size[0] + pads_h + w = self.eval_in_size[1] + pads_w + + return (h, w) + + @property + def patch_mode(self): + return self.eval_in_patch + + def adapt_input(self, lr): + #Used in whole image mode. + pads_h, pads_w = self._input_adapt_padsize + + if len(lr.shape) == 4: + pads = [[0, 0], + [pads_h//2, pads_h-pads_h//2], + [pads_w//2, pads_w-pads_w//2], + [0,0]] + else: + pads = [[0, 0], + [0, 0], + [pads_h//2, pads_h-pads_h//2], + [pads_w//2, pads_w-pads_w//2], + [0,0]] + + lr_pads = np.pad(lr, pads, mode='symmetric') + return lr_pads + + def reverse_adapt(self, data): + # Used in whole image mode + pads_h, pads_w = self._input_adapt_padsize + if data.ndim == 3: + h, w, c = data.shape + pads_t, pads_b = pads_h//2, pads_h-pads_h//2 + pads_l, pads_r = pads_w//2, pads_w-pads_w//2 + return data[pads_t*self.scale:h-pads_b*self.scale, + pads_l*self.scale:w-pads_r*self.scale] + elif data.ndim == 4: + _, h, w, c = data.shape + pads_t, pads_b = pads_h//2, pads_h-pads_h//2 + pads_l, pads_r = pads_w//2, pads_w-pads_w//2 + return data[:, + pads_t*self.scale:h-pads_b*self.scale, + pads_l*self.scale:w-pads_r*self.scale] + else: + raise ArrayDimensionError(f'Expect input data to have 3 or 4 ' + f'dimensions, but got {data.ndim}.') + + def fix_eval_in_size(self): + # Used for inference with PB file and the input size is fixed. + fixed_input_size = [self.best_in_size[0] + self.eval_pad_size[0] * 2, + self.best_in_size[1] + self.eval_pad_size[1] * 2] + pad_h, pad_w = self.cal_adapted_size(fixed_input_size) + assert pad_h == 0 and pad_w == 0, \ + f"Expect to have an input size that is divisible " \ + f"by {self.factor_for_adapt_input} when using a fixed input size, " \ + f"but got {fixed_input_size}. Must ensure that " \ + f"`model.best_in_size + data.eval_padsize*2` divisible by the factor." + self.eval_in_size = self.best_in_size + self.limited_in_size = fixed_input_size # Real input size + self.fixed_in_size_flag = True + + def register_raw_size(self, raw_size): + # Override the configured raw_size + self.eval_raw_size = raw_size + + logger.info(f'Automatically determine inference mode (whether patch or not).') + if self.mode == 'auto': + self.eval_in_size = raw_size + logger.info(f'auto inference mode.') + # In auto mode, the adapter will automatically define the input size + h, w = raw_size + limited_h, limited_w = self.limited_in_size + + if self.fixed_in_size_flag: + # Remember that in this case: + # self.limited_in_size = self.best_in_size + self.eval_padsize * 2 + # **We have also make sure that self.limited_in_size is divisible + # by the factor**. See self.fix_eval_in_size() + # Shall use a different logic to determine whether to eval in + # patch or not. + if h <= limited_h and w <= limited_w: + # If the raw input size equals to the fixed size + # (self.limited_in_size), eval in whole. + # self.limited_in_size = self.best_in_size + self.eval_padsize * 2 + # automatically ensures that self._input_adapt_padsize will be zero. + self.eval_in_patch = False + else: + # Else, use a patch mode no matter if the raw size is larger + # or smaller. To use the self.input_size interface consistent, + # set self.eval_in_size to best_in_size hence ensuring: + # self.limited_in_size = self.best_in_size + self.eval_padsize * 2 + # = self.eval_in_size + self.eval_padsize * 2 + self.eval_in_patch = True + self.eval_in_size = self.best_in_size + else: + if h * w > limited_w * limited_h: + self.eval_in_patch = True + self.eval_in_size = ( + factor_ceiling(min(h, self.best_in_size[0]), self.factor_for_adapt_input)[0], + factor_ceiling(min(w, self.best_in_size[1]), self.factor_for_adapt_input)[0], + ) + else: + self.eval_in_patch = False + + if self.eval_in_patch: + # Follow the config or the automatic setting + pass + else: + # Adapt the image input to fit the network requirements + if self.fixed_in_size_flag: + self._input_adapt_padsize = ( + (self.limited_in_size[0] - raw_size[0]), + (self.limited_in_size[1] - raw_size[1]), + ) + else: + # For whole image inference + self._input_adapt_padsize = self.cal_adapted_size(raw_size) + + if self._input_adapt_padsize[0]: + logger.info(f'Input height {raw_size[0]} is not a divisible by {self.factor_for_adapt_input}' + f', will be padded to {raw_size[0]+self._input_adapt_padsize[0]}') + + if self._input_adapt_padsize[1]: + logger.info(f'Input width {raw_size[1]} is not a divisible by {self.factor_for_adapt_input}' + f', will be padded to {raw_size[1]+self._input_adapt_padsize[1]}') + + logger.info(f'Inference adapter: ') + logger.info(f'\t Use patch: {f"{self.eval_in_patch}":>5}') + logger.info(f'\t Image Raw size: {self.eval_raw_size}') + logger.info(f'\tOriginal patch size: {self.eval_in_size}') + logger.info(f'\t Adapted input size: {self.input_size}') + + def cal_adapted_size(self, raw_size): + h, w = raw_size + # In case the input is not divisible by the factor + _, pad_h = factor_ceiling(h, self.factor_for_adapt_input) + _, pad_w = factor_ceiling(w, self.factor_for_adapt_input) + + return pad_h, pad_w + + def extract_image_patches(self, data, num_patches_per_step=1): + # This function is used in patch mode + return self._extract_image_patches_canonical(data, num_patches_per_step) + + def stitching_patches_to_image(self, data): + # This function is used in patch mode + return self._merge_patches_to_images_canonical(data) + + def _extract_image_patches_canonical(self, data, num_patches_per_step=1): + if data.ndim != 4: + raise ArrayDimensionError(f'Expect input data to have 4 dimensions, but got {data.ndim}.') + + _, h, w, _ = data.shape + ph, pw = self.eval_in_size + # image padding size + image_pad_right = int(float(w)/pw + 1) * pw - w + image_pad_bottom = int(float(h)/ph + 1) * ph - h + image_pad_right = 0 if image_pad_right == pw else image_pad_right + image_pad_bottom = 0 if image_pad_bottom == ph else image_pad_bottom + # patch padding size + patch_pad_top = patch_pad_bottom = self.eval_pad_size[0] + patch_pad_left = patch_pad_right = self.eval_pad_size[1] + + # pad image + pad_t = patch_pad_top + pad_b = patch_pad_bottom + image_pad_bottom + pad_l = patch_pad_left + pad_r = patch_pad_right + image_pad_right + img_paded = np.pad(data, ((0, 0), + (pad_t, pad_b), + (pad_l, pad_r), + (0, 0)), mode='symmetric') + + new_h, new_w = img_paded.shape[1:3] + self._network_direct_outsize = (self.num_output_frames, new_h*self.scale, new_w*self.scale, 3) + + # number of patches + num_split_y = (h + image_pad_bottom) // ph + num_split_x = (w + image_pad_right) // pw + self._num_split = (num_split_y, num_split_x) + + img_patches = [] + for split_j, split_i in product(range(num_split_y), range(num_split_x)): + # extract patches with extra pad size + patch_start_y = split_j * ph + patch_end_y = patch_start_y + ph + patch_pad_top + patch_pad_bottom + patch_start_x = split_i * pw + patch_end_x = patch_start_x + pw + patch_pad_left + patch_pad_right + img_patches.append(img_paded[:, patch_start_y:patch_end_y, patch_start_x:patch_end_x, :]) + + img_patches = np.array(img_patches) + num_patches = img_patches.shape[0] + batch_pad = (num_patches // num_patches_per_step + 1) * num_patches_per_step - num_patches + batch_pad = 0 if batch_pad == num_patches_per_step else batch_pad + self._patch_batch_pad = batch_pad + + # Concatenate all the patches in order. + if batch_pad > 0: + img_patches_padded = np.concatenate([ + img_patches, + np.zeros([batch_pad, *img_patches.shape[1:]], dtype=np.float32), + ], axis=0) + else: + img_patches_padded = img_patches + return img_patches_padded + + def _merge_patches_to_images_canonical(self, data): + # This is the reverse processing of the dispatching. + ph, pw = self.eval_in_size + num_split_y, num_split_x = self._num_split + sr_all = np.zeros(self._network_direct_outsize, dtype=np.float32) # [num_output_frames, h, w, c] + + h, w = self.eval_raw_size + patch_pad_top, patch_pad_left = self.eval_pad_size + patch_sr = np.array(data) # should be [num_patches, num_output_frames, h, w, c] + if patch_sr.ndim == 4 and self.num_output_frames == 1 and patch_sr.shape[1] != 1: + patch_sr = np.expand_dims(patch_sr, axis=1) + + patch_s_y = patch_pad_top * self.scale + patch_e_y = (patch_pad_top + ph) * self.scale + patch_s_x = patch_pad_left * self.scale + patch_e_x = (patch_pad_left + pw) * self.scale + patch_id = 0 + for split_j, split_i in product(range(num_split_y), range(num_split_x)): + im_s_y = split_j * ph * self.scale + im_e_y = im_s_y + ph * self.scale + im_s_x = split_i * pw * self.scale + im_e_x = im_s_x + pw * self.scale + sr_all[:, im_s_y:im_e_y, im_s_x:im_e_x] = patch_sr[patch_id, :, patch_s_y:patch_e_y, patch_s_x:patch_e_x] + patch_id += 1 + + # Trim the output to expected size. + sr_all = sr_all[:, :h*self.scale, :w*self.scale] + return sr_all.squeeze() diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/constant.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/constant.py new file mode 100644 index 0000000000000000000000000000000000000000..9156812f7c59e08658a735e6161eaaf8dc8ce34e --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/constant.py @@ -0,0 +1,47 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum + +VALID_COLORSPACE = {'rgb', 'bgr', 'lab', 'yuv', 'ycrcb', 'gray3d', 'gray', 'yuv', 'y'} +VALID_MODE = {'train', 'eval', 'inference', 'freeze'} +VALID_PARADIGM = {'dni'} +VALID_DEBUG_MODE = {'zeroin', 'intermediate'} +VALID_TASK = {'vsr', 'denoise', 'face', 'hdr', 'vfi'} + +# HDR +HDR_CODEC_PIX_FMT = 'gbrpf32le' +HDR_FILE_SUPPORTED_EXT = 'exr' + +SDR_CODEC_PIX_FMT = 'bgr24' +SDR_FILE_SUPPORTED_EXT = 'png' + +RESOURCE_FILE = r'src/resource.json' + +FILE_EXT_TO_PIX_FMT = { + HDR_FILE_SUPPORTED_EXT: HDR_CODEC_PIX_FMT, + SDR_FILE_SUPPORTED_EXT: SDR_CODEC_PIX_FMT, +} +VALID_FILE_EXT = FILE_EXT_TO_PIX_FMT.keys() + + +# io backend +class IO_BACKEND: + DISK = 'disk' + FFMPEG = 'ffmpeg' + + @classmethod + def CHECK_VALID(cls, io_backend): + assert io_backend in {cls.DISK, cls.FFMPEG}, \ + f'Invalid io backend {io_backend}' diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/defaults.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..30d51d8892845ca19a4aa9719c43e40820341423 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/defaults.py @@ -0,0 +1,269 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from yacs.config import CfgNode as CN + + +cfg = CN(new_allowed=True) + +cfg.mode = 'train' +cfg.task = 'vsr' # 'denoise', 'vsr' + +# ---------------------------------------------------------------------------- # +# Model (common) +# ---------------------------------------------------------------------------- # +cfg.model = CN(new_allowed=True) +cfg.model.name = 'EDVR' # Key for model +cfg.model.scope = 'G' # Scope key for parameters + +# for vfi +cfg.model.frame_rate = 2 + +# for vsr +cfg.model.scale = 4 + +# The input size as well as the placeholder will be adapted automatically. +# See base_model.py `_cal_input_size` function, and inferencer.py `adapt_input` function. +# The output will be reversed-adapted by the inferencer. +cfg.model.factor_for_adapt_input = 4 + +# The following num of frames are standalone defined to generalize to model configuration, +# e.g., extend to temporal, or extend to cascading of models: +# num_net_input_frames: the num frames of input LQ when model inference +# num_net_output_frames: the num frames of output SR when model inference +# num_data_lq_frames: the num frames of input LQ when dataloader in training +# num_data_gt_frames: the num frames of target GT when dataloader in training for supervision +cfg.model.num_net_input_frames = 5 +cfg.model.num_net_output_frames = 1 + +# Options for the input dimension +# 4: 4D tensor, with shape [b*frames, h, w, c], used when model frozen +# 5: 5D tensor, with shape [b, frames, h, w, c] +cfg.model.input_format_dimension = 5 +cfg.model.convert_output_to_uint8 = False + +# ---------------------------------------------------------------------------- # +# Loss (common) +# ---------------------------------------------------------------------------- # +cfg.loss = CN(new_allowed=True) +cfg.loss.content = CN(new_allowed=True) +cfg.loss.content.loss_type = 'L1Loss' +cfg.loss.content.loss_reduction = 'mean' +cfg.loss.content.loss_margin = 1e-6 + +# Loss (edge) +cfg.loss.edge = CN(new_allowed=True) +cfg.loss.edge.loss_weight = 0. +cfg.loss.edge.method = 'sobel' # ['sobel', 'laplacian'] + +# Loss (perceptual) +# perceptual module +cfg.loss.perceptual = CN(new_allowed=True) +cfg.loss.perceptual.loss_weight = 0. +cfg.loss.perceptual.module = 'vgg_19' +cfg.loss.perceptual.layers = ['vgg_19/conv2/conv2_2', + 'vgg_19/conv3/conv3_4', + 'vgg_19/conv4/conv4_4', + 'vgg_19/conv5/conv5_4'] +cfg.loss.perceptual.layers_weights = [1.0, 1.0, 1.0, 1.0] +# full ckpt file should be '${ckpt_dir}/${module}.ckpt' +cfg.loss.perceptual.ckpt_dir = './pretrained_modules' + +# Loss (adv) +cfg.loss.adversarial = CN(new_allowed=True) +cfg.loss.adversarial.loss_weight = 0. +cfg.loss.adversarial.adaptive_strategy = False +cfg.loss.adversarial.d_balance = 0.4 +cfg.loss.adversarial.gan_type = 'VanillaGAN' +cfg.loss.adversarial.grad_penalty_weight = 0.1 +cfg.loss.adversarial.g_update_interval = 1 +cfg.loss.adversarial.d_update_interval = 1 +cfg.loss.adversarial.loss_type = 'VanillaAdvLoss' +cfg.loss.adversarial.loss_reduction = 'mean' +cfg.loss.adversarial.norm_type = 'in' +cfg.loss.adversarial.mid_channels = 64 +cfg.loss.adversarial.parameter_clip = False +cfg.loss.adversarial.parameter_clip_range = [-0.01, 0.01] + +# ---------------------------------------------------------------------------- # +# Data (common) +# ---------------------------------------------------------------------------- # +cfg.data = CN(new_allowed=True) +# For mixture datasets, each should be separated with ':' +cfg.data.data_dir = 'data/reds' + +cfg.data.num_data_lq_frames = 5 +cfg.data.num_data_gt_frames = 1 +# File extension. For HDR, it should be 'exr'. For others, it would be 'png' +# Note: it's only used in inference dataset for now. +cfg.data.extension = 'png' +# ['bgr', 'rgb', 'lab'], default to `rgb` +cfg.data.color_space = 'rgb' +cfg.data.normalized = True + +# training +cfg.data.train = CN(new_allowed=True) +cfg.data.train.degradation = CN(new_allowed=True) +cfg.data.train.degradation.online = False +cfg.data.train.degradation.options = \ +""" +GaussianNoise: + input_dim: 4 + noise_level: 20 +IsotropicGaussianBlur2D: + input_dim: 4 + kernel_size: 15 + sigma: 10 +BicubicDownsampling: + input_dim: 4 + scale: 4 +batch_apply: False +""" +cfg.data.train.gt_enhancement = False +cfg.data.train.set_file = 'train.json' +cfg.data.train.batch_size = 4 +cfg.data.train.input_size = [64, 64] + +cfg.data.train.augmentation = CN(new_allowed=True) +cfg.data.train.augmentation.apply = True +cfg.data.train.augmentation.interval_list = [1, 2] +# Augmentation options, should be a doc-string (yml formatted), +# for example, the following. Note that in 'RandomCrop', the +# 'crop_size' and 'scale' will be further provided by the +# _TrainDataset class based on other configurations. Therefore, +# there is no need for users to explicitly provide these two +# parameters. The reason of such design is to avoid duplicate +# configure of the two parameters. +cfg.data.train.augmentation.options = \ +""" +RandomCrop: + input_dim: 4 +RandomTemporalReverse: + input_dim: 4 +RandomFlipLeftRight: + input_dim: 4 +RandomFlipUpDown: + input_dim: 4 +shuffle_transformers_order: False +""" + +# inference +cfg.data.inference = CN() +cfg.data.inference.auto_adapt_input = True +cfg.data.inference.batch_size = 1 +cfg.data.inference.input_size = [180, 320] +cfg.data.inference.eval_using_patch = False +cfg.data.inference.patch_pad_size = [32, 32] + +# Specify the max size of the input supported by the network. +# When releasing, the program will adaptively use different strategies on whether do +# inference with the whole image input or stitching. +cfg.data.inference.max_input_size = [540, 960] +cfg.data.inference.best_patch_size = [540, 640] + +# A subset of the given dataset for inference, (min_index, max_index). +# One should set the index **in** the file name, instead of the actual index of the +# file order. For example, the files are: +# - samples +# |- 0001.png (file list index 0) +# |- 0002.png (file list index 1) +# |- 0003.png (file list index 2) +# |- 0004.png (file list index 3) +# `- 0005.png (file list index 4) +# and the frames 0002.png - 0004.png are about to be inferred. Then the value of +# the following key should be [2, 4] (indices **in** the file name), rather than +# [1, 3] (indices of the file list). +cfg.data.inference.subset_range = [] +cfg.data.inference.subset_list = [] + +# ---------------------------------------------------------------------------- # +# Training (common) +# ---------------------------------------------------------------------------- # +cfg.train = CN(new_allowed=True) +cfg.train.training_scope = '' +cfg.train.pretrained_scope_list = [] +cfg.train.pretrained_scope_ckpt = [] + +cfg.train.optimizer = CN(new_allowed=True) +cfg.train.optimizer.type = 'Adam' + +# TODO: add options for optimizer +cfg.train.generator = CN(new_allowed=True) +cfg.train.generator.lr_schedule = CN(new_allowed=True) +cfg.train.generator.lr_schedule.type = 'CosineRestart' +cfg.train.generator.lr_schedule.base_lr = 4e-4 +cfg.train.generator.lr_schedule.total_steps = [10000] +cfg.train.generator.lr_schedule.restart_weights = [1, 0.5, 0.5, 0.5] +cfg.train.generator.lr_schedule.min_lr = 1e-7 + +# Discriminator lr schedule +cfg.train.discriminator = CN(new_allowed=True) +cfg.train.discriminator.lr_schedule = CN(new_allowed=True) +cfg.train.discriminator.lr_schedule.type = 'CosineRestart' +cfg.train.discriminator.lr_schedule.base_lr = 4e-4 +cfg.train.discriminator.lr_schedule.total_steps = [150000, 150000, 150000, 150000] +cfg.train.discriminator.lr_schedule.restart_weights = [1, 0.5, 0.5, 0.5] +cfg.train.discriminator.lr_schedule.min_lr = 1e-7 + +cfg.train.checkpoint_interval = 5000 +cfg.train.print_interval = 20 +cfg.train.loss_scale = 'off' + +cfg.train.use_tensorboard = False +cfg.train.dump_intermediate = False +cfg.train.dump_intermediate_interval = 2000 +cfg.train.continue_training = False + +cfg.train.output_dir = 'outputs/edvr' + +# ---------------------------------------------------------------------------- # +# Session +# ---------------------------------------------------------------------------- # +cfg.session = CN() +cfg.session.mix_precision = False +cfg.session.xla = False + +# ---------------------------------------------------------------------------- # +# Env +# ---------------------------------------------------------------------------- # +cfg.env = CN(new_allowed=True) +cfg.env.device = 'npu' +cfg.env.device_ids = [0] +cfg.env.rank_size = 1 +cfg.env.root_rank = 0 + +# ---------------------------------------------------------------------------- # +# Misc +# ---------------------------------------------------------------------------- # +cfg.debug_mode = False +cfg.log_file = '' +cfg.checkpoint = '' + +# ---------------------------------------------------------------------------- # +# Inference +# ---------------------------------------------------------------------------- # +cfg.inference = CN(new_allowed=True) +cfg.inference.write_out = True +cfg.inference.io_backend = 'disk' + +# disk scenario +cfg.inference.result_dir = '' +cfg.inference.writer_num_threads = 8 +cfg.inference.writer_queue_size = 64 # used in both disk and ffmpeg scenario + +# ffmpeg stream scenario +cfg.inference.ffmpeg = CN(new_allowed=True) +cfg.inference.ffmpeg.video_filename = 'test' +cfg.inference.ffmpeg.fps = 25. +cfg.inference.ffmpeg.codec_file = './config/codecs/default_x264.json' diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..493b218edcdb3f046f2416ffbee16b6694939ade --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/__init__.py @@ -0,0 +1,65 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from .noise import * +from .scaling import * +from .blur import * + +from src.utils.klass import get_subclass_given_name + + +class Degradation(object): + """Composes several degradations together. + + Args: + transforms: list[Transform], list of joint transforms to compose. + """ + def __init__(self, degradations=None): + self.degradations = degradations + + @classmethod + def from_cfgs(cls, options, **kwargs): + """Construct augmentation pipeline from cfg dict. + + Args: + options: dict, pairs of {Transform_class_type: kwargs}. + kwargs: dict, additional kwargs. + + Returns: + A composed transform instance. + """ + + t = [] + for k, v in options.items(): + if k == 'RandomCrop': + # crop_size and scales are required terms + v['crop_size'] = kwargs['crop_size'] + v['scales'] = kwargs['scales'] + elif k == 'Scaling': + v['scales'] = kwargs['scales'] + _filter = get_subclass_given_name(Base, k) + t.append(_filter(**v)) + return cls(t) + + def __call__(self, *img): + for t in self.transforms: + img = t(*img) + return img + + def __repr__(self): + format_string = self.__class__.__name__ + '(' + for t in self.transforms: + format_string += '\n' + format_string += ' {0}'.format(t) + format_string += '\n)' + return format_string diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/base.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/base.py new file mode 100644 index 0000000000000000000000000000000000000000..c0886c68c2b5858c7355e0d8bb75b474678d9f10 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/base.py @@ -0,0 +1,45 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import random +import time + + +class Base: + """Base class for degradation. + """ + def set_numpy_random_seed(self): + # Numpy random states are the same across all the mutli-processing. + # In order to maintain the randomness, use the system timestamp to + # manually set numpy seed every time this function is called. + np.random.seed(int(time.time() + random.random() * 1000000)) + + def check_input(self, x): + return isinstance(x, np.ndarray) and x.ndim == 3 + + def __call__(self, im): + self.set_numpy_random_seed() + if self.check_input(im): + return self.apply(im) + else: + raise ValueError(f'Expect input image to be 3D-array (HWC), but got {im.ndim}D-array.') + + def apply(self, im): + raise NotImplementedError + + def __repr__(self): + return f'{self.__class__.__name__}@{id(self)}' + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/blur.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/blur.py new file mode 100644 index 0000000000000000000000000000000000000000..b2328b29e3a82374b30640310e64c3c2f162dddf --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/blur.py @@ -0,0 +1,163 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import numpy as np +import cv2 +import time +import random +from scipy import special + +from src.utils.degradation.base import Base + + +class BlurKernel2D(Base): + """A common base class for blurness. + """ + def check_kernel(self): + if self.ksize[0] % 2 == 0 or self.ksize[1] % 2 == 0: + raise ValueError(f'Expect kernel size to be odd (1, 3, 5, etc.), but got {self.ksize}') + + +class AvgBlur2D(BlurKernel2D): + """A 2D average blur operator. + + Args: + k_size: int, blur kernel size, expected to be odd. + """ + def __init__(self, k_size): + assert k_size % 2 == 1 + k_size = (k_size, k_size) + self.k_size = k_size + + def apply(self, im): + return cv2.blur(im, self.k_size, borderType=cv2.BORDER_REFLECT_101) + + +class IsotropicGaussianBlur2D(BlurKernel2D): + """An isotropic Gaussian blur operator. + + Args: + kernel_size: int, blur kernel size, expected to be odd. + std: float, width of the kernel. + """ + def __init__(self, kernel_size, std): + assert kernel_size % 2 == 1 + self.kernel_size = (kernel_size, kernel_size) + self.std = std + + def apply(self, im): + if self.check_input(im): + return cv2.GaussianBlur(im, self.kernel_size, self.std, self.std, borderType=cv2.BORDER_REFLECT_101) + else: + raise ValueError + + +def gaussian(x, k, s): + return np.exp(-(x-(k-1)/2)**2/(2*s**2)) + + + +class AnisotropicGaussianBlur2D(BlurKernel2D): + """An anisotropic Gaussian blur operator. + + Reference to + https://github.com/cszn/USRNet/blob/4fb56deb80d655abb722ff83750ad3df163ef833/utils/utils_sisr.py#L129 + + Args: + kernel_size: int, blur kernel size, expected to be odd. + var: float, width of the kernel. + + """ + def __init__(self, kernel_size, var, angle, scale=1, noise_level=0): + """" + # modified version of https://github.com/assafshocher/BlindSR_dataset_generator + # Kai Zhang + # min_var = 0.175 * sf # variance of the gaussian kernel will be sampled between min_var and max_var + # max_var = 2.5 * sf + """ + assert kernel_size % 2 == 1 + k_size = np.array([kernel_size, kernel_size]) + + # Set random eigen-vals (lambdas) and angle (theta) for COV matrix + lambda_1, lambda_2 = var + # noise = -noise_level + np.random.rand(*k_size) * noise_level * 2 + + # Set COV matrix using Lambdas and Theta + LAMBDA = np.diag([lambda_1, lambda_2]) + Q = np.array([[np.cos(angle), -np.sin(angle)], + [np.sin(angle), np.cos(angle)]]) + SIGMA = Q @ LAMBDA @ Q.T + INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :] + + # Set expectation position (shifting kernel for aligned image) + MU = k_size // 2 - 0.5*(scale - 1) + MU = MU[None, None, :, None] + + # Create meshgrid for Gaussian + [X,Y] = np.meshgrid(range(k_size[0]), range(k_size[1])) + Z = np.stack([X, Y], 2)[:, :, :, None] + + # Calcualte Gaussian for every pixel of the kernel + ZZ = Z-MU + ZZ_t = ZZ.transpose(0,1,3,2) + raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) + + # Normalize the kernel and return + self.kernel = raw_kernel / np.sum(raw_kernel) + + def apply(self, im): + return cv2.filter2D(im, -1, self.kernel) + + +def circular_lowpass_kernel(cutoff, kernel_size, pad_to=0): + """2D sinc filter. + + Borrowed from https://github.com/xinntao/BasicSR/blob/master/basicsr/data/degradations.py#L392 + Ref: https://dsp.stackexchange.com/questions/58301/2-d-circularly-symmetric-low-pass-filter + + Args: + cutoff: float, cutoff frequency in radians (pi is max) + kernel_size: int, horizontal and vertical size, must be odd. + pad_to: int, pad kernel size to desired size, must be odd or zero. + + Returns: + ndarray of [kernel_size, kernel_size], the sinc kernel. + """ + assert kernel_size % 2 == 1, 'Kernel size must be an odd number.' + kernel = np.fromfunction( + lambda x, y: cutoff * special.j1(cutoff * np.sqrt( + (x - (kernel_size - 1) / 2)**2 + (y - (kernel_size - 1) / 2)**2)) / (2 * np.pi * np.sqrt( + (x - (kernel_size - 1) / 2)**2 + (y - (kernel_size - 1) / 2)**2)), [kernel_size, kernel_size]) + kernel[(kernel_size - 1) // 2, (kernel_size - 1) // 2] = cutoff**2 / (4 * np.pi) + kernel = kernel / np.sum(kernel) + if pad_to > kernel_size: + pad_size = (pad_to - kernel_size) // 2 + kernel = np.pad(kernel, ((pad_size, pad_size), (pad_size, pad_size))) + return kernel + + +class SincFilter(BlurKernel2D): + """A sinc filter. + + Args: + kernel_size: int, blur kernel size, expected to be odd. + omega_c: float, cutoff frequency in radians (pi is max) + """ + def __init__(self, kernel_size, omega_c): + self.kernel_size = kernel_size + self.omega_c = omega_c + self.kernel = circular_lowpass_kernel(self.omega_c, self.kernel_size, pad_to=False) + + def apply(self, im): + return cv2.filter2D(im, -1, self.kernel) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/noise.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/noise.py new file mode 100644 index 0000000000000000000000000000000000000000..ef24f2c6debb036496d052cfa7a6bde23a25d60d --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/noise.py @@ -0,0 +1,195 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import numpy as np +import cv2 +import random +import time + +from src.utils.degradation.base import Base + + +class NoiseAugmentation(Base): + """Noise addition class. + """ + def __init__(self, **kwargs): + self.max_value = 255. + self.min_value = 0. # input data should be in range [min_value, max_value] + self._data_format = 'hwc' # ['hwc', 'thwc'] + + def get_height(self, im): + return im.shape[self._data_format.index['h']] + + def get_width(self, im): + return im.shape[self._data_format.index['w']] + + def get_temp(self, im): + return im.shape[self._data_format.index['t']] + + @property + def data_format(self): + return self._data_format + + @data_format.setter + def data_format(self, target_format): + self._data_format = target_format.lower() + + def __call__(self, im, **kwargs): + # Input im should be in range [0, 255], either with np.uint8 or np.float32 dtype. + if self.check_input(im): + # Numpy random states are the same across all the mutli-processing. + # In order to maintain the randomness, use the system timestamp to + # manually set numpy seed every time this function is called. + self.set_numpy_random_seed() + im = self.apply(im) + im = np.clip(im.astype(np.float32), a_min=self.min_value, a_max=self.max_value) + return im + else: + raise ValueError(f'Expect input image to be [3D, 4D]-array, but got {im.ndim}D-array.') + + +class MultivarGaussianNoise(NoiseAugmentation): + """Multi-variate Gaussian noise. + + The noise in the channels is dependent. + + Args: + mean: float, the mean of the noise in each channel. + cor_var: ndarray, a 3x3 matrix of covariance. + """ + def __init__(self, mean=0., covar=None): + super().__init__() + assert covar is not None + self.mean = np.array([mean, mean, mean]) + self.cor_var = np.array(covar) + + def apply(self, clean_data, **kwargs): + shape = clean_data.shape + noise = np.random.multivariate_normal(self.mean, self.cor_var, size=shape[:-1]) + return clean_data + noise + + +class ChannelIndependentGaussianNoise(NoiseAugmentation): + """Channel indenpent Gaussian noise. + + The noise in the channels is independent. + + Args: + mean: float, the mean of the noise in each channel. + std: float, standard deviation of the noise. + """ + def __init__(self, mean=0., std=0.01): + super(ChannelIndependentGaussianNoise, self).__init__() + self.mean = mean + self.std = std + + def apply(self, clean_data, **kwargs): + shape = clean_data.shape + noise = self.std * np.random.randn(*shape) + self.mean + return clean_data + noise + + +class GrayscaleGaussianNoise(NoiseAugmentation): + """Single channel Gaussian noise. + + The noise in the channels is broadcast to all the channels. + + Args: + mean: float, the mean of the noise in each channel. + std: float, standard deviation of the noise. + """ + def __init__(self, mean=0., std=0.01): + super(GrayscaleGaussianNoise, self).__init__() + self.mean = mean + self.std = std + + def apply(self, clean_data, **kwargs): + shape = list(clean_data.shape) + shape[-1] = 1 + noise = self.std * np.random.randn(*shape) + self.mean + return clean_data + noise + + +class SaltPepperNoise(NoiseAugmentation): + """Salt and pepper noise. + + Args: + amount: float, total proportion of the noise pixels in the image. + salt_ratio: float, the proportion of the salt (white) in the noisy pixels. + """ + def __init__(self, amount=0.005, salt_ratio=0.5): + super().__init__() + self.amount = amount + self.salt_noise_ratio = salt_ratio + + def apply(self, clean_data, **kwargs): + h, w = clean_data.shape[:2] + # make a copy + noisy = np.array(clean_data) + + num_salt = np.ceil(self.amount * h * w * self.salt_noise_ratio) + coord = [np.random.randint(0, i - 1, int(num_salt)) for i in [h, w]] + noisy[tuple(coord)] = self.max_value + + num_pepper = np.ceil(self.amount * h * w * (1. - self.salt_noise_ratio)) + coord = [np.random.randint(0, i - 1, int(num_pepper)) for i in [h, w]] + noisy[tuple(coord)] = self.min_value + return noisy + + +class SpeckleNoise(NoiseAugmentation): + """Spekle noise. + """ + def apply(self, clean_data, **kwargs): + shape = clean_data.shape + gauss = np.random.randn(*shape) + noisy = clean_data + clean_data * gauss + return noisy + + +class JPEGCompressionNoise(NoiseAugmentation): + """JPEG compression noise. + + Args: + quality: int, ranged in [0, 100], controls the quality of the compressed + image. The lower the quality is, the worse the image looks like. + """ + def __init__(self, quality): + super(JPEGCompressionNoise, self).__init__() + self.quality = int(quality) + + def apply(self, clean_data, **kwargs): + clean_data = np.clip(clean_data, self.min_value, self.max_value).astype(np.uint8) + encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), self.quality] + result, encimg = cv2.imencode('.jpg', clean_data, encode_param) + noisy = cv2.imdecode(encimg, cv2.IMREAD_COLOR) + return noisy.astype(np.float32) + + +class PoissonNoise(NoiseAugmentation): + """Poisson noise. + + Reference https://stackoverflow.com/questions/19289470/adding-poisson-noise-to-an-image + https://github.com/xinntao/BasicSR/blob/master/basicsr/data/degradations.py#L587 + """ + def apply(self, clean_data, **kwargs): + # round and clip image for counting vals correctly + vals = len(np.unique(clean_data.astype(np.uint8))) + vals = 2**np.ceil(np.log2(vals)) + + img = np.clip(clean_data, self.min_value, self.max_value) / self.max_value + out = np.float32(np.random.poisson(img * vals) / float(vals)) + noise = (out - img) * self.max_value + + return clean_data + noise diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/scaling.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/scaling.py new file mode 100644 index 0000000000000000000000000000000000000000..febce3002eab445f9807c0ad7e058168bc6e04af --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/degradation/scaling.py @@ -0,0 +1,96 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import numpy as np +import cv2 +import time +import random +from scipy.interpolate import interp2d + +from src.utils.degradation.base import Base + + +class SpatialScaling(Base): + """Up- and down-sampling degradation. + + Args: + target_size: list[int], [W, H] specifying the size after resize. + scales: list[float], [Sw, Sh] specifying the scales of each dimension. + """ + def __init__(self, target_size, scales=None): + # Note: target_size and scales should be in [x, y] or [w, h] order + self.scales = scales + self.target_size = target_size + assert isinstance(self.target_size, tuple) + + +class NearestScaling(SpatialScaling): + """Nearest scaling up and down. + + Args: + kernel_width: int, kernel width to remedy the misalignment of nearest + sampling. + """ + def __init__(self, target_size, scales, kernel_width): + super(NearestScaling, self).__init__(target_size, scales) + kernel = cv2.getGaussianKernel(21, kernel_width) + self.kernel = kernel @ np.transpose(kernel) + self.kernel = self.shift_pixels(self.kernel, scales) + + def shift_pixels(self, x, scales, upper_left=False): + """shift pixel for super-resolution with different scale factors + + Args: + x: WxHxC or WxH, image or kernel + sf: scale factor + upper_left: shift direction + """ + h, w = x.shape[:2] + shift = (scales-1)*0.5 + xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0) + if upper_left: + x1 = xv + shift[0] + y1 = yv + shift[1] + else: + x1 = xv - shift[0] + y1 = yv - shift[1] + + x1 = np.clip(x1, 0, w-1) + y1 = np.clip(y1, 0, h-1) + + if x.ndim == 2: + x = interp2d(xv, yv, x)(x1, y1) + if x.ndim == 3: + for i in range(x.shape[-1]): + x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1) + + return x + + def apply(self, im): + im = cv2.filter2D(im, -1, self.kernel) + return cv2.resize(im, dsize=self.target_size, fx=0., fy=0., interpolation=cv2.INTER_NEAREST) + + +class BicubicScaling(SpatialScaling): + """Bicubic sampling. + """ + def apply(self, im): + return cv2.resize(im, dsize=self.target_size, fx=0., fy=0., interpolation=cv2.INTER_LINEAR) + + +class BilinearScaling(SpatialScaling): + """Bilinear sampling. + """ + def apply(self, im): + return cv2.resize(im, dsize=self.target_size, fx=0., fy=0., interpolation=cv2.INTER_CUBIC) diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/exceptions.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..b0e3c0404ca0e94b5cdec8267ba1d3bf47df5a54 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/exceptions.py @@ -0,0 +1,34 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Error(Exception): + """Customize Exception class. + """ + pass + + +class WorldUninitializedError(Error): + pass + + +class SessionUndefinedError(Error): + pass + + +class ArrayDimensionError(Error): + pass + + +class DirectoryNotFoundError(Error): + pass diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/file_io.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/file_io.py new file mode 100644 index 0000000000000000000000000000000000000000..a9b8af3b5f04aa3b12c7ac850c249364ef2c0b6f --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/file_io.py @@ -0,0 +1,557 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import multiprocessing as mp +import os +import queue +import random +import shutil +import threading +import time +from functools import partial +from subprocess import Popen, PIPE + +import cv2 +import imageio +import numpy as np +from PIL import Image +from src.utils.klass import Singleton +from src.utils.logger import logger +from src.utils.constant import VALID_COLORSPACE, IO_BACKEND +from src.utils.utils import convert_to_dict + + +def imread(x, target_color_space='rgb'): + """Wrapped image read function. + + Support normal SDR png image, as well as HDR exr image. + + Args: + x: str, image file name. + target_color_space: str, what color space should the output image is in. + + Returns: + ndarray, an image of the target_color_space. + """ + target_color_space = target_color_space.lower() + assert target_color_space in VALID_COLORSPACE + + if x.endswith('.exr'): + # read hdr + im = cv2.imread(x, cv2.IMREAD_UNCHANGED) + else: + im = cv2.imread(x) + + # convert to grayscale if required. + if target_color_space == 'gray3d': + im = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY) + im = cv2.cvtColor(im, cv2.COLOR_GRAY2BGR) + elif target_color_space == 'gray': + im = im[:,:,0:1] + + # data_format convert + if target_color_space in ['bgr', 'gray', 'gray3d']: + out = im + elif target_color_space == 'rgb': + out = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) + elif target_color_space == 'lab': + out = cv2.cvtColor(im, cv2.COLOR_BGR2LAB) + elif target_color_space == 'ycrcb': + out = cv2.cvtColor(im, cv2.COLOR_BGR2YCrCb) + elif target_color_space == 'yuv': + out = cv2.cvtColor(im, cv2.COLOR_BGR2YUV) + elif target_color_space == 'y': + out = cv2.cvtColor(im, cv2.COLOR_BGR2YUV) + out = out[:,:,0:1] + else: + raise ValueError("Unknown data_format as {}, or maybe just mismatched!".format(target_color_space)) + + return out + + +def imwrite(name, x, source_color_space='rgb', benormalized=True): + """Wrapped image write function. + + Support normal SDR png image, as well as HDR exr image. + + Args: + name: str, output image file name. + x: ndarray, with shape [H, W, C]. + source_color_space: str, in what color space the source image is. + benormalized: boolean, whether the image is normalized. + """ + source_color_space = source_color_space.lower() + assert source_color_space in VALID_COLORSPACE + + hdr = name.endswith('.exr') + out = image_deprocess(x, source_color_space, benormalized, hdr) + if hdr: + hdr_image_write(name, out) + else: + sdr_image_write(name, out) + + +def image_deprocess(x, source_color_space='rgb', benormalized=True, hdr=False): + """Image deprocess function. + + Converts the normalized ndarray to a writable image by opencv. + """ + if hdr: + return hdr_image_deprocess(x, source_color_space) + else: + return sdr_image_deprocess(x, source_color_space, benormalized) + + +def hdr_image_deprocess(x, source_color_space='rgb'): + """Image deprocess function of HDR image. + + HDR image is always normalized. The only thing to do is to convert to another + color space. + """ + if source_color_space == 'rgb': + x = x[..., ::-1] + elif source_color_space == 'bgr': + pass + else: + raise NotImplementedError(f'HDR output does not support color-spaces other than RGB and BGR.') + return x + + +def sdr_image_deprocess(x, source_color_space='rgb', benormalized=True): + """Image deprocess function of SDR image. + + Converts the color space to 'bgr' for opencv to write out; denormalizes the + data to uint8. + """ + source_color_space = source_color_space.lower() + assert source_color_space in VALID_COLORSPACE + + if benormalized and source_color_space not in ['ycrcb', 'yuv', 'y']: + x[...] = x[...] * 255 + x = np.clip(x, 0., 255.) + + if source_color_space in ['bgr', 'gray']: + out = x + elif source_color_space == 'rgb': + out = cv2.cvtColor(x, cv2.COLOR_RGB2BGR, cv2.CV_32F) + elif source_color_space in ['lab', 'gray3d']: + x[:, :, 0:1] = x[:, :, 0:1] / 2.55 + x[:, :, 1:3] = x[:, :, 1:3] - 128. + out = cv2.cvtColor(x, cv2.COLOR_LAB2BGR, cv2.CV_32F) + out[...] = out[...] * 255. + elif source_color_space == 'ycrcb': + out = cv2.cvtColor(x, cv2.COLOR_YCrCb2BGR, cv2.CV_32F) + if benormalized: + out = np.clip(out * 255., 0., 255.) + elif source_color_space == 'yuv': + out = cv2.cvtColor(x, cv2.COLOR_YUV2BGR, cv2.CV_32F) + out = np.clip(out * 255, 0., 255.) + elif source_color_space == 'y': + out = np.clip(x * 255, 0., 255.) + else: + raise ValueError + + out = out.astype(np.uint8) + return out + + +def sdr_image_write(name, out): + # Just a wrapper. + cv2.imwrite(name, out) + + +def hdr_image_write(name, out): + # Save as half precision to for smaller file. + out = np.maximum(out, 0.) + cv2.imwrite(name, out, [cv2.IMWRITE_EXR_TYPE, cv2.IMWRITE_EXR_TYPE_HALF]) + +class HardDiskImageWriter: + """An image writer which saves the image data in a file on the hard disk. + + We use a queue and multi-thread to write the images to hard disk in the + background. + + Args: + max_num_threads: int, maximum number of the threads to use. + max_queue_size: int, maximum size of the queue to save the data in memory. + """ + def __init__(self, max_num_threads=1, max_queue_size=64): + self.queue = queue.Queue(max_queue_size) + self.threads_pool = [] + self.sentinel = (None, None) + self.max_num_threads = max_num_threads + self.notified = False + + def worker(self): + # Thread work to write out the images. + while True: + try: + elem = self.queue.get(True) + if id(elem) == id(self.sentinel): + self.end() + break + target_path, im_data = elem + hdr = target_path.endswith('.exr') + if hdr: + hdr_image_write(target_path, im_data) + else: + sdr_image_write(target_path, im_data) + except Exception as e: + if not self.notified: + self.notified = True + logger.error(f'Error when writing out images, {e}.') + pass + + def __del__(self): + # Wait until all the threads to join. + for t in self.threads_pool: + try: + t.join() + except: + pass + logger.info('Processing remaining elements') + + # Post check whether there are un-written. + while True: + try: + elem = self.queue.get(False) + assert id(elem) == id(self.sentinel), '[Warning] Remain elements in writing queue' + except queue.Empty: + break + + def put_to_queue(self, target_path, im_data): + # Put the target file name and the image data in the queue. + self.queue.put((target_path, im_data)) + if len(self.threads_pool) <= self.max_num_threads: + t = threading.Thread(target=self.worker, args=()) + t.start() + self.threads_pool.append(t) + + def end(self): + # Put sentinel in the queue to call exit. + self.queue.put(self.sentinel) + + +# https://github.com/imageio/imageio-ffmpeg/blob/f27b6cb31d4ed3fd436f3a22871b2b63d2384c80/imageio_ffmpeg/_utils.py#L55 +def _popen_kwargs(prevent_sigint=False): + startupinfo = None + preexec_fn = None + creationflags = 0 + if prevent_sigint: + # Prevent propagation of sigint (see #4) + # https://stackoverflow.com/questions/5045771 + preexec_fn = os.setpgrp # the _pre_exec does not seem to work + + falsy = ("", "0", "false", "no") + if os.getenv("IMAGEIO_FFMPEG_NO_PREVENT_SIGINT", "").lower() not in falsy: + # Unset preexec_fn to work around a strange hang on fork() (see #58) + preexec_fn = None + + return { + "startupinfo": startupinfo, + "creationflags": creationflags, + "preexec_fn": preexec_fn, + } + + +class FFMPEGStreamWriter: + """An image writer which saves the image data through ffmpeg stream to a video + file. + + Args: + video_filename: str, output target video file. + fps: str, vidoe fps for encoding. + output_param_file: str, codec config file for encoding. + output_resolution: list[int], the resolution [H, W] of the output video. + source_pix_fmt: str, the pixel format of the input to ffmpeg. When encoding + SDR vidoe, it should be `bgr24`. Or it should be 'gbrpf32le' for HDR. + ffmpeg_timeout: int, time limitation to prevent ffmpeg dies. + """ + def __init__(self, video_filename, fps='25', + output_param_file='./configs/codecs/default_x264.json', + output_resolution=None, + source_pix_fmt='bgr24', + ffmpeg_timeout=60): + + if output_resolution is None: + raise ValueError('Expect the output resolution, but got None.') + else: + assert len(output_resolution) == 2 + # W x H + s = f"{output_resolution[1]}x{output_resolution[0]}" + + with open(output_param_file, 'r') as fid: + output_params_dict = json.load(fid) + + # Input information. + vinput_opts = [ + '-r', str(fps), + '-f', 'rawvideo', + '-s', s, + '-pix_fmt', source_pix_fmt, + '-analyzeduration', str(2147483647), + '-probesize', str(2147483647), + ] + vinput_src = ['-i', '-'] + + # Output encoding information. + output_params = [] + vcodec = None + bitrate = None + pix_fmt = "yuv420p" + for k, v in output_params_dict["codec"].items(): + if k in ["-c:v", "-vcodec"]: + vcodec = v + elif k == '-bitrate': + bitrate = v + elif k == '-pix_fmt': + pix_fmt = v + else: + output_params += [k, v] + ext = output_params_dict.get("format", 'mp4') + + if bitrate is not None: + output_params += ['-bitrate', bitrate] + if vcodec is not None: + output_params += ['-c:v', vcodec] + output_params += ['-pix_fmt', pix_fmt] + + if not video_filename.endswith(ext): + video_filename = f'{video_filename}.{ext}' + + self.ffmpeg_timeout = ffmpeg_timeout + + self._basic_cmd = ['ffmpeg', '-y', + *vinput_opts, + *vinput_src, + *output_params, + video_filename, + ] + + def initialize(self): + # Use a generator to accept the image data without blocking the main + # processing. + self.write_gen = self._initialize_gen() + assert self.write_gen is not None + self.write_gen.send(None) + logger.info("Codec command:") + logger.info(self._basic_cmd) + + def _initialize_gen(self): + # Borrowed from imageio-ffmpeg + # https://github.com/imageio/imageio-ffmpeg/blob/master/imageio_ffmpeg/_io.py#L478 + stop_policy = 'timeout' + p = Popen( + self._basic_cmd, + stdin=PIPE, + stdout=None, + stderr=None, + **_popen_kwargs(prevent_sigint=True) + ) + + try: + while True: + frame = yield + try: + p.stdin.write(frame) + except Exception as err: + msg = ( + "{0:}\n\nFFMPEG COMMAND:\n{1:}\n\nFFMPEG STDERR " + "OUTPUT:\n".format(err, self._basic_cmd) + ) + stop_policy = "kill" + raise IOError(msg) + except GeneratorExit: + # Note that GeneratorExit does not inherit from Exception but BaseException + # Detect premature closing + raise + except Exception: + # Normal exceptions fall through + raise + except BaseException: + # Detect KeyboardInterrupt / SystemExit: don't wait for ffmpeg to quit + stop_policy = "kill" + raise + finally: + if p.poll() is None: + try: + p.stdin.close() + except Exception as err: # pragma: no cover + logger.warning("Error while attempting stop ffmpeg (w): " + str(err)) + + if stop_policy == "timeout": + # Wait until timeout, produce a warning and kill if it still exists + try: + etime = time.time() + self.ffmpeg_timeout + while (time.time() < etime) and p.poll() is None: + time.sleep(0.01) + finally: + if p.poll() is None: # pragma: no cover + logger.warn( + "We had to kill ffmpeg to stop it. " + + "Consider increasing ffmpeg_timeout, " + + "or setting it to zero (no timeout)." + ) + p.kill() + + elif stop_policy == "wait": + # Wait forever, kill if it if we're interrupted + try: + while p.poll() is None: + time.sleep(0.01) + finally: # the above can raise e.g. by ctrl-c or systemexit + if p.poll() is None: # pragma: no cover + p.kill() + + else: # stop_policy == "kill": + # Just kill it + p.kill() + # Just to be safe, wrap in try/except + try: + p.stdout.close() + except Exception: + pass + + def put_to_queue(self, target_path, im_data): + # target_path won't matter here + if im_data.dtype == np.float32: + # Notice that after the deprocess, everything is in bgr color space. + # HDR data, will use gbrpf32le pix_fmt. + # Make it [C, H, W] data format, and shift channels to [g, b, r]. + im_data = np.transpose(im_data[..., [1,0,2]], [2,0,1]) + # else is normal uint8 data. Use bgr24le pix_fmt and don't do anything + img_str = im_data.tobytes() + self.write_gen.send(img_str) + + def end(self): + self.write_gen.close() + + +class ImageWriter: + """A top class to handle the image writing. + + Multi-imagewriter is supported when writing out. This class contains all the + concrete writing instances, and deprocess the results passed by the engine and + feed to the writers. Multi-imagewriter can be configured using the + cfg.inference.io_backend where the backends are concatenated with ':'. For + example, setting: + + cfg.inference.io_backend = 'disk:ffmpeg' + + will use two image writer instance, one HardDiskImageWriter and the other + FFMPEGStreamWriter. + + Args: + output_dir: str, output top folder. + cfg: yacs node, global configuration. + source_color_space: str, in what color space the source image is. + benormalized: boolean, whether the image is normalized. + output_resolution: list[int], the resolution [H, W] of the output video. + pix_fmt: str, the pixel format of the input to ffmpeg. When encoding + SDR vidoe, it should be `bgr24`. Or it should be 'gbrpf32le' for HDR. + """ + def __init__(self, output_dir, cfg, benormalized=True, + source_color_space='bgr', output_resolution=None, + pix_fmt='bgr24' + ): + io_backends = cfg.inference.io_backend.split(':') + for ib in io_backends: + IO_BACKEND.CHECK_VALID(ib) + + self.io_backend = io_backends + self.cfg = cfg + + self.image_deprocess = partial(image_deprocess, + source_color_space=source_color_space, + benormalized=benormalized) + self.pix_fmt = pix_fmt + self.output_resolution = output_resolution + self.root_output_dir = output_dir + self.writers = [] + + for ib in io_backends: + self.add_writers(ib, self.root_output_dir) + + def add_writers(self, io_backend, root_dir=None): + # Add specific writers. + if root_dir is None: + root_dir = self.root_output_dir + + if io_backend == IO_BACKEND.DISK: + writer = HardDiskImageWriter(max_num_threads=self.cfg.inference.writer_num_threads, + max_queue_size=self.cfg.inference.writer_queue_size) + output_folder = root_dir + elif io_backend == IO_BACKEND.FFMPEG: + video_filename = os.path.join(f'{root_dir}_videos', self.cfg.inference.ffmpeg.video_filename) + writer = FFMPEGStreamWriter(video_filename=video_filename, + fps=self.cfg.inference.ffmpeg.fps, + output_param_file=self.cfg.inference.ffmpeg.codec_file, + source_pix_fmt=self.pix_fmt, + output_resolution=self.output_resolution, + ) + output_folder = f'{root_dir}_videos' + else: + raise NotImplementedError(f'{io_backend}') + + # Record both the writer instance and the output folder. + self.writers.append([writer, output_folder]) + + def initialize(self): + # Initialization and create folders if necessary. + logger.info(f'Using {self.io_backend} as the io backend.') + for writer_id, ib in enumerate(self.io_backend): + if ib in [IO_BACKEND.DISK, IO_BACKEND.FFMPEG]: + output_folder = self.writers[writer_id][1] + logger.info(f'For {ib} backend, the results will be written to {output_folder}') + os.makedirs(output_folder, exist_ok=True) + + if ib == IO_BACKEND.FFMPEG: + self.writers[writer_id][0].initialize() + + def finalize(self): + # Close the writers. + for writer, output_folder in self.writers: + writer.end() + + def write_out(self, output_data_dict): + output_data_dict = dict(sorted(output_data_dict.items(), key=lambda item: item[0])) + # Append image date to the writers after inference. + for target_file, data in output_data_dict.items(): + # without file copy: {output_file_name: ndarray} + # with file copy: {output_file_name: [source_file_name, ndarray]} + for backend_id, ib in enumerate(self.io_backend): + writer, output_folder = self.writers[backend_id] + target_file = os.path.join(output_folder, target_file) + if isinstance(data, np.ndarray): + # This scenario is only for multi-in single-out model, not include vfi + writer.put_to_queue(target_file, data) + elif isinstance(data, (list, tuple)): + # Mainly used in vfi scenario, or pipeline scenario. + if ib == IO_BACKEND.DISK: + # In the single vfi processing, we use shutil to copy the + # the original data instead of writing out from memory to disk. + # Yet we have not tested the performance of `writing out` strategy. + assert isinstance(data[0], str) + shutil.copy(data[0], target_file) + elif ib == IO_BACKEND.FFMPEG: + # This is used in ffmpeg stream, the first the target file to output + # while the second the output data. + assert isinstance(data[1], np.ndarray) + writer.put_to_queue(target_file, data[1]) + else: + raise NotImplementedError + else: + raise TypeError(f'Expect value `data` to be np.ndarray, or a list of [str, np.ndarray].' + f'But given {type(data)}') diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/klass.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/klass.py new file mode 100644 index 0000000000000000000000000000000000000000..dd15f29284d818583f3d18d2f6ab5a9d7b47396d --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/klass.py @@ -0,0 +1,45 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_subclass_given_name(proto_type, sub_type_name): + """Get the subclass type given the name + + Look through all subclasses and select by type name. + + Args: + proto_type: base class. + sub_type_name: str, derived class name. + + Returns: + derived class type. + """ + subtype = [ + stp for stp in proto_type.__subclasses__() + if stp.__name__ == sub_type_name + ] + return subtype[0] + + +class Singleton(type): + """Singleton class type. + + A singleton class will only have one instance. + """ + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/logger.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..c8cfd4bdb709cb34fbb40ba20a3bd6812ce729ae --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/logger.py @@ -0,0 +1,91 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import logging +from src.utils.klass import Singleton + + +class Logger(metaclass=Singleton): + """A wrapper class of logging. + + Args: + level: enum, logging level. Default to logging.INFO + + Property: + silence: boolean, whether to mute the logger. Typically used in distributed + training and inference to silence the duplicated messages. + """ + def __init__(self, level=logging.INFO): + self.log_formatter = logging.Formatter( + "%(asctime)s [%(levelname)-5.5s] %(message)s" + ) + self._logger = logging.getLogger() + self._logger.setLevel(level) + self._logger.handlers.clear() + console_handler = logging.StreamHandler() + console_handler.setFormatter(self.log_formatter) + self._logger.addHandler(console_handler) + # A flag to control whether to output to stdout. + # Mainly used for distributed training, where only the root node will + # record the message. + self._silence = False + + def add_log_file(self, log_file): + """Add file handler. + + Args: + log_file: str, external log file to write out. + """ + if log_file is not None and log_file != '': + real_path = os.path.split(os.path.realpath(log_file))[0] + os.makedirs(real_path, exist_ok=True) + file_handler = logging.FileHandler(log_file) + file_handler.setFormatter(self.log_formatter) + self._logger.addHandler(file_handler) + self._logger.info(f'Log file: {log_file}') + + @property + def silence(self): + return self._silence + + @silence.setter + def silence(self, switch): + self._silence = switch + + def info(self, message, force=False): + if not self._silence or force: + self._logger.info(message) + + def warn(self, message, force=False): + if not self._silence or force: + self._logger.warning(message) + + def error(self, message, force=False): + if not self._silence or force: + self._logger.error(message) + + def fatal(self, message, force=False): + if not self._silence or force: + self._logger.fatal(message) + + def debug(self, message, force=False): + if not self._silence or force: + self._logger.debug(message) + + def warning(self, message, force=False): + self.warn(message, force) + + +logger = Logger() diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/moving_avg.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/moving_avg.py new file mode 100644 index 0000000000000000000000000000000000000000..0884c7f7a972369a2c9ef5615fe0f340df27c5c4 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/moving_avg.py @@ -0,0 +1,57 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class MovingAvg: + """Class to record the buffering running statistics. + + Args: + smooth: float, a scalar in [0, 1] to smooth the statistics. + + Attributes: + sum: summation of historical data. + avg: average of historical data. + smooth_avg: smoothed average of historical data. + count: total number of historical data record. + cur_val: current data. + + Raises: + ValueError, when smooth is not between [0, 1]. + """ + def __init__(self, smooth=0.9): + if not (0. <= smooth <= 1.): + raise ValueError(f'Smooth value should be between [0, 1], ' + f'but is given {smooth}.') + self.smooth = smooth + self.clear() + + def update(self, val): + """Update statistics. + """ + self.cur_val = val + self.count += 1 + self.sum += val + self.avg = self.sum / self.count + if self.count == 1: + self.smooth_avg = val + else: + self.smooth_avg = self.smooth * self.smooth_avg + (1. - self.smooth) * val + + def clear(self): + """Clear all historical data. + """ + self.sum = 0. + self.avg = 0. + self.smooth_avg = 0. + self.count = 0 + self.cur_val = 0 diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/__init__.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..66f09226024d6b39187234c1231a83d1dc8a427e --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/__init__.py @@ -0,0 +1,69 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .joint_transforms import * +from src.utils.klass import get_subclass_given_name + + +class Compose(object): + """Composes several joint transforms together. + + Args: + transforms: list[Transform], list of joint transforms to compose. + """ + def __init__(self, transforms=None): + self.transforms = transforms + + @classmethod + def from_cfgs(cls, options, **kwargs): + """Construct augmentation pipeline from cfg dict. + + Args: + options: dict, pairs of {Transform_class_type: kwargs}. + kwargs: dict, additional kwargs. + + Returns: + A composed transform instance. + """ + + t = [] + for k, v in options.items(): + if k == 'RandomCrop': + # crop_size and scales are required terms + v['crop_size'] = kwargs['crop_size'] + v['scales'] = kwargs['scales'] + elif k == 'Scaling': + v['scales'] = kwargs['scales'] + + try: + _filter = get_subclass_given_name(_Transform, k) + except IndexError: + logger.error(f'Cannot find transform type {k}.') + raise ValueError() + + t.append(_filter(**v)) + return cls(t) + + def __call__(self, *img): + for t in self.transforms: + img = t(*img) + return img + + def __repr__(self): + format_string = self.__class__.__name__ + '(' + for t in self.transforms: + format_string += '\n' + format_string += ' {0}'.format(t) + format_string += '\n)' + return format_string \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/joint_transforms.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/joint_transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..4589a01316ca80c42f28ddd23c17c4d2785410ad --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/transform/joint_transforms.py @@ -0,0 +1,362 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import numpy as np +import random +import collections + + +__all__ = ['_Transform', 'RandomTemporalReverse', 'RandomFlipUpDown', + 'RandomFlipLeftRight', 'Scale', 'Resize', 'RandomCrop', + 'RandomDropChrominanceChannel', 'TempDistCrop', 'RandomSized', + 'RandomReverseColorChannel' +] + + +class _Transform(object): + """Base transform class. + """ + def __init__(self, input_dim=4): + self.input_dim = input_dim + + +class RandomTemporalReverse(_Transform): + """Random temporal reverse transform. + + This transform will reverse the multi-frame order. + """ + def __call__(self, *imgs): + if random.random() < 0.5: + imgs = [item[::-1] for item in imgs] + return imgs + + +class RandomFlipUpDown(_Transform): + """Random up-down flip transform. + + This transform will randomly flip the frames upside-down. + """ + def __init__(self, input_dim=3): + super().__init__(input_dim) + if input_dim == 3: + # HWC + self.fn = lambda x: x[::-1] + elif input_dim == 4: + # BHWC or DHWC + self.fn = lambda x: x[:, ::-1] + elif input_dim == 5: + # BDHWC + self.fn = lambda x: x[:, :, ::-1] + else: + raise NotImplementedError + + def __call__(self, *imgs): + if random.random() < 0.5: + imgs = [self.fn(item) for item in imgs] + return imgs + + +class RandomFlipLeftRight(_Transform): + """Random up-down flip transform. + + This transform will randomly flip the frames left-right. + """ + def __init__(self, input_dim=3): + super().__init__(input_dim) + if input_dim == 3: + # HWC + self.fn = lambda x: x[:, ::-1] + elif input_dim == 4: + # BHWC or DHWC + self.fn = lambda x: x[:, :, ::-1] + elif input_dim == 5: + # BDHWC + self.fn = lambda x: x[:, :, :, ::-1] + else: + raise NotImplementedError + + def __call__(self, *imgs): + if random.random() < 0.5: + imgs = [self.fn(item) for item in imgs] + return imgs + + +def _resize(img, new_size, input_dim, interpolation=cv2.INTER_LINEAR): + """Basic resize function. + """ + if img.shape[-1] == 1: + expand = True + else: + expand = False + if input_dim == 3: + img = cv2.resize(img, new_size, interpolation=interpolation) + elif input_dim == 4: + img = [cv2.resize(item, new_size, interpolation=interpolation) for item in img] + img = np.stack(img, axis=0) + else: + raise ValueError('Resize: image dimension must be in [3, 4]') + + if expand: + img = np.expand_dims(img, -1) + + return img + + +class Scale(_Transform): + """Resize the inputs given the scale. + """ + def __init__(self, input_dim, scales, interpolations=None): + super().__init__(input_dim) + self.scale = scales + self.interpolations = interpolations + + def __call__(self, *imgs): + h, w = imgs[0].shape[-3:-1] + ow = int(self.scale * w) + oh = int(self.scale * h) + if self.interpolations is None: + imgs = [_resize(item, (ow, oh), self.input_dim) + for item in imgs] + else: + imgs = [_resize(item, (ow, oh), self.input_dim, interpolation=interpolation) + for item, interpolation in zip(imgs, self.interpolations)] + return imgs + + +class Resize(_Transform): + """Resize the inputs given the target size. + """ + def __init__(self, input_dim, size, interpolations=None): + super().__init__(input_dim) + self.size = size + self.interpolations = interpolations + + def __call__(self, *imgs): + new_size = tuple([self.size, self.size]) + if self.interpolations is None: + imgs = [_resize(item, new_size, self.input_dim) + for item in imgs] + else: + imgs = [_resize(item, new_size, self.input_dim, interpolation=interpolation) + for item, interpolation in zip(imgs, self.interpolations)] + return imgs + + +class RandomCrop(_Transform): + """Random crop the images into patches. + + This function is for multiple input array, i.e. [lr_array, lr2_array, ..., hd_array]. + We want the corresponding crops of these inputs. Therefore, the function accepts + a base crop_size and the scales of the base crop_size that correspond to the + expected output patch size of each input array. + + For example, in 4x super resolution we want the training input has the size + [64, 64], and the output thus [256, 256] size. Therefore when cropping the paired + lr and gt data, the corresponding regions of lr and gt are to be cropped. We + can use a RandomCrop transform with: + + Example: + >>> lr, gt = get_data() # suppose 4D tensors of data format DHWC + >>> tr = RandomCrop(input_dim=4, crop_size=(64, 64), scales=(1, 4)) + >>> lr_crop, gt_crop = tr([lr, gt]) + + Args: + input_dim: int, dimension of each input. + crop_size: list[int], the base size [H, W] of the patch. + scales: list[int], the scales of the crop for each input. + bbox: list[int], the bounding box of the crop, [H_ul, W_ul, H_br, W_br], + where H_ul and W_ul are the height and width of upper-left pixel, + H_br and W_br are the height and width of the bottom-right pixel. + """ + def __init__(self, input_dim, crop_size, scales, bbox=None): + super().__init__(input_dim) + # Notice, this transformation always based on the first element of the images + self._h = crop_size[0] + self._w = crop_size[1] + self.scales = scales + self.bbox = bbox + + def crop(self, im, ymin, xmin, ymax, xmax): + if self.input_dim == 3: + # HWC + return im[ymin:ymax, xmin:xmax] + elif self.input_dim == 4: + # DHWC or BHWC + return im[:, ymin:ymax, xmin:xmax] + elif self.input_dim == 5: + # BDHWC + return im[:, :, ymin:ymax, xmin:xmax] + + def __call__(self, *imgs): + assert len(self.scales) == len(imgs) + h, w = imgs[0].shape[-3:-1] + if self.bbox is None: + h_st, h_ed, w_st, w_ed = 0, h, 0, w + else: + h_st, h_ed, w_st, w_ed = self.bbox + xmin = random.randint(w_st, w_ed - self._w * self.scales[0]) + ymin = random.randint(h_st, h_ed - self._h * self.scales[0]) + + augs = [] + for scale, im in zip(self.scales, imgs): + y_st = ymin * scale // self.scales[0] + y_ed = y_st + self._h * scale + x_st = xmin * scale // self.scales[0] + x_ed = x_st + self._w * scale + + patch = self.crop(im, y_st, x_st, y_ed, x_ed) + assert patch.shape[-3:-1] == (self._h * scale, self._w * scale), \ + f'Expect cropped patch to have size {(self._h * scale, self._w * scale)},' \ + f' but got {patch.shape[-3:-1]} (might be out of range). ' \ + f'For information, im has shape {im.shape}, crop range y: {y_st}-{y_ed}, x: {x_st}-{x_ed}.' + augs.append(patch) + return augs + + +class RandomDropChrominanceChannel(_Transform): + """Randomly drop chrominance channels. The luminance channel will be replicated. + """ + def _to_grayscale_3channel(self, x): + single_x = x[..., 0:1] + return np.concatenate([single_x, single_x, single_x], axis=-1) + + def __call__(self, *imgs): + if random.random() < 0.5: + imgs = [self._to_grayscale_3channel(item) for item in imgs] + return imgs + + +class TempDistCrop(_Transform): + '''Crop video frames with disturbed bboxes along temporal dimension + ''' + def __init__(self, input_dim, crop_size, scales, dist=0.01, no_padding=True, crop_range=None): + super(TempDistCrop, self).__init__(input_dim) + self._h = crop_size[0] + self._w = crop_size[1] + self.dist = dist + self.scales = scales + self.no_padding = no_padding + self.crop_range = crop_range + + def crop(self, imgs, T, ymins, xmins, ymaxs, xmaxs): + assert self.input_dim == 4 + if imgs.shape[0] == 1: + c_idx = T//2 + return imgs[:, ymins[c_idx]:ymaxs[c_idx], xmins[c_idx]:xmaxs[c_idx]] + else: + res = [] + for i in range(T): + res.append(imgs[i, ymins[i]:ymaxs[i], xmins[i]:xmaxs[i]]) + + return np.stack(res, axis=0) + + def pos_disturbe(self, T, ymin, xmin, H, W): + xmins, ymins = [], [] + x_bias, y_bias = np.random.random(size=T), np.random.random(size=T) + x_bias = ((x_bias*2-1)*self.dist * self._w).astype(np.int) + y_bias = ((y_bias*2-1)*self.dist * self._h).astype(np.int) + for i in range(T): + xmins.append(np.clip(xmin+x_bias[i], 0, W - self._w)) + ymins.append(np.clip(ymin+y_bias[i], 0, H - self._h)) + + return np.array(xmins), np.array(ymins) + + def _pad(self, imgs): + h, w = imgs.shape[-3:-1] + if self.no_padding: + if w < self._w or h < self._h: + if w < h: + ow = self._w + oh = int(self._w * h / w) + else: + oh = self._h + ow = int(self._h * w / h) + if self.input_dim == 3: + imgs = cv2.resize(imgs, (ow, oh)) + elif self.input_dim == 4: + imgs = [cv2.resize(item, (ow, oh)) for item in imgs] + else: + raise ValueError('TempDistCrop: image dimension must be in [3, 4]') + else: + pad_h = max(self._h - h, 0) + pad_w = max(self._w - w, 0) + imgs = [np.pad(item, ((0, pad_h), (0, pad_w), (0, 0)), 'constant') + for item in imgs] + if self.input_dim == 3: + imgs = np.pad(imgs, ((0, pad_h), (0, pad_w), (0, 0)), 'constant') + elif self.input_dim == 4: + imgs = [np.pad(item, ((0, pad_h), (0, pad_w), (0, 0)), 'constant') + for item in imgs] + else: + raise ValueError('TempDistCrop: image dimension must be in [3, 4]') + return imgs + + def __call__(self, *imgs): + assert len(self.scales) == len(imgs) + # imgs = [self._pad(item) for item in imgs] + h, w = imgs[0].shape[-3:-1] + bedge = 140 + if self.crop_range is None: + # h_st, h_ed, w_st, w_ed = 0, h, 0, w + h_st, h_ed, w_st, w_ed = bedge, h-bedge, 0, w + else: + h_st, h_ed, w_st, w_ed = self.crop_range + xmin = random.randint(w_st, w_ed - self._w * self.scales[0]) + ymin = random.randint(h_st, h_ed - self._h * self.scales[0]) + T = imgs[0].shape[0] + xmins, ymins = self.pos_disturbe(T, ymin, xmin, h, w) + + augs = [] + for scale, im in zip(self.scales, imgs): + y_st = ymins * scale + y_ed = (ymins + self._h) * scale + x_st = xmins * scale + x_ed = (xmins + self._w) * scale + + augs.append(self.crop(im, T, y_st, x_st, y_ed, x_ed)) + return augs + + +class RandomSized(_Transform): + """Random resize the input with a ranged scale. + """ + def __init__(self, input_dim=3, scale=(0.8, 1.2), interpolations=None): + super().__init__(input_dim) + self.scale = scale + self.interpolations = interpolations + + def __call__(self, *imgs): + org_h, org_w = imgs[0].shape[-3:-1] + r = random.uniform(self.scale[0], self.scale[1]) + w = int(r * org_w) + h = int(r * org_h) + + new_size = tuple([w, h]) + if self.interpolations is None: + imgs = [_resize(item, new_size, self.input_dim, interpolation=cv2.INTER_LINEAR) + for item in imgs] + else: + imgs = [_resize(item, new_size, self.input_dim, interpolation=interpolation) + for item, interpolation in zip(imgs, self.interpolations)] + return imgs + + +class RandomReverseColorChannel(_Transform): + """Randomly shift the color channel. + """ + def __call__(self, *imgs): + if random.random() < 0.5: + imgs = [item[..., ::-1] for item in imgs] + return imgs diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/utils.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d96e6be70424cbcfdc8ff812f41f6abe95779202 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/utils.py @@ -0,0 +1,90 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from itertools import chain +import numpy as np +from yacs.config import CfgNode, _VALID_TYPES, _assert_with_logging, _valid_type + + +def to_pair(x, num_reps): + """Make paired of the int. + + Args: + x: int + num_reps: int, number of replicate of the input x. + + Return: + list[int], where each value is a copy of the input x. + """ + if isinstance(x, list) or isinstance(x, tuple): + assert len(x) == num_reps + elif isinstance(x, int): + x = [x] * num_reps + else: + raise ValueError + return x + + +def convert_to_dict(cfg_node, key_list): + """Convert yacs node to dict. + + Usage: + # a is a yacs node + a_as_dict = convert_to_dict(a, []) + + Args: + cfg_node: a yacs node. + key_list: list[str], the key name in the dict. + + Return: + dict, a dict version of the config node. + """ + if not isinstance(cfg_node, CfgNode): + _assert_with_logging( + _valid_type(cfg_node), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list), type(cfg_node), _VALID_TYPES + ), + ) + return cfg_node + else: + cfg_dict = dict(cfg_node) + for k, v in cfg_dict.items(): + cfg_dict[k] = convert_to_dict(v, key_list + [k]) + return cfg_dict + + +def convert_dict_to_list(cfg_dict, prefix_key=None): + """Convert a dict to list + """ + if prefix_key and prefix_key == '': + prefix_key = None + + cfg_list = [] + + for k, v in cfg_dict.items(): + cur_key = f'{prefix_key}.{k}' if prefix_key is not None else k + if isinstance(v, dict): + cfg_list_sub = convert_dict_to_list(v, prefix_key=cur_key) + cfg_list.extend(cfg_list_sub) + # new_keys = [f'{k}.{sub_k}' for sub_k in cfg_list_sub[::2]] + # values = cfg_list_sub[1::2] + # cfg_list.extend(list(chain(*zip(new_keys, values)))) + elif isinstance(v, (list, tuple)): + cfg_list.extend([cur_key, f'{v}']) + else: + cfg_list.extend([cur_key, v]) + # cfg_list.extend([cur_key, f'{v}']) + return cfg_list diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/world.py b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/world.py new file mode 100644 index 0000000000000000000000000000000000000000..73b92950595022dc24273752f1f83c0a7b5ce502 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/src/utils/world.py @@ -0,0 +1,174 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +import os +import shutil + +from .exceptions import WorldUninitializedError +from .logger import logger + + + +def _setup_npu_env(remove_kernel_meta=True, device_id=None, rank_id=None, rank_size=None): + """Setup NPU environment variables. + """ + os.environ['FUSION_TENSOR_SIZE'] = '20000000' + os.environ['JOB_ID'] = '12345678' + os.environ['MOX_USE_NPU'] = '1' + os.environ['MOX_USE_TDT'] = '1' + os.environ['MOX_USE_TF_ESTIMATOR'] = '0' + os.environ['HEARTBEAT'] = '1' + os.environ['CONTINUE_TRAIN'] = 'true' + os.environ['LOG_DIR'] = './log' + os.environ['ASCEND_GLOBAL_EVENT_LEVEL'] = '0' + os.environ['ASCEND_GLOBAL_EVENT_ENABLE'] = '0' + os.environ['ASCEND_GLOBAL_LOG_LEVEL'] = '3' + os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' + + if device_id is not None: + os.environ['DEVICE_ID'] = str(device_id) + os.environ['ASCEND_DEVICE_ID'] = str(device_id) + if rank_id is not None: + os.environ['RANK_ID'] = str(rank_id) + if rank_size is not None: + os.environ['RANK_SIZE'] = str(rank_size) + + +class World: + """A class to keep all the cluster information. + + This class controls how the distributed training and inference is organized, + when running on multi-device. + + Args: + root_rank_id: int, the root node of the cluster. + + Properties: + is_initialized: a boolean flag to indicate whether the cluster information + is intialized. + device_type: the type of the devices in the cluster. + device_id: the physical index of the device used. + rank_size: the number of the devices used in the cluster. + rank_id: the index of the device used in the cluster, ranged in [0, rank_size). + is_root_rank: a boolean value to indicate that the device is regarded as the + root node. Only root node will print messages and save ckpt during training. + """ + def __init__(self, root_rank_id=0): + self.root_rank_id = root_rank_id + self._device_id = None + self._rank_id = None + self._rank_size = None + self._device_type = None + + self._initialized = False + + def initialize(self, device_type, device_id=None, + rank_id=None, rank_size=None, setup_npu_env=True): + """Initialize cluster information by environment variables or the input. + """ + if device_id is None or rank_id is None or rank_size is None: + self.init_by_environ() + else: + self._device_id = int(device_id) + self._rank_id = int(rank_id) + self._rank_size = int(rank_size) + + # initialize some other env + if setup_npu_env: + _setup_npu_env(remove_kernel_meta=True, + device_id=device_id, + rank_id=rank_id, + rank_size=rank_size) + + self._device_type = device_type + + if self._rank_size == 1: + # Force the single device as root_rank + self._rank_id = 0 + + self._initialized = True + + @property + def is_initialized(self): + return self._initialized + + def init_by_environ(self): + """Initialize cluster using environment variables. + """ + try: + self._device_id = int(os.environ['DEVICE_ID']) + except KeyError: + logger.error("Environ 'DEVICE_ID' not defined. Use default value DEVICE_ID=0.") + self._device_id = 0 + except ValueError: + logger.error(f"Environ 'DEVICE_ID' {os.environ['DEVICE_ID']} cannot converted to int. " + "Use default value DEVICE_ID=0.") + self._device_id = 0 + + try: + self._rank_id = int(os.environ['RANK_ID']) + except KeyError: + logger.error("Environ 'RANK_ID' not defined. Use default value RANK_ID=0.") + self._rank_id = 0 + except ValueError: + logger.error(f"Environ 'RANK_ID' {os.environ['RANK_ID']} cannot converted to int. " + "Use default value RANK_ID=0.") + self._rank_id = 0 + + try: + self._rank_size = int(os.environ['RANK_SIZE']) + except KeyError: + logger.error("Environ 'RANK_SIZE' not defined. Use default value RANK_SIZE=1.") + self._rank_size = 1 + except ValueError: + logger.error(f"Environ 'RANK_SIZE' {os.environ['RANK_SIZE']} cannot converted to int. " + "Use default value RANK_SIZE=1.") + self._rank_size = 1 + + self._initialized = True + + @property + def device_type(self): + if self._device_type is None: + raise WorldUninitializedError('World not initialized.') + return self._device_type + + @property + def device_id(self): + if self._device_id is None: + raise WorldUninitializedError('World not initialized.') + return self._device_id + + @property + def rank_id(self): + if self._rank_id is None: + raise WorldUninitializedError('World not initialized.') + return self._rank_id + + @property + def rank_size(self): + if self._rank_size is None: + raise WorldUninitializedError('World not initialized.') + return self._rank_size + + @property + def is_root_rank(self): + if self._rank_id is None: + raise WorldUninitializedError('World not initialized.') + return self._rank_id == self.root_rank_id + + +# Global instance. +world = World() diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_full_1p.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..39f41a129d5b4d5e1a6a69b99fe67714ad7639d9 --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_full_1p.sh @@ -0,0 +1,129 @@ +#!/bin/bash +# source env.sh +#当前路径,不需要修改 +cur_path=`pwd` +export ASCEND_SLOG_PRINT_TO_STDOUT=1 +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +export DEVICE_ID=$ASCEND_DEVICE_ID +RANK_ID_START=0 + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 +# export ASCEND_GLOBAL_LOG_LEVEL=3 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="AscendVideo_EDVR_ID3085_for_Tensorflow" +#训练batch_size +batch_size=16 +#训练step +output_dir='test/output' + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1p.sh " + echo " " + echo "parameter explain: + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) +cd $cur_path/../ +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup python3.7 $cur_path/../src/main.py \ + --config-file $cur_path/../configs/models/edvr_config.py \ + train.checkpoint_interval 5000 \ + train.print_interval 100 \ + data.data_dir ${data_path} \ + env.rank_size 1 \ + train.output_dir ${output_dir} \ + train.generator.lr_schedule.total_steps 150000,150000,150000,150000 > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +FPS=`grep 'fps:' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log| awk -F "fps:" '{print $2}' | awk -F "," '{print $1}' | tail -n +2| awk '{sum+=$1} END {print sum/NR}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#稳定性精度看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`echo "scale=2;${batch_size} * ${RANK_SIZE} * 1000 / ${FPS}"|bc` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'Step:' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F "loss_total:" '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_performance_1p.sh b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..915c060786af2837b076225062858beaeaf16b6f --- /dev/null +++ b/TensorFlow/built-in/cv/Video_enhancement/AscendVideo_EDVR_ID3085_for_Tensorflow/test/train_performance_1p.sh @@ -0,0 +1,128 @@ +#!/bin/bash +# source env.sh +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +export DEVICE_ID=$ASCEND_DEVICE_ID +RANK_ID_START=0 + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 +export ASCEND_GLOBAL_LOG_LEVEL=3 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="AscendVideo_EDVR_ID3085_for_Tensorflow" +#训练batch_size +batch_size=16 +#训练step +output_dir='test/output' + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1p.sh " + echo " " + echo "parameter explain: + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) +cd $cur_path/../ +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup python3.7 $cur_path/../src/main.py \ + --config-file $cur_path/../configs/models/edvr_config.py \ + train.checkpoint_interval 5000 \ + train.print_interval 10 \ + data.data_dir ${data_path} \ + env.rank_size 1 \ + train.output_dir ${output_dir} \ + train.generator.lr_schedule.total_steps 10000, > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +FPS=`grep 'fps:' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log| awk -F "fps:" '{print $2}' | awk -F "," '{print $1}' | tail -n +2| awk '{sum+=$1} END {print sum/NR}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#稳定性精度看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`echo "scale=2;${batch_size} * ${RANK_SIZE} * 1000 / ${FPS}"|bc` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'Step:' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F "loss_total:" '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendcv/runner/sess_config.py b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendcv/runner/sess_config.py index 9325b851d35389a0c9c4eccd3ab7f4f37654d60c..ed8c1fbc0eb7a7093194560e613745eda79846fd 100644 --- a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendcv/runner/sess_config.py +++ b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendcv/runner/sess_config.py @@ -12,18 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. import tensorflow as tf +from npu_bridge.npu_init import * def _npu_config(mix_precision, is_distributed): config = tf.ConfigProto() custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" - custom_op.parameter_map["enable_data_pre_proc"].b = False custom_op.parameter_map["mix_compile_mode"].b = False custom_op.parameter_map["use_off_line"].b = True custom_op.parameter_map["graph_memory_max_size"].s = tf.compat.as_bytes(str(28*1024 * 1024 * 1024)) custom_op.parameter_map["variable_memory_max_size"].s = tf.compat.as_bytes(str(3*1024 * 1024 * 1024)) custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["iterations_per_loop"].i = 10 + config = npu_config_proto(config_proto=config) #if mix_precision: # custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") if is_distributed: diff --git a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendvsr/models/base_model.py b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendvsr/models/base_model.py index bff76f3153038f634f833b187d4ff3daefb885a2..cef442af6b656f53fd1e067ec65fd5b8399f4cec 100644 --- a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendvsr/models/base_model.py +++ b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/ascendvsr/models/base_model.py @@ -7,6 +7,7 @@ import json import re import tensorflow as tf from tqdm import trange +from npu_bridge.npu_init import * from ascendcv.runner.solver import build_solver from ascendcv.utils.writer import ImageWriter @@ -168,6 +169,7 @@ class VSR(object): self.saver = tf.train.Saver(max_to_keep=100, keep_checkpoint_every_n_hours=1) ave_loss = None + train_op = util.set_iteration_per_loop(sess, train_op, 10) st_time = time.time() for it in range(recover_step, solver.total_step): if self.read_mode == 'python': @@ -188,7 +190,7 @@ class VSR(object): if (it + 1) % self.solver.print_interval == 0 and \ not (npu_distributed and int(os.environ['DEVICE_ID']) != self.cfg.root_rank): - ave_time = once_time / self.solver.print_interval + ave_time = once_time / self.solver.print_interval / 10 fps = self.batch_size / ave_time * self.cfg.rank_size print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), 'Step:{}, lr:{:.8f}, loss:{:.08f}, session time:{:.2f}ms, session fps:{:.2f}, device_id: {}'.format( diff --git a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/test/train_performance_1p.sh index 7a1e537fc34eb6f1b7a817b42623cd2db0f2ec53..c64f63e125329b1d51d0779527eb6f5d62210114 100644 --- a/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/cv/Video_enhancement/EDVR_ID0056_for_TensorFlow/test/train_performance_1p.sh @@ -2,7 +2,7 @@ # source env.sh #当前路径,不需要修改 cur_path=`pwd` -export ASCEND_SLOG_PRINT_TO_STDOUT=1 +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 #集合通信参数,不需要修改 export RANK_SIZE=1 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/README.txt b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/README.txt deleted file mode 100644 index 5f176019472ffe937b0110dd6b6bd15886a05021..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/README.txt +++ /dev/null @@ -1,97 +0,0 @@ -************************************************************************** -AVA Dataset - June 2012 Release - -For detailed information, please refer to: - -“AVA: A Large-Scale Database for Aesthetic Visual Analysis”. Naila Murray, - Luca Marchesotti, Florent Perronnin, CVPR 2012. - -Contacts: -Naila Murray (nmurray [at] cvc [dot] uab [dot] es) -Luca Marchesotti (Luca [dot] Marchesotti [at] xrce [dot] xerox [dot] com) -************************************************************************** - -This package contains: - -1. AVA.txt -2. tags.txt -3. challenges.txt -4. aesthetic image lists: lists of train and test images used for aesthetics -experiments. -5. style image lists: lists of train and test images used for style -experiments. - -************************************************************************** -Content of AVA.txt -************************************************************************** - -Column 1: Index - -Column 2: Image ID - -Columns 3 - 12: Counts of aesthetics ratings on a scale of 1-10. Column 3 -has counts of ratings of 1 and column 12 has counts of ratings of 10. - -Columns 13 - 14: Semantic tag IDs. There are 66 IDs ranging from 1 to 66. -The file tags.txt contains the textual tag corresponding to the numerical -id. Each image has between 0 and 2 tags. Images with less than 2 tags have -a "0" in place of the missing tag(s). - -Column 15: Challenge ID. The file challenges.txt contains the name of -the challenge corresponding to each ID. - -************************************************************************** -Aesthetics image Lists -************************************************************************** - -The aesthetics_image_lists directory contains files with the IDs of images -used for training and testing generic aesthetics classifiers. There were: - -1. small scale (ss) experiments with few training images. -2. large scale (ls) experiments with many training images. - -The directory also contains lists of training and testing images used for -content (or category)-dependent classifiers. The categories are: animal, -architecture, cityscape, floral, food/drink, landscape, portrait, and -still-life. - -************************************************************************** -Style image Lists -************************************************************************** - -The style_image_lists directory contains files with the IDs of images -used for training and testing photographic style classifiers. The files are: - -1. train.jpgl - list of IDs of training images -2. test.jpgl - list of IDs of testing images -3. styles.txt - numeric style IDs and their associated photographic styles. -4. train.lab - annotations for images in train.jpgl consisting of numeric -style IDs. -5. test.multilab - binary annotations for images in test.jpgl. There are 14 -columns corresponding to the 14 possible styles so that, for example, a 1 -in column 3 means that the image has been labeled with the 3rd style listed -in styles.txt - -Note that the training images are single-labeled, but the test images are -multilabeled. - -************************************************************************** -How to obtain the images? -************************************************************************** - -The URLs for the images are constructed as: - - http://www.dpchallenge.com/image.php?IMAGE_ID= - -e.g., - - http://www.dpchallenge.com/image.php?IMAGE_ID=359334 - - -************************************************************************** -Copyright Considerations -************************************************************************** - -Rights to all images are retained by the photographers/dpchallenge. This -is why the image files are not included in the database. Please respect -the copyright and refrain from redistributing images or data. diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_test.jpgl deleted file mode 100644 index d6e67437be7b6b5fd394f3667cf0a7a385e6ffb2..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -629797 -902498 -588000 -448695 -237907 -314147 -306944 -486521 -545526 -126116 -28784 -415178 -713715 -234538 -53043 -497944 -722101 -843320 -334499 -156437 -185173 -413104 -639709 -384014 -438926 -758327 -349734 -722028 -822511 -731886 -534103 -538205 -715540 -120710 -598172 -555465 -427476 -864374 -40090 -200880 -926003 -232781 -596783 -778425 -605838 -645466 -323224 -743468 -799937 -445875 -649970 -941842 -810654 -764043 -405168 -862398 -836686 -347442 -185816 -505443 -310126 -38438 -67623 -80262 -935264 -85677 -579418 -72404 -761062 -432913 -223792 -389200 -518425 -756104 -172870 -204567 -638076 -133732 -72258 -597421 -85307 -303329 -635274 -429957 -723452 -95588 -669016 -317214 -79248 -747440 -25645 -119338 -425795 -463587 -637259 -595614 -234344 -206123 -640600 -846701 -118107 -922662 -162495 -545603 -857235 -477300 -26104 -683854 -434023 -879540 -500240 -748030 -662727 -365779 -709765 -472435 -792286 -288937 -537735 -764910 -475190 -786890 -954237 -43796 -155610 -857567 -938559 -338256 -13187 -812351 -453967 -203847 -1725 -359641 -35130 -763123 -778244 -637105 -81547 -168295 -577571 -893860 -82934 -157450 -620561 -31123 -698884 -650874 -245914 -119748 -71291 -11784 -942310 -405031 -948865 -256253 -518267 -285412 -60763 -276529 -906838 -327870 -174717 -38879 -160330 -946335 -859209 -679529 -118569 -526400 -614649 -901650 -652065 -310975 -109518 -580946 -751287 -11714 -365894 -172684 -611758 -12690 -449659 -437896 -669473 -817520 -284718 -525715 -120583 -376709 -354089 -638876 -36760 -251242 -505607 -919989 -917285 -560360 -671299 -437423 -526769 -684288 -317686 -430415 -430271 -773755 -902731 -456598 -284011 -302435 -482793 -204016 -77645 -196207 -90419 -951490 -366004 -281124 -880040 -546220 -578645 -938255 -810816 -759106 -527102 -785764 -887370 -93710 -277239 -386679 -852700 -581586 -595105 -809425 -913724 -456610 -569993 -863873 -486446 -291593 -809781 -225302 -728157 -422321 -287162 -840176 -597278 -577515 -905240 -263806 -506026 -884169 -676135 -512490 -864271 -576563 -244555 -845011 -676734 -48265 -141159 -557450 -763200 -926128 -511441 -706313 -340487 -434635 -783853 -524469 -433438 -561425 -349710 -300913 -650641 -759948 -140491 -954726 -637535 -626729 -693402 -234053 -725983 -476889 -854555 -514524 -367474 -263282 -243152 -94941 -437831 -425811 -220949 -843295 -231032 -908713 -442852 -187400 -425684 -278334 -445194 -570689 -182755 -706768 -494200 -505288 -257612 -626174 -639331 -848380 -298152 -413963 -577423 -248043 -206242 -425744 -752789 -133776 -189281 -339468 -907683 -287540 -824599 -759709 -122088 -665824 -426457 -816662 -343092 -894518 -321810 -261307 -847461 -30639 -24455 -102737 -241742 -855903 -258105 -405872 -737940 -310602 -597036 -627049 -203767 -82333 -546201 -894777 -613033 -18801 -458443 -227016 -160230 -713028 -255603 -796157 -778350 -184594 -778282 -373190 -579792 -95277 -853907 -136863 -382668 -254354 -719442 -19105 -305495 -477226 -134918 -120284 -475673 -875881 -899459 -384881 -640779 -866304 -162883 -481000 -864267 -879002 -638777 -935131 -285455 -265613 -247522 -305975 -723200 -852474 -894371 -836053 -579073 -186312 -263452 -21972 -595351 -897115 -936283 -402652 -910184 -637603 -233352 -650443 -204141 -204953 -186619 -163051 -637844 -637019 -818695 -321301 -495930 -611456 -916812 -283828 -124606 -650705 -318398 -3726 -309879 -55737 -456061 -287187 -627883 -826202 -110369 -181253 -21070 -757238 -886834 -520125 -880110 -884778 -286400 -776003 -916944 -847425 -67372 -161679 -771629 -879774 -204904 -794961 -34005 -540005 -716724 -429412 -848132 -517887 -338321 -608292 -784901 -263569 -735485 -755373 -939940 -189068 -340121 -350936 -329186 -463872 -62708 -443850 -344499 -168275 -677828 -851088 -286554 -770231 -578715 -245940 -106921 -32034 -251267 -357305 -11832 -757370 -343478 -84869 -285123 -23077 -716867 -357022 -908876 -283301 -405234 -458988 -307940 -881627 -163019 -944228 -516271 -67745 -693817 -582495 -909646 -650756 -937529 -19141 -339448 -446921 -176593 -189462 -778541 -847751 -416262 -39001 -756635 -521783 -402356 -759926 -738337 -713771 -305742 -163109 -68622 -46603 -679826 -376530 -109751 -189410 -477484 -764555 -737978 -902389 -589578 -714589 -751966 -120248 -650544 -777630 -681609 -78701 -140763 -49715 -689836 -517009 -427422 -417659 -767008 -536775 -743446 -75636 -751286 -574243 -614495 -948269 -672156 -187217 -638916 -829423 -649900 -738903 -129580 -615219 -420011 -337929 -78845 -293662 -802833 -577437 -36032 -714326 -81143 -578559 -342293 -487750 -577637 -18528 -372471 -138841 -201807 -38975 -331433 -756688 -564438 -716944 -455972 -586207 -25324 -480441 -388868 -284413 -185807 -78700 -287694 -9132 -188651 -601727 -764119 -570597 -628481 -430838 -580878 -595697 -135275 -847200 -43955 -938409 -459087 -40934 -11589 -656131 -188756 -771258 -467709 -851005 -416306 -455358 -489215 -36802 -741358 -376715 -756483 -70411 -356500 -388030 -170473 -476580 -598189 -911439 -106309 -831296 -25956 -456725 -67339 -698631 -577945 -790097 -437963 -795109 -830273 -155567 -878989 -84456 -546128 -81826 -949949 -625015 -287838 -505358 -732140 -898794 -772231 -303080 -136160 -738603 -772297 -188585 -865530 -288957 -492664 -278700 -564021 -11791 -49133 -802065 -840177 -122030 -65648 -611561 -795545 -589160 -956261 -117931 -369246 -55696 -612613 -263938 -287886 -36535 -830001 -545435 -468780 -188740 -198143 -433521 -828732 -189052 -455833 -513385 -526508 -257172 -121259 -785480 -75181 -71265 -857549 -395422 -632806 -774340 -96558 -504544 -19193 -504561 -316853 -499136 -570100 -712356 -581183 -573058 -649901 -894316 -577770 -825020 -772044 -578633 -159412 -408648 -40045 -615725 -804837 -763108 -526683 -581714 -747877 -763437 -427372 -78816 -429141 -149960 -497744 -933842 -637389 -56752 -254972 -154072 -355896 -599265 -818383 -251456 -648640 -655787 -661765 -55121 -814456 -358235 -923140 -737772 -383721 -614691 -502196 -638769 -205109 -405438 -109951 -674551 -315788 -718865 -831438 -585469 -273074 -85328 -742601 -342529 -446168 -464987 -651439 -124491 -598038 -241900 -776490 -552802 -55209 -454137 -464790 -732113 -755310 -333757 -40080 -391135 -120158 -781615 -277183 -707435 -851111 -691274 -237879 -163514 -30999 -735246 -927162 -480720 -622827 -296345 -272463 -500132 -934123 -936539 -109462 -123341 -383113 -458544 -204976 -638964 -390831 -421992 -409830 -893480 -720022 -230799 -188801 -569795 -748336 -116887 -343190 -485165 -66913 -67545 -263384 -835354 -188789 -427460 -662792 -903920 -603973 -158916 -127052 -368952 -656272 -187043 -369653 -666403 -917187 -535079 -532059 -391083 -731126 -925800 -784602 -287863 -453777 -870740 -658562 -917470 -767865 -55078 -306554 -383932 -641341 -924186 -222004 -729260 -786848 -79829 -26065 -577021 -575582 -235435 -190175 -653110 -330259 -505094 -643185 -452572 -906005 -53753 -253340 -527388 -243071 -441109 -720152 -796739 -782400 -188680 -775583 -18982 -667369 -370563 -716482 -759090 -204559 -902462 -772507 -781548 -371896 -802744 -24258 -63879 -345707 -115735 -19164 -667335 -596037 -53737 -578634 -161617 -22653 -593572 -862373 -484004 -56495 -151318 -637594 -765938 -192478 -429489 -168063 -885073 -597006 -109380 -841272 -455539 -279774 -232789 -551587 -650659 -204948 -894111 -769212 -152605 -704652 -712283 -90250 -629339 -772021 -419349 -637086 -854722 -595928 -727870 -394701 -522686 -828820 -488982 -163412 -255879 -678582 -932831 -69523 -5143 -1779 -188467 -61034 -664511 -337530 -530230 -898552 -71906 -764349 -776901 -650580 -638904 -26013 -161407 -409610 -451780 -650238 -797411 -5282 -332737 -436014 -27683 -134560 -757247 -782263 -63400 -651497 -661016 -574125 -155369 -706641 -772291 -727039 -544275 -158903 -625533 -395812 -625601 -369832 -779268 -362505 -436367 -945549 -264167 -732248 -630157 -686685 -569936 -763856 -656029 -754228 -625008 -386438 -109643 -680598 -579734 -947137 -560222 -445337 -385250 -761463 -693741 -502356 -284696 -514729 -830511 -705390 -592949 -638903 -676771 -780481 -770007 -37747 -902443 -331349 -617846 -564192 -902348 -595743 -483124 -133728 -777907 -519101 -154725 -143762 -802501 -69234 -797073 -710077 -486608 -276118 -754773 -155415 -168250 -19206 -116779 -828638 -441813 -756611 -564832 -40032 -528900 -158812 -665110 -816584 -391085 -121383 -477474 -893827 -197846 -437723 -943953 -358767 -927408 -205098 -748899 -393683 -268979 -894314 -177967 -136602 -770679 -896076 -790764 -600184 -593523 -230928 -319006 -45265 -150307 -448632 -637838 -40069 -629941 -455391 -842613 -284898 -924739 -402990 -756213 -665557 -827213 -671387 -733903 -759353 -777307 -160643 -562175 -296421 -67418 -563986 -210946 -708928 -638862 -690863 -436098 -342022 -753595 -387696 -246339 -148762 -44856 -362537 -666887 -33975 -40885 -292957 -121021 -538870 -598839 -287798 -911404 -712539 -761939 -617487 -38955 -324984 -638661 -227590 -577329 -451799 -256738 -497834 -950627 -344498 -726334 -278199 -464811 -902411 -328127 -957645 -43275 -939952 -66787 -607110 -541567 -106060 -445421 -777685 -403737 -71860 -182756 -370910 -611437 -898926 -954045 -400195 -609317 -300664 -160815 -436915 -300627 -412613 -78114 -38120 -449191 -289088 -63379 -162165 -86519 -109384 -894516 -948760 -269956 -778489 -816537 -83274 -731619 -305094 -249393 -60320 -158790 -829138 -912066 -508119 -342779 -947175 -657474 -754392 -27688 -15856 -124930 -361603 -831378 -756743 -24780 -150674 -204476 -838574 -782296 -943949 -713583 -162564 -524819 -109621 -687316 -515559 -233242 -723429 -799936 -643045 -211550 -493940 -136657 -626660 -225774 -523688 -713700 -289241 -437674 -81731 -278566 -106331 -917447 -948467 -609788 -135165 -608659 -460249 -129640 -42821 -434556 -259730 -79889 -686574 -730894 -768403 -326336 -914298 -12784 -808832 -654755 -299460 -247384 -411313 -318992 -461667 -667793 -13445 -667802 -30676 -132193 -578468 -515914 -204513 -545948 -287440 -457108 -802903 -646356 -223206 -694086 -483875 -642904 -359298 -771792 -771569 -946856 -796464 -228203 -175662 -455467 -294727 -3136 -842990 -281854 -366067 -56493 -911363 -477254 -19098 -69783 -118105 -343675 -106293 -75176 -842309 -10848 -948797 -654206 -764485 -797394 -185184 -598412 -24174 -475360 -328667 -599653 -855308 -344878 -384956 -763486 -24997 -504551 -120129 -907881 -666607 -902495 -727146 -532987 -756485 -908135 -162807 -9730 -839881 -27614 -458150 -403684 -514249 -271317 -11772 -181522 -552679 -751952 -287167 -917417 -317276 -728813 -318856 -85452 -174270 -512521 -272061 -456728 -646418 -875985 -899091 -315434 -82177 -661745 -74361 -438030 -734654 -140536 -22934 -498421 -19199 -772353 -234368 -791997 -189067 -460865 -185733 -37722 -282785 -938299 -609613 -607156 -117218 -4370 -60760 -444654 -734388 -899646 -356443 -571397 -109955 -395713 -742119 -437969 -754483 -668155 -762796 -232685 -386580 -373524 -867805 -327370 -538807 -451593 -159016 -943874 -755927 -129286 -300166 -633811 -26015 -734716 -665341 -34983 -534799 -893961 -462224 -274694 -395639 -948284 -117359 -241418 -564553 -730681 -183947 -200238 -587991 -105836 -625672 -782935 -502970 -129033 -388750 -75449 -729931 -384714 -247866 -764580 -579151 -18365 -656231 -907966 -837011 -717008 -528822 -327796 -796012 -813212 -284371 -109640 -63224 -786843 -841524 -831411 -445735 -349499 -710228 -724499 -828731 -46165 -593994 -396045 -54338 -63535 -764400 -608174 -635048 -7301 -582430 -765153 -129926 -369657 -292552 -388681 -95447 -205253 -201925 -858118 -505658 -15611 -607272 -732121 -598311 -777466 -770221 -590542 -816742 -518732 -930396 -18804 -188737 -919860 -289314 -456432 -274835 -664779 -367973 -43038 -810910 -17579 -919916 -779992 -543637 -283673 -954535 -390619 -638850 -432150 -451722 -285121 -764635 -784491 -259979 -715527 -669553 -72015 -639023 -61107 -30518 -86634 -498218 -18347 -156853 -300725 -284675 -567871 -573040 -851296 -661493 -227143 -7112 -495356 -10575 -808609 -456306 -391395 -591976 -284238 -284598 -164687 -516651 -713649 -500494 -253792 -317890 -487593 -118098 -917395 -436828 -84185 -572622 -730271 -324624 -786499 -661899 -437502 -203884 -595502 -821824 -550401 -300791 -295808 -787928 -336891 -656218 -764266 -358662 -162232 -764550 -359936 -771892 -65856 -327399 -851471 -391611 -713655 -375068 -778333 -230358 -611213 -920036 -239279 -39223 -204875 -911250 -85061 -850786 -109308 -293849 -617746 -468302 -466815 -459303 -45995 -18127 -579753 -221985 -906881 -802790 -226905 -553849 -118031 -570094 -196353 -300753 -731153 -745890 -188240 -474159 -350229 -803511 -17108 -38072 -588278 -163087 -188356 -446653 -756421 -345888 -405780 -284771 -404626 -555762 -240242 -272528 -117136 -468598 -315736 -614055 -403075 -44052 -714573 -514788 -859673 -115137 -327964 -650283 -161565 -607358 -101847 -719452 -674517 -828771 -156520 -344338 -421037 -836616 -324645 -58736 -649079 -596091 -666715 -39378 -227918 -777309 -930047 -511450 -43257 -768433 -884366 -256655 -640887 -56737 -644255 -672132 -528570 -279948 -436430 -650808 -731935 -648453 -456115 -314785 -712649 -134012 -328412 -684456 -601252 -593760 -382465 -679377 -36745 -698279 -317413 -81110 -303113 -675046 -650373 -342600 -320858 -134704 -501162 -456926 -496794 -606875 -907785 -18219 -484230 -38343 -116756 -818369 -188140 -786931 -501400 -948667 -105108 -504713 -596650 -643017 -795604 -503434 -361973 -248956 -343735 -799417 -704615 -848234 -564482 -119702 -483668 -588064 -547060 -768127 -900920 -26374 -449072 -303574 -77158 -369415 -878419 -546194 -688596 -391531 -713569 -592400 -109591 -716939 -285391 -902291 -302986 -655835 -116806 -202661 -810690 -591682 -830725 -633124 -898547 -709117 -423048 -115490 -853234 -540674 -617548 -593468 -899650 -743309 -732276 -786908 -950102 -253651 -665529 -276218 -251824 -842853 -827596 -660059 -9436 -170824 -590433 -13050 -167862 -132368 -163074 -433484 -282968 -803215 -87033 -803559 -153260 -866017 -551124 -523504 -39989 -790743 -536632 -375205 -802523 -477390 -502662 -769050 -934274 -748628 -514171 -19063 -909879 -726946 -132847 -597329 -477562 -897139 -592985 -791505 -106307 -947508 -284926 -566002 -223452 -640957 -733917 -543698 -412028 -788854 -544646 -603498 -708606 -84365 -938596 -529325 -446734 -716934 -527403 -264597 -65455 -734663 -884982 -638722 -420004 -132245 -18800 -342083 -640531 -45247 -705104 -377536 -246249 -199354 -331538 -40916 -648566 -457900 -477225 -842273 -30592 -24092 -854304 -552480 -244325 -880447 -679288 -457866 -524494 -692743 -205052 -597631 -171235 -848021 -823849 -91044 -19159 -154024 -953289 -276453 -398202 -505656 -78782 -206580 -84124 -573748 -140116 -942248 -597371 -204139 -349219 -868642 -284884 -263562 -119997 -26850 -554578 -66835 -524104 -220455 -16516 -121226 -140784 -265870 -676194 -136591 -360176 -109778 -22659 -357629 -894221 -797405 -640528 -942004 -632803 -109356 -455226 -888106 -222973 -792580 -610560 -237290 -365975 -699744 -237531 -456767 -662691 -77494 -204786 -437245 -459509 -186841 -733616 -13274 -456748 -601148 -802404 -704345 -364504 -233901 -516167 -455661 -510990 -102281 -18503 -67548 -405423 -616761 -825068 -601942 -118951 -857532 -790204 -182727 -435006 -451719 -916647 -570234 -235310 -9488 -716176 -117437 -287969 -684189 -437648 -260331 -817523 -162981 -938523 -393412 -944829 -775691 -760788 -456311 -899255 -256836 -189008 -162610 -205257 -86572 -367848 -387266 -446504 -517033 -577711 -510475 -456487 -61744 -45667 -955317 -189069 -487476 -938289 -199447 -84169 -770037 -188474 -913802 -425971 -8118 -106350 -388533 -215210 -564388 -183533 -36235 -348393 -285746 -162977 -248009 -857121 -386623 -18384 -109439 -902464 -683298 -519824 -278065 -12061 -297638 -682125 -109547 -69607 -796222 -456720 -133887 -559408 -525139 -36446 -500828 -600895 -19022 -865008 -639014 -598321 -204176 -431354 -755204 -188233 -462023 -733554 -504362 -778439 -129795 -69943 -650768 -309538 -716586 -698183 -881685 -437775 -698541 -522675 -879729 -292367 -283216 -421094 -186022 -233817 -686710 -50909 -538130 -684760 -18455 -730308 -162609 -825585 -923251 -80395 -693758 -238996 -543100 -796937 -360859 -669454 -67268 -123119 -342449 -75601 -661504 -18969 -385452 -885355 -516298 -333677 -59217 -42532 -867498 -491037 -731987 -21089 -755362 -284646 -764467 -461313 -444966 -661561 -932838 -795140 -398163 -65105 -680501 -525765 -271319 -522062 -64270 -26271 -257775 -328700 -226506 -161747 -656223 -533198 -223380 -513430 -86955 -12880 -303403 -449299 -938541 -444911 -734564 -285666 -684291 -631255 -367682 -37733 -626466 -471154 -151130 -776920 -312801 -593947 -803632 -300377 -587184 -615595 -598417 -205423 -724115 -671692 -456383 -902297 -651420 -187682 -439173 -651440 -202298 -157282 -176452 -941432 -809930 -297494 -438043 -670787 -697254 -94655 -258053 -129309 -781390 -51387 -453816 -435986 -57222 -327533 -94734 -102872 -830563 -670047 -638038 -790724 -284905 -135697 -792836 -894478 -284349 -539707 -156133 -625536 -518597 -698471 -109454 -660045 -395730 -429266 -762206 -530933 -480626 -456702 -26651 -637542 -639034 -536288 -525215 -148738 -413278 -61974 -19111 -83686 -952935 -776642 -281435 -638998 -405331 -549644 -25522 -285333 -409204 -333909 -504163 -53039 -650895 -947084 -447682 -189056 -455890 -157028 -693775 -576704 -122086 -44041 -390017 -597258 -109484 -716203 -324429 -362195 -457512 -316772 -680962 -330196 -479058 -778556 -154717 -773818 -597538 -323550 -763809 -454254 -323321 -825252 -67317 -686401 -602492 -204072 -216952 -106232 -475125 -446424 -778321 -705340 -86958 -434805 -458705 -456264 -638097 -705410 -72326 -750597 -598044 -570299 -568283 -825267 -56536 -491039 -652676 -721334 -785519 -344508 -411154 -941507 -777408 -449964 -284762 -74297 -663488 -290739 -97934 -777905 -908404 -529132 -229088 -298815 -651552 -796258 -800038 -67412 -14885 -433090 -56136 -690768 -18623 -804727 -614465 -279060 -463581 -719485 -312551 -515549 -638666 -582906 -586008 -563708 -564491 -463939 -158006 -157499 -571297 -476287 -21294 -634899 -60182 -763113 -712677 -109421 -373883 -748861 -462027 -769540 -285380 -438108 -57306 -524262 -94681 -356151 -124086 -18824 -777507 -158495 -577110 -778308 -67527 -785557 -577181 -328189 -864918 -723952 -350221 -712536 -440450 -905100 -723605 -763490 -338250 -457127 -756893 -453758 -902409 -917073 -19018 -754772 -39114 -311194 -777488 -102166 -883474 -516545 -882385 -600254 -320512 -57257 -109478 -148519 -935551 -434945 -626695 -234564 -44286 -114687 -56561 -648056 -132134 -948217 -434742 -403961 -397555 -66953 -271322 -227663 -258166 -150658 -716898 -419947 -18937 -674005 -80185 -848030 -181521 -129031 -656676 -539248 -511782 -574063 -846521 -54333 -893972 -354340 -751706 -188772 -230154 -297481 -901296 -652064 -931460 -638826 -667113 -761853 -598150 -784678 -750615 -638225 -48283 -249022 -760859 -317618 -30143 -36364 -85910 -846664 -121032 -755382 -653912 -178044 -18677 -472331 -168070 -504280 -808071 -271803 -704032 -282536 -736704 -723392 -761542 -438134 -357041 -94386 -716666 -69342 -292240 -754983 -846651 -755377 -329839 -603836 -236157 -538035 -44309 -388836 -797153 -109748 -406600 -158720 -883094 -13236 -416829 -713678 -667298 -116876 -57284 -881515 -850723 -518410 -77162 -182357 -916959 -812567 -456437 -618452 -785579 -287473 -940152 -419155 -943231 -418201 -76969 -420975 -775544 -561014 -598320 -445149 -122600 -599136 -456145 -455882 -600721 -450741 -614710 -224053 -893692 -649573 -473888 -261926 -743489 -653591 -951260 -286586 -288320 -74669 -18962 -831289 -79953 -614059 -604106 -516059 -56738 -426039 -172618 -767006 -505618 -851084 -863652 -799289 -55757 -57275 -120887 -438083 -291037 -93106 -518448 -644776 -144357 -638327 -266867 -664823 -566172 -596219 -698380 -693109 -325080 -772765 -864216 -754277 -189062 -189047 -387327 -599438 -675491 -713065 -327999 -339576 -536110 -623371 -800164 -574113 -614054 -222038 -296772 -534397 -582194 -857519 -221147 -481913 -453991 -59935 -13057 -771404 -427423 -357324 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_train.jpgl deleted file mode 100644 index 9c9c09eb2d2ad8decdf2773c13e5caef36f5e594..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/animal_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -753815 -285698 -730630 -886571 -599878 -909513 -129561 -756712 -742959 -129903 -901890 -250396 -865543 -736531 -601119 -279264 -94478 -923121 -262909 -276055 -777090 -805495 -343483 -464021 -675056 -496817 -751921 -905033 -173000 -18490 -827638 -551820 -713250 -638943 -777682 -336332 -261274 -696856 -163052 -303246 -736686 -171182 -257230 -440346 -434524 -894798 -199536 -118069 -54860 -161810 -651159 -525112 -12515 -36843 -402617 -685748 -427529 -518802 -767829 -830735 -579285 -328674 -32236 -336198 -584710 -767454 -436869 -525523 -432945 -183798 -383250 -329163 -188432 -314927 -667390 -281053 -733118 -213869 -119332 -677907 -512675 -451520 -814925 -187924 -354610 -650048 -523212 -732150 -754661 -257902 -472232 -604059 -377347 -560372 -51333 -30408 -141353 -498124 -201754 -369862 -176385 -638816 -432924 -650634 -327272 -303153 -285292 -347240 -771253 -314938 -38664 -11733 -455963 -776624 -638881 -65588 -649647 -350928 -818403 -303242 -86474 -249180 -73259 -128540 -792928 -292627 -732277 -527641 -8253 -43926 -591515 -56564 -9958 -638234 -280860 -435760 -382874 -257184 -156271 -598225 -168059 -256311 -368260 -767881 -75827 -455658 -744066 -313713 -132858 -284831 -534640 -460996 -276978 -129021 -432559 -890325 -289381 -134342 -311888 -440115 -117838 -328809 -937407 -403725 -803233 -573880 -206561 -206534 -690025 -375568 -215020 -603712 -893369 -29453 -527474 -612929 -460569 -205675 -182177 -786233 -402163 -935523 -759859 -817903 -395935 -117967 -911371 -147463 -703122 -62065 -328877 -915645 -818731 -710122 -680406 -375249 -205085 -358411 -704207 -809774 -838407 -154817 -778068 -431895 -668652 -250046 -294595 -109191 -733553 -627984 -69518 -648276 -24166 -596106 -686554 -82300 -154105 -939231 -61714 -645724 -56621 -818329 -527864 -134559 -663802 -437848 -117830 -252402 -30710 -387452 -70320 -436976 -516231 -446335 -423420 -158129 -148446 -109438 -163114 -368944 -488841 -132388 -298255 -579812 -340019 -575543 -298762 -574207 -921919 -448514 -67314 -618251 -274605 -566983 -188901 -278283 -70087 -527595 -85448 -351674 -437356 -773800 -923011 -838201 -437416 -389737 -13052 -750933 -541242 -224839 -48177 -886196 -830266 -14838 -86531 -590816 -751720 -84826 -809764 -713705 -65209 -717001 -105489 -814863 -74115 -436874 -161663 -67766 -92179 -827592 -401135 -39563 -894528 -30625 -162112 -223125 -597763 -191282 -678212 -935121 -463701 -372540 -293910 -109477 -576532 -158465 -12029 -429224 -753547 -372827 -650353 -945215 -205363 -272452 -703174 -771674 -850951 -720931 -926574 -796127 -698783 -673165 -370286 -176220 -654788 -94935 -743346 -797451 -139201 -676090 -838096 -283805 -18865 -795796 -533095 -55640 -455930 -82348 -534778 -940396 -224248 -729361 -163094 -235684 -257380 -795573 -75857 -729143 -886159 -161503 -90108 -567939 -809532 -538175 -39959 -77169 -314680 -298416 -732134 -187357 -435810 -589702 -932570 -101524 -539638 -87024 -75403 -310784 -752979 -440491 -574463 -648902 -61183 -442911 -278584 -162909 -413776 -364301 -162614 -764586 -371465 -575336 -270723 -834036 -592536 -598201 -232664 -158814 -713174 -472138 -904498 -679460 -255443 -162929 -356238 -830119 -133855 -365957 -401872 -505733 -913065 -84766 -506550 -650413 -158666 -627531 -818505 -843273 -448002 -590559 -845020 -815023 -21095 -84651 -163090 -685774 -937120 -802407 -171277 -86554 -913811 -552081 -162835 -415606 -46576 -437813 -435582 -68316 -24511 -284362 -314286 -1327 -108946 -619808 -673953 -795080 -930280 -623816 -271104 -32833 -587566 -456436 -461371 -795582 -339944 -563646 -593489 -880430 -186625 -163413 -901887 -397090 -593904 -18442 -954750 -230640 -652267 -720814 -485562 -848001 -847620 -824987 -686617 -19404 -847784 -919701 -23988 -681577 -638648 -824703 -832828 -516378 -859697 -640857 -579344 -931055 -902115 -594079 -744388 -92065 -650625 -636282 -337375 -289390 -430159 -777521 -809739 -44713 -46325 -61864 -26731 -205108 -770514 -937186 -406605 -592299 -505566 -821386 -836427 -26714 -785559 -227850 -119959 -342295 -56608 -264132 -163078 -857512 -650738 -356793 -927204 -174367 -467274 -754283 -829811 -468404 -337996 -778341 -743297 -456552 -333485 -75894 -954466 -602688 -784871 -38721 -461447 -285442 -587976 -375837 -81691 -18946 -59085 -452588 -35078 -596493 -9527 -52589 -270123 -712877 -432379 -674909 -848859 -158976 -110152 -385663 -419965 -767295 -688032 -161971 -925871 -311661 -917348 -838337 -95010 -251856 -270915 -470328 -388661 -482294 -741400 -426703 -274290 -390995 -71317 -886696 -647562 -274810 -350071 -39090 -573829 -68155 -446438 -617991 -553297 -764466 -765585 -399541 -75603 -764244 -554885 -957055 -904179 -638673 -362115 -902480 -161746 -75437 -763180 -866957 -472421 -776816 -790750 -485329 -345408 -279406 -932278 -161614 -63409 -189526 -716469 -786193 -82270 -373303 -636821 -575978 -488775 -658838 -289359 -716705 -588179 -924475 -38483 -489625 -518690 -445002 -434818 -157327 -930145 -664599 -203438 -238968 -271072 -247649 -444997 -156235 -644380 -129645 -691450 -206257 -223958 -374011 -55715 -205373 -383257 -803485 -634196 -7411 -579788 -500280 -350603 -372911 -284136 -271282 -664889 -347928 -136759 -376396 -95074 -396072 -658679 -764258 -69141 -95570 -457961 -476969 -707386 -941980 -118304 -851219 -396825 -161438 -598147 -518593 -13275 -657634 -955605 -284873 -74721 -182675 -938292 -654756 -51642 -82167 -364838 -284981 -650888 -844565 -444761 -36407 -260333 -172891 -290158 -802290 -439741 -157381 -866944 -723297 -763495 -205450 -202536 -285603 -284507 -670978 -287050 -430423 -274791 -756749 -664268 -199466 -22849 -522872 -536006 -156135 -703659 -636020 -403347 -767949 -55128 -461963 -630798 -833789 -285316 -685066 -880733 -524644 -741854 -415105 -901139 -48234 -249577 -17434 -228039 -689648 -109043 -420993 -933342 -287932 -598276 -752239 -57221 -902620 -482781 -778093 -561433 -525201 -375851 -441732 -371991 -894509 -533398 -297938 -693762 -914156 -225640 -898517 -721775 -125077 -642471 -741209 -327530 -9371 -790345 -705280 -932828 -19866 -132066 -72499 -288001 -919935 -782257 -617898 -265027 -863866 -755252 -706965 -188990 -737832 -204949 -579859 -623616 -284806 -71114 -252329 -606762 -134476 -244401 -686898 -163086 -552834 -319227 -356126 -735270 -778390 -18942 -654056 -763918 -594029 -490649 -141564 -46449 -428089 -375891 -944246 -880483 -338419 -417014 -576400 -191454 -11774 -162415 -162920 -431346 -461826 -19131 -698527 -764423 -506292 -32757 -31805 -384089 -800737 -926666 -517883 -519225 -489725 -40017 -713012 -188371 -635854 -916016 -37540 -919962 -12154 -419954 -204537 -42617 -770038 -109870 -826140 -30815 -653762 -596632 -443238 -542062 -710771 -803091 -908240 -638312 -540593 -288705 -646485 -438089 -772088 -691200 -345815 -720298 -85330 -9833 -696311 -348382 -637397 -74399 -698267 -129344 -885543 -897626 -809601 -593798 -805325 -920020 -281270 -340697 -868928 -653116 -600464 -86866 -327386 -390103 -390470 -639027 -822546 -11699 -368926 -117207 -26213 -686340 -768747 -277303 -763014 -557045 -539121 -55779 -648355 -608446 -810448 -409834 -901860 -839254 -280448 -219536 -21389 -545859 -435920 -528432 -501721 -536559 -579942 -916455 -926558 -37798 -25013 -120000 -655778 -18368 -716968 -589743 -318577 -883532 -600550 -412676 -786555 -900461 -602911 -374632 -109163 -591692 -53152 -772367 -772523 -357020 -127625 -438049 -912238 -451793 -571722 -787000 -937642 -796217 -875575 -245008 -276601 -50839 -775433 -713637 -766047 -851142 -180258 -375504 -388364 -403477 -256734 -771049 -598008 -87083 -726744 -292218 -677724 -328932 -942850 -289089 -338983 -426723 -109369 -676299 -157351 -303263 -677174 -294560 -456473 -638610 -38167 -574260 -646340 -437472 -35856 -820343 -36635 -263572 -316682 -897048 -33311 -637042 -696688 -546661 -609179 -489726 -335392 -461559 -545833 -140274 -411137 -245380 -878567 -494210 -535243 -563397 -280577 -719132 -731838 -451157 -173616 -204073 -520615 -851135 -588320 -749525 -353569 -71275 -511055 -349367 -770435 -949209 -456553 -285413 -949055 -337945 -773937 -622638 -416413 -831102 -298043 -172475 -230917 -796095 -38866 -598753 -346535 -795114 -817774 -278744 -784383 -203753 -869701 -22313 -625785 -501388 -188615 -543132 -649870 -456314 -627063 -916683 -879659 -591453 -646704 -732117 -716540 -897767 -840088 -625267 -792851 -328187 -749485 -119060 -864256 -904625 -770288 -203787 -454143 -198429 -941837 -262396 -269326 -550710 -910721 -118189 -517631 -725902 -566674 -894350 -438788 -415697 -775442 -412877 -913827 -407899 -942219 -927197 -553486 -824654 -564589 -436587 -855825 -843281 -864823 -81548 -17027 -689606 -833994 -186035 -149302 -743765 -167602 -820642 -274693 -511211 -761495 -56699 -522876 -896158 -425816 -558870 -462534 -834209 -306790 -66950 -437019 -426659 -946407 -597407 -343999 -56602 -364270 -527674 -902517 -277515 -650584 -280786 -436485 -883380 -317053 -348572 -598023 -635578 -343980 -75704 -59237 -52998 -924251 -174695 -442499 -551281 -285522 -201717 -414844 -902697 -575320 -437976 -577597 -311192 -229514 -78618 -287751 -863366 -674726 -821124 -784825 -391089 -327761 -813175 -424279 -309699 -826658 -235208 -680657 -461298 -110506 -732273 -704328 -830974 -913787 -531261 -404023 -39245 -554833 -828795 -593940 -83773 -440244 -260214 -678447 -295676 -66882 -157500 -654244 -883402 -2547 -445335 -841319 -526817 -636081 -716729 -812879 -571814 -902192 -938096 -564369 -596735 -836755 -793667 -614589 -132684 -545273 -13460 -300803 -284857 -574049 -473212 -942006 -731309 -236273 -9790 -536868 -911185 -19276 -597042 -910057 -797429 -46666 -437954 -821686 -724440 -634462 -906014 -581008 -810170 -63630 -476455 -693560 -187039 -884458 -635159 -258003 -892491 -597364 -284703 -771884 -440510 -825726 -109002 -379289 -909656 -188659 -257191 -763032 -902911 -161361 -327584 -55005 -779137 -497894 -660042 -906607 -783120 -256819 -570429 -13780 -183242 -162862 -775358 -770412 -55098 -763166 -451442 -764506 -114678 -785457 -914303 -518603 -667183 -721925 -764035 -187879 -204491 -627375 -703697 -437809 -14995 -650099 -370997 -797342 -926509 -384054 -763931 -302676 -39197 -811902 -106724 -735782 -330466 -638979 -797510 -652041 -913908 -581579 -294753 -292877 -18684 -342250 -398674 -19067 -61929 -859735 -596680 -106334 -416760 -785586 -495749 -158585 -504714 -340665 -712815 -451609 -291370 -34185 -249450 -151303 -249511 -78611 -438019 -478303 -850933 -366011 -529025 -533883 -55771 -504565 -556876 -580488 -597929 -762757 -830790 -562364 -852716 -424073 -382742 -373912 -465561 -639044 -677213 -106253 -63672 -24325 -77515 -364913 -371022 -202481 -163553 -788596 -208961 -652568 -523470 -578753 -136226 -765595 -575451 -116726 -162907 -258195 -274476 -371038 -656136 -777901 -579856 -109469 -99334 -649621 -75486 -272324 -940441 -236269 -732268 -346166 -526331 -916456 -629425 -324573 -124458 -269526 -626257 -936247 -136787 -770987 -902212 -950094 -663319 -18833 -24175 -911078 -204469 -436099 -83723 -535043 -78037 -325964 -43717 -778055 -192917 -764257 -222119 -785466 -90460 -708937 -835235 -791835 -883717 -898078 -684200 -716746 -162145 -514725 -288199 -949088 -380913 -300304 -108890 -144643 -673189 -411266 -356164 -542293 -902164 -11762 -464899 -764599 -693818 -855755 -946963 -784770 -189077 -80147 -913693 -346283 -52625 -259360 -834025 -188532 -635699 -944381 -434141 -605686 -7403 -778297 -55705 -56722 -454647 -303903 -831311 -204688 -315561 -596744 -593715 -796113 -838169 -330613 -169355 -369803 -501263 -48641 -735985 -927084 -788726 -626091 -528412 -777301 -582325 -767695 -746907 -136593 -86607 -569917 -651795 -236185 -13102 -482198 -22993 -69673 -78712 -635923 -260270 -388541 -494615 -474078 -946594 -640482 -432008 -437696 -483795 -474859 -592754 -106187 -206390 -617015 -605678 -284113 -514673 -662399 -619113 -204093 -830372 -491873 -771484 -956611 -431886 -762843 -374970 -499106 -600022 -651547 -529020 -133969 -30671 -133176 -650713 -181372 -73933 -98458 -517896 -850563 -205352 -221516 -570571 -55818 -647962 -596741 -836064 -632900 -527154 -2101 -836060 -635319 -941523 -81534 -18765 -366149 -618097 -937831 -604126 -763441 -321997 -908012 -618212 -568189 -437348 -383994 -9757 -148502 -326780 -375882 -953933 -483964 -491068 -297774 -666975 -121228 -443597 -452335 -588040 -841997 -188372 -70897 -491403 -236027 -416662 -470716 -716513 -248373 -204827 -694372 -570369 -105996 -770824 -205756 -446433 -517708 -798880 -839080 -521973 -585400 -56557 -782186 -504833 -911846 -18607 -28063 -428565 -650640 -779793 -55772 -637385 -340538 -287203 -455817 -594004 -659689 -36223 -865474 -943608 -18950 -117968 -857449 -426775 -913668 -599962 -456309 -735272 -685328 -886827 -105729 -804842 -388309 -534471 -550873 -599326 -461503 -261364 -680723 -785534 -836661 -726407 -810070 -268883 -348103 -462098 -205343 -285070 -31755 -505697 -876576 -59016 -462499 -706364 -800886 -317361 -309677 -763415 -200517 -633933 -773900 -163935 -390253 -855455 -433432 -713650 -656148 -538347 -205254 -69677 -411407 -341669 -833932 -436944 -106359 -790638 -693719 -170544 -65122 -875738 -205456 -596203 -142334 -927494 -904134 -288637 -256368 -117202 -598013 -641055 -474288 -901255 -650475 -864100 -755126 -942339 -768001 -385257 -135393 -285642 -920010 -698933 -803280 -905324 -821883 -456499 -204502 -348584 -445327 -606804 -65881 -579487 -924090 -579432 -234302 -362375 -777290 -763185 -252178 -837979 -578758 -641408 -528292 -382039 -750261 -670682 -74635 -314631 -35003 -162939 -391800 -472074 -304338 -261015 -778054 -377306 -506723 -850820 -911542 -828698 -552705 -10324 -810801 -158000 -428183 -184008 -658264 -140239 -540265 -642779 -538964 -725442 -226898 -151070 -105569 -289120 -188792 -418818 -405064 -108985 -734326 -25561 -340284 -110327 -82352 -156471 -917404 -941519 -272857 -278897 -349962 -570470 -638756 -772470 -707699 -44450 -732013 -684104 -23968 -32498 -598415 -109294 -438065 -78335 -8242 -759789 -452480 -887286 -205446 -885541 -616493 -650225 -537074 -411291 -686329 -788603 -598317 -905395 -545839 -600366 -943737 -76854 -852140 -362208 -395592 -454930 -84513 -658767 -203388 -797430 -162459 -723719 -12433 -390521 -770687 -783888 -858032 -47296 -772414 -551859 -715255 -402043 -483412 -779773 -769833 -581883 -516584 -167310 -66555 -865892 -161791 -870468 -792619 -681204 -667307 -183314 -116056 -369234 -777525 -765527 -497308 -811826 -486804 -285065 -243113 -698913 -546068 -189011 -902119 -642994 -693536 -18565 -831462 -353240 -822656 -778195 -174643 -564583 -297708 -327942 -260509 -472333 -447981 -285464 -600188 -472574 -516187 -938221 -259803 -52037 -456435 -686211 -805392 -761275 -739550 -343996 -215969 -444623 -415221 -682301 -199537 -906721 -243728 -537975 -950754 -173913 -437547 -956701 -20525 -275640 -70583 -573516 -16223 -936887 -850261 -10507 -446241 -856187 -344292 -763199 -117986 -652618 -476514 -313303 -164674 -60418 -611646 -247033 -775654 -67407 -59086 -308035 -188104 -245453 -503298 -900135 -901440 -8456 -116713 -800789 -340215 -832589 -10276 -426056 -755171 -558797 -280908 -275288 -633107 -722314 -754058 -864253 -235895 -5217 -1384 -284945 -423588 -17650 -528363 -726995 -671342 -133991 -415227 -564487 -369856 -771476 -344333 -572254 -415826 -204084 -511755 -476672 -496561 -275330 -314808 -465021 -817721 -809676 -731714 -851037 -136409 -713657 -208304 -927072 -674964 -464339 -822468 -834248 -810337 -597429 -909922 -917335 -12844 -38878 -205458 -437570 -49322 -901857 -372479 -624243 -756407 -640435 -258461 -129112 -356749 -386053 -769154 -723771 -16403 -809652 -588677 -36529 -626674 -728501 -432457 -822182 -453603 -563154 -109386 -916004 -684269 -429152 -650661 -571678 -632461 -117190 -581674 -748335 -368286 -142436 -293985 -785313 -331561 -88876 -422741 -162801 -462131 -659212 -635872 -874747 -763012 -110350 -257423 -498259 -132307 -52215 -284849 -667301 -249440 -80174 -906632 -878491 -651083 -256083 -797452 -879270 -663932 -778017 -874937 -435236 -523653 -186937 -287744 -850332 -19175 -929046 -678010 -258174 -579693 -922708 -136718 -204799 -899053 -343109 -894306 -271787 -673988 -598177 -639814 -315723 -188441 -109333 -606107 -227200 -625828 -369261 -204443 -70495 -678000 -129709 -313350 -419104 -124553 -564218 -665890 -636118 -638142 -894175 -294720 -342902 -875994 -82145 -910142 -188547 -271116 -244318 -745175 -637538 -856930 -465041 -885285 -436323 -19212 -188114 -221989 -830350 -84752 -810926 -574314 -432214 -327821 -850066 -404375 -109589 -786994 -831429 -409591 -39113 -32775 -369339 -454522 -703714 -715430 -745932 -800028 -14520 -206041 -828158 -198798 -306700 -755508 -726578 -635617 -428448 -934787 -640611 -285430 -841044 -536747 -626141 -265622 -664902 -446420 -941773 -182772 -322951 -43164 -425635 -396474 -900963 -72514 -469232 -637114 -701188 -19214 -75115 -638587 -444239 -920464 -54165 -328847 -470565 -387902 -345202 -155073 -312762 -278748 -848533 -285449 -524061 -256772 -329244 -640911 -517707 -215244 -766475 -785498 -950998 -72123 -344314 -929215 -420731 -39512 -187953 -794600 -462540 -574217 -750323 -241683 -857502 -790554 -306200 -650470 -129398 -763885 -247595 -843160 -271752 -158988 -66223 -796713 -537862 -19747 -901681 -78709 -567346 -658548 -75398 -200017 -69054 -38690 -809485 -204942 -23059 -185827 -480166 -285187 -650572 -648724 -596881 -230701 -425706 -255871 -487472 -677619 -923094 -705385 -436355 -901299 -646699 -562980 -815860 -458422 -310380 -580211 -923147 -777366 -516756 -605792 -445067 -815814 -468358 -699639 -435878 -733322 -276359 -678848 -577712 -386272 -285232 -803276 -232775 -663912 -887179 -684721 -11996 -803054 -477179 -436444 -433199 -336985 -498017 -78785 -56502 -956419 -387126 -110468 -643015 -391764 -268882 -538259 -881670 -588251 -894201 -43636 -21490 -764196 -564469 -828987 -364135 -950964 -109142 -916627 -571843 -38481 -499352 -461323 -485517 -272529 -799297 -110128 -725159 -109641 -806024 -769136 -128849 -278562 -436564 -527333 -412403 -18840 -62493 -552925 -843209 -329173 -902194 -769033 -763248 -150933 -756343 -412386 -650613 -25848 -367622 -426949 -754672 -51567 -785499 -189045 -585359 -784078 -571731 -162570 -833794 -455371 -116793 -256710 -43176 -403938 -507577 -38268 -38172 -25936 -76932 -70783 -864791 -90245 -754575 -794623 -456290 -879090 -170270 -599684 -204843 -885521 -694100 -18940 -165173 -206167 -129662 -180093 -56654 -244925 -931425 -593990 -432528 -673048 -204146 -550651 -803366 -56683 -680600 -391551 -642935 -887427 -730872 -42692 -544418 -452525 -257142 -471588 -246052 -927229 -436788 -406384 -84800 -155482 -390140 -292724 -886789 -186068 -322336 -640974 -656039 -865317 -795729 -743643 -533983 -810263 -666937 -564991 -101315 -298851 -165745 -777984 -99931 -618196 -416992 -688155 -243078 -232302 -236115 -595052 -931358 -887334 -577443 -795391 -369126 -797402 -390887 -205082 -473004 -481394 -148341 -668986 -883322 -59573 -354158 -927332 -123223 -612931 -623383 -273140 -822323 -948665 -438111 -649284 -50198 -573878 -204432 -522312 -464274 -732039 -205173 -650803 -281904 -492655 -327692 -224276 -807316 -375809 -546719 -755304 -171197 -569374 -731880 -382198 -95147 -897570 -945803 -342756 -8161 -406761 -588788 -504594 -570403 -713255 -858057 -72402 -827967 -245775 -275248 -188574 -597482 -658651 -698854 -698746 -901318 -371990 -559139 -793666 -902500 -39956 -889463 -848915 -534631 -245858 -814145 -129432 -731020 -938557 -689349 -199170 -384252 -714106 -719529 -322579 -447808 -501987 -771295 -96485 -423256 -161827 -188929 -765606 -701720 -456099 -810608 -933073 -817285 -828407 -339886 -898681 -501435 -109309 -757312 -784938 -9913 -139845 -630153 -20446 -461013 -403344 -935858 -563449 -57181 -783914 -671341 -404834 -912662 -455077 -343036 -598142 -633697 -459414 -294159 -17383 -530736 -693497 -790387 -897539 -301767 -227939 -305852 -740334 -221610 -67319 -454561 -518649 -165775 -18803 -561159 -783865 -822136 -954004 -433309 -178245 -170322 -323172 -875485 -893684 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_test.jpgl deleted file mode 100644 index bed7092f418a772f8e4794fc90dde2341e211e59..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -690013 -903246 -211486 -258940 -35368 -526797 -98257 -151795 -929140 -440632 -11485 -306325 -896929 -147556 -46652 -428881 -542874 -303602 -256814 -43949 -338393 -376385 -749070 -113588 -853238 -928489 -904166 -855801 -167030 -935352 -263513 -849509 -230850 -175625 -631294 -738487 -231003 -879864 -879161 -831424 -593920 -889597 -838006 -257328 -191582 -894565 -638408 -482713 -159172 -622649 -68648 -938056 -472389 -121222 -856884 -626544 -226999 -603448 -68217 -674118 -606536 -721353 -566662 -34916 -331031 -541676 -324647 -824551 -868062 -804988 -34336 -819748 -735712 -749509 -102123 -32682 -660403 -795134 -147140 -883607 -422398 -272428 -652691 -151251 -808509 -853103 -85262 -226503 -291190 -132337 -174771 -916809 -32050 -730556 -634165 -176323 -30616 -489791 -278819 -112927 -212665 -546055 -934387 -186392 -208494 -64597 -337287 -631750 -361159 -43733 -772726 -443396 -393421 -338205 -137120 -236921 -424352 -849899 -757476 -32054 -533644 -812751 -430349 -798036 -137089 -278573 -338463 -930695 -904069 -863857 -848457 -368417 -639376 -130812 -771825 -44355 -767830 -375854 -136460 -809215 -472452 -211697 -230973 -186448 -215565 -34881 -743207 -803991 -717002 -765810 -20540 -27926 -75947 -263603 -903885 -147472 -808494 -105054 -735723 -849499 -163686 -738115 -248274 -812579 -701772 -176556 -255596 -892753 -297038 -913783 -17957 -779466 -275508 -606351 -517019 -263503 -526513 -137251 -878477 -921151 -230882 -903220 -91703 -482011 -9787 -231165 -356382 -342150 -505881 -276169 -482638 -778253 -684060 -282874 -926547 -494465 -368633 -12227 -355421 -64510 -927651 -75595 -170168 -13529 -574011 -139213 -847492 -803596 -770650 -16657 -930648 -177956 -501447 -723554 -280732 -771470 -192635 -173217 -230459 -35900 -423724 -71479 -502526 -34127 -627887 -92760 -341499 -339568 -247491 -623705 -802896 -113191 -360169 -238483 -339725 -744179 -12632 -64865 -136949 -696396 -664297 -229913 -904334 -903906 -453689 -169365 -137186 -203436 -663923 -331412 -260112 -485880 -928037 -96190 -347474 -210910 -192748 -638798 -666885 -661991 -857098 -749339 -10113 -448308 -90198 -933069 -377446 -96881 -64047 -309848 -449502 -34869 -210606 -165352 -248062 -393234 -196330 -246141 -166080 -365913 -828604 -131154 -64982 -263484 -166010 -11723 -865066 -86415 -244226 -257179 -842203 -217970 -387468 -64572 -126715 -155284 -164583 -801397 -769324 -897134 -898244 -857160 -817490 -693935 -893768 -406002 -623487 -85558 -41412 -814746 -863909 -317524 -147402 -315088 -186666 -638066 -760598 -893461 -324793 -78046 -773265 -196453 -206135 -821563 -388180 -639382 -49110 -165683 -613445 -926963 -451796 -6220 -126185 -106818 -236656 -798011 -137127 -420172 -462034 -751505 -677609 -911545 -45268 -903449 -141753 -104815 -136838 -74287 -926504 -54703 -137237 -698878 -320620 -848018 -791582 -339253 -289373 -362080 -924072 -388327 -743794 -227594 -502211 -506001 -930194 -563818 -43903 -208518 -345384 -786788 -192557 -606400 -343056 -132400 -90861 -850127 -690695 -653920 -396708 -935297 -669408 -228900 -779128 -230793 -676740 -164902 -394938 -339387 -639891 -210703 -500073 -71500 -429251 -171190 -680679 -154319 -43822 -260511 -879233 -177623 -438343 -667513 -65951 -40676 -56459 -244521 -435040 -802535 -167445 -926973 -163829 -616030 -376067 -224911 -836157 -930021 -695016 -96802 -899742 -72642 -231336 -11488 -639652 -146317 -329013 -602541 -9804 -11659 -824606 -151567 -210559 -821564 -706553 -325132 -648820 -705260 -342718 -303595 -303619 -403626 -157453 -447676 -805320 -185310 -338554 -521302 -314415 -235082 -173737 -289033 -3353 -20238 -237078 -332488 -350884 -639996 -627419 -12988 -558477 -54553 -113433 -634897 -778298 -631448 -781206 -283450 -33739 -797145 -171945 -770240 -797829 -383934 -102892 -177510 -439202 -233909 -486950 -70719 -179921 -677864 -824704 -606548 -391513 -794070 -931203 -801712 -246251 -544016 -34861 -429413 -856622 -409283 -303585 -863361 -933852 -413083 -331858 -895816 -229777 -413199 -453882 -917342 -137204 -24833 -852507 -794539 -260611 -464143 -230792 -164236 -122205 -484972 -360620 -880229 -732430 -490926 -280885 -424154 -502184 -105135 -741970 -298424 -429426 -797744 -65838 -339847 -11502 -276278 -888090 -660461 -174668 -538025 -912146 -592149 -153914 -173894 -196379 -2141 -431175 -165358 -454521 -710830 -13217 -20437 -719470 -88828 -13804 -47468 -470991 -684166 -272920 -230789 -305714 -233096 -234410 -697059 -31548 -11496 -326284 -939859 -889225 -680621 -332015 -624316 -639969 -924638 -375876 -770166 -286487 -72758 -363211 -524868 -16656 -945572 -237340 -888313 -339370 -157356 -514094 -20337 -230950 -165667 -653003 -770684 -116021 -769987 -683938 -645422 -930497 -771552 -575392 -914026 -774043 -174646 -422979 -903595 -146587 -164010 -33550 -881889 -323190 -495999 -245928 -74283 -653968 -772188 -771867 -273071 -458811 -772024 -349930 -43223 -918768 -21323 -281552 -139475 -428708 -32033 -193920 -59054 -298839 -365232 -49514 -136697 -215132 -661907 -426558 -163821 -563555 -518641 -820410 -185256 -244998 -944823 -811221 -166047 -244170 -768400 -155354 -735836 -859492 -881111 -463989 -328865 -339624 -857521 -535026 -819566 -230470 -456700 -55685 -135696 -802715 -304115 -10137 -593946 -431164 -193937 -475833 -94766 -66174 -13763 -262543 -64431 -712198 -147251 -693597 -13549 -176573 -248246 -276206 -763945 -175147 -899072 -80188 -394207 -761037 -860247 -449142 -455009 -210752 -524084 -32144 -114875 -639790 -882299 -518430 -118778 -657235 -238257 -929684 -901886 -560054 -322366 -921902 -127664 -561740 -324926 -897149 -693432 -677674 -861099 -233100 -245862 -288135 -914321 -738074 -858212 -398334 -84217 -837362 -432709 -86835 -236644 -397700 -362523 -837349 -230980 -542652 -798161 -40976 -175373 -899433 -489250 -349564 -522712 -819444 -443073 -55501 -215640 -246290 -571465 -208447 -164820 -63655 -28441 -926652 -888081 -68688 -847863 -165859 -165285 -346048 -811847 -331911 -234684 -283455 -654594 -164851 -847726 -900901 -136324 -71097 -11642 -332453 -180997 -456847 -186856 -943031 -296628 -22365 -388825 -817472 -252016 -770538 -917354 -159924 -799260 -317134 -457312 -926812 -785463 -934580 -328972 -34875 -78120 -838402 -639458 -137238 -339841 -147560 -256131 -897266 -787006 -187419 -146412 -19712 -899379 -943951 -59290 -12139 -119333 -827143 -63599 -339581 -236094 -659579 -923127 -888932 -308644 -12206 -798058 -799217 -254741 -416445 -573547 -662359 -174901 -49948 -671331 -817257 -625540 -361671 -824495 -805190 -356247 -617849 -818824 -799984 -421505 -137150 -160887 -457302 -95608 -828517 -772500 -339737 -11580 -339391 -65857 -31945 -6671 -539639 -604736 -192708 -694430 -188470 -147585 -360994 -661774 -424466 -615755 -676452 -517348 -903560 -419282 -495932 -3962 -503866 -657342 -68137 -339843 -116629 -20389 -924606 -227103 -405410 -185135 -853219 -635558 -475881 -827297 -164854 -314682 -227931 -136678 -45665 -934140 -4164 -291528 -660681 -136821 -693016 -602920 -840369 -951589 -625103 -384717 -517368 -15536 -19887 -136957 -338297 -416492 -42383 -933580 -840929 -744376 -60960 -78135 -842319 -509212 -758051 -78771 -84656 -34111 -905001 -232055 -799082 -308117 -155770 -902597 -190550 -829854 -224944 -81711 -711754 -943714 -101268 -617732 -559214 -347784 -416435 -794195 -633776 -633645 -165378 -132270 -732806 -731105 -657390 -523061 -748478 -579689 -27830 -154093 -215869 -339006 -296515 -889554 -908191 -949624 -308852 -488211 -321760 -771466 -412791 -15609 -258607 -481413 -913599 -75744 -432514 -113581 -50223 -185656 -37138 -40983 -477128 -240229 -803207 -732879 -331186 -286093 -33446 -854536 -602441 -137116 -801421 -505593 -475109 -534352 -13695 -130571 -456255 -606267 -195769 -164174 -40861 -924777 -545770 -121303 -445498 -132758 -32211 -263621 -433535 -506675 -347531 -593389 -339545 -663208 -818815 -165973 -430295 -101746 -498009 -267576 -46910 -429996 -32321 -574059 -101851 -176180 -124924 -576359 -276053 -939552 -277382 -197057 -824302 -924610 -442615 -819953 -820844 -166024 -432638 -11631 -132264 -621600 -276303 -364840 -330440 -628961 -153084 -818199 -816637 -491273 -924219 -426371 -31628 -832444 -67482 -33685 -633876 -47443 -831444 -660547 -63530 -124932 -339583 -165780 -662234 -165386 -812280 -669365 -930757 -631980 -85258 -165575 -64265 -20573 -663581 -315801 -698842 -799369 -136700 -638880 -173922 -512638 -694024 -853081 -46008 -395212 -191347 -828386 -860453 -41143 -954190 -338794 -338493 -788348 -65028 -860920 -473811 -13841 -176029 -263277 -80847 -929051 -946106 -502646 -294517 -903089 -742984 -230822 -13716 -779790 -28562 -131208 -324712 -136778 -244691 -196231 -350177 -6111 -49959 -166051 -741033 -339633 -848253 -637021 -70580 -374458 -782334 -429308 -313900 -743458 -798162 -639981 -482434 -673170 -574831 -765737 -76016 -14434 -303268 -386819 -915631 -163316 -667068 -652600 -917344 -593124 -457689 -835491 -456415 -206206 -282892 -288718 -164609 -934218 -123164 -949431 -270199 -113879 -780411 -180313 -345879 -26815 -524518 -693682 -281766 -339425 -86725 -175439 -650163 -46831 -904092 -81271 -245055 -570377 -919913 -495570 -276075 -490821 -633881 -677702 -866368 -95335 -802664 -13059 -322540 -484750 -79292 -339513 -164984 -729330 -651860 -892803 -192954 -69028 -875506 -419013 -800020 -639974 -46230 -13809 -501866 -363282 -339875 -864532 -955445 -450800 -512482 -546047 -179704 -101769 -712222 -890727 -832790 -207849 -633334 -321587 -383910 -474180 -325056 -668195 -14836 -364061 -799306 -444249 -299906 -768970 -695729 -212972 -609795 -931520 -85680 -640040 -565871 -819713 -639858 -933013 -904007 -339361 -501173 -328340 -865111 -112138 -11486 -77871 -273827 -772391 -89095 -140582 -272975 -132761 -275324 -245061 -908149 -786536 -924698 -451430 -43727 -228154 -604050 -477010 -228469 -165883 -391392 -840760 -288558 -368328 -273129 -737102 -676169 -72131 -154854 -115267 -305718 -793949 -810730 -847227 -797295 -186480 -537587 -339701 -779766 -274677 -12288 -421929 -137184 -742530 -22985 -51881 -930904 -263566 -728976 -846066 -176230 -147630 -339856 -814212 -90403 -101725 -798061 -393191 -527508 -639074 -312196 -125862 -121144 -639801 -435538 -799447 -165807 -503572 -291028 -783548 -816542 -586498 -893061 -271088 -547793 -632111 -276447 -371273 -165598 -394716 -246235 -570792 -64710 -276466 -9014 -606409 -118944 -52667 -432911 -32080 -931913 -581474 -339747 -99724 -625163 -889563 -761806 -67067 -403187 -654447 -170906 -163409 -943853 -800022 -70665 -61091 -828683 -15942 -30754 -621269 -511293 -196109 -120656 -867086 -624711 -892837 -633804 -552838 -369792 -522458 -339887 -446489 -398505 -342320 -491824 -588422 -137137 -854772 -661385 -147541 -605830 -330174 -339282 -428745 -939635 -64820 -797867 -204738 -536778 -760484 -551955 -278348 -160508 -514188 -362238 -632194 -953630 -644025 -121341 -101911 -517411 -364479 -33222 -603558 -512874 -945650 -137223 -364361 -638758 -477131 -711945 -279896 -268443 -917105 -451917 -823327 -482516 -568913 -638602 -101355 -1330 -934643 -771936 -457404 -237345 -200337 -787372 -65911 -447666 -714844 -904929 -89165 -938226 -117002 -648824 -231339 -907543 -265787 -653417 -298417 -200460 -750253 -165051 -332352 -37408 -457788 -22488 -339426 -272833 -801736 -165732 -30536 -576320 -920386 -894757 -155599 -45939 -555563 -908189 -650367 -445121 -49788 -257963 -339094 -288989 -215635 -550499 -165998 -133545 -516569 -236101 -347282 -630647 -109257 -527356 -294789 -192454 -831220 -335139 -212792 -950614 -723496 -72537 -797772 -174816 -461247 -172821 -562503 -635174 -245937 -92113 -913315 -165270 -904053 -77845 -593102 -244338 -413222 -506201 -126227 -680143 -424455 -23710 -716488 -929810 -330470 -763554 -356367 -73921 -11623 -117272 -501095 -683263 -361692 -658487 -571340 -361628 -372000 -6015 -853882 -175065 -263596 -491457 -879309 -448482 -626006 -798166 -491118 -365178 -367833 -248435 -837382 -526934 -190875 -624375 -624767 -888845 -883559 -164286 -387781 -13677 -836819 -239199 -793072 -245020 -889478 -310477 -226842 -809794 -163812 -772983 -397992 -133716 -455961 -102884 -607169 -75540 -839592 -110402 -952929 -832971 -246392 -778116 -657792 -51337 -931917 -15866 -904056 -125336 -253403 -102556 -600839 -616316 -675479 -797965 -945691 -903570 -323602 -395272 -322810 -935187 -150834 -949379 -614885 -113665 -181011 -812427 -13822 -151662 -667207 -356167 -122491 -618466 -772220 -44102 -54513 -22774 -639922 -117161 -15851 -355890 -431699 -424855 -13954 -137108 -32465 -387348 -422139 -863906 -190556 -260620 -381870 -571483 -744593 -652587 -585009 -230970 -369890 -237872 -264967 -263395 -324459 -784211 -903377 -292047 -824780 -74156 -863591 -26930 -625555 -528470 -13746 -395078 -462043 -224024 -60537 -632089 -856923 -230764 -482477 -907615 -823882 -860425 -132277 -169761 -164805 -34646 -803582 -544629 -186574 -298433 -165237 -102642 -809432 -431166 -146856 -295776 -633826 -60957 -539571 -431971 -834381 -64725 -133250 -29690 -30720 -16757 -129118 -822815 -206709 -78488 -692619 -445239 -13657 -811815 -196071 -771438 -659424 -623828 -406012 -369347 -419135 -116605 -314510 -618247 -243934 -659504 -162210 -259718 -703221 -242545 -287608 -660447 -155374 -837285 -114978 -633083 -518326 -458955 -4121 -15779 -112046 -483684 -92157 -32928 -510093 -625361 -309839 -321215 -261020 -273803 -278902 -691559 -186662 -926244 -57261 -377490 -633416 -490293 -899102 -635283 -369888 -31627 -291487 -228327 -662890 -54223 -322359 -842836 -237398 -912498 -600142 -662818 -63621 -777028 -164074 -888214 -362517 -864053 -237160 -136289 -931498 -772704 -861043 -101949 -79322 -326464 -173381 -146562 -687004 -126152 -72679 -231255 -1124 -70916 -243979 -263532 -165409 -732709 -115723 -364233 -829536 -332022 -433861 -221082 -818005 -954614 -603848 -235242 -771650 -698588 -639851 -310129 -165498 -264021 -123030 -74060 -146907 -633362 -875959 -377360 -684785 -639218 -428912 -255314 -396615 -669353 -114080 -277252 -170200 -231158 -22811 -454696 -87105 -634234 -778358 -449239 -113349 -934017 -896259 -418822 -31998 -110296 -592728 -235265 -70188 -173815 -667177 -185697 -330966 -155365 -229658 -791625 -832284 -180927 -377078 -894571 -165359 -20397 -920315 -197518 -580777 -726922 -848824 -721154 -897652 -443920 -244881 -908055 -161969 -442359 -638284 -382378 -744297 -11461 -948919 -623338 -531502 -371663 -446526 -390925 -147552 -639881 -633025 -235906 -831158 -51484 -905791 -46416 -442983 -255518 -457382 -15644 -210906 -180894 -365508 -139403 -136889 -338851 -428789 -76761 -636323 -904073 -727050 -173269 -576 -32242 -275008 -810833 -444967 -693627 -927515 -935689 -832786 -674276 -248244 -71264 -173820 -832597 -343896 -67480 -339739 -824205 -276311 -362000 -735428 -934304 -850864 -284406 -512525 -20546 -164832 -176246 -637455 -863183 -932735 -606521 -342632 -575034 -803536 -432633 -136292 -856937 -12296 -163801 -31968 -640007 -662252 -818230 -853000 -529175 -22616 -948963 -523894 -189952 -480993 -43383 -186846 -11362 -354358 -801668 -444071 -523211 -160418 -103710 -888285 -859183 -383988 -858197 -518255 -145241 -474444 -797828 -294873 -339237 -872559 -647657 -429277 -747985 -675917 -122470 -412979 -287295 -237547 -652480 -339645 -331130 -684742 -765681 -677252 -564354 -661955 -762442 -40463 -904162 -338959 -165971 -41358 -516610 -205801 -46028 -559216 -171118 -316960 -818385 -867022 -84751 -315721 -31615 -639982 -318750 -888792 -396932 -281139 -633709 -512447 -41409 -29141 -907534 -112059 -377222 -567704 -413008 -192166 -163677 -8843 -690440 -906829 -334178 -286946 -702987 -330775 -179779 -947123 -216064 -362471 -633562 -927879 -797917 -268442 -187539 -317819 -900161 -68366 -92137 -564010 -262623 -17342 -443072 -459339 -159629 -427985 -756249 -291699 -717539 -73343 -155623 -238715 -552931 -639741 -209134 -339118 -637574 -365207 -735999 -10075 -773306 -126014 -639988 -504293 -31950 -263478 -878604 -889526 -141649 -262841 -292236 -750026 -947108 -632919 -339890 -735266 -132258 -635790 -322880 -835103 -606189 -863197 -279102 -712077 -444387 -173035 -54488 -34900 -638006 -861059 -517730 -9316 -348464 -754559 -292723 -632578 -26234 -81187 -363717 -303115 -396378 -235749 -110427 -63475 -625012 -394806 -245281 -51369 -422045 -126255 -886490 -388028 -935364 -276859 -503368 -133268 -136836 -863520 -253721 -6146 -735034 -732762 -101460 -744246 -253833 -824647 -824610 -230983 -339899 -454619 -391222 -879483 -314287 -465073 -661756 -135618 -399605 -524008 -633720 -14699 -771082 -430411 -635173 -914115 -165803 -76343 -11524 -42753 -846608 -773764 -397329 -158867 -215637 -676136 -114175 -900877 -46261 -288477 -362139 -889649 -483742 -12185 -415214 -195557 -613021 -353366 -243835 -534687 -714607 -663874 -711700 -927086 -229376 -511080 -627240 -788537 -339889 -695873 -339310 -930324 -763164 -164942 -863075 -238138 -270993 -648555 -420174 -757833 -832825 -204728 -135751 -890892 -72478 -837925 -441886 -796852 -909678 -638221 -417029 -215758 -185243 -40344 -577898 -561521 -237124 -85245 -732814 -930717 -663863 -136870 -34492 -588283 -62760 -861278 -332288 -948959 -847490 -767699 -382992 -326442 -165513 -710984 -46575 -896138 -827074 -635576 -225529 -322288 -82298 -564172 -430342 -237171 -705391 -505912 -55820 -824482 -667083 -691636 -165418 -950725 -756472 -136035 -443578 -743456 -794911 -50201 -34113 -137158 -239292 -382294 -72776 -844384 -793505 -644391 -667117 -332153 -547143 -339524 -315668 -192817 -575180 -771602 -504225 -63142 -175265 -125693 -200725 -954737 -882827 -811958 -904091 -856585 -126699 -173898 -850946 -834496 -797939 -279761 -291797 -556433 -303328 -836874 -338621 -168123 -239237 -797063 -164730 -288138 -6027 -416928 -340778 -812021 -175187 -339116 -375303 -925863 -477200 -403643 -388358 -69979 -64680 -210837 -432883 -24952 -662921 -136953 -483179 -185870 -60301 -56761 -913050 -638689 -33176 -166090 -835290 -81723 -801717 -377363 -70829 -475308 -762158 -633204 -176486 -339721 -174291 -63913 -147465 -113965 -663698 -822828 -839451 -215706 -914240 -182844 -610917 -276960 -68507 -165053 -695947 -832080 -45309 -600994 -172378 -339868 -101796 -276401 -176570 -516278 -866912 -906651 -303630 -122990 -518817 -714446 -32966 -339641 -713877 -397921 -653618 -912495 -245683 -738397 -339872 -231312 -504215 -2595 -416298 -908129 -26937 -54636 -141699 -715782 -648751 -924756 -892830 -730762 -7639 -133429 -861343 -839472 -797623 -854659 -32209 -916436 -365482 -852868 -303580 -835377 -505962 -210832 -436309 -771005 -286276 -368630 -339170 -926180 -152916 -54670 -121206 -25239 -361123 -273153 -94915 -388914 -906430 -402864 -305808 -941495 -74699 -137212 -244015 -213371 -113621 -315321 -920101 -593935 -136223 -438726 -744401 -860170 -863828 -577633 -835401 -457707 -811876 -165614 -627631 -51775 -73796 -65985 -41349 -888037 -75740 -157452 -364831 -640023 -101543 -136760 -275223 -569905 -618661 -710927 -325151 -236005 -110487 -859240 -173786 -331857 -730146 -61035 -432500 -632204 -544422 -175814 -96393 -824066 -888238 -339673 -234541 -802730 -734489 -803259 -422622 -711752 -795194 -214987 -732683 -137020 -49924 -757044 -573124 -163854 -904137 -920018 -457534 -772202 -165705 -505471 -16404 -342918 -840202 -834978 -56135 -432549 -904144 -113532 -84769 -602698 -634597 -446763 -523064 -536731 -756510 -245496 -905166 -339703 -818640 -470211 -306849 -332199 -510775 -906731 -456464 -481138 -331954 -660000 -137098 -147449 -71911 -854429 -653051 -929536 -239384 -603704 -35539 -695377 -374751 -126249 -54931 -123053 -395196 -362033 -914711 -237319 -230208 -364933 -521496 -46090 -540 -841628 -933355 -54460 -953722 -573182 -232443 -816857 -761224 -326083 -363805 -301471 -136032 -34893 -914231 -40973 -50145 -696687 -476090 -504456 -526123 -722416 -889534 -63644 -696443 -124279 -541800 -339506 -712020 -542456 -176500 -602116 -786776 -797653 -78285 -63456 -487481 -907953 -904103 -81708 -423835 -242229 -339279 -34184 -222140 -766133 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_train.jpgl deleted file mode 100644 index 0b4bf52ffe91a8d6dcef8157a4edccdde60a1366..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/architecture_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -368079 -290819 -49210 -210836 -639307 -102300 -176159 -904194 -34015 -854623 -64966 -954496 -929000 -31553 -433557 -95278 -178804 -291272 -68306 -627147 -857229 -734891 -840924 -404505 -136754 -240548 -33457 -600557 -444557 -353993 -633149 -32665 -772031 -55861 -402075 -12290 -170221 -332324 -914159 -324703 -361607 -262611 -432430 -676821 -79297 -100431 -659494 -160406 -73969 -606151 -797285 -884283 -185413 -366025 -342339 -12660 -137123 -195053 -636995 -912837 -488743 -122957 -638800 -398265 -739866 -339882 -345926 -847238 -820925 -482221 -251863 -262953 -711304 -751944 -13787 -366124 -165802 -758363 -23031 -606347 -126664 -316419 -383404 -377520 -712006 -693737 -126168 -190970 -63678 -200443 -191558 -112486 -394224 -561588 -587547 -879257 -85021 -914083 -777052 -107471 -321349 -755681 -669548 -930435 -634320 -934371 -577298 -336248 -12121 -274834 -482723 -44173 -91783 -633316 -280544 -407711 -861042 -65624 -111579 -35757 -821040 -483929 -852729 -849734 -647998 -362731 -503219 -744124 -109815 -96688 -743360 -638811 -943971 -443762 -268939 -624840 -14721 -427683 -99703 -605891 -325094 -499204 -124609 -828199 -22646 -769126 -368679 -567972 -815358 -587511 -212464 -611619 -564409 -125381 -102427 -64444 -365093 -639821 -649019 -136968 -330435 -638493 -289114 -165739 -290555 -328592 -12244 -861316 -41403 -668500 -393445 -676646 -798053 -297751 -83646 -428608 -442516 -273254 -856926 -477494 -934607 -109188 -265497 -897267 -11518 -183677 -150706 -314561 -903948 -941966 -657785 -760300 -195781 -633587 -485527 -638616 -860647 -46000 -771440 -428857 -94604 -383252 -635566 -274565 -930293 -226728 -893051 -754361 -856949 -765762 -231092 -695986 -31100 -356688 -500 -839513 -139756 -822464 -496378 -35857 -180283 -35523 -860287 -834975 -205554 -400470 -103984 -647304 -743876 -369869 -186608 -810628 -138706 -929062 -770510 -244955 -711341 -834546 -315683 -396937 -164350 -12183 -225945 -633810 -167993 -129397 -449596 -147619 -602439 -808261 -136365 -518376 -940726 -229379 -135752 -394765 -136378 -895214 -648411 -610498 -766154 -835404 -32036 -462700 -121980 -759924 -835296 -742723 -639341 -164054 -605605 -296205 -771249 -23971 -339825 -196370 -62051 -771047 -893258 -431973 -432771 -847598 -228264 -632143 -197495 -688035 -563534 -853155 -903573 -164340 -945901 -442682 -339609 -930456 -857278 -262492 -661623 -232358 -840417 -488379 -524264 -761520 -30174 -339642 -896415 -930674 -21752 -233324 -146906 -73840 -504973 -663628 -664205 -289337 -592964 -130816 -548969 -816770 -696920 -914320 -411270 -778349 -395359 -626992 -606578 -423265 -332123 -736524 -362350 -155356 -155025 -27059 -711252 -711411 -61699 -165587 -70750 -639845 -798656 -67786 -950234 -155399 -940069 -623512 -31399 -331177 -362225 -50203 -46341 -331997 -633638 -126302 -505497 -852593 -574791 -773762 -121493 -943705 -396754 -823349 -542230 -516735 -625052 -237389 -136969 -639978 -45678 -332249 -164966 -96702 -442660 -836307 -72082 -817498 -136656 -587771 -103156 -456897 -450525 -339866 -588727 -13800 -645536 -126143 -14978 -857957 -857946 -31996 -522977 -669847 -285182 -185674 -828062 -879102 -137210 -696666 -883347 -183030 -10042 -90393 -626879 -9607 -537394 -432579 -180759 -136732 -442535 -30238 -797603 -29495 -161125 -501055 -147298 -332062 -507455 -455767 -403169 -122174 -880423 -639317 -794839 -950774 -414488 -30106 -137135 -160251 -441810 -625122 -365787 -199018 -834053 -907816 -343248 -588210 -85916 -442306 -12170 -195262 -771163 -891530 -803126 -70808 -33244 -943765 -362286 -13559 -789137 -756427 -122776 -926269 -105046 -771026 -121531 -278384 -817550 -40622 -703513 -176458 -863654 -72109 -13039 -373863 -944156 -426246 -28561 -137193 -423257 -505888 -930799 -328879 -662121 -324450 -653227 -510882 -817707 -173648 -477446 -892974 -711218 -711468 -188116 -557118 -395539 -242753 -454070 -396521 -298131 -155526 -640963 -121734 -564305 -248531 -163950 -809898 -797680 -298898 -338697 -146226 -81151 -165934 -654775 -287168 -485108 -228254 -338738 -101348 -661019 -578630 -331852 -821129 -603619 -146396 -319916 -365626 -728667 -541719 -771294 -28008 -950736 -416446 -1328 -444786 -857501 -395077 -771585 -703194 -343732 -101922 -163981 -846689 -613064 -338491 -623277 -86947 -915636 -315286 -775258 -917520 -75985 -571402 -913121 -849813 -359918 -926987 -857896 -795474 -639833 -10290 -339497 -640651 -172695 -362711 -129414 -897258 -20112 -906414 -617056 -207836 -282770 -155485 -29444 -61746 -759755 -662238 -135207 -571741 -904182 -938150 -456184 -442678 -586713 -694339 -773962 -12143 -861253 -798378 -363899 -612780 -298429 -543924 -146258 -805867 -388560 -928367 -31796 -244541 -575723 -574039 -765316 -602802 -938747 -230474 -236901 -559087 -458301 -17413 -810409 -113167 -264160 -16460 -904185 -888284 -783938 -239214 -441856 -69845 -340968 -20723 -523738 -766271 -694780 -103881 -235316 -761618 -332013 -482790 -271643 -303380 -888325 -147509 -722793 -136917 -707248 -35187 -423582 -639957 -669733 -322847 -889669 -426578 -13636 -120713 -85234 -166012 -104027 -663531 -183559 -513634 -802804 -638080 -462008 -853002 -631661 -833022 -17397 -491102 -26453 -86714 -129129 -165943 -772222 -163274 -113465 -799240 -210855 -945745 -376488 -950811 -564513 -34318 -191557 -940408 -98270 -923688 -422066 -41232 -44724 -638275 -137077 -195590 -328224 -206221 -799690 -42320 -78588 -175499 -403678 -95001 -69159 -85220 -536438 -906792 -749593 -571102 -27611 -245499 -114160 -40953 -865653 -332103 -892740 -84541 -26835 -164658 -494594 -274921 -927059 -295769 -664064 -35695 -81313 -516842 -638785 -674233 -16751 -771254 -765082 -231270 -246189 -801424 -246039 -16578 -753470 -849151 -610523 -929085 -930499 -674488 -41900 -121914 -360736 -262377 -136869 -3387 -63565 -916976 -450885 -332281 -191319 -824633 -679888 -947146 -192157 -704159 -137040 -848808 -137168 -886562 -648036 -825972 -902746 -11180 -903997 -443997 -518454 -749452 -165969 -328007 -103991 -87475 -1052 -712152 -10332 -542818 -758939 -880462 -116139 -623681 -822598 -346153 -339723 -82161 -41378 -817057 -13824 -95005 -462522 -322967 -184227 -833907 -64745 -851136 -483858 -766854 -820717 -331143 -791953 -620034 -687862 -234408 -80572 -236959 -506197 -41332 -832441 -589371 -231071 -903936 -743919 -644451 -186848 -11505 -660405 -232544 -474663 -34137 -938645 -288393 -585717 -828712 -426595 -474316 -46087 -71327 -771355 -147466 -913960 -295538 -184895 -818632 -435221 -503192 -420712 -886897 -814826 -523787 -823904 -861049 -446654 -673607 -40416 -729092 -249361 -238048 -361456 -856795 -505381 -11904 -208597 -738149 -605869 -114206 -802645 -874199 -676418 -40649 -797967 -861248 -402762 -687084 -742875 -659145 -903424 -494483 -820093 -64808 -232497 -157538 -49194 -926501 -339283 -426748 -15163 -320872 -178543 -828658 -782459 -896486 -889524 -362104 -115257 -930201 -790028 -260362 -134562 -395370 -273212 -451856 -716695 -605079 -607368 -195404 -632240 -903918 -34365 -55370 -779591 -42903 -575505 -818655 -932953 -251849 -610084 -165206 -593909 -918495 -113647 -953692 -307883 -414428 -364340 -339608 -175074 -137222 -652976 -210874 -633942 -861028 -684192 -96951 -394698 -567966 -72233 -121557 -281225 -883176 -321776 -79082 -481207 -458148 -147538 -325076 -276035 -40796 -800623 -672165 -863804 -609404 -633032 -779745 -65023 -120512 -703833 -569904 -735761 -412822 -158102 -188591 -795025 -237115 -523651 -730984 -41102 -770638 -339891 -125073 -520238 -897296 -426746 -185120 -889668 -242887 -846272 -63227 -861151 -365057 -789505 -366205 -298257 -317780 -527026 -165929 -601731 -635327 -164949 -146427 -734493 -638164 -165933 -325092 -802614 -230632 -824641 -888808 -603994 -777068 -664858 -210868 -636181 -145704 -386848 -338471 -897237 -103215 -271491 -337932 -811913 -539547 -787451 -941810 -474947 -244395 -67483 -800851 -850570 -633899 -426123 -897242 -479211 -741248 -123027 -160581 -273198 -881857 -637875 -166077 -778386 -83828 -55116 -586828 -776372 -899808 -34687 -560073 -903814 -777556 -247696 -637413 -397100 -163745 -854624 -948735 -64759 -134548 -236889 -888142 -854357 -849528 -136707 -559175 -50548 -342312 -771973 -165253 -900013 -714545 -377201 -361984 -339412 -654541 -912529 -366509 -676433 -688045 -342806 -10504 -577130 -31329 -120904 -712252 -481064 -11559 -377429 -114819 -164385 -235008 -339386 -539395 -208079 -64795 -828748 -644757 -35661 -148631 -939668 -471924 -663498 -742876 -496085 -416753 -13388 -13357 -606620 -136749 -374758 -81927 -591983 -516181 -673755 -623836 -135949 -22633 -600689 -297882 -339529 -344497 -631218 -376700 -944407 -801412 -46272 -568826 -514818 -360586 -96789 -14942 -892399 -447164 -339269 -853866 -232842 -196268 -137164 -946909 -801270 -425609 -853953 -13589 -91119 -797022 -231268 -72126 -223781 -797750 -504434 -672803 -13686 -902805 -211611 -136938 -74239 -332462 -67315 -338990 -72115 -50625 -72787 -446099 -711548 -442882 -298490 -362147 -762029 -65849 -913989 -49982 -851427 -648562 -366180 -504458 -944356 -54645 -611553 -32813 -410904 -863821 -542608 -54772 -103065 -380694 -1314 -339544 -861155 -54532 -863987 -297855 -12559 -948748 -103898 -339617 -283628 -24463 -344344 -853149 -55085 -738319 -904068 -195127 -251052 -941563 -766319 -275182 -10094 -602602 -13551 -324862 -332293 -927864 -4041 -41311 -360641 -813912 -950799 -229755 -342942 -429311 -880437 -770977 -405422 -886850 -489219 -339822 -597443 -938391 -42297 -896214 -166134 -23012 -797018 -283648 -632148 -83378 -750361 -570105 -192654 -291846 -103683 -403999 -779500 -785286 -606192 -674709 -863854 -892646 -864069 -176130 -696289 -197592 -98083 -630325 -375101 -310582 -374774 -633685 -164034 -164739 -302533 -348357 -790030 -12235 -79838 -49053 -232141 -234658 -164948 -46291 -605322 -11653 -12126 -289001 -533553 -503223 -236797 -263712 -272996 -30667 -691562 -860466 -639880 -100801 -230546 -729442 -362513 -797746 -11636 -386055 -652551 -351012 -602267 -750829 -578698 -239236 -24925 -826147 -147595 -286947 -12416 -749518 -880953 -691218 -661422 -32518 -570128 -673924 -69968 -224030 -518054 -29367 -289199 -307780 -360211 -857854 -73715 -773222 -84928 -142268 -770991 -773098 -632738 -251389 -339653 -462167 -660129 -176518 -889206 -639835 -82799 -456201 -244223 -122763 -424188 -626478 -236758 -173608 -853990 -102799 -10876 -922275 -835284 -779038 -34496 -542785 -889857 -786964 -155735 -364664 -298468 -857119 -136699 -712312 -570400 -861245 -314556 -296866 -924480 -41134 -164796 -4375 -338242 -606795 -797933 -771530 -11662 -223276 -504729 -440173 -282756 -606407 -566817 -31927 -268985 -325953 -21911 -632505 -46336 -13571 -460145 -587430 -604117 -174078 -23079 -16291 -166138 -920412 -797911 -451666 -798108 -278533 -539385 -879811 -625077 -444702 -850760 -262063 -624940 -16596 -849185 -660125 -790128 -500773 -517500 -42610 -366178 -639607 -796873 -574793 -160377 -293289 -331750 -96404 -11571 -495160 -810364 -229958 -889022 -32989 -339873 -798000 -668597 -591623 -322568 -480009 -632924 -339693 -137083 -643624 -929897 -640074 -668273 -490590 -901814 -210871 -863980 -737643 -430039 -211844 -624331 -160494 -246439 -896388 -165415 -147318 -802966 -779742 -891010 -797962 -315220 -40772 -879331 -639518 -165011 -430703 -817560 -744428 -930727 -347283 -165671 -136316 -184619 -20497 -211820 -446706 -390860 -61649 -137209 -904093 -349495 -280793 -338374 -426769 -37221 -42014 -852601 -653028 -505383 -139214 -173023 -428903 -172521 -698890 -146620 -140911 -634862 -331521 -899592 -828645 -610719 -186935 -14678 -303438 -181001 -291537 -429384 -229960 -139253 -879673 -800749 -48881 -841625 -664416 -749590 -749611 -63587 -648952 -456989 -857911 -446604 -797399 -857054 -185448 -908145 -78461 -611182 -63079 -320877 -550385 -297337 -229846 -146308 -558204 -123108 -863924 -706407 -438051 -797841 -323225 -903890 -542198 -954456 -696939 -599868 -113652 -339041 -257058 -427275 -703554 -41002 -502994 -510106 -635713 -489135 -164680 -694616 -832471 -212837 -599849 -564473 -391220 -812013 -838354 -206451 -711996 -591072 -425895 -136711 -770426 -194838 -347280 -186232 -639832 -32802 -687345 -843297 -338904 -298089 -637948 -778156 -490973 -781751 -135850 -246192 -908194 -32201 -744381 -196710 -10095 -856588 -566707 -136880 -331964 -428836 -141305 -797714 -949217 -750493 -278624 -164700 -423029 -506054 -668802 -147443 -771399 -713180 -905364 -680383 -771233 -165901 -231018 -186601 -72822 -900712 -861117 -124666 -40469 -326422 -322504 -176178 -104343 -624709 -123062 -126159 -190365 -27982 -107893 -448477 -49312 -638672 -635835 -922473 -73883 -818649 -238725 -45455 -328212 -230914 -298735 -165166 -339698 -216052 -704999 -761397 -660322 -231375 -12107 -799085 -15359 -426037 -771571 -347106 -251463 -669430 -835097 -840403 -606595 -40736 -866801 -776231 -10576 -176013 -382815 -126228 -516260 -271127 -33053 -543259 -41128 -838033 -888806 -54527 -200444 -418370 -54807 -846199 -164970 -200434 -41345 -277388 -16565 -818293 -912977 -892391 -204561 -451190 -939746 -903567 -524994 -633785 -40968 -852819 -96538 -330346 -68877 -296324 -930657 -225597 -445706 -431366 -495594 -736260 -801279 -296739 -816655 -820837 -638166 -946037 -385119 -293334 -176225 -126202 -22073 -905066 -210878 -1218 -908556 -164938 -256906 -40985 -70718 -294617 -215831 -323746 -878961 -141759 -500242 -820036 -136159 -298261 -828619 -359314 -13570 -638932 -727010 -323970 -225955 -606360 -674627 -216736 -147294 -339735 -854446 -192832 -848539 -638708 -896892 -132015 -639361 -10486 -236465 -192357 -929058 -904075 -398608 -186789 -547317 -500182 -306857 -365143 -46017 -523022 -863707 -677327 -669975 -840224 -236132 -295038 -330584 -357181 -856777 -196438 -861325 -712169 -766141 -331752 -32909 -904190 -362426 -441080 -495577 -147026 -908253 -674265 -726621 -147280 -345272 -779350 -125425 -638339 -135428 -146704 -37401 -20280 -332202 -135849 -104237 -64671 -647732 -696988 -570384 -419539 -383364 -605315 -64984 -918847 -276314 -505835 -722898 -193166 -170775 -147343 -292314 -751461 -145323 -331590 -741433 -603220 -602037 -662239 -364406 -16304 -569894 -632698 -9684 -749285 -843127 -725576 -287515 -931948 -533414 -245250 -858038 -577623 -863888 -348505 -797770 -135917 -92075 -887876 -648 -489787 -362368 -136327 -889683 -854753 -750662 -362795 -271492 -854716 -449153 -285689 -152444 -34727 -106707 -731511 -237380 -746856 -399450 -69132 -235869 -857545 -13645 -244682 -147604 -845876 -711954 -732885 -329685 -839614 -195774 -331711 -645303 -394942 -368620 -145782 -667175 -11267 -94905 -327935 -215987 -85946 -355927 -941110 -203 -136782 -682515 -648894 -491133 -526115 -950083 -534865 -331770 -761937 -325091 -62216 -206465 -64666 -511995 -54747 -290577 -366031 -303670 -831239 -121963 -153299 -440644 -432202 -903954 -136467 -176604 -931754 -694028 -231191 -54075 -429221 -63634 -147030 -465024 -732511 -324616 -315036 -195901 -66228 -10374 -278254 -170711 -423013 -808764 -331514 -338411 -775209 -667028 -484058 -861114 -339878 -276264 -361791 -494937 -13335 -629236 -771618 -861348 -13783 -387455 -353750 -424688 -950077 -36860 -164869 -339792 -518208 -320613 -888241 -606549 -811851 -455942 -230639 -832957 -826237 -896764 -801489 -155144 -70570 -325090 -361064 -365706 -933012 -18959 -902169 -514938 -511983 -332185 -23073 -226742 -96372 -703113 -108872 -513506 -627334 -168889 -225008 -101501 -13576 -429250 -331607 -64754 -604291 -314778 -297547 -639914 -927379 -941014 -815926 -358674 -70785 -918277 -281808 -364932 -84740 -78064 -339154 -602758 -363883 -800037 -638609 -173257 -195269 -121950 -463713 -301595 -164877 -577686 -49037 -623626 -495984 -455475 -633366 -164605 -831809 -757578 -41343 -478186 -113518 -953777 -443935 -471129 -124656 -441321 -639598 -395211 -914209 -621626 -479477 -165512 -68276 -606821 -166117 -458083 -113672 -934909 -639471 -523356 -9107 -824891 -387371 -115214 -638331 -362360 -103238 -886831 -844736 -492367 -303310 -692871 -455725 -120521 -474012 -376679 -424213 -137148 -676642 -413364 -119977 -146833 -836139 -372157 -105647 -328195 -554717 -468369 -50192 -874184 -128249 -712269 -784919 -135926 -238797 -639959 -31802 -632645 -223643 -863759 -225178 -496068 -415115 -281379 -541736 -33735 -237541 -339877 -239550 -937783 -44302 -179778 -904487 -903510 -843228 -824587 -863826 -896048 -206266 -362168 -836724 -125951 -606713 -904052 -235765 -506189 -411774 -713831 -633168 -639447 -369275 -228155 -779806 -146640 -863997 -722140 -137093 -639558 -29323 -383905 -539440 -67450 -39082 -30085 -40878 -387017 -606144 -910951 -671805 -22935 -875697 -112058 -268694 -173746 -355553 -479220 -430450 -124621 -117083 -11387 -126191 -392805 -730175 -4141 -13835 -278980 -39122 -42693 -831275 -825865 -49127 -624946 -345697 -639504 -568568 -289339 -833632 -750537 -147609 -292849 -236922 -354689 -180292 -551814 -897571 -913731 -784209 -925733 -735085 -524784 -712012 -164733 -215709 -133041 -22082 -802661 -398415 -757378 -97666 -749338 -579695 -228171 -528162 -324789 -892022 -449372 -403438 -27310 -13370 -491086 -184575 -101914 -186572 -789801 -743722 -837938 -711728 -542071 -186015 -857950 -428319 -631262 -233360 -916740 -185969 -683250 -867269 -580822 -502997 -693030 -305683 -886391 -185410 -714501 -797718 -339840 -348918 -897550 -457736 -769181 -125827 -648752 -235304 -192505 -674136 -64228 -927552 -22087 -911023 -687777 -281192 -533954 -165342 -186941 -906181 -166037 -361536 -136989 -555332 -386090 -743695 -691828 -308326 -99013 -768022 -738060 -738136 -136247 -176469 -165781 -227356 -903878 -324865 -238659 -797279 -248529 -870474 -828066 -11497 -11491 -32065 -703455 -256508 -429567 -206523 -286873 -805309 -73918 -307726 -338749 -208651 -147142 -777499 -343324 -934233 -524083 -827274 -423551 -442978 -740519 -281315 -711964 -607283 -403161 -74440 -915638 -1341 -948810 -40758 -888368 -72316 -332299 -410430 -75289 -634965 -843246 -24460 -46153 -768517 -212662 -779102 -164220 -693554 -832668 -110396 -230704 -518799 -674382 -505541 -432710 -77384 -429954 -45864 -431855 -49964 -125797 -115495 -445058 -339848 -96785 -330417 -674147 -166102 -158171 -172584 -230117 -523427 -63792 -930694 -637894 -622228 -72793 -80939 -209454 -506029 -632088 -153443 -385960 -366163 -22789 -443255 -290076 -848186 -770875 -196329 -587789 -907542 -332358 -771245 -818792 -339678 -917236 -524685 -339852 -323979 -398429 -168450 -204608 -211842 -13650 -797074 -90163 -339540 -649012 -60792 -743995 -723910 -298414 -639342 -376384 -368407 -424006 -88313 -853067 -559986 -915748 -211851 -654296 -208669 -904926 -639625 -376604 -606768 -831216 -566873 -721649 -788723 -518289 -465065 -137231 -212876 -695390 -357404 -859148 -147176 -13974 -760058 -292176 -861285 -673869 -676683 -938488 -332483 -482331 -924734 -514959 -331493 -791806 -654499 -717204 -330782 -724058 -365566 -338495 -482153 -770300 -96533 -106823 -747228 -516389 -146634 -430337 -151871 -415485 -861339 -244247 -457293 -414738 -119303 -32055 -673696 -836054 -80206 -482767 -286481 -551337 -26363 -835203 -779224 -132089 -332206 -930462 -328576 -165210 -797634 -662133 -738357 -797699 -12431 -30226 -114305 -159898 -444436 -34867 -11402 -561083 -516405 -921526 -339580 -445876 -165818 -950245 -405965 -13770 -186418 -903028 -585580 -9414 -682580 -640071 -483181 -236562 -289918 -810326 -890200 -121830 -230880 -603607 -638753 -863809 -920432 -164437 -545504 -5214 -523924 -911879 -362131 -72714 -638791 -30731 -146661 -632519 -534117 -567375 -364986 -537804 -802853 -932781 -147577 -814885 -257696 -356462 -449038 -829047 -83929 -695470 -888120 -52075 -908226 -72274 -70299 -722559 -424394 -6428 -825896 -625028 -136902 -424351 -175127 -397480 -323724 -777125 -896903 -590743 -231086 -22385 -267185 -854575 -164622 -639993 -277834 -951613 -159304 -115186 -208644 -293929 -126875 -165681 -861238 -391681 -481374 -176524 -11523 -29153 -248239 -604122 -626336 -839458 -905011 -294834 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_test.jpgl deleted file mode 100644 index 9afa56e01639d2344ea00d4d8a9f3da9eee1fb24..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -518612 -601116 -228156 -39829 -17868 -636226 -138515 -822507 -64885 -19250 -653610 -46215 -132479 -802978 -282206 -343994 -940061 -339257 -491203 -634871 -130108 -680221 -477371 -805803 -779600 -377212 -332343 -439904 -168299 -742619 -140642 -695359 -665918 -72323 -303435 -141881 -470920 -424783 -934514 -766115 -405001 -236602 -933840 -354480 -180714 -424203 -771174 -510347 -274769 -41313 -33634 -692680 -132468 -674799 -364297 -806044 -55729 -852019 -943842 -125933 -632233 -95011 -350891 -494546 -846059 -949668 -693738 -331397 -289910 -722010 -493499 -258239 -306461 -41389 -924184 -112973 -339322 -304002 -128661 -330839 -275063 -139985 -112380 -20269 -124978 -196886 -442456 -495681 -303591 -101684 -511920 -706060 -63398 -807440 -786059 -78949 -620325 -935236 -98177 -389558 -40778 -398346 -113587 -706103 -329376 -31866 -525387 -723136 -692133 -738279 -208634 -101614 -82222 -275328 -593889 -879126 -824293 -489268 -577741 -65813 -512002 -262972 -131537 -816575 -127982 -41399 -278868 -20503 -72211 -851350 -71940 -571382 -56983 -640768 -762290 -861244 -10933 -141521 -714234 -471481 -24197 -574792 -324838 -770770 -80748 -495829 -40792 -173157 -271812 -367630 -11327 -696526 -228330 -569444 -41094 -899324 -785927 -828817 -72614 -328696 -818771 -426419 -369417 -246521 -273892 -242691 -432340 -912608 -857527 -246123 -849841 -809765 -896178 -921289 -274611 -860235 -276157 -245015 -146164 -768482 -449035 -795537 -413251 -569804 -894833 -347308 -192412 -538198 -775208 -426402 -801564 -413096 -476548 -147015 -559375 -484416 -231058 -422472 -576962 -494776 -635964 -415564 -359265 -563967 -423166 -262511 -914050 -97663 -853788 -524881 -799218 -914251 -850540 -90810 -231318 -6120 -10667 -735031 -20434 -348820 -497193 -895375 -346183 -75684 -40645 -923566 -678045 -423280 -639561 -838041 -429040 -802502 -101772 -112114 -765396 -230512 -886920 -756461 -41333 -941532 -332475 -821066 -441825 -528618 -211827 -813403 -17521 -369579 -414439 -56083 -11562 -773226 -898274 -706405 -112695 -665718 -621106 -519804 -113220 -505525 -113495 -145374 -876005 -765982 -941852 -23058 -55928 -20439 -242779 -137078 -545161 -841751 -646219 -103254 -639579 -55533 -137006 -86399 -186394 -41265 -797723 -751508 -886550 -914143 -365680 -83066 -321574 -820889 -788346 -388374 -807172 -276098 -387559 -891405 -492644 -571921 -537912 -480284 -889549 -26922 -72293 -801441 -194925 -22777 -155201 -88058 -324103 -51415 -395771 -275286 -756499 -495896 -586590 -251005 -735924 -132184 -836878 -51030 -180724 -782954 -430750 -322569 -832637 -715786 -76844 -413249 -186850 -52064 -772389 -72013 -588441 -954013 -722923 -849372 -661775 -72774 -141902 -375422 -855334 -276099 -640773 -694315 -581110 -186928 -67570 -239131 -266724 -298863 -738263 -35304 -827225 -690893 -426212 -232232 -10522 -904057 -81128 -770608 -230433 -476365 -504455 -571953 -423359 -571308 -624301 -51388 -899766 -127344 -503355 -696956 -388720 -521548 -256381 -241818 -274651 -74355 -663907 -308917 -443277 -146538 -27459 -102645 -571842 -761249 -723708 -220584 -742889 -96357 -860274 -433064 -262788 -170843 -185086 -42995 -563141 -647569 -782132 -41350 -377196 -923482 -178565 -102258 -752211 -445792 -62093 -324815 -9891 -19359 -495812 -136762 -27898 -815667 -447819 -765091 -809061 -893101 -803601 -123051 -78092 -445777 -413123 -12242 -173399 -252227 -500988 -445774 -358776 -70857 -486306 -849748 -20009 -627221 -641356 -444720 -712307 -172872 -250650 -377443 -754440 -186429 -72068 -176206 -165616 -91028 -827892 -859989 -244976 -495576 -245518 -445661 -778651 -50193 -372194 -422184 -850012 -287828 -890860 -525425 -600551 -61685 -63615 -782623 -758069 -333446 -925649 -185100 -879942 -233340 -260952 -817885 -804850 -429257 -752154 -80271 -693029 -280861 -512591 -103106 -86657 -46278 -861337 -763364 -639594 -129047 -818828 -177528 -74434 -72492 -903521 -307444 -316647 -24741 -827432 -186278 -783421 -711543 -779497 -723517 -55744 -729339 -842985 -72143 -141140 -97677 -436156 -262991 -780994 -186645 -60934 -184947 -839535 -818161 -415149 -49273 -424330 -262520 -229775 -456027 -595 -55854 -424393 -235027 -275895 -363580 -562407 -835993 -107747 -128148 -337383 -579483 -636852 -141658 -252813 -857651 -900116 -756709 -433555 -495595 -510238 -68310 -72256 -709968 -72072 -571303 -486090 -801724 -72252 -68603 -412889 -20419 -639359 -105651 -727065 -12922 -13747 -514002 -459778 -624662 -330785 -102518 -823096 -741398 -806938 -224757 -13565 -252034 -244279 -889017 -451167 -867015 -28936 -349961 -871550 -274333 -262431 -463536 -547233 -559662 -139832 -254922 -147373 -10675 -861080 -230141 -731069 -142005 -606717 -308751 -938526 -954261 -934774 -79173 -126299 -189314 -602687 -696999 -445289 -81480 -938363 -588911 -839732 -429233 -907696 -755467 -63580 -10992 -153981 -942889 -632656 -276545 -180716 -693924 -828699 -44426 -861338 -41003 -886858 -568940 -801635 -652575 -710661 -894596 -952559 -374988 -638647 -866962 -236791 -856220 -900682 -230934 -797743 -74058 -624955 -64693 -415945 -850128 -335327 -19964 -338235 -542658 -125989 -916492 -272698 -781148 -362463 -250579 -53840 -334172 -843793 -230267 -661094 -765724 -791842 -857097 -899779 -276367 -255058 -297298 -67200 -756451 -925693 -593478 -489202 -242907 -120720 -904998 -217838 -339663 -622260 -77525 -72336 -16522 -330391 -323122 -801796 -470531 -941367 -4246 -27727 -423992 -429678 -97979 -779616 -107830 -577213 -423259 -255392 -257129 -648452 -332640 -934068 -377311 -634436 -23521 -288829 -694359 -338326 -876119 -195229 -91105 -403805 -921535 -475980 -424469 -300179 -887060 -27872 -27350 -101495 -27938 -387679 -602900 -672970 -349875 -97377 -33262 -386417 -823735 -660462 -921266 -800414 -850944 -288685 -924263 -386016 -13638 -118626 -185935 -802902 -97446 -142221 -36297 -63519 -542916 -571200 -571704 -375448 -164968 -202928 -340187 -45349 -282882 -263493 -765293 -927718 -66956 -105722 -442257 -847866 -54324 -81710 -669078 -231330 -164243 -734253 -100397 -765684 -186488 -834865 -532103 -12306 -488318 -264031 -825722 -857056 -848896 -929671 -72173 -16176 -18189 -866287 -611824 -352821 -388026 -431695 -45629 -614396 -852617 -335399 -834633 -134146 -624444 -755477 -398425 -812703 -362357 -918850 -569893 -37142 -446485 -572812 -6201 -362371 -743187 -445174 -782105 -173635 -173598 -331041 -515209 -927580 -940093 -258551 -445037 -562562 -165794 -217013 -893063 -16510 -21842 -52838 -136650 -671661 -129466 -839689 -779472 -65993 -15862 -889011 -693384 -663551 -101371 -46100 -140478 -410713 -231935 -855571 -177468 -6527 -846247 -695254 -602629 -727816 -263072 -696154 -34807 -520051 -327325 -422728 -669632 -232585 -348302 -429303 -324856 -398626 -29358 -102093 -254521 -910914 -570783 -26612 -358374 -283626 -126184 -503964 -388253 -115756 -118631 -457800 -145672 -706453 -759620 -482388 -849414 -495606 -266868 -569035 -429160 -211875 -749545 -314761 -921634 -931201 -377307 -2732 -11131 -59272 -693715 -735289 -116282 -98430 -924562 -330826 -32137 -402355 -330376 -730488 -173259 -102797 -13969 -11170 -330915 -351382 -936791 -28766 -461352 -27892 -128473 -543579 -18271 -133453 -368528 -64536 -78097 -325121 -818949 -96064 -339316 -922686 -735521 -605811 -41410 -824575 -838374 -924455 -656267 -761896 -496134 -40864 -490283 -338689 -494085 -775103 -239035 -593262 -63371 -775680 -690888 -255197 -743422 -34678 -24490 -564448 -40665 -684205 -214403 -54352 -94886 -663953 -275933 -556411 -692759 -741337 -45693 -934840 -413358 -421705 -163676 -816953 -164997 -569173 -277444 -75094 -648597 -443890 -669138 -798763 -331204 -838633 -446422 -760905 -174214 -37502 -356751 -207011 -34806 -648271 -945729 -899311 -394227 -781831 -41287 -435771 -280923 -282736 -13819 -327498 -592846 -48929 -339675 -768843 -250128 -428677 -804582 -394338 -368905 -668574 -536752 -18233 -948564 -255308 -412067 -903138 -428833 -85000 -461728 -439477 -685321 -929503 -571779 -68068 -470687 -20428 -331693 -303420 -41138 -123505 -282357 -11326 -500588 -72067 -20138 -445501 -954546 -208828 -24190 -12709 -393972 -860325 -539001 -837324 -714599 -925634 -147247 -460902 -102808 -660880 -823783 -279667 -70818 -526759 -35651 -324614 -350616 -726445 -110063 -233069 -165693 -532506 -238516 -624977 -102267 -10336 -101820 -66108 -647650 -266129 -764970 -753802 -638270 -121621 -238056 -427608 -767144 -2626 -282784 -697725 -71299 -44313 -173435 -146893 -41354 -20270 -237229 -429374 -387435 -231180 -10832 -104258 -252033 -5613 -132124 -886685 -83832 -308843 -41367 -570721 -174809 -13766 -400629 -497812 -70300 -945884 -805404 -72369 -831100 -340695 -182575 -332376 -424349 -726089 -586480 -421558 -820203 -383749 -648929 -396741 -133152 -890693 -402239 -663914 -704942 -282777 -72144 -844891 -65955 -488807 -494458 -890755 -275359 -2347 -806930 -102447 -26945 -113666 -361157 -514933 -429080 -429326 -230371 -166483 -244612 -763803 -838025 -494945 -276290 -692706 -233075 -169138 -91093 -125708 -834744 -744562 -502368 -465671 -147163 -246706 -829744 -63495 -276836 -835503 -591527 -839423 -839494 -232750 -168319 -950781 -486500 -113330 -471378 -328744 -113654 -71883 -233337 -85687 -385559 -432232 -81818 -388864 -425716 -582466 -477204 -897517 -33414 -13396 -603539 -362288 -533316 -361705 -735525 -811429 -158047 -668452 -270237 -174958 -891306 -892919 -469759 -134804 -147264 -27534 -375784 -276279 -41069 -781155 -332210 -903468 -449730 -227085 -904175 -893939 -12275 -490846 -75538 -886931 -901866 -360747 -395638 -22665 -398368 -706355 -290327 -96846 -423291 -165643 -107598 -664681 -339865 -779244 -279600 -812401 -339087 -570546 -932076 -12663 -490700 -113604 -404997 -648813 -246286 -777690 -239515 -458816 -330739 -22305 -276438 -892861 -562557 -250659 -807435 -428537 -381742 -843224 -716525 -273556 -330542 -886582 -839404 -647014 -474667 -67447 -373396 -799386 -941826 -24522 -19974 -339380 -761911 -643271 -687228 -49421 -495899 -176058 -760170 -731749 -810950 -211440 -6324 -146045 -102897 -331612 -235433 -802616 -147567 -566342 -591358 -765835 -824075 -164269 -458163 -420871 -80833 -771426 -353447 -71897 -164647 -837767 -919574 -349595 -238484 -390695 -771118 -85047 -824684 -360130 -30979 -861323 -571879 -696917 -165993 -465308 -362476 -623428 -839489 -768172 -633182 -5960 -604087 -95919 -706987 -145798 -11179 -284433 -800804 -71314 -768732 -352255 -254897 -142625 -428814 -505977 -761534 -860243 -184942 -405538 -86448 -233904 -172324 -54956 -21476 -67057 -186282 -92674 -873277 -186903 -67708 -97429 -371543 -587959 -812986 -261657 -81472 -395188 -27906 -98467 -432446 -13038 -833628 -832680 -770571 -861290 -427657 -136595 -326115 -99922 -357505 -273652 -948183 -891417 -664065 -40760 -663560 -139817 -239436 -481333 -432840 -438024 -428629 -643166 -571456 -198867 -747784 -693486 -114892 -251810 -112988 -2766 -721523 -693930 -68237 -61711 -245799 -250973 -298630 -512746 -803380 -839630 -195933 -56701 -674955 -272731 -80106 -412043 -765490 -767859 -229364 -672086 -437462 -54713 -445634 -786866 -656810 -752048 -275361 -6222 -637787 -489306 -140567 -216763 -199777 -472176 -72330 -754680 -255041 -190292 -513561 -276121 -276219 -578488 -412309 -333726 -812481 -941530 -494459 -41256 -50693 -481969 -931073 -603600 -188394 -693176 -41212 -72272 -278872 -303582 -749536 -377427 -388786 -71944 -231274 -500527 -830387 -533113 -2209 -426142 -275998 -54201 -477347 -147583 -66102 -168375 -736126 -60370 -68414 -159452 -702039 -525890 -147395 -96956 -273760 -303044 -252396 -72096 -147451 -126289 -790106 -680483 -361663 -359184 -192054 -734947 -656561 -94863 -200573 -786112 -184973 -746888 -482449 -640858 -418485 -539202 -34109 -420938 -832366 -331063 -953857 -69407 -897108 -201850 -500007 -339883 -944900 -736008 -928364 -102364 -10241 -238258 -632035 -13558 -281265 -837664 -861218 -68397 -196476 -237138 -72338 -377539 -263170 -197156 -441839 -796135 -238765 -483130 -570381 -250632 -11308 -398238 -331100 -429393 -835543 -358681 -824073 -5690 -934103 -495095 -147473 -332433 -165690 -838335 -649420 -734792 -511244 -323522 -841829 -172365 -215720 -307203 -25859 -56150 -892005 -41213 -188727 -34538 -41340 -282942 -835374 -924396 -772055 -163440 -672870 -212749 -373275 -692139 -857766 -205918 -924889 -422659 -489371 -841313 -63445 -52574 -695417 -337768 -822284 -824249 -274756 -950666 -17056 -828803 -385186 -819455 -54561 -215375 -750943 -600707 -707367 -190988 -163668 -49294 -693034 -791782 -398562 -282495 -766119 -136950 -893113 -694368 -834569 -926904 -23036 -98315 -571106 -42593 -383240 -231253 -134932 -308134 -861040 -95833 -932899 -136285 -211628 -274736 -186826 -817533 -244962 -766283 -887058 -49396 -431533 -893034 -463780 -230913 -11098 -799349 -664768 -693610 -850176 -892969 -521271 -25386 -176614 -101652 -775302 -677169 -330589 -243473 -693473 -481850 -173374 -341751 -879854 -330055 -861075 -385288 -274160 -932579 -571290 -892845 -10912 -83833 -627494 -486303 -574402 -623511 -497903 -831599 -236223 -591651 -435093 -423050 -175445 -177478 -423803 -722919 -30342 -733797 -112275 -591335 -12273 -861045 -409569 -679725 -147450 -495872 -857219 -12176 -481721 -13912 -280910 -429844 -166309 -113373 -481654 -468708 -32241 -1129 -704480 -693708 -32396 -444805 -211714 -873231 -103011 -186999 -40682 -476996 -799761 -20771 -854784 -101622 -875752 -50877 -779577 -20482 -56124 -39185 -27896 -19646 -682282 -417472 -287717 -776604 -428201 -235315 -666357 -306943 -801253 -429931 -782241 -56346 -664294 -828327 -354747 -838008 -703618 -891510 -331923 -284194 -67342 -41185 -43288 -282635 -26933 -766301 -460538 -478630 -775248 -104691 -856984 -462101 -540777 -398619 -51971 -444709 -558818 -667660 -540553 -165668 -245155 -850157 -542452 -728190 -27343 -248530 -889438 -232525 -836264 -649884 -272131 -532932 -638443 -20536 -437594 -486808 -13111 -772320 -246277 -168147 -40679 -165515 -196074 -477165 -797218 -569872 -784296 -602764 -34823 -439471 -497943 -706334 -831601 -9404 -33737 -254973 -741797 -41133 -279757 -427706 -63138 -319193 -456431 -494180 -278078 -769013 -779351 -562634 -20063 -277921 -231261 -493 -903181 -495835 -717167 -68551 -46078 -276120 -287914 -12464 -513656 -153907 -562971 -186275 -4394 -839316 -623270 -562318 -788444 -548046 -778684 -347815 -83042 -801903 -275645 -41150 -339704 -626239 -687510 -140459 -121932 -428977 -67983 -571956 -196443 -244514 -225411 -186576 -454764 -638859 -847046 -696085 -599263 -32762 -19716 -759318 -788564 -278577 -41338 -186884 -744305 -165281 -725149 -470052 -175170 -633791 -13413 -47689 -857145 -887071 -243697 -954990 -231166 -930455 -255543 -287495 -311402 -711938 -811891 -697012 -67196 -516988 -573436 -563280 -165965 -423098 -173284 -185136 -192880 -314749 -277826 -29760 -716582 -97135 -113429 -839414 -204684 -49379 -26699 -838607 -835578 -365413 -13861 -694399 -262308 -398288 -278493 -165453 -693012 -157143 -486723 -112175 -624445 -142706 -425734 -154148 -275420 -571319 -807188 -423129 -496109 -677438 -186592 -72339 -492975 -175526 -730690 -751215 -439536 -671761 -325114 -390534 -12578 -444363 -230940 -291953 -428743 -791486 -233258 -635751 -43921 -255380 -45441 -872994 -9548 -606078 -332370 -137249 -173649 -848969 -12664 -439815 -516681 -694116 -775607 -274114 -616391 -446234 -939331 -230171 -41008 -173146 -234364 -54598 -831238 -703616 -830149 -356236 -302860 -245850 -873448 -816404 -72169 -396698 -798708 -503970 -566236 -428520 -167332 -803197 -365843 -10810 -330772 -332226 -864074 -636421 -696451 -146528 -95341 -749697 -765132 -549463 -41261 -27882 -812908 -184731 -820368 -186715 -857459 -29591 -672583 -56048 -846017 -35653 -636940 -279278 -811949 -636144 -591187 -134153 -539574 -637843 -909034 -562860 -41351 -865134 -68690 -495574 -456083 -197212 -155429 -505167 -429358 -505421 -239643 -886573 -552087 -547148 -349854 -799808 -375022 -186853 -193919 -503954 -703537 -687334 -668403 -471062 -667001 -907600 -376662 -75126 -402199 -745475 -828939 -279025 -50697 -386403 -593622 -208624 -820085 -173509 -524386 -838655 -81606 -44307 -831109 -78341 -516036 -64833 -146520 -165738 -90584 -303725 -60154 -185303 -919650 -695745 -652869 -593774 -185594 -173348 -154102 -317399 -935418 -113360 -142204 -275948 -22730 -136029 -834864 -144422 -54477 -365299 -428889 -386612 -528547 -308921 -432327 -761752 -623549 -288613 -928983 -40759 -741864 -618423 -778275 -431683 -103666 -649730 -858085 -127962 -223834 -911565 -308791 -395603 -255467 -54973 -520497 -277057 -636339 -387792 -624789 -856421 -732507 -375027 -173863 -66377 -401233 -41404 -567296 -887446 -376039 -621474 -343924 -935470 -373279 -627940 -162784 -424211 -747528 -126262 -29056 -939653 -704026 -10274 -858034 -241586 -505603 -797982 -949092 -368695 -778696 -570920 -331376 -338854 -32128 -282556 -656842 -721451 -494867 -272466 -40540 -569900 -739730 -496126 -330298 -387862 -832151 -704604 -536122 -8728 -600561 -541103 -173159 -800879 -650063 -230682 -292709 -664104 -176251 -375954 -78502 -263361 -690204 -343697 -859696 -313776 -41049 -67768 -897883 -57756 -645399 -232344 -142708 -266135 -775441 -519032 -858852 -593053 -272618 -324193 -850027 -186808 -232973 -146996 -134702 -932140 -703723 -837284 -274868 -402345 -648836 -22884 -71124 -65890 -141883 -462059 -400695 -6725 -569337 -622191 -281221 -288006 -394440 -250787 -823028 -99791 -760040 -181604 -422002 -136030 -691639 -84838 -76087 -41331 -725789 -927729 -779149 -285827 -146986 -10944 -854305 -29126 -774103 -606552 -56094 -58179 -331895 -133974 -826835 -437094 -19455 -254389 -253791 -98327 -721273 -533652 -72093 -308380 -288151 -12689 -349742 -132004 -128293 -208756 -388670 -146043 -332313 -432722 -188287 -41762 -147559 -146542 -410693 -398457 -694115 -852614 -639734 -331098 -276289 -403349 -232526 -469671 -277985 -64124 -755257 -824557 -9531 -743440 -276325 -733873 -173763 -524068 -86096 -74352 -291453 -850057 -768417 -231069 -904486 -103086 -658602 -244853 -35741 -632554 -527340 -479963 -355687 -231236 -66014 -468000 -74402 -133061 -259393 -904205 -462239 -479097 -659182 -350408 -46595 -687599 -129027 -262291 -623323 -591782 -287655 -829297 -783391 -263093 -438517 -147304 -466062 -125728 -783555 -322555 -775786 -403598 -707084 -505868 -694422 -424400 -458820 -639715 -429072 -377492 -96725 -811573 -694426 -387820 -115830 -368427 -824341 -675927 -276088 -62615 -451750 -453760 -184886 -195857 -635284 -41372 -96994 -674171 -483762 -78316 -190922 -131445 -602737 -185871 -736005 -528102 -339837 -67472 -844407 -811941 -931888 -168303 -40579 -818642 -146554 -350466 -736519 -785258 -518089 -119221 -339124 -603750 -344976 -252120 -27802 -754748 -326147 -358296 -658482 -79590 -408322 -270798 -779635 -245685 -282583 -641314 -244049 -41302 -669893 -303840 -824236 -308315 -429435 -676236 -279084 -432307 -797220 -392304 -442133 -12167 -129115 -8803 -358308 -64778 -236736 -41155 -665982 -779533 -450894 -659687 -62970 -90632 -259827 -494188 -604138 -799677 -591690 -123530 -347362 -311813 -432666 -277966 -70699 -121890 -790733 -645546 -12117 -909774 -103335 -339226 -41346 -847546 -14711 -18083 -878506 -625588 -331370 -20322 -347354 -43785 -150309 -369568 -632291 -246256 -10906 -285473 -799273 -623863 -440951 -297582 -240683 -640959 -102161 -422762 -691998 -920341 -276065 -281218 -138827 -524048 -84847 -419384 -263256 -674790 -418626 -819527 -186870 -691722 -244078 -722463 -338956 -808195 -768623 -133558 -675970 -45926 -347541 -518872 -601047 -102285 -929379 -41161 -54458 -852426 -287482 -265838 -433416 -839426 -737893 -395494 -176114 -395607 -640466 -494865 -182902 -40952 -571416 -571104 -551560 -490733 -710914 -802531 -716317 -927448 -308570 -205948 -172325 -588073 -208527 -16648 -377418 -51838 -27884 -740557 -350991 -78426 -703075 -600482 -331969 -476616 -738686 -13574 -332110 -234426 -254042 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_train.jpgl deleted file mode 100644 index 58312058b404c4eac108d6ff033003a344bd2f2f..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/cityscape_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -82510 -295835 -396248 -887054 -129194 -193174 -656188 -10092 -802944 -632958 -495906 -732973 -830729 -340602 -377547 -368426 -420750 -235642 -146997 -242192 -873752 -850052 -483297 -349572 -275846 -493256 -168216 -41390 -860124 -475035 -193045 -43848 -3267 -11593 -741086 -96163 -295764 -172474 -27337 -808719 -281130 -264041 -602482 -736094 -276211 -189007 -274980 -416847 -139883 -745579 -571962 -191506 -913967 -21922 -212847 -74530 -669162 -685679 -34778 -113440 -812585 -56147 -207585 -804207 -422935 -777458 -357210 -947783 -663495 -400373 -139748 -385189 -277506 -31983 -287740 -830427 -314678 -332219 -893016 -204864 -27714 -897038 -126545 -193065 -141097 -19711 -363865 -457433 -590460 -570725 -475203 -423323 -722640 -244626 -428131 -12420 -231239 -1142 -204174 -225573 -360005 -275972 -173510 -745593 -879496 -478700 -544741 -577830 -239535 -27815 -330191 -942869 -694416 -894202 -185583 -795070 -917829 -391949 -779786 -196210 -312077 -162428 -20949 -120714 -17855 -593885 -896198 -571685 -828236 -811721 -849972 -897521 -237827 -112652 -230627 -101926 -27738 -265121 -761522 -72155 -923958 -90134 -70700 -13665 -935094 -22868 -779629 -563779 -284783 -828366 -706982 -331099 -15869 -20907 -824312 -712851 -169949 -636760 -810042 -938211 -292742 -233338 -904204 -436459 -593317 -444449 -21333 -788395 -474973 -316678 -571268 -14943 -395480 -692410 -502496 -923135 -16558 -860887 -62117 -47873 -244432 -648715 -627610 -14421 -7674 -756487 -173471 -832330 -920515 -632040 -734788 -49026 -477419 -275102 -186585 -682155 -177934 -250224 -275745 -425708 -172431 -83593 -173645 -140607 -274569 -429937 -195940 -13662 -889254 -286318 -15870 -755092 -236031 -268573 -237922 -216839 -288991 -827494 -210121 -464080 -154698 -760847 -954804 -299371 -41289 -730282 -651710 -28202 -573976 -121912 -85283 -244452 -703858 -938822 -511083 -23063 -53054 -836642 -65258 -238698 -751978 -31894 -639980 -734391 -850099 -331508 -41620 -83874 -274081 -808375 -897096 -925862 -210989 -538147 -557999 -18037 -825013 -262000 -779581 -796584 -382988 -122021 -20045 -200556 -738664 -81906 -612793 -275472 -155455 -537803 -759871 -387110 -413935 -364427 -123451 -137190 -13753 -879681 -146912 -98342 -340768 -198056 -766292 -346540 -423260 -573318 -239454 -828047 -544535 -920969 -242864 -568639 -320698 -774197 -363885 -316738 -832718 -751422 -619735 -859760 -495687 -916437 -126018 -860916 -167413 -239443 -889193 -706537 -102165 -267686 -913757 -403124 -40460 -878944 -514632 -552945 -55983 -731041 -432367 -847142 -639666 -715368 -751170 -173413 -624211 -11073 -444393 -588967 -429291 -97780 -173256 -73344 -482527 -175237 -314969 -120474 -429068 -368259 -319569 -14623 -438455 -246813 -638065 -12231 -156397 -75411 -418750 -562715 -280565 -209878 -11661 -40662 -594690 -524488 -424097 -918059 -696577 -931010 -743579 -898822 -454311 -475324 -113339 -707081 -645598 -765378 -853248 -22092 -161172 -215032 -610948 -888213 -69642 -937505 -175630 -21979 -779460 -431566 -588735 -691546 -861073 -60699 -845158 -451365 -571030 -339369 -135246 -25337 -458102 -191498 -81799 -450617 -478551 -694117 -941573 -365808 -858323 -133166 -133354 -921788 -624213 -645594 -775243 -16728 -51199 -428726 -40786 -86755 -113398 -436115 -68394 -446280 -769675 -88047 -332451 -504288 -281732 -10132 -449094 -471106 -741959 -428839 -927725 -340224 -113599 -171809 -444957 -749334 -46114 -495907 -6247 -654043 -350196 -852963 -349341 -126849 -296593 -850114 -625409 -496114 -853164 -204043 -810701 -12272 -129238 -90217 -853113 -163298 -542870 -15936 -22450 -424443 -332215 -115692 -423149 -724351 -795532 -775135 -69233 -141036 -738658 -621081 -81557 -688860 -63139 -147582 -339420 -488443 -340486 -692916 -309656 -944291 -534796 -648777 -765220 -861347 -928331 -237860 -259470 -491911 -285094 -147634 -22889 -210979 -284122 -173907 -41136 -755241 -173771 -659230 -214596 -862942 -72094 -131769 -224647 -82213 -55460 -715103 -301696 -849469 -16498 -57430 -760495 -339292 -282998 -822154 -557770 -184538 -132921 -423300 -914257 -145318 -826398 -13599 -696533 -12476 -797785 -436895 -802908 -362293 -309596 -251287 -919538 -868692 -173836 -62830 -346236 -431633 -703918 -848370 -397940 -252350 -716496 -37151 -261079 -774146 -889464 -429439 -246196 -723514 -869454 -113497 -423449 -955654 -377411 -569895 -500998 -303581 -147034 -73346 -377482 -344145 -1621 -871754 -388744 -91016 -215838 -705228 -296613 -790005 -909237 -697005 -252177 -199143 -398054 -256841 -696971 -21431 -341347 -39543 -79300 -789853 -368646 -623543 -289976 -22747 -743544 -679656 -831643 -20518 -86765 -951078 -568892 -55419 -570781 -213735 -114835 -284643 -887783 -319868 -429260 -361686 -29764 -722479 -70139 -632205 -732050 -532382 -251021 -155835 -630479 -738265 -886869 -515678 -332461 -489524 -693035 -49283 -252497 -83572 -330325 -242364 -113521 -289418 -632215 -310347 -262582 -463580 -90358 -40927 -677735 -34632 -44149 -447856 -810244 -494322 -63346 -345786 -836723 -64682 -388356 -751303 -411222 -20040 -692125 -337975 -77365 -602759 -164530 -779333 -450865 -292434 -422123 -330095 -116182 -558825 -575668 -433405 -238784 -69547 -941482 -121185 -322573 -423332 -664131 -97835 -403651 -270421 -275297 -40404 -275767 -763212 -429416 -276468 -656576 -626991 -818062 -13621 -906070 -432175 -34104 -691360 -903513 -282786 -343278 -288078 -113156 -495797 -288570 -245333 -550867 -772770 -505883 -828357 -173641 -96874 -450610 -639738 -226518 -125121 -634176 -658729 -751752 -793557 -11512 -648126 -906885 -623792 -571426 -251224 -84313 -819716 -41385 -177335 -619554 -591273 -573609 -518536 -19602 -53071 -498964 -761445 -743382 -204437 -189521 -896023 -520909 -331963 -779596 -672184 -14936 -362338 -786292 -12586 -355537 -415167 -41174 -746001 -397804 -388114 -298281 -861324 -664166 -951309 -573874 -782473 -142507 -648616 -474369 -79088 -11777 -770692 -404198 -571777 -19942 -577312 -233811 -849933 -17298 -160757 -166470 -48814 -429197 -49422 -426501 -900891 -913094 -13248 -927562 -33200 -801160 -695904 -328790 -571991 -623233 -424221 -33214 -862470 -36730 -52386 -37144 -486635 -565869 -354405 -276108 -182404 -180735 -161122 -796692 -146185 -29675 -810709 -325509 -949304 -75519 -268561 -283013 -174711 -446759 -851053 -815843 -499192 -60010 -137132 -828312 -145658 -265533 -516848 -593455 -11197 -428292 -618454 -539761 -72518 -33427 -827060 -694316 -693887 -199049 -64342 -358363 -48815 -912552 -173490 -654873 -414250 -135197 -222678 -328463 -735390 -786614 -211303 -569271 -558803 -418764 -446011 -751915 -712106 -903532 -288820 -712124 -670788 -911754 -812592 -186683 -492894 -65452 -894536 -192714 -54859 -230827 -654602 -145316 -231079 -723986 -479582 -427970 -745525 -195183 -344330 -27959 -78693 -480108 -579795 -667629 -802784 -175760 -810710 -287095 -422092 -734966 -40926 -802386 -134511 -173643 -545689 -665380 -635000 -126155 -324937 -201581 -779732 -729228 -294361 -186580 -386366 -239335 -52567 -816821 -307929 -244075 -191455 -775278 -170536 -609483 -154082 -634280 -195706 -83604 -546322 -853706 -50599 -442521 -410629 -495744 -721898 -945784 -894843 -838071 -13708 -178354 -72136 -148704 -639435 -774871 -53358 -308473 -19883 -672721 -776794 -772184 -395330 -910203 -16753 -422751 -763388 -329559 -440280 -886500 -955767 -239545 -41320 -570690 -89290 -287935 -72141 -274788 -478202 -629962 -563946 -940537 -681604 -227000 -922541 -951222 -113211 -266056 -111398 -59984 -499377 -147377 -659648 -886886 -638699 -186607 -176296 -757153 -744397 -258355 -172318 -828789 -890635 -84688 -339709 -494265 -226908 -537527 -813155 -892350 -185182 -429434 -64740 -118387 -41065 -742415 -69675 -445686 -593907 -282804 -950117 -579171 -396253 -429262 -948652 -693395 -809858 -89476 -83565 -579872 -149186 -154893 -281534 -12386 -173905 -571270 -917066 -829708 -276319 -926658 -873739 -824213 -808852 -586663 -103015 -137079 -490270 -766941 -204105 -126799 -34558 -81781 -404825 -14590 -437352 -276462 -797761 -902695 -570332 -81579 -40432 -412314 -855167 -495986 -74130 -774246 -230670 -471103 -12220 -322773 -126051 -812581 -802476 -916843 -658309 -12952 -76614 -504813 -86091 -235430 -73221 -276363 -189775 -907318 -83459 -674877 -806557 -180943 -648114 -821328 -114932 -283279 -211705 -137059 -11776 -781956 -719961 -829711 -512388 -653790 -41251 -176740 -25906 -146440 -704036 -686834 -98262 -640046 -824331 -828818 -756629 -878890 -409090 -244064 -490888 -471440 -361562 -541455 -623275 -78810 -314827 -422877 -593136 -534588 -831303 -577161 -766238 -501865 -472356 -448671 -813208 -800488 -779807 -772309 -362365 -267106 -786169 -894680 -696866 -127254 -823191 -690671 -190128 -71196 -545500 -493170 -305974 -303007 -102521 -839636 -471218 -291899 -167010 -504736 -142604 -72087 -870218 -178911 -275339 -655093 -813396 -30964 -239227 -262375 -13087 -745820 -493060 -844031 -818500 -361527 -238950 -468316 -287925 -495032 -20529 -471346 -463721 -550013 -377213 -857947 -132630 -492446 -711129 -694020 -528708 -684196 -703470 -721887 -112797 -417865 -476994 -427191 -860308 -457444 -472812 -722758 -125957 -19912 -186835 -238804 -463633 -730880 -569148 -314707 -888006 -497473 -411858 -878855 -429312 -429256 -139923 -55058 -683282 -494678 -953738 -374995 -428093 -778281 -737923 -188366 -230848 -365730 -570828 -443647 -652971 -51086 -146572 -934912 -831268 -899827 -362384 -724496 -137002 -693848 -6541 -931811 -872993 -423148 -113168 -927723 -239303 -41301 -532518 -126245 -693125 -64484 -263525 -20522 -260010 -163945 -19991 -690721 -706909 -236981 -732330 -339695 -502532 -502649 -95607 -104713 -206341 -837958 -13500 -503400 -889765 -20452 -816739 -129025 -846752 -496127 -669322 -821927 -113452 -276094 -561221 -613073 -141677 -732086 -645450 -37444 -504937 -52929 -474176 -446030 -779636 -542854 -325512 -273919 -358076 -902727 -829829 -264056 -208827 -358367 -427663 -635784 -429212 -633145 -113127 -690720 -192991 -129402 -692405 -887994 -101854 -52764 -771824 -10258 -739386 -388650 -496015 -330769 -365059 -449200 -604593 -838479 -66425 -282591 -423225 -112640 -262770 -56156 -693093 -78932 -939896 -504587 -223709 -561147 -77647 -43005 -173127 -126719 -647771 -571990 -49908 -374193 -617980 -696129 -873469 -894820 -342248 -428631 -236991 -237284 -27214 -913925 -281229 -172316 -12084 -331974 -186915 -276215 -44294 -32929 -450983 -446033 -774253 -704860 -716539 -818299 -188784 -9811 -801625 -695957 -770774 -97560 -414873 -215601 -167725 -581928 -607258 -519911 -186139 -471723 -495587 -178482 -194059 -326218 -921403 -770647 -888326 -924680 -142037 -411217 -913383 -372993 -173205 -60156 -332254 -584503 -75341 -693760 -359317 -770567 -71626 -96628 -900086 -263281 -558502 -623322 -173742 -63496 -903446 -336272 -87008 -275973 -825310 -328519 -276455 -137662 -303755 -361958 -245495 -476416 -113327 -483314 -863512 -276058 -303586 -852786 -27710 -812884 -738003 -282006 -72253 -443391 -398397 -639490 -366914 -830038 -794919 -600906 -30914 -287755 -425946 -343082 -26304 -770185 -653729 -24643 -86487 -284699 -282985 -102284 -72447 -659470 -262280 -636741 -805487 -388709 -409458 -663404 -254285 -792128 -186276 -131720 -72095 -537727 -246273 -11511 -125740 -755574 -17469 -932802 -637883 -185408 -439539 -417259 -461469 -925212 -887328 -263473 -52092 -900050 -429107 -810815 -32249 -788504 -636248 -761963 -346845 -25511 -83498 -180999 -451851 -930429 -332217 -52926 -245319 -490386 -774040 -615164 -239644 -275976 -426650 -117073 -838257 -230352 -304150 -15138 -442481 -916953 -775318 -887282 -210804 -111660 -97011 -472811 -153195 -481813 -715375 -437503 -830343 -251689 -146430 -464043 -422038 -945892 -416979 -9924 -429186 -423238 -555576 -331563 -475137 -41402 -114394 -603877 -799770 -693641 -85319 -41381 -710472 -238931 -571839 -329505 -354764 -818001 -903175 -638878 -488266 -517347 -808579 -185129 -772586 -64447 -308322 -824663 -430323 -347082 -275393 -74455 -566352 -375659 -43419 -482472 -361532 -10084 -40697 -639720 -83464 -444420 -349735 -70771 -429344 -673016 -275334 -430148 -633065 -835021 -103084 -644336 -402503 -255387 -7818 -825410 -277497 -283004 -102248 -636605 -602819 -211408 -861046 -496129 -105036 -848972 -938631 -261827 -308749 -845806 -11289 -633441 -183531 -158970 -680431 -101864 -482287 -85314 -360221 -332209 -286968 -767539 -774297 -428780 -495351 -308324 -85130 -121216 -543748 -276247 -622236 -755686 -394533 -160746 -571496 -956049 -473350 -204163 -859352 -941796 -591230 -210330 -45162 -623130 -126225 -65116 -445745 -810817 -403841 -440396 -27750 -113685 -261731 -830778 -887320 -327513 -20742 -232052 -741682 -705223 -422536 -6258 -85327 -11454 -430401 -273696 -289341 -164755 -716517 -832991 -538251 -839616 -251549 -896104 -211709 -11318 -322125 -510699 -64908 -276472 -10769 -173738 -347047 -173175 -163186 -727085 -524156 -954752 -475196 -308878 -332056 -275018 -356348 -644163 -45101 -889210 -55914 -824477 -479063 -165482 -803989 -478853 -167222 -789965 -328971 -56074 -145890 -330960 -563153 -850100 -55777 -388625 -765332 -756636 -564506 -374972 -146921 -644440 -908615 -348529 -458513 -287860 -489414 -41177 -147597 -126813 -667299 -659049 -424988 -861181 -113670 -710943 -343508 -863936 -879158 -41233 -158588 -18220 -252439 -63035 -41216 -102774 -66252 -147517 -623330 -656690 -860602 -55806 -11302 -282696 -20405 -41309 -135181 -147295 -184201 -423070 -512770 -27708 -740751 -727245 -178128 -743399 -196937 -640020 -26421 -145790 -927208 -75934 -68719 -705658 -101658 -160151 -102246 -308206 -588297 -548097 -324020 -113350 -282938 -299653 -498311 -476722 -217724 -437719 -102952 -498879 -41214 -210807 -905352 -674876 -254884 -282522 -890230 -429112 -750103 -276427 -72119 -41362 -905078 -83648 -35655 -665239 -127886 -326123 -137207 -648900 -666826 -818783 -257327 -303964 -465059 -429254 -13664 -77168 -762646 -445851 -229533 -308360 -602675 -145921 -827740 -490352 -273902 -192348 -353751 -185971 -231091 -562928 -239202 -836198 -286048 -897632 -185556 -211500 -571318 -271166 -276479 -11276 -41178 -175101 -165478 -603860 -887164 -84890 -102282 -354212 -20750 -461875 -692884 -124551 -171662 -648352 -281771 -777470 -428651 -439900 -836698 -84727 -134620 -237699 -210864 -303155 -624448 -62992 -40884 -605693 -695917 -78678 -19460 -703434 -663995 -147000 -374773 -114047 -545577 -137614 -274658 -38537 -505172 -322747 -490874 -786585 -81776 -640122 -850143 -437778 -422740 -332322 -331094 -845288 -400836 -445230 -274549 -908272 -648097 -636583 -942838 -835568 -863206 -104884 -891279 -953698 -839588 -76938 -191019 -577648 -949050 -458229 -239429 -139456 -624429 -623871 -398375 -362245 -79447 -42855 -800415 -623066 -694412 -805417 -788263 -349333 -325046 -142419 -145807 -724080 -362067 -528995 -861047 -896657 -532271 -706344 -934652 -600522 -30080 -870795 -276404 -480012 -562763 -384362 -331899 -422121 -86940 -849464 -13564 -423726 -319109 -331704 -34251 -99024 -173856 -714741 -136884 -64316 -770234 -113097 -12276 -70579 -81935 -515742 -158020 -310071 -13543 -101291 -625693 -172564 -485641 -682026 -824160 -777297 -951452 -760369 -141384 -11453 -423085 -495668 -693886 -103998 -949439 -356033 -923199 -440246 -17488 -733761 -540556 -127954 -33423 -365285 -902898 -483320 -282976 -6189 -262988 -177693 -952509 -676119 -566467 -64471 -719998 -149376 -693718 -304186 -447681 -165938 -836044 -22886 -126119 -941626 -349369 -251600 -412312 -371281 -263716 -112648 -149427 -20120 -175328 -281434 -11560 -280782 -682925 -693566 -64085 -934252 -215710 -477149 -297674 -287667 -670971 -603719 -414781 -491860 -83224 -707177 -446548 -854466 -801693 -809458 -768962 -607302 -93153 -102444 -637079 -677796 -426790 -40564 -692994 -120176 -79087 -843002 -889279 -428630 -839257 -823021 -477443 -684187 -511039 -10516 -251166 -701530 -429448 -572791 -598503 -50132 -732649 -590617 -669813 -766107 -432718 -449049 -76582 -383689 -770486 -707450 -253498 -687428 -524759 -844350 -186905 -361706 -330834 -832279 -866496 -779543 -495864 -849851 -55981 -317313 -824628 -211386 -808575 -32601 -438606 -602795 -782354 -568911 -16723 -874194 -758962 -66434 -920303 -874282 -13562 -666728 -342945 -185326 -707438 -94671 -367412 -925655 -813261 -607414 -571486 -107626 -236373 -702434 -516623 -328465 -349049 -492450 -495874 -74537 -694354 -309650 -571647 -121958 -471380 -763887 -71482 -786982 -766533 -863835 -778110 -282761 -314112 -12823 -570996 -69651 -518502 -741831 -96878 -631947 -37346 -710803 -873625 -853193 -282688 -878798 -25973 -752259 -693724 -840658 -428009 -2729 -11622 -623697 -928989 -768667 -274948 -929918 -694318 -268295 -619900 -854707 -707321 -930469 -734607 -44455 -778534 -821095 -636141 -819788 -134179 -331925 -424105 -776877 -721196 -661875 -419386 -602005 -172484 -40888 -403995 -147454 -886962 -415587 -69517 -926909 -113115 -325108 -338146 -121049 -14946 -604687 -763956 -577176 -755898 -12151 -430371 -927561 -605017 -237771 -356308 -72615 -443555 -230549 -822872 -442258 -210513 -495422 -18163 -332234 -422306 -772132 -502640 -81669 -623231 -616473 -681774 -297355 -505240 -541712 -155388 -172792 -358366 -230571 -477001 -22622 -457417 -564272 -860604 -515925 -67404 -431536 -457240 -566973 -908403 -236654 -424152 -414933 -315672 -693932 -146740 -251931 -273604 -64526 -833508 -926206 -563118 -70954 -549212 -52442 -811398 -845170 -914124 -257389 -2668 -332251 -834555 -175584 -904169 -345833 -278864 -175696 -821747 -427053 -41089 -721774 -950221 -287765 -809450 -398493 -640635 -858129 -97605 -46536 -282161 -439503 -424450 -661650 -741440 -32920 -656886 -615150 -816132 -64396 -494575 -274201 -113388 -637904 -361177 -836668 -324881 -136663 -331053 -306691 -747159 -175896 -357998 -510996 -571738 -347457 -13595 -569913 -429409 -226084 -72178 -96674 -733604 -790661 -86698 -837859 -926608 -13864 -146989 -623661 -857402 -142193 -27602 -745552 -116844 -196762 -339672 -683883 -896029 -88324 -304041 -192282 -232767 -820963 -185973 -432353 -423426 -282813 -464179 -694975 -697063 -320983 -940210 -863362 -233107 -432802 -40738 -20523 -117019 -253900 -828630 -41036 -338729 -80898 -873581 -863640 -227905 -48854 -847856 -785832 -667228 -770162 -136985 -22939 -909385 -180203 -735719 -113310 -495700 -386881 -169454 -49249 -763644 -428356 -423290 -512532 -930600 -75881 -423938 -30594 -798032 -487852 -147510 -128484 -903981 -788455 -112232 -852823 -843713 -70800 -377214 -468825 -850170 -903976 -503982 -707410 -824681 -357259 -76991 -738203 -629756 -694343 -587617 -941804 -34245 -495580 -192933 -69775 -430293 -817839 -77107 -24244 -103224 -141570 -126207 -440289 -262908 -85293 -52123 -663740 -68736 -405610 -120860 -125627 -743000 -766934 -672103 -861077 -71728 -474189 -86983 -75924 -158063 -172367 -704038 -142365 -339643 -624692 -453974 -332290 -102546 -228988 -603845 -150887 -626747 -564334 -505564 -673097 -495787 -588090 -405831 -761945 -273183 -275437 -390491 -113494 -564619 -84883 -63076 -914170 -190758 -54276 -722452 -276027 -515854 -806565 -232936 -251837 -861355 -792347 -98321 -546111 -536512 -930743 -374963 -14829 -79777 -40754 -276267 -756355 -827207 -69542 -142246 -808994 -365108 -199518 -223742 -827641 -19428 -635794 -444165 -141654 -794741 -852966 -691307 -659528 -14931 -81181 -791840 -826640 -272830 -263418 -227095 -367509 -570734 -283278 -102677 -283014 -68768 -402091 -515957 -35457 -571017 -524649 -82338 -730550 -671618 -848835 -846302 -603896 -755072 -495708 -80224 -823792 -189527 -51699 -394457 -659270 -444576 -420494 -40458 -70186 -376697 -381775 -17953 -165262 -587168 -451537 -453194 -849178 -29867 -307000 -16507 -192591 -83003 -310296 -162986 -40606 -638615 -947688 -51882 -350187 -6270 -849925 -291172 -849976 -579038 -765199 -272443 -346994 -891501 -301766 -430890 -146208 -832359 -41356 -930059 -328218 -597976 -11369 -432476 -805316 -134270 -903337 -154631 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_test.jpgl deleted file mode 100644 index 30b59479b46185f03b983fea6557f77210bb6e56..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -150438 -175797 -910007 -76574 -160466 -401426 -70685 -356351 -338602 -287484 -203539 -579177 -810807 -399723 -573672 -223893 -952088 -190953 -853962 -741462 -633812 -550179 -666998 -769552 -755225 -501767 -665094 -738392 -483839 -521483 -952173 -200282 -73475 -89289 -369096 -357360 -537154 -865178 -76504 -235694 -171199 -771685 -821001 -664146 -81695 -544041 -707376 -199540 -234578 -401875 -528582 -792353 -724229 -597029 -778328 -674848 -909126 -680906 -778226 -350915 -203522 -94283 -719924 -344175 -590577 -698829 -798273 -627533 -564038 -952108 -582116 -498210 -502401 -261913 -600382 -838367 -699730 -375443 -879767 -356262 -390147 -357266 -785710 -536278 -76257 -356566 -397277 -324465 -503018 -372523 -192448 -278627 -48572 -674744 -357002 -424467 -543952 -504236 -356411 -199310 -575546 -481617 -611078 -861628 -670010 -256592 -446740 -205144 -619125 -34180 -712324 -268301 -354452 -426162 -95429 -756596 -226399 -746101 -503385 -299206 -416374 -171315 -475090 -539666 -324667 -355808 -946051 -666171 -367232 -747714 -782239 -357328 -663756 -741991 -322159 -578656 -57675 -67326 -97947 -97988 -349956 -498222 -358316 -809505 -544247 -192806 -951949 -356260 -350881 -802831 -673168 -156275 -662175 -30808 -925832 -356691 -596954 -314769 -47358 -527687 -944699 -524949 -355704 -761313 -945857 -640904 -415541 -344495 -215523 -312516 -596565 -830468 -944493 -282151 -534178 -240607 -611656 -355893 -179460 -500866 -902972 -140657 -542196 -502623 -945306 -355992 -858906 -118021 -814108 -287677 -355250 -766372 -545910 -715800 -696094 -504178 -332295 -97190 -799267 -356695 -324798 -664713 -684277 -490024 -118124 -595803 -677622 -498208 -582271 -719138 -226177 -416985 -355346 -946023 -69639 -922298 -258051 -943008 -398561 -463420 -344421 -663535 -476036 -802458 -265116 -946094 -242968 -471202 -576164 -593975 -956517 -656663 -360849 -906221 -778442 -935531 -719450 -269534 -863456 -479704 -277636 -742155 -416110 -458116 -94735 -119395 -95323 -274823 -747675 -417003 -504688 -322814 -769430 -216745 -383414 -595706 -543878 -956394 -121238 -175359 -577947 -260716 -576257 -582292 -395928 -717240 -356542 -103722 -943969 -905289 -29163 -801045 -144447 -510511 -86522 -356311 -120548 -672091 -86588 -126771 -315148 -86494 -381825 -582486 -348353 -504498 -664465 -143947 -544240 -328233 -356175 -556132 -862970 -432696 -605492 -923093 -159332 -652216 -290517 -806809 -859025 -825946 -903601 -350674 -405255 -882840 -324033 -134679 -783414 -905346 -56534 -402118 -18823 -356104 -678171 -197801 -174929 -505689 -664028 -174722 -625634 -403188 -299888 -348265 -717484 -811654 -799326 -777115 -166721 -645544 -73456 -182655 -356704 -814838 -502117 -523723 -787596 -550526 -174177 -264179 -923078 -824483 -875719 -674797 -65439 -255619 -45329 -570652 -762664 -355642 -475989 -545060 -695922 -149803 -394009 -486827 -61703 -909575 -564564 -248489 -661354 -474534 -940888 -867700 -777173 -409754 -664953 -805606 -416201 -356359 -863674 -474774 -778218 -712164 -75626 -236316 -761039 -239230 -806768 -648969 -339228 -806758 -284112 -510092 -904342 -39061 -175371 -952951 -863795 -249280 -500604 -724865 -355772 -591633 -564574 -240211 -871583 -750809 -777949 -76458 -309773 -65909 -741634 -550591 -945550 -93471 -476665 -663540 -581743 -81649 -743726 -944166 -227117 -505588 -543399 -597480 -833954 -880737 -421130 -597500 -777285 -505751 -703009 -761503 -413737 -809206 -515624 -388982 -901993 -414931 -402203 -696159 -832586 -343865 -927784 -543326 -134380 -824381 -883064 -44167 -78538 -431948 -777147 -214808 -882334 -44419 -813986 -143832 -834277 -822519 -523860 -790173 -704231 -339859 -383836 -326481 -249179 -934153 -942730 -226372 -97598 -258097 -424388 -133758 -542052 -295849 -236144 -248160 -200739 -356393 -435034 -795180 -803506 -596397 -947733 -505720 -279810 -862596 -323540 -677642 -504161 -160879 -822545 -236073 -688022 -847228 -780017 -946487 -74285 -324012 -205846 -865677 -123930 -31021 -883462 -486986 -598168 -911311 -338791 -403485 -877089 -51606 -778505 -806399 -695850 -952056 -301806 -749453 -502168 -946289 -356031 -901223 -260703 -142346 -856780 -357246 -863410 -906068 -585828 -180880 -279121 -912034 -805913 -76686 -599967 -720296 -777861 -315256 -892696 -238459 -706348 -356840 -527305 -825367 -510732 -402034 -795586 -325101 -354195 -716227 -673306 -618239 -313143 -743105 -803336 -786940 -357242 -432422 -564044 -325131 -308766 -575529 -166057 -829573 -69256 -683408 -591090 -564224 -398372 -248821 -390176 -876061 -953980 -663996 -356320 -342392 -284132 -426149 -415680 -765361 -280287 -743733 -910311 -769301 -426669 -679915 -124357 -477315 -397947 -339377 -747857 -71255 -952137 -170139 -458234 -516987 -603906 -271119 -434169 -451677 -567751 -144985 -513274 -590770 -946442 -446237 -76032 -356156 -539371 -246263 -115017 -596825 -338661 -205141 -659629 -88325 -240583 -84611 -537500 -802590 -324016 -231082 -703294 -449078 -350060 -528694 -690646 -356495 -199588 -559319 -643715 -884530 -730938 -863694 -356079 -714836 -624340 -325081 -461612 -883584 -736196 -684658 -806163 -666847 -367875 -76818 -910287 -346181 -357347 -402044 -239156 -505620 -532436 -333126 -342624 -335464 -414604 -315203 -793816 -356571 -796797 -384191 -119579 -502450 -598152 -779847 -326460 -73987 -936153 -612940 -39870 -315475 -152854 -61131 -706464 -356975 -256822 -324983 -587960 -954712 -865138 -563801 -98011 -372007 -121977 -504403 -589325 -325198 -315013 -27008 -674716 -898114 -240114 -951561 -555763 -948809 -458507 -324689 -795585 -31145 -946739 -922344 -298570 -911348 -415187 -951381 -581766 -74981 -518873 -341539 -932531 -490749 -431924 -720238 -863696 -373493 -889253 -321640 -177942 -691481 -913371 -775432 -605788 -785504 -951851 -312444 -919148 -670492 -672409 -806551 -144396 -278463 -148915 -150982 -534940 -512649 -349545 -278822 -489159 -73829 -90288 -389903 -850810 -504555 -720146 -818788 -354296 -946823 -167753 -949411 -355723 -777254 -135447 -694493 -401676 -952158 -107991 -521766 -543336 -778175 -593731 -334879 -144372 -141173 -119553 -313662 -388992 -677929 -88308 -672544 -832558 -160814 -354753 -357178 -664978 -747883 -97207 -445143 -927491 -806724 -767095 -952878 -580336 -482140 -577784 -462013 -650420 -94693 -66045 -664791 -598197 -415786 -541688 -676815 -389984 -391652 -144335 -741987 -333127 -287361 -136633 -524399 -302625 -324280 -95283 -627054 -356822 -328428 -86357 -736591 -677987 -696648 -322956 -234535 -232301 -767591 -634815 -356240 -772357 -703713 -518392 -501744 -910187 -591139 -949820 -113480 -300804 -630534 -345075 -94037 -883488 -45603 -664482 -416136 -904548 -594062 -217745 -951954 -610076 -780703 -134199 -275229 -512879 -129208 -589660 -356926 -47817 -598241 -606273 -945302 -134491 -793955 -633872 -355283 -321029 -56554 -610997 -354649 -136208 -593469 -383761 -806748 -680101 -116285 -530244 -326489 -288612 -847114 -529024 -668059 -326007 -237419 -748037 -366076 -431992 -399733 -899468 -610284 -349486 -94565 -869976 -702557 -718081 -171326 -693315 -97736 -669595 -434436 -66667 -704185 -938256 -608965 -426628 -936812 -214547 -232839 -667187 -37939 -340787 -95523 -98013 -499061 -817090 -314671 -67169 -29482 -855666 -767535 -323107 -747541 -784495 -704360 -491514 -94592 -956753 -598648 -356596 -185991 -947882 -470321 -289022 -356937 -151188 -801301 -355081 -479146 -315324 -84861 -771651 -155133 -948968 -356279 -652474 -444493 -516388 -79154 -954254 -529162 -398931 -356188 -124137 -515132 -596644 -50745 -606106 -559459 -415419 -197841 -786560 -817305 -314541 -832476 -249589 -606253 -95252 -569225 -43051 -402047 -778515 -366483 -929311 -875298 -855647 -790534 -161463 -909263 -912039 -517398 -199210 -703684 -288113 -311616 -836178 -896056 -893784 -183052 -343751 -326496 -278043 -103675 -947839 -830690 -832348 -806172 -564225 -288979 -175041 -482117 -174948 -908034 -670772 -539985 -417024 -357202 -674514 -443270 -48663 -123052 -259363 -919009 -355382 -892856 -637094 -388841 -227087 -92052 -578859 -395038 -337835 -612364 -416278 -394248 -504123 -814795 -344469 -430763 -951800 -198616 -940708 -33769 -684766 -322772 -43890 -814146 -150764 -560270 -412927 -951837 -780021 -582243 -538079 -500972 -894700 -186169 -704451 -616573 -497813 -204801 -416474 -397064 -394379 -947771 -315221 -770220 -324629 -203368 -88349 -917104 -397847 -217880 -879678 -126078 -233802 -516087 -773842 -411997 -611108 -801872 -301853 -79557 -954666 -867323 -278226 -411909 -49001 -402121 -388016 -31062 -50313 -665771 -951936 -806507 -356213 -513118 -538207 -616570 -806508 -791778 -483638 -806789 -533339 -51607 -34118 -401818 -415816 -457553 -857206 -504430 -848287 -491543 -376041 -913726 -806735 -806389 -84870 -589938 -409454 -946800 -166539 -821090 -952942 -537059 -948678 -417631 -657116 -501807 -776893 -322403 -133759 -828191 -441358 -475295 -121649 -765063 -532234 -690970 -951180 -527082 -139738 -674185 -538062 -610184 -529866 -947759 -86506 -354278 -252211 -95491 -560458 -166709 -33835 -24071 -599988 -479624 -598631 -460192 -170947 -412973 -855892 -71239 -763758 -806370 -326148 -226694 -737975 -564159 -314369 -403659 -315388 -1164 -165162 -570475 -48885 -952147 -183031 -296459 -528748 -771041 -84761 -769388 -706788 -496839 -197893 -344198 -167916 -282010 -245321 -343795 -348281 -889387 -814893 -354046 -951960 -350756 -814754 -596880 -364230 -86472 -167963 -384069 -98017 -392322 -325604 -906141 -809032 -551364 -805017 -97929 -792738 -53051 -402170 -234010 -135492 -529151 -518714 -778071 -778359 -667046 -175408 -922576 -389039 -313227 -500517 -671777 -278582 -310907 -829486 -40333 -579160 -97347 -357064 -555033 -356696 -242904 -35411 -249210 -813207 -176215 -553923 -814560 -357054 -38017 -325073 -325142 -532508 -951221 -932854 -270944 -58944 -75715 -406041 -753340 -459548 -249032 -778517 -712284 -371413 -806237 -865624 -60247 -583272 -664147 -70155 -356726 -863509 -333913 -151475 -551077 -591533 -769381 -167269 -856237 -79262 -199279 -88572 -618335 -662360 -287177 -772651 -778642 -767526 -390099 -367256 -323348 -945217 -278469 -125902 -921222 -102089 -431875 -133443 -952094 -275903 -687600 -401082 -323198 -34004 -780471 -738362 -179551 -195903 -483560 -116917 -281997 -543869 -202182 -278863 -141980 -326561 -97330 -829581 -564597 -596110 -607010 -806821 -224050 -741893 -955390 -935628 -354817 -389224 -447572 -681077 -719474 -885063 -86578 -355505 -902410 -400843 -85786 -777774 -155227 -805520 -126357 -870847 -302840 -669610 -800954 -616122 -633773 -72725 -655818 -512345 -396635 -696701 -730339 -486610 -780308 -97810 -382679 -951463 -174927 -377551 -169308 -377613 -383649 -810826 -355296 -336971 -410753 -582161 -401635 -718316 -601000 -799319 -862954 -400893 -665758 -432049 -428753 -777621 -509729 -672189 -331266 -298449 -515819 -851547 -326414 -810141 -611803 -94715 -451659 -826113 -483302 -905186 -683691 -124034 -680204 -956863 -468596 -199318 -796214 -905162 -676749 -525625 -314450 -137423 -899549 -806800 -708615 -65077 -248354 -666349 -230685 -385903 -795512 -678766 -662137 -342802 -951150 -632546 -608800 -504794 -722174 -472716 -950494 -581101 -806835 -794975 -68212 -717017 -340165 -800990 -655373 -33950 -84144 -448547 -799372 -707362 -542630 -814632 -838153 -314878 -911784 -37250 -412869 -473383 -52937 -677762 -334952 -801055 -904511 -720160 -503597 -868215 -33014 -677592 -149870 -269977 -619130 -867402 -905262 -592870 -871046 -900939 -337489 -55104 -402470 -865635 -883338 -502660 -284765 -867174 -950790 -777711 -424383 -881674 -65601 -218026 -833537 -434638 -372026 -759583 -456192 -171264 -509230 -683499 -552831 -664954 -541227 -136550 -232396 -634551 -355114 -356377 -863569 -356933 -786037 -387604 -810993 -403716 -793724 -863780 -355062 -652832 -65531 -442406 -353881 -900717 -871192 -762435 -53089 -704265 -778581 -356037 -947224 -575165 -943120 -647329 -312541 -528981 -806866 -314041 -461631 -805570 -43402 -357118 -693964 -97707 -679914 -335452 -707461 -199101 -895477 -803544 -739370 -723396 -779566 -515130 -412940 -681721 -297222 -785562 -747946 -356338 -951240 -723637 -635339 -786863 -741119 -780193 -124939 -240186 -84903 -332208 -955895 -875148 -697969 -803371 -598173 -627178 -133388 -793708 -249306 -647581 -73644 -814941 -323210 -391477 -806535 -349973 -742001 -344310 -247301 -610413 -357262 -53273 -868709 -597920 -677178 -678644 -828914 -808909 -396655 -356881 -809214 -927122 -461018 -350746 -33781 -677467 -98007 -560526 -140659 -943660 -155692 -523010 -947034 -348028 -260249 -357313 -84613 -899031 -295617 -158158 -814859 -150865 -507269 -199641 -514301 -395404 -180920 -827636 -884426 -358097 -372326 -232319 -806713 -796561 -278702 -533361 -939642 -490544 -177721 -326113 -822481 -501017 -952082 -636158 -897193 -691898 -184039 -952936 -86853 -570081 -357268 -529092 -954732 -947746 -776317 -387486 -434414 -719421 -501018 -625097 -78507 -701247 -662570 -97293 -311639 -923614 -796818 -663412 -803017 -467886 -413462 -932131 -79389 -805751 -176605 -882740 -473836 -806530 -143533 -144095 -455773 -577721 -703465 -33539 -121132 -924170 -415107 -401893 -711999 -390216 -18160 -285656 -356594 -505548 -819587 -421060 -279448 -695776 -909207 -871371 -326019 -232291 -734613 -171089 -805686 -625817 -581633 -527659 -110184 -669824 -684312 -714543 -512365 -664518 -831889 -110010 -593641 -846706 -65598 -37892 -134695 -874869 -809212 -806686 -896110 -355519 -768818 -770590 -952362 -912720 -481740 -814499 -206579 -580537 -814659 -799382 -523170 -677472 -592401 -400772 -730909 -343509 -305659 -952121 -338962 -667246 -777757 -793913 -174598 -144419 -150344 -582242 -460988 -502460 -369070 -171153 -167489 -767353 -779959 -535599 -766616 -614898 -356945 -581511 -490254 -342719 -953257 -308832 -525298 -317090 -315083 -882111 -513362 -447862 -717178 -327570 -480282 -181885 -917326 -775766 -350984 -344272 -778394 -899644 -384190 -769667 -596645 -619251 -240227 -451797 -326461 -778420 -103946 -249245 -356350 -134591 -806539 -848141 -401939 -55669 -780544 -356513 -65875 -590604 -72430 -249124 -801102 -610635 -350990 -400921 -806369 -785982 -825975 -534878 -684342 -743511 -544198 -419366 -899501 -401663 -349175 -287154 -427399 -356721 -807710 -326081 -423146 -67355 -479934 -776834 -795207 -806135 -261972 -830639 -867045 -433289 -491138 -38903 -665922 -383754 -551025 -899196 -802771 -715548 -401275 -315479 -648650 -96723 -904986 -898912 -42717 -573865 -158175 -738019 -88612 -521926 -823129 -250088 -816239 -323050 -75970 -954826 -555700 -246762 -672131 -805877 -883202 -670337 -517003 -466758 -355303 -414124 -180453 -440148 -86622 -737101 -788506 -945783 -545315 -322609 -951885 -357290 -593714 -880055 -652873 -355085 -350830 -696008 -524483 -67081 -885150 -163675 -747726 -222123 -943623 -579626 -332379 -397201 -475924 -118140 -539759 -776616 -97286 -657698 -612534 -351015 -92990 -525190 -483530 -355124 -333058 -803257 -416729 -916405 -255529 -577385 -778309 -322220 -828093 -799790 -504924 -579202 -143760 -474251 -239475 -51799 -322040 -577474 -848921 -342964 -747262 -892895 -147829 -673898 -793227 -563607 -748009 -748856 -944520 -356131 -416938 -605706 -518408 -45100 -918180 -785717 -550063 -951028 -863204 -494460 -322559 -287121 -678613 -246488 -335445 -168212 -300934 -403673 -490366 -249160 -762230 -97813 -29759 -907768 -337657 -794402 -537683 -144375 -331849 -632230 -513371 -832139 -822671 -524969 -753871 -820529 -791748 -511814 -654492 -97622 -183304 -304183 -108111 -651924 -502231 -239676 -492658 -741902 -951769 -315215 -581342 -109658 -327931 -400133 -340673 -582257 -558822 -947775 -69590 -134456 -324026 -346753 -279222 -391735 -800845 -738498 -777769 -41140 -423246 -176396 -584360 -260124 -865103 -432105 -857670 -692061 -643160 -911052 -248559 -610591 -401091 -372439 -369622 -676706 -897993 -357355 -810222 -44344 -918276 -199380 -275752 -867434 -875921 -354638 -919654 -546776 -58756 -154129 -950465 -492554 -51823 -350672 -863549 -717971 -866649 -805522 -906434 -776401 -414790 -850042 -357285 -951314 -703902 -674335 -458662 -712219 -344678 -323701 -75496 -44936 -315575 -305483 -57034 -349605 -355679 -778641 -651595 -676428 -813973 -909999 -157880 -662926 -889870 -582529 -102701 -97781 -179582 -74633 -937858 -143668 -664423 -952064 -954723 -552872 -30755 -892668 -889277 -134495 -144518 -342697 -248341 -534786 -402145 -767048 -517452 -544007 -226917 -597693 -616577 -328544 -906478 -582204 -480179 -902802 -778082 -161290 -524391 -181357 -594006 -491153 -945371 -136441 -582565 -677721 -601235 -84085 -271030 -957000 -588682 -940105 -802207 -143675 -579818 -702189 -129333 -60322 -305170 -511919 -539900 -473899 -314972 -740470 -315642 -388818 -742934 -777737 -696680 -333854 -598274 -651434 -598040 -777313 -768938 -320187 -160590 -773322 -323952 -301415 -347305 -674903 -529887 -591719 -322783 -521867 -806733 -710673 -344239 -44927 -691964 -568167 -167868 -137407 -72858 -952086 -32478 -616273 -570687 -85834 -490147 -133842 -832137 -346005 -287866 -598070 -935651 -295816 -786966 -334039 -169980 -596135 -773507 -174305 -341682 -696997 -892678 -505523 -175028 -74021 -735312 -356844 -334969 -96896 -318733 -903710 -816654 -240409 -452974 -199409 -356818 -759096 -227018 -231275 -97986 -160866 -97747 -482329 -356649 -143837 -656858 -333864 -365954 -517074 -192461 -680079 -169656 -952126 -370865 -97722 -541755 -926192 -777945 -401634 -390533 -226747 -637070 -203940 -301841 -56484 -476134 -75246 -663307 -322241 -404912 -910304 -398832 -566878 -228883 -162937 -347450 -355930 -663401 -582296 -879561 -888832 -48057 -769433 -180992 -757369 -874371 -536003 -918214 -183046 -340195 -494727 -350046 -693550 -813007 -806755 -632383 -74008 -550603 -234156 -899541 -273626 -116578 -193173 -356765 -596991 -487359 -786800 -335380 -816072 -138735 -525442 -284028 -558412 -49783 -271299 -580998 -203410 -671906 -307606 -133762 -248937 -575569 -544177 -171128 -80321 -183067 -525030 -94566 -325628 -47777 -664845 -325881 -853430 -315726 -922589 -278427 -772368 -822550 -806641 -372550 -778211 -439720 -350075 -947578 -906605 -143651 -456193 -778637 -393865 -200441 -505115 -640618 -272322 -805180 -595484 -570487 -593699 -722714 -717603 -951268 -776872 -422686 -416027 -855528 -322649 -416450 -68820 -545907 -516379 -278080 -525231 -327615 -544127 -72733 -652000 -680545 -103561 -882803 -240105 -348793 -462142 -357368 -504371 -598340 -760903 -577689 -537506 -355082 -829541 -542427 -581134 -323096 -356123 -652886 -707179 -49039 -102948 -68717 -951723 -680571 -322082 -250191 -663188 -523000 -501646 -401841 -271508 -537992 -536762 -658784 -504619 -36242 -412887 -951559 -859111 -504062 -309632 -806833 -92155 -33394 -806512 -767170 -777511 -904064 -867482 -65831 -432569 -544013 -771735 -712163 -897483 -598430 -356685 -899575 -196394 -829196 -175188 -929779 -810225 -365077 -802811 -893552 -375785 -217740 -355290 -136177 -676629 -348187 -578946 -899437 -596729 -952343 -247495 -772511 -315640 -904988 -515953 -795590 -66990 -806754 -103387 -356392 -471879 -548037 -80308 -922575 -633298 -300956 -900497 -881941 -899584 -818457 -833980 -356288 -34047 -324143 -605989 -74380 -298755 -559727 -887420 -956689 -84864 -65422 -801426 -648347 -800191 -487616 -451954 -591400 -872894 -187858 -705394 -634119 -805365 -441446 -777472 -544099 -49088 -356946 -355471 -429192 -772042 -598007 -695566 -949303 -951266 -94683 -457651 -948903 -573433 -797662 -175911 -695242 -745616 -391569 -695703 -56220 -769169 -652915 -206153 -742021 -344490 -820610 -79032 -888056 -343942 -355671 -472334 -326180 -372402 -343951 -901225 -97960 -952110 -95466 -759772 -479811 -324941 -805010 -670252 -38131 -535547 -512716 -882222 -879071 -75954 -882364 -300592 -313868 -941140 -863979 -289111 -334318 -907494 -287910 -451741 -475504 -434218 -151571 -389879 -655595 -542883 -116762 -597448 -325468 -523689 -832488 -951845 -315168 -949355 -679856 -490078 -777927 -190121 -70324 -349484 -851108 -653796 -514174 -355636 -416886 -570373 -800981 -648538 -615786 -227081 -747752 -263804 -769044 -799295 -874972 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_train.jpgl deleted file mode 100644 index af1065c5d045f18c383603f46e1d724ca490c880..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/floral_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -884281 -793707 -595490 -108940 -349839 -168671 -823503 -483902 -757891 -71203 -656284 -848931 -355252 -570649 -503333 -952169 -651634 -480088 -778421 -778252 -491101 -176538 -377385 -863595 -611013 -672776 -43899 -84852 -698671 -591239 -377392 -564497 -511731 -801091 -740770 -743277 -355507 -378438 -660551 -863329 -356273 -823738 -773555 -534279 -874433 -65835 -664445 -674810 -627780 -33894 -431789 -143447 -416893 -573449 -702134 -126809 -804769 -815639 -684278 -315710 -81296 -866827 -543158 -575040 -585792 -562218 -805444 -711883 -916687 -86401 -43268 -343332 -538195 -278542 -887323 -298035 -821456 -863559 -76389 -698594 -158073 -779604 -380714 -58953 -424741 -30668 -326588 -342390 -806552 -278269 -180542 -303584 -301673 -208530 -596401 -511661 -278881 -367283 -803444 -97943 -677974 -725761 -243091 -946753 -277256 -899470 -600199 -66053 -892243 -818185 -478869 -755250 -344345 -944320 -811029 -629494 -778640 -733167 -951888 -567980 -323175 -866508 -199011 -537467 -327853 -778077 -516627 -543915 -65102 -939531 -132733 -356848 -806814 -476736 -51367 -577450 -749316 -398813 -836812 -742100 -624506 -669978 -264198 -135150 -916063 -171154 -48594 -403664 -35864 -143038 -182840 -283159 -582196 -77041 -805954 -651806 -775679 -603908 -590873 -901157 -170444 -403756 -778409 -605759 -61850 -72102 -764971 -458180 -396641 -67762 -510289 -515924 -173823 -806807 -143325 -455747 -74473 -755052 -28837 -540931 -275896 -596823 -570509 -777954 -260040 -733370 -663456 -44143 -230152 -839529 -707462 -706181 -461599 -755043 -806739 -325889 -685191 -745704 -139964 -416824 -155669 -570666 -598184 -87095 -946870 -891926 -793101 -125161 -434699 -798268 -778306 -597502 -281436 -596561 -501842 -539792 -83651 -44085 -389412 -952116 -939370 -289031 -663709 -918861 -921928 -942916 -505375 -598022 -199595 -761128 -249073 -593992 -551374 -527681 -775713 -236358 -457639 -952151 -824446 -582299 -125903 -198931 -504339 -401804 -323372 -806371 -919312 -400889 -756626 -633856 -859809 -644752 -271369 -29736 -555899 -828329 -682562 -326107 -134633 -589950 -956591 -512636 -47858 -598051 -741735 -502922 -940456 -301291 -372390 -802396 -613078 -778434 -703274 -582248 -350971 -794689 -315206 -511361 -806741 -202443 -791689 -364937 -534130 -778319 -157877 -955860 -813156 -904815 -111504 -328549 -711933 -56807 -778094 -189197 -632063 -468336 -582139 -167207 -496082 -213577 -309813 -357340 -951107 -401037 -240630 -139615 -671891 -484978 -883295 -259484 -326325 -466840 -889418 -480346 -94368 -951724 -356252 -580233 -935164 -632824 -485293 -704218 -775415 -111658 -477427 -785966 -790340 -97858 -326060 -781394 -350475 -358805 -354704 -712028 -788496 -810113 -217433 -909426 -326586 -627377 -601245 -770036 -402256 -119029 -433423 -806762 -178321 -609943 -126471 -383100 -524012 -773823 -249314 -663301 -585821 -863182 -673416 -719453 -550533 -954126 -313872 -170617 -264403 -516578 -832473 -355488 -97832 -887360 -61141 -596165 -335236 -661635 -856047 -432112 -811134 -126565 -153890 -951971 -356587 -697010 -360439 -134365 -313068 -491398 -403813 -149827 -755482 -945697 -491135 -796246 -249349 -477341 -416885 -829987 -66329 -693237 -512933 -395557 -865038 -718649 -505286 -528783 -634679 -65321 -356049 -920071 -718164 -581872 -815476 -702586 -945099 -351020 -439478 -868143 -161273 -662791 -477122 -507950 -480608 -892310 -326398 -303633 -905215 -106655 -355958 -898832 -669862 -777742 -536941 -711900 -544101 -871415 -118112 -276708 -362406 -633903 -535540 -356655 -48494 -595167 -790722 -597645 -596495 -806760 -480482 -893371 -535003 -86762 -738458 -157975 -666234 -416786 -806599 -814657 -899495 -175777 -427112 -368213 -922586 -902801 -382859 -507466 -617316 -724215 -830795 -324637 -371102 -365368 -289420 -403781 -103186 -434684 -596607 -590886 -324190 -678140 -106333 -929786 -806776 -226783 -358285 -167866 -490800 -303021 -837876 -183209 -614662 -633913 -33948 -174937 -667971 -357256 -803252 -69852 -489243 -65125 -570225 -391381 -703645 -57873 -746204 -768442 -433722 -572388 -230964 -767468 -126453 -539548 -368653 -348826 -232318 -194196 -500440 -510492 -724200 -65435 -692997 -778325 -67657 -84858 -711377 -237550 -200292 -597124 -764727 -611756 -159240 -331916 -625658 -368712 -686270 -591796 -615751 -356830 -689456 -300885 -48766 -806108 -368710 -347719 -806808 -350237 -160289 -156732 -572476 -767051 -717569 -947569 -297114 -460393 -248411 -716336 -354085 -578798 -95543 -540104 -909876 -152324 -693633 -370790 -446316 -402265 -183475 -44388 -701582 -242888 -947541 -385347 -489724 -341631 -151788 -431822 -582281 -243968 -591282 -266773 -948843 -742037 -675062 -80335 -66190 -481465 -679821 -870529 -180498 -198719 -887347 -353754 -97700 -516299 -446216 -952410 -664715 -158070 -343034 -415047 -207738 -154657 -945648 -315390 -59851 -488913 -402196 -110241 -527573 -415084 -301501 -612227 -240592 -570443 -283382 -236146 -742138 -702167 -674588 -666383 -816058 -198966 -314053 -543811 -334759 -393616 -536915 -674566 -625711 -99735 -863500 -121490 -802860 -802851 -537763 -95168 -56559 -540385 -680605 -482148 -694905 -805966 -892689 -682644 -350222 -267182 -523086 -776322 -663194 -16519 -595991 -249534 -826094 -60758 -343072 -357169 -898230 -355582 -686138 -577636 -902886 -946722 -591833 -516729 -326569 -581266 -879449 -951467 -357329 -492675 -76603 -596511 -806865 -956944 -468397 -952940 -236725 -747587 -951969 -97538 -580511 -205807 -814623 -240408 -57031 -543596 -742014 -274724 -633931 -516942 -921141 -717739 -440091 -334745 -802823 -754457 -334937 -181621 -313960 -74293 -678583 -151845 -240561 -434781 -350621 -38257 -770378 -257116 -790961 -663463 -460858 -397435 -542287 -938274 -340782 -891865 -368105 -278489 -883131 -951508 -327798 -829802 -181010 -350670 -388037 -580665 -950745 -96537 -598426 -784417 -675611 -847477 -888252 -287027 -609761 -97447 -150768 -174123 -806611 -936154 -880312 -206224 -236032 -383725 -879640 -514039 -395051 -322687 -623032 -820513 -357312 -44283 -492459 -67649 -580279 -677382 -313577 -516943 -705307 -459794 -898538 -806591 -809936 -811399 -778320 -778210 -944913 -400381 -625625 -143740 -315682 -952446 -249343 -695636 -463606 -259366 -832884 -879680 -86426 -677484 -559671 -716437 -397333 -597309 -271344 -174742 -355748 -665036 -350462 -360802 -404091 -301001 -174971 -854759 -596161 -97470 -171156 -951575 -97802 -401981 -214251 -444976 -782390 -607260 -103566 -263889 -798169 -52921 -838259 -811840 -46675 -398941 -798766 -458346 -460340 -864006 -429415 -673294 -595802 -589920 -354284 -917416 -735972 -401732 -956713 -918950 -326487 -462049 -611507 -65895 -160941 -386786 -595194 -340101 -803364 -474634 -471042 -778966 -209799 -94713 -183207 -516358 -133793 -597292 -328199 -431882 -255963 -159551 -356811 -596084 -830731 -174224 -550388 -863623 -544112 -65846 -335261 -657958 -664409 -678251 -703414 -491925 -492627 -335416 -65277 -354873 -301408 -656930 -650837 -369743 -287749 -899559 -951882 -521991 -598060 -58277 -814954 -369344 -247329 -372404 -418275 -257125 -719031 -857239 -622320 -296194 -140356 -442988 -74748 -361913 -313321 -306541 -579150 -942870 -505099 -261132 -854864 -938020 -361193 -832869 -656845 -265137 -937544 -664232 -515642 -917232 -228256 -326028 -952157 -144423 -78508 -315287 -162516 -657044 -778535 -949608 -76957 -716457 -459138 -952405 -298874 -86742 -328644 -47100 -349874 -150517 -445139 -940463 -95187 -43814 -635194 -32012 -674499 -715600 -951515 -776810 -197787 -483319 -505615 -185321 -793739 -804966 -115130 -899471 -702140 -518223 -806614 -400240 -271361 -864917 -601074 -948558 -278341 -66245 -271278 -899062 -415899 -778148 -37386 -747901 -150308 -355363 -667078 -754769 -192841 -534387 -433436 -909229 -626904 -612564 -889686 -196366 -474543 -579965 -902792 -476780 -939409 -303163 -291258 -717619 -196180 -331233 -381462 -51720 -324344 -883608 -450580 -241178 -947937 -599381 -511880 -356533 -335295 -881966 -79304 -867399 -664746 -539803 -276417 -602101 -386764 -802618 -315794 -593923 -335151 -451744 -239738 -247356 -227040 -81786 -905281 -692205 -563392 -885427 -581905 -416830 -81651 -182677 -285657 -950559 -491128 -919137 -947826 -402147 -597980 -792707 -457725 -86180 -65631 -904750 -67620 -59292 -125798 -170712 -580523 -777860 -675278 -510065 -778438 -458584 -76971 -483662 -177990 -605909 -391659 -605956 -374019 -426072 -356287 -821862 -175264 -390309 -150406 -166174 -664690 -309543 -490896 -487248 -701326 -349502 -315012 -433037 -372187 -117169 -350174 -70242 -557364 -126650 -698661 -717960 -786546 -664944 -419197 -880266 -182292 -777929 -598146 -349408 -875635 -863649 -204774 -824571 -487291 -451445 -348315 -806756 -458763 -334424 -426392 -350676 -541860 -156895 -557945 -432007 -167039 -321808 -117388 -97984 -539768 -238932 -328057 -515871 -435652 -88573 -757979 -290670 -329124 -580349 -776681 -832906 -324088 -154341 -823752 -724002 -497709 -297219 -634959 -262544 -481834 -786299 -680522 -366372 -477783 -430445 -518398 -691937 -199446 -84819 -912401 -372958 -676403 -698443 -166813 -741927 -755029 -288331 -879530 -49970 -829605 -828462 -144455 -122748 -355059 -771688 -627138 -909466 -104649 -354701 -103868 -434706 -684031 -499323 -315771 -465104 -271225 -865540 -388858 -260346 -328483 -142364 -269602 -472642 -142524 -242602 -492283 -373406 -847639 -758142 -181779 -532786 -666963 -34141 -597162 -450282 -597691 -767126 -636744 -768830 -787875 -778242 -778400 -865611 -405829 -302043 -795192 -506082 -497127 -502241 -356340 -924482 -767355 -97751 -258261 -730633 -558847 -357097 -357144 -793618 -823360 -480122 -143938 -356939 -49907 -545233 -301747 -199142 -432798 -704407 -504262 -279400 -504828 -77143 -505457 -402106 -776914 -940309 -811947 -945132 -954579 -298723 -761598 -437761 -51689 -329191 -671730 -951506 -324998 -596133 -779366 -238897 -894753 -667145 -169717 -390266 -335554 -900845 -480155 -715239 -527657 -596187 -666427 -677977 -85864 -528905 -703809 -270191 -712313 -97937 -297360 -803365 -735492 -348960 -182616 -469042 -356150 -678481 -458195 -355620 -203951 -775516 -390548 -442518 -300236 -199520 -330289 -287264 -346250 -322962 -328389 -48514 -354261 -899479 -543840 -770817 -485194 -412799 -320923 -65628 -535220 -255739 -348346 -598869 -577759 -946792 -480729 -741837 -313944 -911265 -199167 -777370 -376982 -772769 -236019 -249324 -198786 -951935 -408552 -446352 -772087 -93375 -882892 -502608 -912434 -341659 -403732 -952398 -236864 -598048 -558474 -632017 -806771 -604832 -291465 -610550 -778336 -596917 -560571 -237991 -855981 -778622 -264161 -883501 -666173 -827524 -897057 -354532 -288302 -325900 -563661 -599891 -802633 -942491 -848838 -106751 -350113 -951973 -65557 -802211 -316588 -695754 -922393 -952092 -276435 -458893 -134519 -599154 -406029 -461690 -550346 -677826 -143716 -369808 -446434 -871856 -175600 -167534 -767451 -59973 -802854 -595517 -726840 -892128 -490916 -581475 -356216 -325088 -954689 -635031 -597356 -79219 -947887 -525436 -674973 -717248 -832619 -738555 -805045 -339650 -134572 -746648 -70653 -653107 -651944 -357147 -562918 -738382 -517081 -755138 -894817 -784749 -815656 -356880 -103997 -526976 -721878 -767013 -545740 -706818 -669819 -295770 -492704 -288072 -667054 -64436 -138500 -805999 -199650 -932354 -667166 -264166 -866800 -777928 -524715 -108464 -850694 -615818 -662705 -320134 -396405 -885286 -153248 -596194 -534421 -487260 -667196 -879887 -301723 -542445 -833909 -785478 -314640 -777453 -433421 -315503 -678559 -455671 -426119 -595839 -357069 -51663 -240192 -769112 -647892 -904250 -377266 -775531 -455619 -884803 -151872 -389882 -88379 -353365 -315066 -38902 -111672 -53444 -677871 -599937 -217257 -158114 -805456 -693276 -289276 -34375 -198382 -829764 -237396 -940999 -667188 -457549 -160878 -326915 -947800 -248900 -856760 -520464 -47927 -824835 -474278 -458660 -685302 -891976 -606457 -786834 -473582 -951591 -793877 -139945 -737235 -193137 -738369 -330142 -45475 -948234 -698396 -67156 -224519 -90283 -587238 -693294 -357232 -795538 -103455 -248986 -754522 -45404 -827151 -315807 -676830 -339433 -827701 -581738 -579107 -431761 -569990 -518675 -951435 -344368 -800519 -326392 -313128 -753612 -919571 -674780 -678504 -168229 -51994 -738109 -712299 -775435 -390387 -661963 -710289 -298362 -632920 -479486 -71805 -432099 -311961 -883122 -323094 -77118 -579144 -582165 -613003 -119945 -355302 -249467 -328134 -356166 -98996 -459396 -393860 -279353 -226537 -800603 -141188 -271089 -800690 -830656 -737593 -157383 -344253 -862620 -540785 -152341 -801265 -160670 -751602 -331092 -604357 -597558 -60831 -751432 -857215 -34064 -726526 -91118 -328066 -122851 -400100 -66259 -97702 -357039 -181725 -464963 -885566 -799452 -610411 -299384 -80095 -791522 -902974 -50798 -599638 -809678 -85783 -506168 -596506 -950468 -768991 -903451 -401312 -947884 -51580 -323814 -48968 -806414 -868862 -95149 -167614 -704456 -755401 -295912 -598202 -234867 -303248 -690745 -58987 -540202 -627402 -871582 -855662 -397577 -540175 -806854 -492469 -480356 -577567 -183324 -439674 -479151 -160340 -402243 -650398 -777030 -721101 -401737 -598090 -762333 -199346 -715412 -487190 -950849 -30678 -656990 -298610 -860658 -45351 -619539 -504685 -747588 -356828 -582089 -742392 -357013 -51513 -923912 -199483 -955646 -65630 -651959 -103458 -432656 -62791 -550579 -777117 -434318 -126157 -250450 -248984 -446428 -678511 -867342 -428861 -365582 -581446 -935659 -543708 -523457 -477772 -198603 -678012 -415702 -43308 -163572 -407440 -340819 -223295 -88411 -713248 -324942 -516870 -822457 -623552 -534227 -59089 -457602 -44821 -474783 -390599 -742050 -350972 -76081 -127679 -511643 -610929 -325631 -390119 -328327 -227609 -874258 -806176 -85911 -787406 -490906 -433958 -717656 -809812 -886896 -248389 -375269 -403703 -550607 -314506 -544827 -908340 -736582 -368125 -84811 -257856 -126853 -600100 -948731 -38020 -806520 -150052 -724439 -325124 -279506 -64609 -249434 -397460 -66561 -773761 -274384 -526182 -720018 -511140 -594946 -737652 -239163 -369025 -544207 -904366 -951922 -489492 -598255 -42696 -660649 -915814 -356081 -693136 -587251 -30339 -790031 -765535 -80005 -143845 -429673 -265260 -944562 -482015 -633831 -122115 -814114 -714500 -955392 -781140 -828192 -952114 -793624 -279239 -830681 -950905 -582190 -881483 -303551 -651139 -515364 -170801 -388367 -356878 -79760 -543152 -487658 -808439 -49949 -596549 -86059 -326413 -580945 -59895 -605712 -413934 -302635 -717506 -695517 -95295 -777530 -449578 -885066 -777363 -356527 -778396 -369451 -864943 -88442 -45390 -775079 -88943 -488131 -475093 -846877 -46446 -806104 -534479 -370062 -886900 -285629 -458805 -950557 -504601 -150539 -199677 -174580 -328179 -474431 -661917 -806035 -583333 -388684 -336907 -702460 -706568 -78707 -60652 -324233 -435320 -881973 -44280 -524078 -875446 -841062 -315089 -805584 -227964 -47782 -288198 -288085 -387092 -559753 -779459 -80400 -942880 -769450 -814489 -524121 -337960 -337752 -478346 -81289 -356850 -856006 -284570 -666004 -388043 -669809 -627911 -181068 -684113 -75846 -832944 -820933 -315474 -373519 -301300 -417020 -449690 -769606 -765769 -698752 -899438 -314813 -344371 -103750 -895569 -410568 -181204 -514126 -486627 -806845 -677944 -236117 -160934 -651666 -559792 -909578 -762587 -67343 -271933 -814282 -245941 -743596 -899477 -298907 -598256 -544190 -771952 -661723 -315444 -183299 -848182 -826111 -308670 -236289 -416441 -782408 -659791 -842315 -200240 -150735 -810067 -902807 -432697 -298608 -583200 -945751 -101920 -402010 -883418 -308443 -227965 -480019 -600377 -778445 -29165 -356280 -562693 -286658 -106941 -956696 -510473 -865619 -326401 -487059 -810512 -29031 -827241 -947848 -825885 -516362 -771190 -687505 -287519 -660597 -771210 -664283 -481778 -66076 -625712 -40932 -240523 -324111 -121657 -314852 -538280 -369617 -34349 -313710 -896365 -859227 -878987 -693789 -475352 -859769 -806256 -509836 -298445 -597934 -97941 -651856 -816045 -74006 -711442 -397494 -356406 -732588 -94690 -892185 -947971 -563087 -206152 -778462 -858878 -264069 -534152 -388392 -542114 -450293 -199408 -249594 -546467 -334076 -334340 -605424 -672786 -180746 -593921 -154159 -755342 -463981 -94075 -797969 -89270 -899388 -527234 -931998 -663733 -355721 -442022 -380017 -326178 -777952 -33347 -594396 -501769 -678025 -314968 -664960 -66379 -390394 -315307 -539348 -821712 -357078 -678592 -254504 -323022 -315888 -357032 -476340 -103577 -686936 -893767 -502080 -65940 -579775 -236128 -547107 -888135 -502011 -864981 -477966 -912542 -321495 -909680 -803403 -30346 -344509 -96645 -143056 -599356 -854751 -328716 -948483 -400761 -71235 -916432 -106712 -684108 -30875 -474249 -862947 -238556 -687799 -847298 -665001 -356986 -465954 -946878 -529261 -315243 -369850 -383774 -724110 -769535 -581510 -951886 -591792 -778506 -802617 -607139 -805734 -451703 -349005 -317511 -536388 -356004 -87109 -126379 -240964 -471342 -49123 -527948 -777810 -217752 -952129 -106161 -598215 -543920 -370667 -112229 -823456 -248467 -325511 -883286 -349346 -342273 -168300 -501696 -233819 -76272 -561885 -482745 -421071 -671992 -34288 -859138 -674562 -164372 -934170 -169500 -786845 -922394 -576401 -948444 -205235 -167276 -369357 -514533 -821338 -323546 -103382 -814937 -949143 -818784 -505034 -948991 -33403 -893386 -879832 -712265 -93865 -872238 -31065 -391363 -356230 -838361 -278917 -677795 -854738 -916991 -635820 -88873 -170508 -278579 -680262 -397013 -236320 -725849 -678196 -576533 -355849 -609975 -272983 -884924 -97327 -209363 -828426 -357346 -301622 -806819 -790732 -97620 -695877 -692936 -50231 -344404 -269331 -935806 -952135 -742073 -595473 -922396 -475098 -401927 -777989 -806902 -328229 -939510 -424244 -74515 -76775 -406044 -825565 -577599 -236203 -59200 -97848 -340329 -360395 -295744 -504342 -635671 -582502 -341720 -711950 -126833 -434376 -590727 -710783 -357283 -785089 -726020 -57935 -540410 -602098 -772874 -805074 -103206 -521467 -349347 -333603 -356604 -48380 -569945 -350806 -56929 -737027 -155255 -257690 -388077 -600785 -527655 -481971 -559571 -110261 -278597 -48647 -897870 -150856 -750937 -830160 -900275 -465099 -364790 -849592 -260710 -199311 -605598 -656080 -871089 -777625 -601096 -595560 -557682 -403809 -276358 -799438 -31171 -170419 -847813 -284882 -701157 -653679 -472984 -579887 -818707 -884553 -138835 -457419 -870991 -949440 -391512 -801161 -736345 -543551 -778289 -174826 -664368 -753946 -432063 -463884 -664173 -572803 -427052 -917117 -712207 -97944 -770184 -814416 -400660 -780470 -159307 -302687 -166902 -260568 -808949 -504109 -815495 -796146 -631038 -389713 -773819 -951162 -695980 -168227 -952081 -514327 -502201 -97608 -403409 -512810 -160736 -908866 -605922 -889433 -236280 -357107 -579339 -810049 -356346 -324528 -402148 -607413 -704372 -451957 -756762 -155100 -714740 -334982 -515938 -874621 -776031 -312101 -346884 -515653 -84045 -910022 -811978 -860525 -669450 -357019 -95228 -633751 -356843 -765344 -632012 -264023 -778528 -356064 -144369 -786755 -763280 -138498 -802835 -315096 -707476 -598328 -738650 -816498 -711436 -935738 -695414 -261923 -635227 -356861 -829430 -234279 -315748 -511265 -596337 -562925 -350284 -97643 -817004 -43312 -88369 -882972 -108937 -206034 -335555 -951749 -110265 -814943 -825051 -468386 -659660 -288766 -323596 -805515 -260277 -623126 -597979 -594002 -564573 -951398 -504237 -775045 -785318 -805805 -502066 -360790 -716644 -502811 -824542 -38958 -769016 -945480 -360940 -778491 -735375 -899628 -724405 -718656 -474377 -580989 -591831 -597671 -803607 -355894 -865755 -170752 -416437 -711394 -167587 -379485 -361390 -684249 -335914 -954187 -881890 -197786 -86043 -371974 -401068 -527988 -77125 -597257 -443857 -947789 -66552 -541864 -806694 -802542 -806137 -247639 -598254 -693189 -500247 -543441 -197576 -526971 -799359 -865059 -343066 -947805 -665237 -175659 -669868 -740342 -463204 -287186 -799686 -707423 -61149 -545997 -951726 -669979 -466241 -218800 -80108 -948136 -705321 -487892 -301700 -443382 -97811 -144446 -158623 -818313 -199669 -249603 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_test.jpgl deleted file mode 100644 index b989ce7fc7c56b8ff77be7253874638144c6bf68..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -476122 -528536 -899152 -677883 -570309 -237155 -327006 -522824 -463235 -364473 -414522 -277377 -420698 -661566 -314327 -898179 -926115 -150949 -472002 -327571 -416494 -673950 -646289 -874663 -236930 -427478 -935768 -674331 -125922 -155393 -570668 -174577 -598016 -150959 -155643 -581019 -223763 -305351 -893983 -97959 -420974 -180107 -175705 -791603 -354761 -362401 -464514 -268900 -663933 -606898 -525662 -199406 -232993 -179167 -465267 -221269 -778921 -849540 -198381 -379734 -249385 -222068 -243020 -305321 -175379 -134508 -946025 -701275 -236498 -362451 -179605 -699466 -797883 -756352 -811722 -239658 -558059 -121121 -663971 -325063 -416789 -643132 -755878 -461454 -364381 -334048 -250548 -880064 -705510 -632917 -108601 -818683 -414171 -731066 -188525 -738662 -140781 -601008 -824657 -711266 -466685 -236786 -823751 -699082 -449898 -403112 -279434 -440330 -344000 -669889 -652562 -222064 -518501 -467364 -108523 -220441 -364541 -467001 -312498 -405827 -637979 -863378 -278812 -397359 -631992 -343630 -858002 -305079 -294390 -686237 -92405 -219972 -811357 -348815 -479952 -466317 -198099 -653839 -136970 -221971 -463757 -654770 -184378 -256752 -240537 -226998 -863630 -405920 -439626 -612727 -175690 -902932 -457731 -459904 -363546 -295544 -465567 -375889 -642954 -855934 -132299 -200263 -466903 -124763 -898795 -277832 -240536 -902532 -730714 -405768 -490187 -261440 -179354 -279478 -210104 -801345 -268300 -270999 -268879 -240526 -133761 -313074 -949469 -476055 -180817 -270535 -840740 -342723 -295710 -699679 -279512 -220843 -919135 -378480 -277965 -607614 -235913 -849629 -344258 -676143 -467130 -221543 -205138 -379859 -717518 -277169 -738651 -236175 -243166 -295369 -943310 -426778 -764328 -583913 -673737 -467390 -222029 -378682 -669791 -751384 -645421 -955901 -616659 -439767 -621687 -413018 -364735 -457772 -395146 -645917 -623366 -674369 -900114 -317624 -395054 -646042 -240415 -569330 -921999 -922485 -754982 -380190 -570277 -765659 -292598 -934092 -670734 -453994 -669886 -587648 -467042 -848862 -673746 -178438 -899076 -466881 -699474 -939681 -150618 -205328 -836860 -879264 -390094 -277528 -662023 -680163 -502359 -646241 -855463 -454155 -309845 -236264 -566413 -764199 -885757 -723762 -609864 -129392 -773320 -279440 -150833 -751007 -200316 -381885 -466970 -633928 -796171 -334020 -699985 -206284 -936478 -490729 -465419 -206525 -240197 -455688 -231925 -412723 -625553 -325082 -574737 -236703 -931947 -346537 -177966 -833825 -126843 -548485 -309646 -235203 -940435 -885673 -209651 -557126 -236537 -240552 -177638 -724072 -109224 -699605 -178802 -245538 -130744 -673629 -817776 -237096 -282752 -633249 -505377 -732899 -863508 -364616 -944076 -791421 -356378 -278396 -278280 -438637 -379454 -609952 -479888 -724083 -763802 -939299 -474470 -277670 -712319 -276852 -701524 -276844 -600345 -432699 -367002 -319521 -208603 -803362 -828824 -933697 -272993 -698698 -796238 -955908 -235607 -898635 -939890 -750651 -885921 -181695 -587964 -204818 -300632 -469748 -391223 -339502 -337998 -886093 -279039 -369293 -274584 -240511 -478698 -361306 -175052 -238473 -305188 -940584 -486817 -279582 -315756 -609335 -466677 -334560 -240348 -754929 -221662 -905202 -278658 -898491 -716856 -754161 -131856 -730897 -335368 -245577 -365087 -590888 -222053 -527081 -885486 -447233 -858184 -196814 -698460 -386233 -691115 -282262 -340773 -383640 -151715 -818311 -185822 -847173 -368570 -521764 -837133 -755920 -178518 -279571 -720063 -318826 -467196 -468367 -617499 -633860 -888149 -480204 -593630 -754704 -440294 -380362 -945284 -957008 -940676 -240419 -457509 -538261 -464068 -731121 -673794 -600383 -467340 -367041 -156779 -878695 -569423 -332714 -465710 -742728 -280689 -897762 -531499 -639737 -340603 -944582 -724277 -126755 -337447 -939477 -677930 -635674 -178695 -67366 -276987 -702106 -325143 -340766 -658515 -558824 -467182 -431814 -467377 -179272 -892660 -389705 -257707 -654373 -124751 -342672 -137736 -918409 -404584 -674497 -390742 -364829 -249123 -510713 -560313 -699583 -601106 -609871 -182904 -474644 -104230 -379592 -700921 -148984 -312578 -628124 -310437 -633754 -499338 -474069 -625162 -172381 -466386 -559910 -104936 -404974 -673847 -941604 -741829 -380294 -305396 -404688 -698511 -482491 -466806 -703930 -683133 -953178 -400185 -237765 -364807 -221619 -742541 -601051 -627199 -369132 -698764 -238973 -862437 -179037 -885698 -144521 -325540 -760951 -240175 -939538 -177534 -702100 -467337 -237140 -899297 -849002 -437868 -570655 -742653 -364560 -894366 -912644 -764226 -646447 -158890 -369706 -405812 -237196 -152592 -344507 -150058 -288479 -654246 -390852 -886050 -692952 -698611 -762994 -364501 -268581 -297848 -521546 -396596 -466892 -747826 -550534 -405191 -747439 -267722 -617406 -405474 -899606 -831003 -581107 -424270 -901062 -167671 -181778 -752273 -699168 -279368 -730689 -673342 -943029 -103407 -467370 -534225 -465414 -555349 -933712 -107738 -699592 -329141 -466098 -239231 -702012 -948391 -939810 -243068 -771248 -537468 -952866 -465733 -791663 -674225 -227229 -824623 -779595 -699570 -543796 -739677 -221947 -700246 -440192 -265300 -944901 -327237 -763578 -674404 -363780 -362903 -654669 -199529 -824520 -956826 -941841 -107398 -763196 -380700 -503387 -927431 -664238 -687039 -236978 -269728 -772429 -906567 -233991 -649733 -754458 -122074 -699484 -814570 -818057 -821259 -926253 -574624 -301529 -276876 -364923 -93633 -300698 -879444 -616231 -908100 -268876 -943767 -313891 -787455 -539008 -300759 -467372 -459494 -823729 -132501 -319463 -390392 -673865 -177504 -128009 -161838 -699410 -123732 -762877 -277812 -176785 -347330 -725233 -179362 -356270 -472355 -333476 -459910 -527562 -262571 -391186 -431614 -268828 -700709 -467165 -219758 -861755 -533223 -503728 -640219 -123942 -479232 -500745 -375873 -862099 -811543 -373258 -956632 -699598 -699635 -609411 -313232 -396679 -803579 -289266 -793577 -849670 -314699 -505905 -755861 -124842 -824966 -637076 -222074 -125949 -840535 -177973 -99117 -295860 -205107 -903700 -244631 -378048 -811385 -310924 -849685 -633598 -117971 -364781 -235745 -183053 -526763 -867934 -932359 -340204 -575243 -240121 -699619 -178657 -221244 -240292 -467041 -221541 -908121 -391009 -717041 -261155 -279062 -157444 -783576 -126622 -581815 -673848 -328206 -439994 -482837 -126864 -124952 -536732 -133594 -427211 -220775 -147846 -898623 -817635 -198178 -500455 -363675 -424169 -393131 -700371 -177120 -249432 -310463 -181346 -933145 -724479 -212527 -625599 -309261 -587414 -746100 -179119 -526775 -604278 -955966 -432456 -818777 -379755 -264861 -642370 -942473 -239129 -626824 -550562 -617403 -645800 -236879 -864795 -718873 -735706 -939713 -698293 -664256 -660978 -934109 -238147 -132375 -179056 -170983 -590590 -763410 -379291 -336298 -410247 -587804 -946726 -955910 -118671 -559586 -157633 -235970 -467125 -219997 -197624 -562559 -763800 -371089 -378185 -467453 -207828 -700350 -125184 -699597 -434411 -817171 -132169 -500761 -237236 -793051 -885549 -646209 -898928 -774879 -763235 -512509 -450551 -831064 -523690 -268886 -364057 -382377 -513208 -151424 -380514 -755876 -141820 -412231 -240133 -251693 -265430 -627216 -738510 -431892 -450925 -278764 -125301 -345325 -221780 -699414 -279202 -699403 -127564 -120896 -314614 -762377 -920344 -275624 -444995 -265471 -617906 -899807 -902602 -293958 -474741 -646467 -416993 -279018 -326475 -404795 -518973 -235269 -270335 -837028 -208709 -199531 -124232 -793718 -370900 -150526 -598186 -755657 -212814 -173120 -126802 -699561 -108424 -751490 -345238 -229092 -863493 -264290 -487613 -699646 -512792 -593234 -908214 -763524 -627738 -584253 -265763 -642913 -755737 -862514 -503306 -378472 -404404 -256416 -939326 -824924 -590826 -583254 -707125 -278251 -222121 -672008 -178055 -955299 -461460 -747330 -179098 -674180 -611504 -305288 -919838 -636441 -644254 -460484 -279424 -149875 -317355 -387735 -380544 -632098 -221710 -566417 -814758 -465992 -179146 -476978 -362043 -646353 -501373 -669773 -663400 -763712 -177546 -769985 -323006 -651670 -698749 -264555 -561680 -464033 -662610 -624296 -738590 -262039 -885503 -344790 -650423 -176150 -155072 -427550 -546686 -569991 -249401 -470414 -446633 -405214 -613018 -466917 -404130 -49986 -643437 -309934 -237320 -761312 -670591 -273936 -674436 -593559 -699443 -527663 -939226 -234343 -237020 -849627 -945584 -277975 -661293 -234601 -99432 -784087 -584188 -390342 -467145 -939212 -943316 -364545 -424067 -341818 -584515 -181523 -346502 -278075 -745120 -560118 -424292 -674550 -279219 -251915 -642715 -831023 -704002 -818684 -943852 -625363 -912291 -405609 -114622 -698377 -254819 -234049 -396971 -458143 -278920 -436791 -849502 -896108 -722785 -123712 -550959 -940865 -699663 -390343 -279443 -691497 -715325 -677727 -151652 -315169 -742022 -239485 -535667 -701950 -107469 -857959 -174194 -459115 -817637 -107639 -764541 -849574 -814190 -886067 -897758 -526913 -808389 -367856 -478458 -519021 -857785 -369331 -673271 -756003 -811307 -634463 -475919 -179045 -898656 -205943 -940917 -372467 -131031 -927014 -237976 -590650 -600828 -405369 -221849 -922978 -458189 -699648 -221960 -251345 -728540 -379068 -167707 -439817 -742561 -518475 -478846 -699656 -115938 -445163 -439990 -308115 -277272 -846633 -291805 -567528 -178967 -817272 -107823 -102640 -456991 -476812 -397001 -479028 -673807 -127863 -661114 -124639 -830461 -312992 -315497 -151823 -235746 -317613 -460056 -237266 -179330 -364105 -140654 -849534 -129074 -370867 -811578 -922404 -831042 -793764 -466319 -646044 -699289 -108596 -102351 -764270 -938978 -335430 -480246 -789866 -803443 -651736 -249458 -503393 -955786 -363697 -151840 -825725 -527334 -466760 -254153 -239532 -674112 -836180 -103749 -895940 -402214 -642051 -124017 -431286 -181006 -511309 -466969 -930653 -335431 -661322 -511958 -637421 -826203 -641468 -826097 -361875 -329417 -251788 -849548 -457117 -222085 -369333 -241839 -688698 -319691 -440552 -755556 -179852 -517701 -940928 -262902 -240400 -658968 -391187 -241084 -405321 -389956 -480068 -738698 -879447 -364113 -288123 -405921 -762150 -699406 -812962 -898266 -485888 -863697 -132470 -570656 -179170 -856500 -865590 -241871 -922079 -849665 -278586 -173197 -236076 -297762 -411863 -478619 -237316 -288994 -746956 -270888 -279585 -654534 -199874 -316713 -749119 -380282 -239732 -661193 -499383 -341054 -458749 -940131 -168480 -124865 -335176 -749405 -699684 -300937 -626972 -107496 -898406 -265392 -335588 -570051 -427178 -317810 -575137 -264871 -124890 -200291 -431435 -672799 -558841 -885899 -702188 -703356 -126598 -367122 -364917 -672883 -848109 -518235 -240116 -178849 -699393 -240009 -314623 -748611 -401905 -588818 -584099 -482109 -700832 -817544 -181707 -344467 -314284 -939410 -129078 -108362 -395049 -522231 -560053 -943044 -133958 -432584 -698065 -501489 -847613 -181614 -199882 -182206 -108409 -397214 -116805 -325745 -863126 -238181 -466224 -764104 -464331 -724346 -570212 -391242 -671392 -467333 -221896 -325478 -840950 -955912 -603137 -788671 -439638 -380733 -106286 -666238 -245506 -184018 -687528 -467365 -378036 -413181 -645558 -849500 -851315 -148185 -309141 -178934 -126062 -290933 -658208 -632727 -707433 -151769 -745956 -249537 -671921 -718867 -405924 -405196 -617307 -181491 -746183 -516183 -764642 -198633 -322821 -220409 -559289 -769218 -184082 -239622 -768714 -522216 -429922 -322588 -181046 -410877 -357180 -702190 -841053 -900061 -660096 -858203 -350245 -674391 -466616 -343668 -364240 -645944 -518372 -926655 -313657 -397463 -179044 -450756 -830944 -439787 -119894 -437470 -151501 -662633 -673916 -723378 -354598 -473343 -892580 -287355 -755191 -668065 -350423 -221264 -896148 -252118 -127347 -811830 -178142 -258050 -287943 -660727 -240482 -897585 -466264 -609905 -305173 -661487 -397113 -376644 -817613 -273075 -518377 -703890 -138847 -179345 -550564 -736525 -130712 -676833 -500392 -755849 -240513 -416822 -942648 -584478 -917698 -227436 -730856 -557877 -266850 -407640 -260891 -405976 -440257 -616571 -289304 -730803 -360923 -434129 -650570 -178702 -364896 -574369 -795684 -609809 -199474 -745818 -289370 -466513 -863312 -161162 -310239 -105936 -764147 -466266 -502444 -278841 -287963 -404449 -312394 -172599 -632054 -397040 -109657 -624893 -220709 -325974 -474988 -465304 -341743 -403968 -750792 -749081 -317015 -275446 -849551 -300364 -130850 -755618 -932749 -571836 -310862 -659401 -240579 -462944 -670889 -438237 -852005 -659511 -817701 -926965 -278356 -237147 -905341 -544241 -741669 -108164 -178579 -478483 -257146 -279519 -651773 -181175 -179343 -466355 -551589 -643157 -900120 -113640 -183191 -107392 -461355 -335107 -221633 -239421 -534198 -632247 -467424 -605271 -439771 -185507 -953186 -115402 -386775 -108556 -661434 -120174 -260106 -746159 -480059 -289335 -661195 -782860 -107572 -278835 -176816 -858162 -107882 -139641 -892836 -857812 -636085 -609634 -849503 -699579 -271064 -701957 -108435 -486820 -259530 -560435 -301874 -952719 -409568 -313648 -761455 -424414 -240412 -361991 -639461 -270628 -929989 -241957 -200349 -738267 -289656 -666636 -268943 -539956 -893433 -646243 -849644 -127556 -930514 -556567 -644422 -466223 -649752 -337167 -364740 -379803 -334045 -181540 -236934 -510992 -507071 -556988 -823120 -764786 -930246 -699275 -584754 -534381 -316897 -118018 -906115 -534692 -305380 -377797 -253660 -279590 -379576 -237239 -755784 -258612 -466653 -724114 -363756 -646443 -391561 -322863 -405514 -817811 -950684 -272853 -486569 -884588 -221624 -405774 -646424 -150502 -814800 -917022 -625811 -857924 -467382 -431900 -268695 -404699 -460849 -434680 -626303 -901446 -178133 -140341 -482443 -466291 -215569 -273446 -652057 -222050 -831005 -925988 -498405 -240439 -755949 -134894 -651599 -628357 -224799 -699705 -903659 -856707 -193175 -467208 -699697 -560189 -636055 -590805 -141391 -701199 -532798 -813662 -120267 -501430 -312038 -939897 -150971 -264711 -221637 -526258 -385440 -314974 -131304 -404614 -304207 -236890 -107762 -537985 -126770 -645468 -296487 -574104 -583290 -791532 -609961 -952888 -178265 -276273 -346382 -148037 -513393 -922972 -196958 -278115 -643093 -501166 -287155 -179309 -316850 -500369 -204224 -699607 -179229 -866033 -404109 -763576 -177573 -939429 -426440 -372498 -760500 -723949 -954010 -879145 -221565 -125593 -123306 -390132 -827792 -702215 -516952 -316564 -179273 -714481 -439180 -852782 -482095 -482601 -236796 -924835 -780157 -661184 -885370 -402429 -364624 -763457 -485130 -237036 -918057 -251998 -305168 -108573 -118216 -699320 -617439 -494464 -680439 -380605 -805881 -468373 -183845 -647495 -711294 -474843 -179071 -561786 -379791 -437620 -135012 -108588 -371758 -377218 -319796 -699016 -278052 -898567 -858140 -327671 -893965 -844270 -278894 -151098 -278346 -557471 -739530 -178898 -836982 -754959 -748795 -354530 -144449 -702163 -174669 -228013 -650700 -362912 -278691 -667049 -328816 -390747 -737537 -221840 -178146 -405867 -653699 -825775 -763215 -794873 -632932 -299671 -703273 -179285 -673142 -348854 -220066 -575526 -277708 -489288 -126350 -221980 -452993 -900149 -98740 -609820 -647457 -803464 -586466 -347551 -904580 -463990 -255207 -704058 -764621 -633984 -205772 -279503 -238962 -898408 -700835 -865634 -934065 -946081 -239954 -467154 -278457 -156067 -116940 -828381 -817282 -364700 -359973 -368411 -106499 -940137 -904541 -149882 -387715 -768752 -701845 -126881 -435162 -315692 -898565 -764094 -539544 -755550 -700597 -500978 -119946 -690553 -933139 -755552 -467186 -309598 -413191 -560046 -584346 -326360 -381499 -203583 -266102 -232699 -163562 -693243 -107532 -148995 -802359 -941354 -551256 -617493 -180224 -661415 -811546 -893802 -162199 -108518 -467166 -938399 -661246 -210800 -769750 -277471 -817773 -698256 -940530 -817834 -698369 -576569 -45550 -208677 -849505 -701115 -157948 -412824 -650676 -633739 -334697 -129185 -844541 -179297 -702203 -264066 -270718 -236136 -179248 -632965 -515803 -697902 -479340 -784275 -803085 -368369 -780463 -174831 -584275 -240137 -671219 -390589 -124275 -664687 -84930 -535361 -662804 -107450 -849581 -525938 -900139 -817607 -811455 -467273 -719528 -759735 -818528 -502200 -119940 -169786 -763768 -440104 -511954 -267228 -243854 -699685 -185627 -346177 -709080 -617013 -434775 -132199 -484680 -504654 -124766 -248976 -645515 -151347 -738141 -287947 -515416 -108154 -161970 -938493 -397447 -379747 -125345 -502466 -446273 -895532 -405649 -237467 -153876 -236487 -955785 -463867 -199105 -436836 -139989 -536572 -163579 -184193 -236614 -278623 -546643 -179155 -850726 -720243 -260687 -108586 -466400 -653323 -815989 -458753 -122742 -178932 -267728 -824373 -292290 -939225 -559914 -279528 -328823 -343152 -501327 -440537 -501165 -917919 -849484 -144346 -518776 -599847 -462238 -467381 -292342 -464765 -391366 -517041 -939509 -750727 -666844 -460833 -479830 -627159 -674475 -222031 -799277 -236751 -900111 -148659 -277753 -405584 -231174 -102890 -945207 -264121 -699703 -577174 -465530 -277681 -447004 -817693 -380000 -435316 -157213 -584481 -842285 -699825 -466689 -774096 -724249 -662937 -465963 -699552 -814824 -707364 -746118 -293067 -262517 -391442 -464012 -198440 -180681 -405895 -900080 -272716 -476813 -373712 -209458 -745373 -160836 -648534 -461652 -701222 -744006 -466833 -939584 -674547 -817114 -98105 -222157 -475499 -228011 -524385 -701361 -847320 -108323 -699613 -705494 -633121 -317520 -374344 -649535 -466986 -927496 -240293 -257272 -535261 -126619 -742031 -107944 -933304 -254938 -373633 -933983 -661185 -699323 -380646 -646378 -179144 -400742 -278351 -279490 -113838 -646430 -534510 -219588 -587284 -396748 -383780 -369750 -464440 -258148 -278870 -664838 -702154 -548093 -237326 -752445 -432849 -259692 -498219 -748764 -887237 -183732 -501416 -737784 -815819 -880410 -150921 -808539 -467329 -315798 -124396 -139678 -644214 -300180 -633734 -898551 -343776 -400113 -272887 -720260 -310269 -324469 -301285 -706375 -793197 -787555 -390991 -270631 -124363 -699638 -750764 -240562 -467427 -849573 -701096 -648052 -455362 -112941 -404902 -268915 -364009 -192524 -364788 -764259 -344209 -692574 -343247 -817641 -151435 -534774 -266035 -198456 -439712 -360614 -774034 -237301 -804685 -110374 -192342 -439476 -664513 -672145 -706152 -661291 -134161 -815927 -380493 -250274 -526720 -558325 -742543 -179151 -238935 -653476 -380212 -360896 -811898 -453191 -600893 -323459 -93603 -632080 -667257 -646313 -345720 -464036 -130550 -588540 -410547 -278229 -390219 -432894 -385913 -379321 -197215 -296453 -703540 -467445 -431138 -439657 -507152 -237797 -937359 -234280 -535225 -701150 -380629 -402813 -222169 -405425 -747452 -182103 -437282 -901164 -667856 -367210 -742232 -175397 -942142 -652019 -723299 -621732 -631749 -479217 -247146 -472557 -905812 -270590 -952850 -700358 -265364 -412296 -344351 -236187 -676781 -932751 -600870 -236561 -459635 -127657 -704214 -849578 -827520 -559619 -762281 -179274 -646437 -463584 -703962 -811582 -97725 -661337 -461669 -475985 -848332 -178152 -652047 -159913 -179281 -750655 -511539 -156426 -467135 -849217 -652085 -793929 -444918 -257120 -460436 -584498 -440320 -651091 -608916 -817242 -46657 -699308 -617028 -442694 -480149 -953286 -236722 -569852 -928236 -646006 -175054 -206964 -462431 -219627 -369114 -763745 -698082 -645487 -945408 -423237 -117099 -364819 -717557 -909942 -174702 -936022 -120058 -239747 -699536 -905309 -124058 -608860 -760284 -652044 -724387 -703097 -699569 -518515 -237394 -363721 -803166 -423496 -593808 -179257 -431659 -674769 -151368 -460286 -651933 -339780 -574223 -296815 -654760 -291582 -461492 -118689 -756590 -849585 -927874 -584108 -698768 -391717 -929179 -467394 -665091 -466514 -177476 -735215 -500184 -361105 -380727 -140622 -238696 -817283 -832774 -501452 -898513 -273166 -362916 -383194 -805351 -277381 -699969 -609504 -368392 -938957 -588041 -480202 -780051 -464587 -251430 -609734 -130794 -148994 -383757 -180109 -824428 -939198 -440357 -599974 -405312 -632330 -320849 -223868 -397045 -380177 -265034 -753740 -729985 -239625 -808916 -699617 -894341 -719797 -769505 -523549 -177799 -124042 -466703 -900033 -754320 -463072 -472659 -567472 -235109 -635830 -197587 -222109 -268092 -474691 -559267 -938550 -368355 -539524 -699986 -232700 -259613 -584473 -811542 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_train.jpgl deleted file mode 100644 index f75079fbfc6975be90461dcb8e49714132bcd642..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/fooddrink_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -244715 -310400 -744330 -126693 -645992 -132234 -336789 -885842 -724411 -285638 -288101 -932546 -446722 -618183 -150381 -373655 -42462 -364681 -235280 -262387 -623107 -107967 -742787 -467115 -886044 -786495 -772660 -698560 -237317 -363262 -673554 -650150 -747389 -560488 -504353 -344003 -685416 -467327 -938191 -179366 -557590 -220021 -442340 -380259 -830959 -934162 -475130 -277530 -263541 -424380 -817978 -364730 -953473 -584342 -466857 -764416 -650540 -108527 -106954 -368913 -206237 -440464 -379393 -836291 -442999 -612953 -440415 -690713 -617204 -181099 -674311 -458649 -849504 -707258 -719507 -177125 -467299 -849558 -731140 -763458 -646420 -315827 -316814 -351666 -380612 -221703 -467009 -108581 -278680 -885946 -698874 -140011 -240589 -755205 -894361 -178975 -719117 -298524 -750726 -737356 -466905 -335521 -236165 -939496 -424175 -430145 -501030 -851541 -616533 -340527 -939765 -466932 -237388 -379406 -533825 -295478 -500826 -633245 -298140 -781726 -584249 -628352 -372481 -364907 -351054 -702213 -754833 -179282 -949742 -921124 -790938 -179042 -673039 -653885 -264729 -755621 -412040 -773838 -208668 -279357 -704129 -356887 -661485 -725574 -456600 -222022 -424484 -479812 -279281 -538499 -700757 -390886 -480364 -380741 -699464 -236961 -947753 -342246 -569868 -178309 -480456 -817644 -195928 -176446 -749356 -269018 -723665 -260548 -412803 -744839 -206051 -653528 -278884 -580246 -306779 -379535 -404693 -889019 -551363 -912297 -178970 -281906 -939755 -467366 -655340 -848157 -498591 -309752 -405953 -470451 -222055 -482842 -275261 -699700 -439715 -174699 -486193 -662356 -777662 -465517 -221615 -643708 -404207 -892404 -466754 -458465 -754846 -699659 -247262 -379484 -310085 -646139 -345886 -279459 -745914 -162020 -834285 -940811 -380287 -308569 -693280 -439826 -628292 -340071 -152921 -440646 -609895 -647599 -273810 -364262 -792351 -738064 -181478 -359957 -237331 -467228 -364246 -378991 -300406 -568219 -697383 -856966 -660648 -718755 -175599 -699345 -379581 -279526 -449146 -643755 -512016 -279087 -948548 -817732 -802713 -179389 -379123 -653351 -105576 -467258 -500857 -830969 -616854 -482908 -396254 -949691 -478465 -234357 -369867 -927469 -700819 -674368 -570066 -103968 -738692 -701315 -364760 -900091 -460033 -133694 -742033 -938637 -118132 -432426 -913854 -817263 -405096 -150525 -661383 -220384 -817293 -755393 -396631 -181213 -279410 -654696 -763653 -865173 -199411 -400916 -443805 -646178 -235311 -487285 -784439 -755946 -126651 -946049 -249020 -427024 -674226 -724273 -279595 -172406 -300474 -944883 -814757 -661515 -127505 -768646 -645770 -616414 -199048 -652967 -257186 -268686 -312258 -938215 -231848 -130813 -885777 -644127 -354719 -258623 -574811 -413322 -653868 -276851 -837143 -906760 -108363 -818703 -293015 -125358 -364554 -475370 -369882 -640847 -382143 -793359 -534816 -739791 -953979 -199291 -718093 -115380 -898944 -701305 -158131 -764237 -830943 -787933 -704410 -232818 -734731 -900772 -124281 -874737 -274113 -644147 -825562 -883137 -234480 -724149 -423025 -364891 -437815 -181178 -118032 -157723 -279453 -769728 -849672 -304797 -726410 -613079 -438086 -747735 -449242 -391588 -485909 -440565 -150522 -262769 -336923 -706338 -215590 -131658 -362610 -601227 -380136 -257280 -326235 -699349 -911911 -950937 -251341 -465679 -379457 -501326 -639809 -248870 -467252 -391380 -742829 -235517 -899717 -405457 -786264 -466929 -146182 -343640 -935912 -763314 -627208 -699321 -755217 -240375 -742745 -466021 -237248 -279037 -841734 -239173 -178347 -107104 -858125 -862000 -870796 -377165 -673698 -699651 -823944 -777812 -438098 -364070 -674008 -335545 -922452 -271029 -295709 -539225 -396775 -509966 -378720 -940680 -699678 -776122 -466855 -537800 -335362 -221991 -440401 -568519 -699680 -240603 -326017 -390308 -861403 -642949 -461304 -48307 -499852 -467194 -898468 -480278 -577754 -840245 -936638 -156803 -178981 -574695 -364833 -315812 -627618 -120015 -143775 -849641 -704166 -575202 -699557 -939802 -745958 -249029 -652831 -297589 -125475 -338492 -673756 -364844 -724497 -160970 -699504 -942943 -466532 -736276 -460237 -238288 -259477 -331382 -315821 -803496 -764221 -161964 -108179 -380300 -533961 -334689 -501445 -651559 -222167 -764435 -593532 -742045 -813991 -643772 -939459 -939525 -927457 -367005 -652554 -589378 -241219 -500590 -737581 -480203 -436879 -851768 -502275 -769787 -364422 -302450 -315453 -506140 -755499 -877032 -120212 -404621 -279531 -490308 -817391 -529972 -458929 -378857 -768715 -517861 -219861 -654363 -334688 -237075 -787880 -343935 -179036 -469342 -570329 -674388 -304113 -179367 -569873 -764634 -742805 -907698 -292026 -369731 -466852 -810220 -467192 -755474 -763787 -345562 -511163 -661225 -761677 -265068 -238967 -287524 -293064 -126805 -645980 -527620 -646469 -835626 -864029 -655312 -669340 -955876 -279094 -278678 -646476 -892400 -387537 -183493 -108571 -151404 -762772 -151398 -131263 -308196 -681431 -258263 -899723 -449760 -764434 -251997 -364924 -438218 -478541 -570415 -157658 -769555 -608570 -467383 -704152 -120263 -648848 -707316 -746013 -937996 -198173 -123023 -300877 -939707 -335153 -325976 -787718 -108506 -940864 -177697 -588452 -829441 -480403 -467113 -464727 -903309 -142119 -511471 -560206 -108286 -677618 -362189 -381236 -608656 -308808 -179333 -818060 -865094 -701295 -730362 -702204 -251010 -313550 -191382 -221317 -248632 -691082 -507718 -266651 -430005 -378175 -121703 -428790 -240171 -125119 -177702 -301656 -99802 -134216 -228012 -236740 -143527 -823011 -674173 -698305 -222054 -955920 -442887 -259612 -857732 -718159 -742801 -534496 -896253 -702217 -531514 -355368 -235801 -263708 -848557 -699388 -786883 -134575 -363772 -178527 -551383 -898434 -850993 -178294 -108598 -440381 -755436 -458653 -477307 -144046 -699394 -940837 -940955 -181659 -642837 -240356 -715413 -126806 -466196 -151395 -334077 -267219 -350614 -431980 -220706 -708563 -448756 -177954 -569801 -707103 -151256 -265409 -863029 -300679 -646011 -108053 -827290 -458624 -98281 -653718 -229255 -432762 -273145 -664806 -179287 -235992 -466817 -430085 -433685 -817730 -221647 -365769 -858801 -943489 -914806 -108492 -932994 -475340 -206570 -804435 -397470 -242543 -439820 -111809 -654727 -940513 -237391 -764129 -602598 -814878 -380096 -862037 -500976 -671005 -520468 -111726 -636743 -764392 -501130 -112995 -217773 -861845 -251780 -534754 -584387 -817113 -776670 -254903 -663726 -903099 -699706 -699614 -479218 -206349 -500576 -205192 -942960 -764159 -178236 -760035 -323610 -668173 -769171 -346744 -380162 -200314 -939583 -764120 -103904 -618541 -380692 -716618 -312714 -139940 -456205 -584460 -698407 -266385 -242726 -637030 -380583 -405967 -306950 -162111 -849008 -667386 -655484 -852684 -699631 -151831 -315752 -237390 -920458 -864635 -533914 -221141 -344294 -108504 -439768 -505715 -219143 -642674 -474241 -741820 -654068 -938099 -646440 -244179 -278380 -395696 -942961 -393155 -609755 -237276 -671857 -405703 -549393 -848170 -841146 -700418 -639676 -721630 -467149 -656423 -616603 -235908 -646454 -472463 -752498 -501381 -722281 -439575 -382579 -257985 -396243 -396749 -181519 -672504 -459313 -235480 -562533 -405818 -222160 -569406 -379763 -108188 -383943 -500546 -466577 -204980 -277507 -741393 -811597 -342941 -476755 -820005 -712095 -112979 -627243 -699498 -174829 -208143 -115791 -179948 -344184 -884636 -378037 -221338 -424465 -511828 -849147 -579056 -98116 -78306 -350021 -560099 -905411 -232599 -108040 -900101 -862154 -697749 -488299 -576104 -754474 -236294 -479905 -156537 -200718 -648986 -802125 -900090 -955316 -440344 -647451 -119724 -755137 -235689 -294338 -811342 -940553 -724078 -938258 -264679 -661194 -617257 -181214 -315572 -627252 -849537 -329324 -693541 -219382 -438569 -479080 -671381 -474673 -335263 -721292 -200728 -299993 -674300 -364920 -255485 -646005 -526898 -585409 -787866 -527780 -511806 -262722 -645424 -940863 -179361 -141857 -640327 -379903 -931424 -334277 -745824 -628248 -713032 -299737 -527458 -440400 -179173 -910025 -235945 -791815 -279398 -179336 -364764 -710595 -245241 -550877 -769597 -694834 -460207 -174689 -347281 -282886 -573571 -240601 -643783 -240425 -140050 -849068 -517613 -235389 -702067 -279320 -664622 -222103 -787682 -616909 -570083 -466701 -316878 -220106 -460054 -707377 -754327 -337325 -571670 -177857 -856502 -627888 -819686 -242986 -833675 -124936 -183651 -251182 -559177 -745983 -255614 -937922 -237347 -857159 -126450 -750606 -590661 -198481 -273222 -358613 -632280 -479480 -590717 -108550 -761563 -467133 -178267 -643019 -891703 -277270 -723909 -818691 -754633 -148927 -279445 -406001 -515161 -570175 -265558 -252278 -705693 -939434 -501895 -466353 -439871 -154155 -525632 -592023 -644011 -729927 -108274 -857035 -764103 -405198 -636004 -409991 -724340 -314100 -636953 -268878 -636196 -467052 -752554 -260724 -113200 -501314 -633581 -684111 -286273 -706669 -198332 -849639 -949475 -545062 -437448 -724476 -738106 -344024 -108565 -270413 -356958 -848084 -646410 -103761 -131730 -377529 -838363 -649572 -338141 -698870 -518429 -644701 -646106 -150568 -276301 -763461 -457502 -102319 -235814 -575509 -439081 -700199 -779098 -209262 -456979 -390552 -631257 -949651 -894513 -364355 -699596 -867410 -475279 -856348 -204536 -731039 -533608 -817494 -218831 -439485 -895721 -343815 -268881 -945317 -235287 -737579 -327881 -793874 -210879 -828673 -562054 -884376 -236887 -699412 -476556 -673300 -369465 -671203 -953177 -698667 -199296 -126381 -643292 -481534 -802067 -584394 -215127 -176196 -279080 -674556 -745357 -125823 -501947 -755874 -952321 -677973 -863504 -301139 -340022 -787483 -179319 -700272 -705348 -290324 -645465 -834149 -108324 -865304 -168136 -337496 -858004 -210537 -654684 -284663 -846740 -120476 -837042 -955902 -583116 -403789 -178516 -575510 -296902 -661242 -198815 -391666 -209423 -939703 -842597 -769127 -381478 -582239 -123701 -123824 -683217 -362594 -478473 -258124 -340592 -486522 -653525 -518846 -701913 -755989 -671403 -720088 -352870 -158197 -569704 -745351 -898247 -803493 -316193 -181228 -692567 -360894 -400417 -405106 -176965 -780353 -344238 -458101 -244963 -534869 -257787 -180307 -701126 -430480 -120232 -896492 -707455 -337369 -568571 -431880 -364727 -109134 -240191 -151719 -847982 -439367 -353207 -732724 -350294 -763569 -475367 -421117 -235966 -479984 -220875 -235252 -849606 -480480 -236050 -919849 -135575 -442020 -662549 -405870 -813370 -945926 -787642 -287966 -367628 -719552 -625754 -239741 -699021 -699510 -702212 -863688 -334118 -431804 -380037 -560528 -622688 -511311 -659527 -467306 -236974 -731147 -479149 -750777 -817548 -542604 -875495 -173878 -642819 -908261 -770045 -240194 -793326 -731433 -179280 -811273 -849538 -888919 -577086 -108568 -755965 -222147 -236762 -133564 -119033 -364639 -699600 -108239 -608735 -221918 -345216 -673952 -467444 -616544 -919948 -639784 -692971 -680956 -769351 -179487 -122983 -378691 -503624 -937833 -380735 -465533 -635717 -616885 -922481 -557587 -600251 -849479 -439421 -142736 -467409 -237893 -107768 -179816 -108433 -546608 -107962 -156876 -311978 -849715 -458548 -140303 -724224 -230517 -650122 -724481 -150966 -636038 -849577 -196768 -771287 -467402 -642567 -237288 -199140 -674518 -364848 -488450 -808382 -830508 -645494 -458659 -699313 -177390 -672754 -741990 -224063 -577256 -900623 -825413 -317750 -344362 -339542 -699355 -608249 -900159 -478558 -440534 -265140 -819411 -148734 -293539 -386735 -499004 -627036 -354331 -379434 -237366 -723375 -369341 -364858 -646474 -642134 -835841 -324403 -769970 -395741 -886031 -362697 -899772 -699580 -609667 -569437 -143476 -696769 -611010 -633792 -441239 -388217 -240853 -927034 -180537 -817771 -654685 -174726 -126387 -765711 -252151 -124608 -335495 -322490 -108009 -166452 -742825 -652037 -619444 -221513 -397092 -488821 -379943 -644561 -830209 -238971 -438021 -469054 -363720 -674366 -770107 -863648 -391635 -646328 -439266 -900037 -334712 -134687 -661429 -528952 -391556 -222163 -885913 -108528 -379462 -466895 -328383 -279201 -570686 -318665 -380731 -531244 -543306 -439630 -115061 -238783 -894236 -849510 -400928 -178026 -762508 -344188 -893642 -670998 -609574 -319683 -361310 -300181 -477070 -879590 -488408 -239606 -953034 -612319 -779683 -802632 -220540 -645444 -356837 -830616 -420596 -278774 -324335 -491483 -292815 -466572 -221975 -661403 -107604 -288905 -227549 -446225 -929914 -849702 -429117 -264853 -220285 -677885 -314563 -556180 -864178 -927908 -363693 -936734 -251597 -490578 -346058 -379740 -646143 -615080 -707320 -278544 -590835 -899789 -712114 -236904 -814554 -857663 -474641 -108127 -107913 -768844 -144392 -650848 -237202 -124819 -898545 -898658 -764260 -952676 -631954 -673369 -380668 -236166 -543301 -817633 -904769 -619063 -861401 -633949 -161953 -488461 -343395 -364064 -240533 -594088 -177968 -773994 -346876 -380512 -329073 -249046 -406016 -669567 -738593 -703131 -332169 -140347 -476790 -134343 -422656 -206531 -278373 -276288 -189539 -644295 -601133 -699693 -390203 -161850 -376480 -300876 -334091 -701727 -110323 -776346 -432555 -258873 -369675 -637087 -644577 -235888 -567948 -698957 -857438 -179762 -940931 -463434 -701070 -434338 -946534 -198818 -527006 -200436 -45405 -181354 -602958 -237298 -814301 -755904 -747072 -885754 -769497 -723629 -255208 -764367 -920037 -466470 -674587 -898451 -866844 -699456 -237406 -943984 -768863 -249315 -921307 -705013 -401001 -701227 -125915 -206073 -743408 -764251 -409899 -184178 -300389 -311864 -219207 -378950 -946097 -236910 -267629 -274131 -378487 -501205 -707309 -584077 -762767 -466935 -179865 -660463 -755358 -181259 -237079 -301009 -673825 -764504 -938413 -221485 -222110 -367965 -698519 -480143 -472570 -671420 -474197 -380683 -298253 -150335 -693262 -719069 -832692 -641050 -755509 -724376 -817720 -935138 -379680 -293431 -648521 -121655 -303506 -612925 -498515 -616824 -766262 -465841 -236026 -902722 -467108 -108171 -269801 -108360 -206533 -612915 -707286 -463600 -645467 -268428 -208594 -380623 -275014 -181593 -363248 -755936 -764591 -550749 -404230 -261112 -487879 -327889 -466510 -97853 -926826 -439819 -369084 -222108 -395316 -298339 -467367 -671277 -240363 -671225 -222162 -222114 -661288 -255626 -209340 -943471 -234507 -279483 -303073 -237323 -204508 -542473 -311851 -394632 -466384 -231149 -467408 -872758 -701630 -476839 -265251 -369875 -310018 -368494 -932549 -198620 -374931 -157048 -459843 -921012 -467423 -292806 -179314 -476557 -242290 -271091 -177386 -861544 -663694 -952496 -817770 -768136 -755260 -464786 -309919 -265369 -747105 -324625 -158700 -334746 -703169 -673478 -848044 -459330 -179317 -334633 -107651 -301711 -240502 -623282 -221340 -219962 -782598 -442343 -661505 -270719 -179024 -626370 -701216 -268840 -364721 -667541 -900177 -405628 -898055 -885623 -465331 -305167 -279013 -524464 -279498 -178972 -324999 -348950 -701283 -364708 -397219 -314762 -831352 -144454 -766089 -895840 -830512 -704299 -482798 -390946 -609719 -679534 -467034 -665434 -754735 -268382 -275923 -443809 -690198 -919091 -294995 -126163 -205170 -742676 -742547 -734191 -301146 -501434 -482330 -605484 -609188 -482055 -691648 -260595 -652968 -343086 -674460 -178131 -181437 -124302 -501187 -937984 -671178 -949542 -795835 -769368 -743373 -466836 -178858 -467185 -897238 -661424 -818786 -701758 -174358 -755700 -460983 -642804 -196199 -780289 -849023 -673648 -919931 -314977 -719588 -911809 -755296 -229564 -732285 -192802 -222034 -698480 -269040 -101349 -279324 -179387 -178843 -656011 -178632 -466869 -501192 -419792 -288946 -803375 -221278 -467378 -661741 -669686 -689026 -161773 -598830 -945944 -251732 -387022 -817867 -526663 -255228 -574655 -817078 -663310 -222042 -349763 -346849 -208590 -277762 -918918 -363896 -300517 -755365 -912492 -754321 -674209 -945281 -642702 -177652 -814579 -151858 -206268 -617409 -314906 -215203 -467161 -243149 -950810 -179164 -243032 -633075 -706435 -724050 -361342 -699147 -862949 -882049 -436985 -433236 -956322 -949717 -274149 -779950 -848762 -144383 -301609 -488984 -653506 -123381 -386402 -439479 -729889 -750049 -763664 -273676 -313306 -208379 -395313 -177538 -264859 -693287 -260129 -364634 -602773 -440085 -305412 -362978 -400951 -235855 -696131 -467321 -205602 -239510 -404954 -884904 -544553 -826173 -596889 -443025 -821431 -467056 -955909 -701857 -126343 -364499 -851563 -278025 -649737 -144497 -697674 -955769 -279280 -922000 -434835 -442657 -380038 -531670 -738348 -396383 -368373 -277700 -151480 -742017 -671347 -315010 -700446 -193954 -466352 -600113 -646286 -749113 -811575 -326903 -178550 -139768 -515079 -645376 -755585 -124295 -763879 -718947 -717042 -177261 -560267 -747180 -439515 -667296 -468257 -814573 -763400 -239149 -701323 -378879 -109925 -764509 -940427 -277630 -856012 -763464 -269006 -607178 -398434 -467075 -467060 -879539 -364723 -380005 -335071 -569233 -533130 -151119 -466633 -912663 -673997 -466452 -897920 -938090 -523922 -524150 -898005 -673054 -220991 -609610 -764236 -373709 -762996 -177932 -742644 -126706 -248881 -270513 -673752 -678773 -108399 -945411 -181205 -268854 -610087 -653005 -885605 -324586 -700866 -413242 -539665 -654232 -583924 -699030 -221407 -901180 -291998 -472711 -438943 -763547 -939308 -821351 -427029 -234902 -118373 -466922 -722079 -733614 -939625 -737868 -175368 -301418 -364757 -103153 -773296 -904060 -609204 -221931 -570126 -373623 -340631 -207576 -779536 -742036 -199030 -245659 -238446 -236387 -646245 -863660 -295092 -692687 -439548 -609798 -265270 -364749 -343636 -948721 -698110 -501414 -439703 -745410 -253352 -395888 -769589 -181476 -670005 -701145 -626680 -905099 -764402 -315991 -308382 -260991 -817089 -349515 -699595 -105988 -391190 -335290 -949306 -836617 -634408 -467425 -364697 -884459 -701454 -251823 -632885 -534699 -486474 -592496 -899101 -703880 -500762 -884036 -952859 -235731 -368709 -654751 -699618 -633159 -278869 -325456 -204706 -653297 -651503 -670638 -192392 -333535 -605266 -364470 -787846 -918378 -293783 -126509 -457171 -570357 -572979 -133997 -380137 -242679 -861876 -704431 -364409 -466537 -439319 -899570 -700839 -221183 -439313 -865551 -124998 -830554 -583433 -475900 -920469 -671304 -823779 -897723 -899126 -222146 -209310 -466826 -157126 -335381 -402993 -651960 -412091 -711293 -192741 -910152 -735662 -584654 -294021 -177426 -222155 -531867 -272506 -335178 -534676 -883697 -631706 -364696 -382748 -622781 -731079 -431918 -584511 -242067 -637096 -516736 -102795 -594017 -300949 -289106 -140848 -716742 -472591 -793350 -702086 -364852 -435361 -514349 -828514 -692661 -603923 -643459 -226817 -364032 -592736 -120045 -129254 -433172 -701523 -424086 -899082 -126576 -204583 -508074 -181411 -150241 -885395 -181625 -388178 -150876 -953009 -180522 -222087 -278330 -141723 -857420 -480254 -446356 -268062 -651071 -676732 -411149 -848022 -590742 -236993 -616760 -110349 -905117 -717520 -125867 -458681 -380579 -358279 -251964 -234286 -375710 -177364 -391256 -412232 -134823 -625640 -221546 -176390 -479326 -237302 -674190 -206311 -736948 -240464 -700367 -439879 -661526 -243148 -440671 -942986 -220414 -527174 -920633 -178223 -422965 -427516 -674530 -801324 -389402 -533577 -135244 -764393 -598133 -410770 -653218 -810352 -177645 -507151 -181577 -699608 -460809 -383344 -577709 -801374 -277081 -898097 -237416 -628013 -328017 -400349 -566123 -221316 -568889 -378878 -388862 -390312 -456919 -582291 -110455 -347486 -124894 -391121 -147302 -371479 -466975 -438478 -220852 -695967 -220710 -769887 -237318 -459908 -922398 -272629 -236441 -704005 -952920 -174950 -118895 -405959 -238879 -198632 -282883 -699067 -190625 -125642 -364668 -512353 -818666 -132137 -900134 -506071 -701265 -388976 -181256 -326078 -742648 -171542 -238260 -313600 -439634 -699477 -704481 -373643 -390197 -461693 -404907 -379029 -395116 -699454 -673144 -206335 -751616 -613080 -324898 -935776 -830670 -364718 -501396 -369283 -846739 -342867 -288670 -539499 -742620 -692639 -369374 -467061 -179124 -943670 -335302 -849638 -178389 -269538 -725313 -738249 -466867 -859134 -401022 -265993 -848276 -343822 -224076 -489555 -364638 -291599 -383654 -436789 -126630 -480161 -653508 -529690 -643462 -762075 -515366 -259328 -342480 -288697 -763837 -627351 -108284 -609794 -625596 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ls_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ls_train.jpgl deleted file mode 100644 index 8fad95f8a1ffaa70c2fc1f0a1d2df1fca1e15151..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ls_train.jpgl +++ /dev/null @@ -1,20000 +0,0 @@ -417938 -173641 -294617 -937505 -276533 -398375 -472234 -236134 -466577 -172564 -429256 -24176 -888135 -886854 -102282 -797962 -480166 -255469 -289031 -462030 -113211 -937831 -650772 -838033 -122028 -778528 -828462 -51410 -444623 -666222 -696904 -611010 -569846 -54665 -465841 -355368 -129397 -215127 -627208 -490578 -321335 -128540 -60961 -889022 -387278 -889686 -883137 -931998 -239606 -90716 -216104 -883600 -168695 -769450 -12476 -42028 -217862 -90163 -390552 -935659 -25104 -870474 -13599 -808764 -719552 -935882 -139923 -14703 -908866 -74999 -123272 -485826 -611621 -899112 -30603 -18076 -390203 -653005 -742178 -451851 -771294 -252643 -147454 -501847 -74473 -714545 -135575 -498124 -319957 -803496 -547405 -615286 -239303 -446356 -194044 -265251 -755898 -474377 -247473 -418617 -113339 -923612 -287088 -950770 -770248 -26836 -207738 -75289 -938557 -303242 -356830 -298898 -751752 -113561 -314707 -274476 -145559 -79838 -145745 -146950 -391270 -38095 -924475 -793220 -162929 -476790 -803485 -524530 -314827 -557770 -609204 -556563 -610719 -319395 -1855 -274131 -118454 -878838 -919395 -205170 -581905 -846302 -349587 -255371 -70653 -104845 -687777 -766039 -26224 -76582 -673755 -166761 -350938 -96485 -841146 -682942 -88437 -952321 -361189 -668802 -680262 -64557 -880483 -60248 -930293 -186942 -476557 -607368 -628307 -171514 -18833 -749452 -527560 -865166 -505381 -763314 -349347 -810244 -38721 -409716 -615744 -54766 -339866 -77515 -331669 -632088 -350237 -691649 -386759 -134343 -29334 -265497 -644147 -682879 -801437 -12586 -122763 -179853 -481971 -442911 -515161 -677735 -576306 -35457 -524644 -164530 -803365 -279524 -948558 -894201 -281434 -853866 -16753 -69673 -265260 -797783 -240192 -85946 -718850 -72499 -357256 -42759 -874184 -30710 -85327 -23788 -931948 -138644 -333736 -784183 -145316 -37444 -817362 -256083 -32245 -778184 -650225 -637883 -298196 -746856 -902164 -42058 -443805 -282597 -365093 -832828 -46146 -474241 -748404 -183559 -636145 -294560 -383100 -644440 -361984 -96223 -916976 -362513 -227180 -951969 -735130 -142127 -101196 -909229 -593489 -505497 -348357 -479204 -186687 -71275 -713657 -342806 -118069 -139357 -571670 -769833 -921440 -332322 -281718 -542473 -563879 -917443 -557590 -188532 -97605 -457171 -839080 -668450 -255423 -463204 -633931 -948483 -289276 -624940 -129026 -71805 -298716 -442516 -193137 -643708 -837514 -446280 -61730 -427353 -449689 -338285 -531867 -873691 -437761 -288302 -332206 -456553 -817494 -232936 -56929 -569406 -778055 -880178 -770300 -580523 -25333 -831239 -471342 -103458 -67511 -703722 -444997 -685679 -677974 -356161 -175570 -40458 -433432 -379123 -279506 -569374 -44149 -430890 -555351 -220875 -584137 -149376 -68316 -277998 -949439 -33457 -22622 -515198 -766941 -546322 -390103 -208663 -924251 -686834 -446225 -336344 -563661 -639821 -781382 -533961 -383252 -603719 -60590 -559792 -388392 -887417 -76335 -61310 -722559 -287935 -849068 -710268 -96376 -40831 -297569 -249577 -49034 -37386 -52921 -346744 -186572 -477223 -155562 -308473 -316629 -31115 -644295 -920010 -410904 -751316 -338252 -669868 -92286 -861245 -499323 -108179 -85864 -696138 -175499 -85911 -759197 -856777 -182677 -769516 -271933 -934968 -195901 -675528 -185973 -850251 -395696 -137662 -920002 -712152 -579886 -150284 -927197 -516362 -285094 -383725 -761541 -12952 -824477 -302676 -525061 -828712 -436869 -546588 -130816 -476514 -313321 -274834 -417720 -628727 -831809 -403022 -786644 -178128 -46000 -395774 -278980 -828366 -212876 -478700 -723051 -281524 -939308 -577567 -315814 -423238 -199296 -38958 -114394 -580956 -538195 -734279 -268882 -388693 -542114 -320400 -224779 -401417 -703122 -942880 -795538 -166853 -690202 -64443 -151870 -531244 -904052 -126202 -682519 -594396 -756343 -268233 -921471 -230132 -814100 -726658 -86610 -485091 -643624 -185971 -348950 -388625 -793101 -793378 -693932 -415105 -200556 -786546 -391681 -899789 -502922 -407203 -255137 -240630 -298735 -847813 -654602 -178354 -699600 -776816 -503333 -936734 -198056 -377529 -830209 -125797 -67407 -170444 -649249 -752856 -422306 -517758 -471042 -317511 -18950 -306200 -848972 -52075 -617898 -767452 -205192 -489414 -69518 -51820 -126159 -4043 -640039 -770977 -820717 -831600 -782390 -581630 -564427 -652892 -708711 -240225 -498272 -413322 -315888 -532382 -900086 -372417 -82145 -165482 -602819 -578753 -468629 -810263 -784749 -449597 -251849 -483785 -481356 -879593 -632519 -713831 -229971 -298140 -445325 -931391 -403878 -823904 -354196 -76680 -239202 -874433 -574791 -724496 -95001 -577783 -510882 -143325 -126157 -1494 -258092 -647689 -245497 -626141 -663498 -230286 -646420 -469342 -394224 -388178 -151558 -211844 -295912 -334689 -844238 -379862 -314788 -664691 -922394 -311864 -647996 -76603 -180292 -913677 -741927 -678010 -490311 -950559 -664893 -894513 -763248 -834286 -609975 -782713 -115705 -66223 -783812 -279087 -570117 -84858 -46336 -879372 -930905 -174699 -179287 -698870 -82348 -298415 -474786 -14706 -312429 -64908 -92668 -638610 -751128 -379393 -282886 -342250 -303903 -943471 -785312 -809678 -662238 -237331 -603876 -135850 -68736 -602802 -255254 -158020 -452588 -219861 -556180 -503400 -652041 -693189 -637904 -540175 -254164 -803252 -160581 -90108 -346456 -259858 -634320 -298089 -773994 -398616 -896961 -914043 -70750 -212685 -142327 -32665 -357144 -405953 -808998 -161172 -425533 -642674 -221918 -764506 -349408 -633065 -786994 -46153 -579965 -365224 -823944 -230474 -229958 -690671 -747105 -747159 -185326 -717656 -158588 -129025 -360077 -817078 -598146 -46291 -852963 -819686 -703923 -415665 -521055 -237276 -622788 -769368 -105988 -887427 -479984 -771163 -649112 -486466 -356126 -679460 -141564 -387022 -523638 -166102 -240740 -907816 -412065 -404498 -824663 -340215 -661299 -744839 -225573 -458465 -543138 -506292 -935112 -623126 -338983 -111726 -767539 -289381 -110434 -489555 -196251 -811029 -477966 -27862 -168059 -398268 -582165 -20280 -505566 -308360 -673952 -145807 -419943 -420804 -833988 -160494 -415198 -205173 -944320 -474078 -10053 -862119 -422456 -714219 -297751 -666401 -563534 -938215 -461371 -772533 -404023 -644757 -125073 -830160 -778325 -752498 -350928 -409991 -573829 -80400 -584710 -926558 -8640 -626311 -84740 -309867 -24460 -521486 -917335 -838096 -414844 -747389 -33367 -449899 -425335 -134571 -711468 -950736 -186625 -782354 -106655 -833347 -327584 -611734 -294753 -845973 -243078 -570666 -631257 -252402 -328066 -633075 -750537 -903936 -639666 -527641 -24511 -798656 -738373 -726744 -909127 -243036 -786692 -12170 -339709 -495351 -456099 -40469 -72268 -851541 -297355 -634959 -505564 -517896 -809450 -227939 -551281 -799430 -651159 -167866 -677860 -244075 -763913 -167222 -766238 -273604 -598200 -544101 -946594 -10588 -255401 -180203 -281429 -806808 -80108 -459396 -314611 -326588 -551820 -244665 -773823 -361456 -80572 -851219 -115791 -368161 -129645 -93867 -246418 -298281 -155144 -426928 -127662 -724411 -10276 -423300 -50065 -759755 -243032 -718093 -872888 -632900 -771585 -126833 -556905 -755508 -449604 -572979 -830642 -363311 -937642 -416979 -172695 -794623 -731079 -693035 -701857 -823021 -478541 -162111 -41301 -597292 -160934 -297598 -191319 -386272 -174224 -365730 -707320 -112174 -839614 -159682 -63643 -63678 -490916 -129398 -59973 -369275 -314836 -59124 -804966 -547840 -302635 -538259 -915144 -124621 -188297 -258020 -755006 -889433 -502011 -412564 -190206 -333632 -875994 -528162 -97327 -228810 -25936 -276358 -735761 -60418 -101641 -852601 -425960 -505835 -752332 -165933 -230880 -939746 -332863 -485329 -533983 -165173 -642935 -627144 -899827 -905324 -165775 -72316 -795474 -435281 -593657 -468549 -719961 -527333 -783914 -389402 -883404 -280782 -893857 -298447 -309699 -117083 -101524 -209226 -146704 -826222 -693926 -776372 -40953 -121490 -130969 -476010 -568639 -288697 -707248 -111660 -47873 -556861 -726977 -224647 -640327 -873625 -130839 -857119 -224030 -205446 -226559 -177125 -475302 -416335 -18843 -158666 -219382 -340768 -669686 -643019 -204143 -900159 -21431 -259328 -487879 -703439 -185686 -10660 -184895 -635866 -115161 -458701 -181213 -769597 -286769 -735599 -886500 -297869 -732973 -334526 -95010 -558502 -442258 -332219 -368653 -199483 -30339 -787907 -280793 -50839 -303007 -10123 -653134 -705693 -443382 -467409 -724481 -457893 -637948 -125477 -475243 -315287 -461690 -511995 -342312 -485297 -605891 -489135 -849528 -54808 -608868 -799444 -701523 -895212 -398127 -560061 -906099 -409729 -704407 -861403 -416753 -799240 -705228 -500546 -136880 -683771 -638672 -315307 -633732 -698752 -633899 -396937 -122957 -855551 -766154 -834029 -662606 -779742 -930499 -732134 -758418 -388214 -276400 -751213 -456552 -249250 -407040 -271127 -703697 -401737 -527988 -157877 -415702 -247356 -434765 -681158 -704372 -10023 -309599 -17383 -141593 -661526 -439741 -711964 -386643 -281044 -384057 -331177 -687428 -52998 -504237 -124894 -526898 -443997 -714006 -667175 -903890 -106869 -446123 -546448 -835404 -939802 -836054 -131263 -251010 -19276 -183242 -572803 -712312 -467185 -196366 -605642 -58386 -178131 -607139 -288199 -594368 -75220 -92951 -693581 -590873 -111867 -45405 -750610 -814943 -464963 -899495 -342390 -851768 -51143 -322576 -730362 -617396 -777810 -278864 -810170 -364355 -375837 -251224 -478853 -948731 -642610 -888808 -262470 -73399 -419049 -599891 -342753 -1328 -675801 -853248 -170536 -281436 -282161 -575211 -232751 -232775 -70810 -90107 -659344 -138761 -932953 -879040 -129239 -249467 -96874 -774305 -756044 -878729 -824953 -466452 -863854 -761495 -26923 -78633 -395316 -504353 -321130 -710027 -699313 -800488 -430039 -664151 -150933 -445745 -459336 -210783 -205602 -552742 -704152 -897632 -60568 -521732 -523496 -65125 -738692 -564335 -716270 -623816 -806814 -343036 -55765 -334077 -606254 -859582 -48660 -718164 -147517 -340968 -45455 -417058 -523213 -822457 -542459 -786205 -166134 -811597 -459510 -186835 -69517 -577759 -730633 -904204 -947041 -542071 -85021 -225008 -432353 -38396 -628292 -344238 -448535 -584215 -587566 -262636 -221615 -179389 -678559 -593136 -674385 -157723 -395888 -190278 -179255 -40736 -271072 -617056 -703513 -450580 -676821 -85972 -834036 -803450 -142268 -163094 -418818 -54378 -382143 -209358 -38088 -33409 -328266 -723986 -308338 -805734 -646197 -627377 -777556 -573205 -422676 -60097 -914212 -445335 -751695 -865038 -267219 -403477 -126207 -60896 -108946 -934364 -915670 -576401 -458653 -638339 -51882 -404651 -66435 -419386 -727164 -791382 -524807 -258355 -25224 -457757 -180931 -43002 -61149 -22092 -633913 -288705 -598426 -777470 -537394 -437358 -230549 -824552 -805470 -405457 -431980 -47221 -253726 -616473 -61498 -764416 -228155 -635976 -286873 -282786 -703374 -263948 -859618 -321573 -591029 -948162 -390312 -456989 -750603 -204864 -879102 -65023 -72322 -890835 -281238 -648297 -165901 -638234 -894361 -840297 -775367 -451442 -940536 -902480 -224519 -395296 -65116 -349495 -201982 -7438 -427559 -528641 -159459 -785680 -645598 -764634 -818166 -765535 -474197 -403781 -136663 -113350 -564334 -146634 -751199 -824987 -817454 -409090 -586663 -172474 -482148 -875738 -460983 -755043 -605598 -694117 -32236 -579887 -676642 -948444 -726020 -54532 -788496 -491191 -886789 -327530 -322336 -206131 -621051 -563946 -635460 -607414 -426659 -704000 -172352 -228256 -629788 -731838 -303021 -563779 -227549 -865540 -845035 -697063 -467056 -284783 -712252 -806807 -141644 -903748 -870326 -710122 -204502 -795582 -450699 -769555 -952676 -414738 -450786 -343943 -375100 -29453 -364913 -229960 -423582 -141759 -923688 -682556 -572388 -278533 -816426 -900837 -859158 -586960 -295180 -798032 -169500 -786755 -163090 -78678 -38937 -114835 -956765 -701294 -349312 -253498 -579014 -21292 -308032 -849246 -803471 -505847 -787239 -797287 -844759 -574793 -147030 -276708 -96533 -132388 -870228 -590887 -61666 -676418 -148916 -722793 -52926 -574759 -119060 -395480 -331925 -699321 -885395 -176528 -223276 -26300 -426589 -787451 -394765 -365566 -811843 -778281 -861401 -426295 -328538 -33453 -20045 -854809 -896048 -916740 -147000 -624410 -454893 -234507 -34318 -182292 -331852 -802476 -91783 -164385 -618197 -368377 -668719 -571017 -151070 -564506 -64500 -779366 -806599 -244541 -449166 -646139 -34412 -573318 -892460 -300679 -200017 -449153 -451852 -751303 -650584 -470363 -906767 -708563 -348100 -488477 -679950 -655312 -238967 -234286 -86085 -767949 -266041 -208668 -382815 -426217 -257631 -691472 -819599 -793602 -188792 -121734 -344297 -824654 -785342 -108009 -884826 -704456 -308256 -262894 -290277 -227095 -35856 -112653 -826094 -536941 -192392 -237094 -188114 -276404 -328057 -612925 -420522 -378524 -403655 -264041 -557364 -675056 -190625 -340527 -698854 -27310 -546561 -772414 -655093 -880430 -606768 -12151 -588297 -176220 -146912 -20183 -696693 -164372 -672103 -364406 -306861 -121049 -883596 -315206 -81579 -785457 -401839 -800415 -788635 -699498 -694354 -440091 -340392 -21095 -429262 -298608 -482408 -596518 -501721 -574049 -420178 -76971 -126640 -449858 -851037 -166813 -602267 -724405 -632939 -82213 -49633 -270513 -501435 -639720 -594086 -920515 -738458 -582291 -790638 -233926 -60321 -330730 -745161 -668986 -717968 -77143 -830508 -422788 -728734 -879090 -536959 -179300 -122845 -595802 -431855 -151788 -56074 -730555 -13087 -298253 -405610 -941810 -258873 -330435 -403725 -706709 -524464 -571647 -703194 -768646 -511018 -28063 -95570 -234658 -897782 -365525 -343730 -29299 -488379 -98991 -203951 -165938 -771571 -467258 -627115 -625525 -226537 -534279 -433560 -416446 -106954 -284889 -728167 -315748 -43489 -447278 -303964 -354261 -920071 -65383 -20750 -688035 -889668 -645724 -105729 -810364 -419188 -550470 -162835 -833932 -448302 -872748 -119534 -146430 -687602 -183274 -570571 -304797 -73335 -285525 -562364 -808382 -99790 -887230 -431886 -115117 -755393 -751170 -544190 -277142 -650837 -335061 -177386 -332169 -948843 -22889 -764174 -710214 -878567 -761397 -370790 -287515 -74239 -872896 -50192 -468405 -348826 -753946 -12183 -714500 -836307 -797429 -814757 -666173 -894202 -278774 -492367 -754361 -733328 -506082 -199847 -653729 -483795 -388309 -394632 -431918 -24818 -815926 -627764 -844627 -174937 -282809 -594692 -668173 -437547 -879659 -516842 -527657 -233243 -63346 -916683 -675589 -418662 -54284 -215244 -916296 -478465 -585821 -563601 -476780 -132294 -856187 -599894 -921307 -733118 -391314 -348881 -297674 -394312 -48894 -288766 -333573 -362978 -730900 -786555 -897296 -230138 -896415 -232842 -775278 -382663 -663932 -696994 -801161 -656690 -484058 -701227 -703839 -116713 -6428 -741867 -676119 -174367 -80174 -98321 -210691 -740267 -627888 -261257 -488897 -716711 -466869 -323596 -665953 -491925 -772132 -638791 -861316 -284113 -329270 -391556 -760300 -577597 -653790 -901814 -126799 -612564 -605693 -251823 -512636 -722640 -816739 -904729 -310349 -327935 -863890 -20188 -842816 -340486 -450614 -72082 -918847 -604084 -283468 -262063 -813156 -271278 -388356 -119724 -199346 -768817 -933055 -544741 -360940 -222163 -203438 -51633 -911504 -106707 -772446 -518208 -431882 -85330 -442343 -762360 -101922 -639307 -545833 -658086 -33041 -354764 -955769 -406384 -167562 -95061 -175760 -248531 -395058 -518690 -607143 -553891 -34288 -878895 -741099 -927723 -921874 -144446 -830554 -436938 -68719 -443857 -1585 -146258 -48307 -427683 -799176 -827461 -862000 -177697 -39198 -64886 -215091 -756636 -19753 -619113 -297079 -626257 -426392 -326101 -825562 -674810 -383689 -885466 -236115 -173856 -742033 -49210 -147604 -763787 -99830 -701454 -177645 -25096 -604357 -221183 -598215 -863688 -779098 -704036 -478473 -120512 -650202 -164739 -139756 -684113 -949742 -751440 -244352 -167005 -20207 -661383 -318953 -95278 -161438 -802944 -429409 -108571 -941796 -698560 -287749 -264023 -699456 -592536 -763918 -541719 -382721 -379753 -937340 -826640 -105116 -40888 -376480 -825944 -477351 -18959 -68760 -19866 -517347 -228261 -102546 -363262 -20452 -515929 -210764 -630432 -26849 -571741 -235252 -172406 -486842 -597645 -52589 -711883 -146226 -340019 -797817 -1384 -16460 -105569 -303438 -886159 -638753 -954752 -527451 -21457 -658286 -132418 -538466 -153703 -239545 -654756 -186608 -779459 -596161 -895488 -825310 -109134 -11957 -114714 -651083 -786495 -922541 -120212 -202564 -238950 -633159 -895650 -867399 -507466 -871181 -215963 -946049 -362131 -11904 -526115 -160746 -349049 -411407 -537763 -233338 -396825 -331143 -806762 -349598 -750175 -580665 -34814 -817550 -802211 -949217 -741248 -832718 -926380 -848915 -126777 -327821 -81946 -829525 -301850 -237860 -656188 -666188 -13516 -539665 -436153 -571843 -785966 -424006 -355807 -279353 -23063 -312762 -925862 -830157 -573512 -772495 -330466 -817978 -699504 -897048 -491590 -336275 -614589 -35038 -315771 -750662 -767484 -835118 -158131 -338904 -45351 -485750 -56559 -592842 -276094 -830119 -710499 -511211 -564369 -397460 -30875 -233107 -11750 -518628 -344188 -236991 -546993 -369867 -8863 -11559 -940210 -262000 -773296 -571403 -953326 -790443 -828312 -423428 -377429 -361607 -73918 -436846 -328383 -659049 -230627 -199518 -541963 -775442 -432925 -942585 -950083 -425635 -23234 -126576 -356688 -99931 -64966 -702618 -638065 -450608 -26854 -689786 -301723 -199513 -906665 -463884 -404230 -56147 -536725 -46561 -570017 -609667 -416280 -741735 -209310 -113127 -429311 -121865 -46302 -124655 -372187 -232952 -179314 -593894 -308964 -134045 -86983 -279572 -656930 -724200 -305302 -319965 -268840 -809898 -528759 -435236 -230152 -571102 -635835 -448910 -466895 -308196 -132250 -52229 -365787 -95221 -545500 -269958 -18838 -928766 -784997 -248239 -468404 -900845 -181437 -66449 -741400 -777929 -700757 -466572 -248986 -934912 -848157 -339852 -571426 -167143 -165681 -427970 -499377 -120801 -12989 -555596 -775243 -930429 -187039 -420126 -505615 -350462 -272209 -73922 -518054 -124553 -468358 -495160 -619354 -370997 -779500 -269040 -86338 -771440 -12538 -101043 -454308 -795573 -744428 -182404 -136985 -485491 -787875 -141188 -838241 -383437 -67413 -180307 -102677 -183030 -117838 -483929 -909219 -7411 -88442 -539547 -222147 -21440 -660463 -582281 -924482 -163032 -725345 -490888 -344292 -204043 -498515 -623010 -375790 -867269 -207254 -773800 -137207 -911754 -77270 -588320 -755250 -906070 -12984 -858004 -892491 -708947 -393793 -332461 -729143 -607322 -934652 -889193 -700819 -770319 -897057 -68394 -501937 -418750 -750787 -696722 -403787 -577075 -262611 -120860 -25906 -268294 -246050 -460279 -587617 -349763 -478558 -774297 -934371 -379029 -825013 -329415 -265386 -257776 -148513 -524844 -560323 -80280 -691937 -420607 -856966 -900275 -145782 -834025 -719588 -152341 -622688 -895668 -919931 -467228 -369234 -49127 -802542 -635794 -64525 -326235 -76389 -12657 -686898 -699614 -226742 -712207 -18715 -108527 -315672 -805805 -418477 -694316 -687733 -451666 -326207 -416760 -403182 -734493 -725418 -750777 -728821 -347281 -457433 -566674 -85220 -488852 -623282 -635227 -431633 -96799 -204859 -498017 -579038 -95624 -314977 -309376 -364749 -670236 -85293 -741393 -180032 -124295 -936893 -823738 -113521 -297233 -952129 -316339 -17366 -792782 -184619 -505541 -558565 -100872 -402617 -645303 -664371 -949055 -846209 -653967 -589920 -827701 -559992 -362235 -143619 -502066 -361248 -673696 -120263 -186418 -121737 -515957 -428089 -302766 -146423 -940766 -13753 -742014 -896158 -592182 -23771 -406844 -648777 -408355 -276762 -117986 -183493 -638142 -697005 -181283 -757951 -538499 -711252 -97848 -281379 -315390 -289001 -64671 -693566 -309656 -447889 -897767 -517079 -233292 -208497 -866496 -598415 -699700 -229087 -13686 -254966 -661403 -638979 -44173 -540556 -492675 -150706 -222042 -564583 -197576 -63079 -424380 -829547 -383257 -816714 -326078 -910618 -661360 -94113 -86279 -297577 -344472 -74385 -328598 -411379 -851011 -23544 -151872 -107768 -480257 -147443 -297790 -718649 -308206 -456201 -68306 -515422 -518293 -305802 -260362 -97470 -157613 -65370 -579827 -472421 -326441 -698913 -869623 -29288 -763388 -69054 -457736 -460699 -335175 -129709 -452525 -344344 -596680 -107626 -713223 -240356 -755342 -619933 -595473 -44724 -306205 -866844 -552731 -913034 -458002 -873768 -195685 -935696 -805966 -738682 -908012 -391810 -765220 -313303 -118895 -674518 -870748 -490308 -490906 -60640 -33198 -549212 -552705 -364246 -640122 -853155 -65899 -644011 -14285 -545208 -659230 -911809 -74635 -56602 -76843 -185120 -314561 -298257 -761576 -905066 -635194 -824174 -616909 -663404 -373623 -602438 -18220 -299993 -734191 -48854 -267629 -137209 -776624 -717042 -733370 -236165 -765585 -333820 -403732 -139615 -255277 -162428 -850951 -653868 -185827 -140274 -814657 -177426 -161633 -788603 -251341 -754846 -212662 -539440 -571030 -220106 -901255 -141097 -622946 -78707 -217752 -750829 -605903 -205011 -942850 -358674 -440436 -600188 -510997 -417055 -369465 -402503 -318954 -678528 -898944 -190253 -613080 -72649 -674382 -281614 -141113 -550388 -477179 -435320 -13800 -26421 -67156 -481893 -34349 -794389 -714609 -863595 -313128 -50231 -294720 -502867 -738332 -56502 -849606 -180656 -357097 -664746 -300389 -205450 -325976 -285464 -356064 -295750 -178294 -404207 -728057 -501326 -588788 -191282 -791953 -941837 -309609 -164605 -309919 -141036 -585400 -491873 -380096 -442678 -466690 -177857 -254261 -620034 -114240 -679534 -672504 -406029 -127679 -118872 -432011 -824587 -574228 -20737 -30029 -166077 -131512 -650470 -682739 -526182 -35695 -429197 -880388 -103881 -607260 -854162 -820925 -516870 -857215 -837876 -458763 -574860 -855981 -99735 -47055 -767608 -403759 -848835 -616824 -96951 -815860 -663622 -485351 -940513 -873642 -181593 -23635 -356939 -238755 -365769 -122532 -498993 -48766 -274921 -815023 -886632 -82270 -90358 -253033 -650122 -263626 -110327 -251600 -795598 -182201 -158976 -736276 -395741 -663297 -288681 -946053 -754522 -30847 -185176 -67391 -23971 -777984 -379535 -745824 -200428 -13543 -534778 -903462 -97853 -334118 -579144 -803375 -418554 -890200 -807988 -656136 -790226 -921719 -711548 -696666 -551363 -716705 -776914 -31329 -126719 -274658 -132066 -54527 -580399 -58186 -935202 -663709 -467115 -915547 -380049 -554833 -570781 -71236 -37751 -736524 -764257 -121844 -625834 -449831 -294959 -295709 -477783 -743000 -181017 -171154 -164700 -303826 -615080 -823785 -744388 -852593 -76854 -937511 -871369 -693793 -776810 -636995 -624213 -327632 -60706 -204942 -335902 -134782 -362225 -237317 -315220 -730550 -695240 -687850 -17986 -929818 -313961 -107651 -458433 -68314 -445058 -821129 -262582 -204084 -511828 -762391 -884924 -131197 -640331 -146495 -405106 -954496 -185735 -95607 -674426 -818313 -155356 -58969 -215601 -567633 -556229 -574314 -308245 -362426 -440115 -369339 -41233 -383404 -755217 -535312 -130845 -615818 -291996 -706568 -577937 -710143 -298840 -10132 -556764 -939266 -574039 -777052 -370667 -72087 -286273 -855371 -17842 -755241 -315494 -211500 -94690 -140356 -430323 -756487 -892670 -612929 -146830 -423938 -373303 -172475 -818783 -555182 -122059 -456897 -863366 -899477 -617764 -44633 -482255 -781531 -743373 -203388 -650738 -419539 -150381 -322967 -396072 -307365 -89290 -212837 -265558 -73933 -320877 -671178 -640071 -545859 -86043 -528363 -771026 -362368 -199595 -726526 -677755 -192714 -540265 -236901 -894689 -828804 -160970 -372827 -195484 -52405 -460340 -775804 -578762 -298335 -60845 -81927 -360005 -253072 -898538 -616603 -920412 -333259 -43814 -334091 -171223 -115196 -676830 -396521 -249020 -391256 -439871 -942916 -589381 -742876 -271491 -685473 -818786 -623616 -228018 -76775 -173597 -883501 -411371 -13218 -236981 -520579 -231091 -934170 -133991 -699510 -949651 -751720 -278269 -735782 -575978 -287069 -894843 -792166 -134599 -896764 -763310 -946126 -764137 -897242 -858125 -440646 -537975 -633837 -765317 -849639 -278897 -106751 -808182 -276331 -938631 -356604 -704835 -166929 -228129 -630232 -163612 -784439 -251960 -180537 -514327 -383943 -340631 -742317 -251021 -834555 -808253 -673416 -931358 -28594 -633580 -85612 -365633 -811902 -766228 -431751 -561147 -238374 -132089 -692884 -494265 -889857 -940463 -725313 -364639 -653107 -943892 -764393 -14943 -21205 -307660 -140753 -666492 -395935 -319701 -303551 -278201 -134342 -111872 -458422 -292434 -682742 -442999 -529972 -491086 -669809 -146208 -778242 -22637 -108127 -6035 -593992 -822136 -356793 -181205 -848625 -802618 -172484 -439266 -336156 -629236 -806819 -716457 -528453 -475352 -396474 -903323 -280544 -423260 -716324 -617662 -78296 -596561 -465954 -115495 -784364 -550399 -902115 -650736 -701913 -47100 -254846 -380136 -297456 -581475 -465679 -840403 -202536 -398376 -477341 -13662 -751415 -290819 -738106 -540104 -938099 -345833 -922313 -156895 -694905 -697090 -648097 -879794 -31553 -701326 -829430 -446216 -255565 -650043 -340188 -778148 -285562 -40754 -625267 -171197 -617447 -649556 -549393 -885537 -765769 -540385 -828426 -33244 -817644 -650645 -864178 -292218 -34824 -901887 -103823 -265137 -777525 -178321 -436459 -349210 -268640 -126191 -287667 -798378 -346236 -325511 -827207 -173905 -196206 -328179 -595490 -111579 -604059 -214596 -125642 -376384 -918378 -769497 -776747 -169148 -712250 -340224 -833907 -443811 -291998 -137644 -643048 -179366 -468518 -577016 -160377 -812435 -41900 -376604 -259612 -387413 -239146 -734391 -900712 -511755 -562746 -458707 -122887 -794792 -288945 -862973 -242602 -857533 -714501 -84339 -258192 -278531 -678481 -113652 -805954 -730533 -880686 -631892 -653351 -173058 -712169 -263128 -259803 -765999 -214251 -244955 -891676 -328790 -350676 -861046 -450865 -455942 -900605 -805495 -633831 -876599 -386090 -164586 -759160 -70495 -769126 -251052 -114586 -923677 -160340 -638340 -741854 -467252 -418487 -195404 -654685 -288991 -162909 -685066 -106941 -542372 -854716 -7862 -764196 -744277 -594751 -550867 -282591 -276472 -355553 -33053 -932802 -69775 -823752 -640974 -388367 -895408 -736612 -633942 -162979 -661875 -359091 -179317 -350603 -326401 -62117 -381834 -516241 -342737 -274149 -95074 -606762 -314813 -812689 -905011 -164938 -786268 -246710 -338411 -788144 -226904 -743643 -490896 -332451 -904179 -455371 -643909 -25250 -795025 -272506 -873752 -234867 -241790 -84791 -692994 -368712 -956322 -436567 -416820 -876066 -30731 -828407 -635900 -150052 -112821 -820343 -35883 -333904 -138500 -803366 -532271 -562918 -673997 -466510 -550607 -324898 -75934 -558474 -282937 -141677 -271344 -78362 -187626 -210864 -271091 -750261 -780470 -451744 -845876 -615331 -444702 -395264 -615150 -476672 -593798 -818822 -742392 -760847 -69722 -126381 -439478 -881483 -64396 -72115 -844911 -873327 -830731 -61759 -903561 -577709 -386804 -455359 -121054 -374019 -167793 -650540 -656981 -589702 -322847 -687471 -276417 -531514 -450247 -236904 -77125 -617196 -835718 -481394 -692760 -716268 -326218 -17397 -74500 -776122 -395051 -99013 -438098 -491727 -294965 -70486 -920969 -235765 -206561 -369767 -198786 -465104 -100542 -86762 -598048 -337752 -641055 -361342 -321910 -796713 -852819 -228171 -635159 -197270 -543915 -662564 -713705 -796217 -240561 -212931 -797841 -73135 -631038 -230290 -387110 -463434 -674625 -693887 -351006 -693187 -724224 -879295 -702956 -806414 -577599 -555698 -11289 -13335 -285070 -319156 -135429 -834751 -301185 -442172 -49026 -418964 -146726 -844980 -912837 -402148 -388709 -639471 -232358 -837979 -497308 -101395 -462167 -108360 -402010 -647732 -276215 -660405 -726635 -440326 -524012 -859472 -891255 -846689 -539422 -275216 -302450 -489032 -24643 -481834 -938150 -22936 -234618 -594516 -97858 -925733 -905395 -409458 -793557 -880437 -269538 -109641 -209363 -60242 -704013 -769154 -932968 -200434 -515757 -691450 -898822 -833675 -42950 -334688 -112058 -289106 -380300 -114678 -534152 -165206 -505034 -174123 -427112 -83341 -205254 -824331 -698667 -484403 -898097 -712006 -103243 -389412 -564815 -365850 -230827 -727181 -805417 -699454 -657958 -702460 -37988 -778252 -650398 -346250 -473620 -616533 -458768 -279610 -914156 -335416 -667028 -243557 -821847 -581674 -913925 -365296 -556545 -743360 -108363 -238556 -463713 -473004 -237827 -320134 -769217 -438040 -782033 -292810 -324177 -914257 -440362 -618975 -322338 -745182 -230130 -847238 -857911 -371385 -541266 -555837 -332247 -301711 -439485 -945833 -730630 -13825 -300829 -97700 -45627 -64436 -276210 -47927 -751559 -52037 -504601 -864980 -109470 -948548 -355252 -240191 -784938 -603860 -495687 -321412 -157528 -311888 -832944 -698933 -339792 -706344 -755029 -440400 -60990 -364790 -165512 -258220 -433958 -119945 -103968 -519911 -394942 -72126 -776796 -483964 -254961 -334890 -11653 -416437 -68580 -900891 -404531 -801265 -11497 -825865 -627770 -64732 -699355 -321901 -80403 -893016 -334982 -439575 -310018 -487472 -801091 -553278 -707386 -886896 -830596 -754672 -860802 -749593 -827494 -854446 -248860 -536559 -755621 -146564 -646286 -504109 -211820 -346058 -432889 -663023 -82438 -947783 -760889 -146185 -724110 -946878 -927059 -66347 -211303 -824972 -357236 -646428 -797285 -150241 -813175 -397013 -335317 -835060 -429434 -725159 -19942 -315991 -174711 -376396 -827241 -444775 -388661 -244767 -558870 -326060 -235686 -844031 -152324 -791689 -778320 -889669 -108568 -10309 -78957 -453553 -87109 -566352 -326028 -244715 -22747 -771047 -81151 -93983 -35857 -935228 -267182 -661263 -33403 -523022 -609404 -687364 -13576 -123451 -938211 -743408 -420171 -80224 -291272 -388862 -275846 -831311 -953933 -441810 -18789 -56564 -45354 -743399 -696547 -426412 -636760 -23824 -302962 -945281 -827290 -121353 -882972 -430423 -474641 -784346 -499106 -857192 -180025 -545475 -860658 -26102 -869306 -832471 -294483 -174358 -32929 -322568 -516181 -673956 -714380 -315752 -503624 -779536 -34177 -786117 -335554 -916063 -860604 -59895 -346884 -276055 -24368 -547317 -434781 -66569 -927386 -287714 -227850 -564288 -654873 -702213 -269189 -745820 -446011 -188432 -249450 -609483 -755436 -434422 -912238 -732511 -657301 -175885 -112652 -757501 -900091 -486496 -501497 -639598 -61183 -94478 -954476 -874321 -625640 -103224 -770036 -11180 -521829 -429287 -448132 -262280 -752239 -604114 -767355 -788052 -145397 -840083 -251964 -921051 -165745 -88324 -817424 -10258 -920037 -854624 -749316 -70800 -31805 -568219 -135256 -745150 -678361 -491102 -681604 -19947 -356348 -372540 -350113 -339877 -626478 -496378 -405203 -219143 -440173 -37144 -174423 -136969 -66425 -893767 -272830 -498259 -645992 -835284 -763032 -383596 -533577 -181001 -677947 -335178 -632885 -22157 -828813 -161773 -806741 -450589 -442330 -287564 -311961 -11438 -335261 -605869 -43926 -901157 -492446 -300181 -826734 -32757 -885696 -932828 -176225 -124281 -857035 -642134 -582139 -712124 -134562 -651547 -904401 -228039 -268881 -116624 -280138 -756913 -831410 -932549 -591602 -85610 -891010 -332215 -258502 -456600 -731714 -509966 -810841 -49964 -486474 -899101 -134687 -432961 -324999 -154698 -24978 -288331 -854917 -861077 -182177 -695517 -398429 -36407 -520615 -174643 -849672 -784825 -477462 -185408 -461263 -529201 -524150 -124454 -274605 -632314 -221931 -367005 -249511 -129112 -674360 -321780 -199466 -863623 -331382 -829711 -941573 -673953 -435279 -196210 -544223 -787670 -136327 -607251 -849813 -604893 -804842 -768829 -681204 -343622 -516979 -786011 -546138 -830807 -558797 -248467 -303581 -741831 -40564 -746204 -242067 -870795 -291465 -634964 -881966 -233775 -417682 -605322 -692916 -418382 -680737 -644752 -53066 -612227 -505375 -864256 -178236 -1052 -638943 -181214 -15359 -353750 -224076 -800519 -423029 -174950 -691688 -356110 -454026 -512532 -888368 -389961 -151256 -280291 -645594 -744369 -590661 -90245 -692639 -488408 -698746 -39313 -180943 -539638 -236740 -653885 -725296 -398674 -691218 -401804 -457602 -318865 -718656 -207995 -874621 -327513 -588276 -904075 -27214 -96702 -449373 -284507 -664409 -891530 -715239 -108171 -761769 -306541 -354634 -215831 -694975 -943937 -223655 -19602 -43899 -376982 -448514 -322583 -787933 -339873 -343483 -275953 -289264 -674142 -647562 -948721 -613064 -262722 -412822 -838354 -667629 -756626 -518675 -552925 -758152 -382055 -31065 -525281 -811978 -47322 -84688 -604067 -622638 -618212 -183009 -913731 -196937 -511661 -622782 -670978 -747949 -52215 -760595 -173127 -197787 -921317 -841734 -88047 -174323 -256772 -254646 -719117 -531748 -718231 -569873 -383053 -799690 -523738 -624692 -64692 -893684 -656039 -571496 -605740 -603994 -351674 -512810 -805489 -694269 -218055 -375851 -459983 -777290 -899592 -286272 -353932 -865094 -267106 -163982 -20120 -369882 -354676 -8671 -27602 -450525 -775209 -145976 -829764 -314286 -527519 -439539 -158063 -10576 -265071 -170508 -276013 -224006 -857054 -7674 -813291 -951749 -801160 -66297 -64573 -633438 -31927 -113755 -713939 -745175 -132015 -940895 -804944 -168170 -863707 -844733 -901180 -431804 -914124 -710918 -478463 -208304 -598255 -662399 -909152 -459313 -862037 -13821 -13864 -152444 -671005 -210874 -382972 -125119 -596506 -268198 -10224 -188591 -738149 -360894 -570175 -357283 -520238 -654712 -341305 -212056 -898832 -849976 -947937 -515742 -543441 -281052 -856348 -108506 -458102 -336789 -361532 -697383 -200436 -453396 -438788 -596881 -262375 -545689 -930015 -872965 -222035 -70897 -501396 -240063 -763166 -851716 -183531 -732117 -405818 -785089 -660648 -582089 -224448 -544513 -347928 -727703 -744381 -716746 -722479 -670005 -674566 -346535 -291782 -905367 -690940 -400470 -484610 -305725 -887360 -144454 -786982 -376610 -44455 -364764 -145790 -416824 -41065 -925857 -361791 -13713 -261731 -11073 -298516 -364844 -364852 -237389 -475196 -268084 -296836 -671203 -72233 -364070 -849147 -23968 -471723 -618742 -569437 -449093 -754575 -610948 -210871 -866957 -856795 -246192 -500280 -773555 -779725 -834546 -537527 -287095 -25274 -95605 -738109 -325509 -73221 -634408 -42320 -778077 -11661 -254515 -277099 -65881 -397333 -430380 -771245 -927908 -816132 -227000 -74721 -147026 -287860 -228012 -304604 -857950 -935900 -158070 -939712 -284122 -515293 -22633 -239498 -659679 -426703 -293156 -338242 -75827 -917416 -571738 -708037 -488443 -830656 -855755 -124819 -179042 -577712 -674136 -743919 -902478 -830790 -791509 -309543 -290186 -17112 -551814 -593885 -661515 -288101 -465331 -523457 -929046 -18197 -274290 -32601 -745915 -734188 -195183 -713255 -929609 -400916 -874121 -363883 -731020 -551859 -295032 -55460 -693287 -894536 -129402 -809812 -781726 -30668 -726643 -289339 -285430 -439515 -583116 -268382 -485196 -915814 -335381 -795365 -949088 -860282 -314906 -428631 -599326 -879673 -541736 -355748 -541722 -848667 -199030 -528952 -946963 -870548 -788723 -861151 -128412 -955392 -626336 -11491 -126609 -623107 -167746 -164970 -572476 -332309 -364833 -78508 -208665 -854456 -743382 -790028 -560488 -897258 -300696 -268694 -252151 -272857 -343830 -944280 -704290 -185621 -785576 -496561 -752142 -444274 -171281 -585717 -677796 -691067 -749356 -923007 -482908 -617991 -354689 -364727 -622745 -321081 -916687 -600100 -482798 -671277 -926658 -627610 -777625 -395313 -482790 -419161 -312774 -588557 -624709 -755904 -281742 -629494 -938359 -29155 -37141 -91016 -595167 -633260 -292768 -502649 -387537 -819537 -597443 -69547 -429390 -599962 -95147 -126119 -461013 -590460 -37456 -339369 -134365 -88411 -432555 -388956 -329191 -86410 -6189 -786883 -65824 -388813 -821883 -570996 -956713 -755072 -570329 -704166 -357597 -369743 -603923 -624193 -608249 -566123 -886696 -827234 -770107 -438111 -693633 -24606 -328576 -31802 -706325 -674233 -674460 -70570 -852140 -155587 -431723 -822283 -25110 -674175 -221389 -902911 -601275 -677382 -270699 -891057 -791840 -64984 -437329 -671341 -181068 -570083 -328389 -268295 -285522 -769535 -332251 -330346 -516942 -18037 -827545 -48815 -526617 -696919 -222055 -199537 -663628 -724002 -931977 -814114 -324573 -75360 -421498 -650885 -189066 -308322 -918277 -850933 -742323 -365957 -523992 -389882 -546919 -916520 -779137 -783825 -144046 -652267 -684108 -64342 -484982 -210804 -844092 -228835 -57897 -127886 -391357 -335338 -696581 -424443 -894819 -17561 -182772 -44713 -701188 -771111 -69651 -310347 -937996 -26363 -292850 -303633 -81682 -240171 -763693 -939799 -661422 -824634 -327761 -78588 -810337 -261997 -926206 -422066 -157500 -436444 -368679 -220285 -693886 -87685 -321114 -480143 -33919 -53152 -784330 -252292 -954004 -215838 -49691 -646485 -868610 -422608 -405959 -633638 -59904 -335236 -67762 -97832 -350222 -85234 -384088 -323490 -137123 -574695 -542870 -24905 -364723 -385119 -755921 -810067 -276108 -242888 -315683 -298715 -501660 -863751 -522876 -920458 -575509 -898468 -77271 -587547 -534640 -742745 -676449 -464043 -458649 -237227 -181634 -54747 -381838 -334089 -238971 -742648 -85828 -879530 -155638 -423588 -588680 -158535 -756355 -604117 -256696 -381478 -474914 -911769 -423265 -643783 -741412 -335555 -489492 -49936 -632148 -635566 -667348 -763956 -947887 -898434 -772175 -145879 -833632 -763627 -573880 -788455 -81822 -769669 -845806 -199143 -210989 -102106 -899559 -46449 -949608 -773262 -332358 -627351 -248978 -72071 -702279 -821862 -477149 -124615 -649870 -11512 -767881 -283004 -649737 -268049 -104975 -402075 -624506 -21922 -879978 -870853 -938079 -325090 -150568 -176723 -692125 -13388 -844565 -913668 -225021 -935738 -581510 -9757 -177364 -179124 -210879 -934144 -856354 -261827 -386402 -562295 -880526 -336324 -776877 -15869 -273198 -206465 -876835 -550749 -515938 -751461 -899082 -842139 -910904 -279531 -927034 -306691 -348572 -503192 -756755 -368620 -768572 -528708 -42025 -386366 -136917 -747612 -745704 -372517 -495907 -232287 -236387 -862154 -433685 -517707 -22284 -321897 -269331 -873022 -183301 -190758 -155025 -817560 -189232 -810113 -100431 -535904 -425609 -40878 -871089 -21752 -11807 -809764 -242709 -839458 -93829 -190993 -619444 -523922 -824919 -922768 -779683 -281225 -232141 -753547 -761598 -343538 -593979 -315328 -424221 -9924 -571722 -701465 -350972 -165011 -779629 -864387 -717711 -230100 -334792 -298907 -30174 -102319 -516729 -327932 -837713 -831303 -116056 -834312 -9585 -954579 -888817 -207643 -278283 -397804 -75393 -72155 -613003 -10324 -357066 -315784 -735206 -206034 -17062 -654068 -64666 -504458 -42441 -165946 -770237 -131730 -771484 -113518 -626669 -350071 -623032 -883418 -749611 -391303 -96789 -362207 -772561 -763668 -426183 -345886 -144436 -255208 -636181 -422751 -950810 -476340 -167039 -222678 -701530 -301622 -330769 -523250 -420857 -543395 -863562 -873581 -810701 -920234 -718755 -636953 -925545 -312378 -850100 -600550 -41002 -348886 -699760 -680522 -795463 -359811 -759588 -857447 -11197 -665036 -771355 -413364 -766292 -230640 -142365 -150508 -730840 -65668 -49982 -177335 -831216 -625835 -314808 -361193 -576104 -714209 -623552 -871552 -281739 -86027 -745593 -316617 -218800 -298711 -330782 -385347 -903878 -726068 -699596 -16507 -782170 -405831 -559753 -835454 -894753 -597042 -953777 -638816 -388578 -235311 -725442 -175584 -431346 -568935 -422656 -763885 -390470 -579344 -90283 -845010 -385033 -887282 -51994 -136968 -705023 -880259 -475098 -212382 -636605 -833909 -951162 -51720 -166810 -579856 -462049 -322773 -661404 -16663 -102444 -332483 -754457 -855880 -191019 -331997 -126839 -594024 -298429 -587238 -849479 -328199 -798000 -740984 -1468 -69141 -205082 -747613 -179816 -395330 -324450 -158623 -775787 -689702 -236132 -105797 -437416 -331590 -766468 -444761 -238446 -564272 -394192 -639832 -622228 -836060 -283648 -459300 -674311 -368235 -854207 -476554 -755840 -949691 -659528 -315338 -125900 -251166 -450682 -461963 -82161 -911371 -898658 -570225 -570318 -569334 -803364 -8253 -677907 -173413 -1327 -720018 -862947 -135849 -810971 -803493 -197575 -464080 -173648 -450019 -451190 -288198 -760058 -693280 -462700 -739791 -635340 -284882 -674366 -824633 -537727 -365059 -277507 -744937 -627976 -68731 -301673 -266570 -773838 -678025 -838451 -552757 -745416 -696217 -450226 -896023 -379581 -567655 -734891 -691562 -913799 -326529 -92075 -134270 -94935 -778557 -358363 -932903 -656974 -761445 -823779 -86599 -766319 -910022 -236320 -11454 -447455 -652321 -328879 -642779 -204105 -319999 -482781 -829605 -237115 -238931 -778349 -196710 -178981 -715600 -499311 -140234 -457240 -200697 -244626 -771530 -776322 -835078 -274636 -676646 -674230 -165929 -454530 -486064 -879811 -364932 -686774 -192209 -86866 -236441 -742045 -235684 -767829 -461304 -802333 -266773 -45507 -499204 -934217 -342756 -632514 -913960 -795391 -374758 -126749 -912663 -617870 -144497 -673097 -203627 -815977 -751995 -764392 -775435 -943971 -159953 -600022 -366155 -495864 -357404 -820513 -230517 -829047 -444239 -172891 -38640 -192955 -833508 -922396 -744255 -838363 -177261 -280475 -605605 -70307 -658567 -627911 -10042 -864791 -173471 -527154 -848276 -53013 -338809 -949717 -34244 -667541 -672655 -877100 -679656 -396749 -606107 -403169 -82300 -923199 -255158 -911542 -406605 -477446 -174971 -172316 -344322 -162952 -673039 -442665 -132137 -755031 -282696 -491494 -429344 -588279 -165934 -746907 -910407 -831268 -467167 -103991 -832279 -812581 -745946 -472812 -898178 -610720 -483902 -830350 -559548 -13275 -680723 -699618 -291141 -204551 -127999 -745914 -879270 -855671 -840623 -96538 -934252 -148734 -162539 -118192 -89270 -556399 -570369 -680383 -338141 -951613 -914320 -763415 -52105 -880510 -427052 -908340 -146521 -152292 -584342 -397813 -76932 -33311 -715375 -373709 -716469 -879640 -691082 -594769 -946916 -570332 -518289 -311031 -901890 -501030 -423257 -324789 -836668 -154817 -17434 -113672 -315723 -390479 -407571 -561221 -664445 -448671 -854357 -53666 -878971 -284981 -540785 -650661 -701283 -396254 -92999 -318577 -86607 -693844 -726410 -343733 -122983 -810512 -518454 -39113 -704454 -263284 -406044 -348505 -244465 -209598 -196166 -420444 -863888 -569913 -253579 -354212 -380741 -875343 -467061 -850170 -742037 -936887 -447567 -869701 -723771 -192832 -326487 -625754 -12577 -726407 -219962 -251931 -47384 -173257 -542201 -114248 -193954 -42855 -384450 -278081 -295882 -795796 -667145 -271166 -634862 -185182 -749385 -884458 -593317 -685774 -64376 -879887 -244172 -535515 -147509 -22993 -268142 -349735 -805045 -426769 -444272 -657077 -278678 -720148 -907698 -921336 -254282 -89184 -331974 -926269 -287931 -799770 -856926 -597980 -158585 -174689 -138619 -34680 -70320 -66950 -527681 -849638 -142604 -113917 -14942 -215027 -429254 -476315 -25108 -903997 -847136 -314678 -754327 -225248 -802713 -927062 -229751 -441856 -518223 -467274 -247962 -402256 -557278 -543491 -276264 -225597 -482330 -783560 -681577 -731309 -819688 -255252 -166737 -365228 -683250 -55898 -406016 -818480 -412091 -236289 -851563 -253105 -704207 -225178 -605630 -738593 -315595 -694343 -660129 -48177 -663733 -439630 -664784 -767389 -450602 -624331 -950077 -298543 -342248 -593923 -12331 -458659 -199446 -263245 -148631 -898001 -198815 -40404 -480278 -206152 -661424 -938488 -326178 -637413 -439819 -398608 -812884 -950754 -256841 -919312 -140239 -309654 -49422 -274384 -861155 -295039 -244913 -371625 -11369 -710289 -797279 -625785 -799297 -337982 -291172 -616854 -703833 -587789 -715412 -693497 -892356 -694780 -516405 -334076 -669548 -935912 -634682 -344497 -395357 -344509 -674265 -570105 -929058 -852716 -258623 -677885 -772309 -634141 -93375 -528783 -223742 -235908 -693848 -300955 -542854 -175448 -497473 -902697 -161361 -242946 -45208 -576573 -573874 -239234 -301767 -637894 -30557 -633378 -766934 -39204 -348584 -927404 -125823 -129380 -476736 -336923 -279201 -139220 -474369 -251689 -196438 -294841 -120713 -166719 -790266 -65588 -797451 -282032 -637538 -163950 -803022 -18942 -171291 -244247 -12272 -229533 -569396 -380005 -85907 -215710 -911972 -454181 -247649 -275437 -75756 -880101 -129482 -874097 -334552 -741418 -116862 -909237 -670763 -795369 -581446 -339609 -177390 -944016 -42610 -39082 -434706 -153261 -214180 -444472 -666728 -45595 -842315 -81942 -943915 -765599 -656291 -797746 -785832 -436878 -288337 -556617 -14988 -356252 -830681 -325124 -129561 -151034 -505715 -510065 -811399 -356004 -593532 -715555 -669733 -327261 -521919 -294603 -684292 -119453 -326325 -917236 -857663 -110265 -927229 -144455 -824505 -545273 -225955 -860466 -39528 -158716 -217433 -868692 -443255 -84811 -910203 -358806 -932354 -629522 -473582 -68877 -152421 -956611 -30051 -772031 -314506 -489243 -229325 -707438 -919314 -25848 -186015 -472570 -522972 -913817 -44821 -315010 -662239 -689706 -633100 -94842 -48649 -659494 -639914 -440401 -211709 -260724 -724080 -124125 -753064 -41216 -388987 -596493 -283279 -257125 -292647 -778491 -98952 -133564 -545233 -437895 -244395 -324937 -36193 -778534 -600785 -7818 -600464 -268686 -37230 -237368 -461693 -715103 -324969 -55085 -242849 -46651 -491128 -818337 -361691 -905393 -893891 -951916 -742723 -421071 -764233 -287186 -204603 -153299 -458660 -546474 -650881 -420713 -240457 -774367 -537804 -711442 -32909 -364668 -239510 -887286 -30594 -862942 -769016 -18607 -361913 -616885 -34015 -696866 -677977 -577312 -858878 -146885 -465053 -612915 -897175 -169734 -276027 -59200 -768962 -37399 -287744 -11560 -137077 -635713 -106333 -168294 -821431 -78841 -940864 -163274 -287924 -198481 -711240 -398830 -894798 -803444 -322890 -891279 -560256 -90545 -566983 -461599 -767029 -368286 -12376 -84279 -884431 -652573 -35809 -22935 -136884 -111504 -188901 -937186 -650848 -315024 -743375 -940069 -675077 -887533 -735274 -114806 -309817 -248232 -704221 -568568 -271116 -369675 -273919 -340426 -236203 -305684 -258128 -338729 -395202 -677948 -892474 -736654 -738274 -631876 -280489 -342945 -600377 -785679 -477772 -419117 -364696 -476685 -80005 -274791 -840658 -696485 -13460 -482745 -75437 -806756 -570415 -123824 -261112 -704328 -778506 -328592 -838169 -951398 -803403 -848539 -863826 -524083 -140800 -648352 -858129 -356587 -38046 -504434 -446229 -133213 -489787 -338738 -72095 -97943 -482433 -598225 -536438 -879331 -11187 -479218 -913383 -543596 -80095 -362115 -130579 -106187 -724215 -628605 -161214 -428319 -462499 -900050 -32518 -427189 -356280 -295769 -388656 -14750 -902792 -933342 -722265 -860399 -116348 -428093 -432175 -316627 -195067 -682245 -36211 -630103 -952410 -237672 -471103 -955909 -70186 -664295 -364718 -234302 -940837 -209799 -150522 -314751 -599868 -86037 -784417 -475068 -260509 -258245 -618541 -222160 -428565 -600588 -762646 -76938 -278869 -297465 -633618 -280448 -590742 -745095 -199890 -830735 -695470 -196268 -297855 -818703 -737652 -769190 -350670 -937162 -860916 -783120 -314640 -192802 -357020 -351020 -802407 -14838 -677389 -10989 -848725 -316678 -747588 -542198 -424457 -328286 -638881 -324344 -569704 -941626 -730899 -306950 -703858 -437663 -332249 -465683 -331964 -313350 -571839 -840224 -37359 -650597 -279094 -696929 -120521 -858032 -640783 -760285 -93685 -316094 -717724 -405703 -844241 -278254 -118373 -226908 -414873 -162398 -951935 -41309 -310400 -281771 -483412 -251565 -22685 -891522 -891278 -236864 -239163 -458049 -358366 -312258 -795650 -264132 -237329 -446433 -195940 -230234 -287001 -775135 -426119 -582248 -377306 -682954 -63035 -84513 -103240 -831462 -368213 -282053 -397435 -3387 -952405 -66077 -347280 -684420 -673607 -810409 -134519 -240601 -189527 -908194 -432367 -571458 -458929 -693724 -127505 -887641 -770185 -160757 -512933 -238725 -937889 -324644 -892973 -69939 -263965 -42174 -295764 -296324 -221316 -601128 -904169 -918950 -115958 -121557 -590816 -39408 -743346 -215020 -809888 -804838 -486627 -816060 -180257 -107383 -951222 -805239 -315750 -498879 -55005 -11733 -30745 -817770 -467194 -256734 -525632 -516402 -892310 -295812 -865754 -539385 -9154 -913967 -209423 -174815 -263716 -281185 -257902 -172318 -491051 -255871 -273810 -842782 -244283 -64492 -265068 -181659 -897869 -432007 -598051 -275472 -896365 -737235 -409 -719507 -764258 -786009 -903099 -935776 -237248 -597934 -181010 -303215 -807868 -343066 -408110 -24372 -355849 -424188 -878524 -914159 -797399 -362338 -499004 -563154 -273102 -165365 -199991 -663463 -357210 -593907 -199408 -453194 -289984 -581883 -365706 -298648 -287168 -331092 -270624 -880492 -735985 -419991 -459794 -692687 -424213 -57040 -546719 -957462 -23031 -334424 -554885 -715255 -490270 -533780 -778068 -694416 -37346 -286318 -481374 -875647 -840417 -594002 -12121 -559234 -775713 -812592 -674368 -282003 -478351 -835235 -163078 -755401 -464179 -913761 -678196 -577623 -235869 -914669 -475093 -32833 -706669 -50198 -623792 -644451 -732724 -229379 -278072 -206041 -449291 -934162 -442427 -948833 -466241 -745958 -607413 -208961 -904190 -911846 -40985 -435221 -345216 -755936 -403995 -232289 -745410 -756323 -593813 -426037 -405420 -289114 -280784 -79760 -699639 -321758 -447949 -538251 -206341 -504685 -116844 -950937 -240363 -431905 -94075 -646328 -776879 -316902 -17650 -742138 -254955 -524520 -818665 -22283 -911352 -772087 -509836 -887054 -671322 -761522 -501399 -894765 -424988 -945051 -156397 -610411 -696289 -360006 -616580 -870796 -19883 -755574 -801176 -725576 -787682 -581199 -40662 -935556 -391421 -690772 -382638 -907880 -781394 -425816 -879590 -246273 -31996 -924680 -513810 -135002 -938191 -621027 -65885 -599638 -723736 -578798 -10049 -11260 -821328 -807137 -723208 -957011 -845066 -12660 -287004 -515871 -694028 -314893 -227965 -720931 -584394 -93153 -102973 -512016 -195436 -836139 -849933 -840007 -635617 -940999 -386764 -46647 -13708 -792988 -424244 -637024 -315036 -648355 -138408 -843955 -843160 -198966 -239535 -43312 -176178 -332787 -560528 -368977 -103156 -778210 -65630 -143446 -308174 -212617 -671618 -173733 -904926 -350294 -215590 -786264 -356843 -312714 -275014 -448756 -774992 -806771 -853592 -163572 -505383 -438086 -236050 -51818 -328726 -185969 -878798 -446335 -323175 -524973 -204073 -103206 -276525 -403999 -773161 -341347 -382262 -521973 -650640 -604126 -239533 -792928 -45431 -629969 -253900 -761563 -227200 -469054 -898545 -419104 -236910 -880424 -575469 -144425 -805456 -591985 -288820 -212639 -503223 -850181 -837143 -790938 -151760 -897952 -790661 -832617 -619055 -951591 -751188 -916991 -871370 -153179 -334312 -462009 -911909 -865551 -661288 -813370 -817732 -890325 -143038 -236269 -280908 -123062 -203753 -774445 -809739 -766475 -950495 -349439 -280860 -664205 -778535 -811342 -117288 -192901 -516299 -175303 -403834 -440527 -44909 -775646 -763461 -608735 -247329 -395077 -239503 -196329 -437348 -349543 -126163 -897096 -755138 -777123 -907318 -430844 -635699 -441663 -232599 -939583 -366509 -66379 -389022 -916299 -97835 -914806 -713012 -97984 -814204 -779581 -475203 -51663 -682502 -230271 -291258 -749285 -800894 -950849 -307726 -847039 -427191 -891131 -134626 -49855 -810906 -671805 -105746 -237893 -3827 -648952 -13500 -279809 -329244 -904250 -166800 -336803 -923147 -713521 -741433 -848001 -67390 -354583 -230914 -395211 -132742 -124753 -63409 -181621 -80390 -55098 -275288 -952940 -700866 -726938 -489625 -866800 -275973 -683149 -596187 -10290 -766759 -415221 -59984 -87095 -271089 -691307 -873861 -429224 -433199 -404246 -175368 -786834 -552945 -607178 -181476 -255460 -931580 -747266 -772192 -422600 -394520 -440114 -434524 -934509 -9913 -940456 -808375 -41278 -609943 -150534 -46325 -147538 -889279 -225945 -874262 -908107 -325406 -618921 -845057 -771824 -764236 -851136 -167534 -183314 -719042 -179762 -949401 -215961 -237396 -746001 -126813 -566873 -832957 -78064 -126343 -440491 -23873 -372479 -830670 -770692 -821747 -121493 -364427 -295875 -810022 -103004 -474262 -823036 -337496 -358144 -301339 -770412 -18765 -346166 -316878 -684176 -365582 -324703 -260568 -730998 -445686 -916965 -948313 -165739 -515079 -332462 -692662 -103577 -832359 -396754 -75603 -400928 -760287 -160289 -952092 -483320 -843106 -479104 -630125 -184160 -367412 -173738 -204146 -955920 -700199 -759859 -51656 -587430 -387729 -480564 -236026 -539348 -132921 -546661 -59270 -744124 -75903 -449372 -150942 -927000 -753074 -33928 -102246 -422549 -339386 -279324 -278373 -526971 -96688 -768517 -378487 -185645 -719998 -186532 -249133 -61109 -817089 -119959 -224516 -430005 -527421 -936638 -282562 -267686 -666975 -903446 -659487 -362711 -256353 -409990 -528995 -439548 -64609 -315812 -711293 -944001 -346519 -659394 -7443 -178858 -58391 -90580 -626101 -129344 -850073 -665001 -421033 -644336 -446726 -598503 -703414 -945121 -92720 -318588 -405315 -686440 -851433 -160371 -65849 -668517 -408169 -634988 -633290 -34982 -103984 -734362 -30741 -199048 -865173 -18084 -886554 -830248 -698443 -843127 -265533 -391190 -413934 -913827 -849538 -841040 -261079 -792786 -151490 -678075 -123223 -374774 -714856 -817771 -711954 -771254 -698661 -418653 -833794 -186276 -601731 -228254 -707075 -669567 -512771 -49322 -356533 -41089 -735937 -881670 -300873 -885913 -428726 -459645 -706818 -764591 -14643 -774365 -346845 -388650 -839636 -397940 -275946 -620311 -886831 -918861 -313224 -357019 -749334 -409834 -528432 -771952 -368709 -446706 -462153 -632040 -912878 -392483 -355582 -10574 -859345 -30408 -456979 -764402 -467113 -633316 -271039 -482836 -609719 -527655 -695439 -10658 -324784 -167207 -802617 -237075 -363720 -638164 -866508 -757891 -125951 -759611 -430145 -809667 -260991 -769589 -20723 -680639 -334759 -378878 -635031 -150856 -500 -904366 -342295 -13005 -488344 -27710 -333860 -12126 -56655 -362375 -287701 -437954 -48165 -103084 -416901 -365108 -147582 -711436 -224816 -853149 -949440 -266199 -137164 -4041 -779732 -332202 -695830 -777068 -163651 -316814 -516584 -770817 -673735 -857502 -75519 -736422 -681772 -706354 -712851 -13414 -954664 -181431 -861544 -483677 -547610 -633751 -717248 -282122 -653718 -186139 -245008 -650634 -779333 -668181 -233360 -664924 -485623 -40478 -778094 -639845 -950998 -594202 -474543 -774327 -439850 -684031 -3267 -24456 -272452 -736702 -772654 -52764 -121071 -85314 -609895 -46089 -564469 -590835 -273222 -845670 -623697 -126809 -647304 -731128 -443238 -690494 -86698 -869993 -839513 -917117 -686936 -735555 -192505 -96903 -32901 -146440 -113465 -51567 -123892 -480346 -108992 -345408 -666004 -545949 -285232 -229970 -650353 -66552 -126448 -433172 -951575 -167276 -777952 -427024 -571270 -934069 -743297 -259504 -677484 -113685 -437719 -619309 -461367 -777499 -944747 -132694 -412866 -693698 -598022 -244880 -564411 -230670 -669322 -297364 -149596 -926239 -703554 -485294 -171236 -504970 -830979 -735719 -443667 -657044 -658558 -287519 -109324 -651412 -188101 -22313 -633792 -528871 -467075 -516396 -633685 -236465 -425320 -436587 -126545 -206221 -83723 -608121 -570696 -37011 -449965 -703692 -30667 -310380 -58512 -433557 -575723 -287925 -152089 -354331 -429416 -21490 -670971 -871582 -801489 -953113 -848563 -71482 -674488 -244786 -166924 -831643 -863029 -778046 -284857 -494483 -874923 -64741 -747428 -311851 -196199 -863329 -418415 -14863 -257690 -12012 -460054 -366178 -150733 -301659 -925212 -555899 -374193 -135428 -328212 -95228 -182744 -490973 -402083 -269526 -667228 -650150 -10110 -333447 -565869 -239607 -849628 -500857 -446033 -259632 -168280 -121228 -862470 -523814 -772566 -865190 -738664 -415095 -103761 -10362 -422714 -52092 -920866 -276479 -294338 -678251 -640435 -277497 -342935 -280963 -756765 -863821 -427029 -399450 -420743 -294172 -34137 -365309 -18206 -864006 -83339 -631526 -412799 -55842 -78810 -634703 -46017 -465676 -662133 -81291 -16519 -111691 -282883 -806739 -145890 -331770 -808048 -150525 -436944 -154105 -244064 -141811 -698890 -676090 -787483 -19662 -828236 -851427 -779102 -927552 -811586 -796866 -31120 -737643 -245186 -360736 -195053 -682537 -391512 -714482 -931705 -905281 -310572 -55081 -353240 -793621 -827592 -324865 -758024 -129352 -956637 -794396 -639603 -882540 -392651 -586496 -247482 -432924 -357078 -846272 -353325 -719954 -345697 -807189 -97702 -800623 -223958 -718684 -888142 -276020 -505595 -213869 -207836 -188990 -231149 -40313 -770234 -424688 -89044 -880274 -609477 -245161 -367622 -420177 -717024 -696564 -763879 -929918 -793874 -388320 -725321 -445522 -56654 -401135 -339283 -806591 -451520 -699608 -257856 -165969 -260548 -652251 -104134 -408761 -685302 -939510 -268878 -255739 -256750 -523086 -35904 -904185 -500247 -193101 -931010 -506168 -858432 -12454 -623322 -173907 -906563 -118112 -232478 -878491 -488743 -945924 -208484 -874084 -129792 -287753 -755946 -271643 -14946 -263598 -80147 -733895 -10117 -38245 -668652 -897237 -284661 -891832 -466905 -563941 -792239 -11720 -12823 -604687 -94595 -103597 -325092 -243149 -756087 -204432 -85678 -162020 -693791 -380259 -589743 -126245 -635327 -223738 -22896 -603877 -693278 -485382 -850545 -673235 -664131 -8242 -87475 -387371 -765082 -844736 -81548 -933014 -696939 -702134 -67483 -208292 -463606 -84928 -135246 -452929 -744383 -95694 -637385 -702067 -179337 -449146 -628010 -695917 -366914 -768830 -15163 -445781 -32989 -281732 -755358 -31125 -236654 -477494 -11518 -134572 -680600 -937544 -97560 -178367 -746013 -464899 -50085 -696688 -88032 -64526 -769240 -365968 -257859 -475714 -875446 -9884 -914209 -378720 -60831 -476416 -734788 -455077 -791522 -733641 -335151 -918734 -569894 -111765 -162415 -281057 -350581 -748004 -344368 -19175 -263590 -56699 -202404 -371022 -911078 -324556 -335362 -897267 -472642 -278881 -731831 -695754 -701295 -568571 -930266 -80898 -900134 -625693 -161122 -602759 -555576 -41345 -786585 -884904 -315474 -269186 -610929 -21885 -323746 -384451 -593481 -673048 -162459 -24883 -605792 -65895 -206534 -693890 -864253 -638616 -753612 -137040 -591730 -559175 -850261 -368420 -314938 -195163 -515653 -764020 -110455 -247313 -52398 -91804 -954456 -846199 -127855 -333643 -571268 -892923 -955579 -438019 -243705 -685237 -664882 -125957 -733646 -703455 -275339 -674008 -713637 -247189 -755585 -534421 -123092 -94615 -899507 -102774 -952156 -835568 -18490 -284699 -548010 -443000 -903948 -616760 -366031 -67588 -793739 -55779 -13726 -544553 -125193 -166742 -282998 -218831 -433421 -803126 -78335 -18368 -934233 -65321 -236993 -676732 -128849 -618238 -511731 -871304 -357232 -927196 -940970 -741682 -834248 -822598 -764396 -926678 -633149 -403124 -778438 -466935 -721775 -24274 -49283 -827312 -492652 -916953 -475900 -238879 -112486 -332103 -840088 -805487 -955646 -162788 -373863 -39245 -696856 -324233 -727152 -793618 -472074 -457036 -485108 -471346 -899062 -944356 -254983 -819614 -176030 -115344 -611784 -417259 -9414 -851322 -418874 -935523 -790031 -625510 -126051 -17502 -691360 -702908 -307929 -167593 -368259 -453077 -213365 -812032 -712106 -762029 -387452 -30085 -707450 -277388 -482523 -10507 -356273 -123756 -278384 -846101 -105996 -17566 -349341 -326422 -328373 -149462 -106334 -482880 -627147 -638493 -156401 -104795 -17406 -38690 -442022 -703458 -826207 -714313 -570818 -45162 -561885 -806257 -380039 -273242 -767867 -699706 -121963 -742930 -703557 -116752 -437815 -124998 -706747 -438524 -935138 -72166 -154893 -137148 -926909 -227356 -161971 -18940 -64897 -323172 -906607 -756604 -710595 -99416 -372911 -625091 -178632 -7403 -228644 -400240 -875870 -773098 -664268 -330613 -463170 -382734 -744421 -754633 -276319 -471129 -646440 -331514 -594004 -114819 -75341 -707309 -279410 -652967 -68396 -136593 -49194 -799290 -847651 -409591 -747587 -651139 -298723 -772189 -391089 -350366 -382988 -295092 -763458 -879197 -101658 -367283 -952496 -289390 -717001 -335263 -340782 -292767 -883380 -502997 -865543 -848186 -248411 -764367 -190102 -848831 -212815 -943489 -777030 -848551 -830969 -276314 -155255 -879431 -701145 -580822 -802067 -947800 -349346 -703434 -51689 -707177 -309611 -534479 -37290 -751616 -593715 -147544 -339672 -6541 -435582 -784209 -48584 -178843 -688061 -695986 -682720 -443809 -139883 -951107 -426072 -672786 -148249 -468287 -25428 -787000 -236562 -114932 -793667 -699595 -831789 -296993 -639833 -244228 -339608 -70946 -402196 -784919 -599381 -364902 -72138 -431925 -421109 -435878 -799244 -648114 -894306 -134201 -252178 -264036 -906792 -955605 -236294 -926666 -94083 -906885 -327853 -655778 -20742 -430450 -835371 -598276 -930657 -255443 -22087 -606713 -828509 -446438 -103065 -231102 -276601 -920091 -325108 -664169 -41177 -666246 -560073 -652968 -165033 -411501 -750066 -281270 -432099 -321754 -598254 -350621 -316588 -284672 -31784 -320090 -45163 -426746 -153890 -763212 -120904 -851018 -917120 -613073 -235480 -147597 -161320 -253021 -555308 -861181 -443555 -79297 -252553 -478869 -211895 -64731 -502496 -471924 -84565 -828141 -12386 -809652 -571777 -800603 -41831 -638708 -406231 -498964 -832692 -878855 -245453 -70718 -660649 -887347 -374011 -379680 -101291 -386702 -240898 -756311 -106712 -360210 -940020 -754321 -843297 -297063 -72253 -49814 -312768 -16596 -530736 -762014 -481778 -929215 -935806 -365143 -415115 -648900 -164622 -674627 -242192 -928199 -816821 -34214 -850672 -238804 -156271 -53054 -44831 -345786 -529317 -928331 -639978 -360395 -158114 -129414 -543132 -894649 -678212 -810739 -591623 -290324 -439634 -139949 -129297 -930413 -893051 -35147 -805392 -492459 -838407 -356164 -304113 -228068 -432063 -796584 -753815 -738658 -298610 -556219 -420520 -87976 -163981 -800982 -158465 -706965 -153566 -344003 -435920 -212813 -577256 -210807 -316688 -237288 -535540 -129021 -545782 -708701 -162862 -13120 -781956 -105929 -542818 -205675 -86401 -50798 -643292 -692661 -57935 -892022 -379681 -630382 -544295 -909385 -165210 -52454 -470914 -667196 -192719 -375882 -29387 -693484 -754474 -597558 -946946 -626674 -854136 -712313 -724156 -817839 -881860 -183677 -308508 -480356 -134077 -129662 -350021 -644577 -753913 -372390 -756947 -43848 -840003 -426378 -828673 -55940 -181701 -826237 -894768 -715430 -791835 -90359 -336907 -930496 -9684 -892128 -26442 -390946 -11777 -53444 -13024 -897920 -383994 -567346 -855617 -648752 -265264 -752554 -916843 -871856 -250224 -481883 -400417 -81956 -778156 -924480 -810608 -258178 -761322 -178911 -175047 -204943 -493256 -279595 -101854 -461559 -468336 -251883 -577258 -624840 -684276 -296205 -555543 -10332 -238784 -638800 -390886 -953177 -933073 -891703 -226178 -357578 -650099 -86603 -495587 -268062 -156480 -405162 -739866 -311661 -421099 -465024 -850066 -797933 -806611 -740751 -249361 -14995 -315066 -674499 -767481 -911911 -318991 -622236 -449082 -443391 -151395 -770567 -165478 -262923 -356655 -55629 -896388 -66786 -835087 -68361 -496129 -948136 -41378 -342741 -873135 -873640 -94905 -468507 -888120 -332254 -400889 -349299 -546851 -667296 -291846 -757021 -45475 -817237 -916354 -288570 -43410 -99334 -863640 -74314 -505874 -12235 -859697 -85783 -32036 -181625 -418191 -904182 -341755 -764103 -770783 -348382 -235432 -839257 -659257 -957587 -791581 -626319 -146906 -649931 -200728 -817910 -165342 -838201 -932781 -942838 -1341 -20334 -432637 -588179 -10259 -117967 -667183 -902194 -426949 -231298 -51885 -358613 -483101 -117206 -706435 -480729 -113168 -45101 -364721 -485104 -936652 -480456 -607258 -440415 -903424 -72822 -155775 -855167 -664150 -284703 -339878 -562574 -231092 -314563 -944156 -742959 -927379 -308221 -650777 -98116 -660597 -280521 -669450 -639435 -412957 -114050 -690713 -270179 -245025 -571990 -373643 -333777 -433688 -803326 -956049 -295872 -770288 -486804 -256311 -138835 -117658 -173000 -467175 -623277 -588967 -375891 -945926 -5594 -567939 -366133 -483319 -900639 -947092 -950774 -180283 -942918 -543924 -596823 -898428 -389713 -458624 -538964 -922481 -135181 -199677 -114951 -36635 -605017 -702994 -364470 -63802 -418764 -126651 -400100 -146740 -133037 -78785 -705014 -208079 -419993 -249315 -862949 -950811 -757578 -543152 -164949 -533193 -900880 -544827 -298502 -240533 -401312 -651766 -411270 -10146 -535220 -896892 -403409 -895152 -594029 -256819 -245495 -100801 -79087 -598753 -504937 -656011 -704375 -808579 -828199 -442102 -15824 -802364 -836812 -674268 -13487 -490874 -861073 -58135 -780353 -865474 -327942 -821686 -328218 -569234 -14936 -438107 -65940 -778966 -323178 -824863 -66259 -385287 -673141 -777458 -703169 -766854 -360211 -403389 -673144 -220374 -316876 -687802 -597407 -50548 -638932 -45531 -263889 -836723 -584249 -40796 -413129 -901440 -314478 -413776 -102521 -60595 -654056 -281906 -722216 -334579 -195724 -919962 -394347 -566457 -263529 -155642 -819411 -934165 -947569 -614966 -43078 -812879 -107604 -177954 -400673 -687839 -670042 -282804 -898412 -770435 -482015 -847620 -774253 -674173 -322579 -431366 -314439 -27611 -439319 -570128 -104631 -303584 -316313 -210740 -482287 -460207 -682026 -122851 -625567 -97802 -108399 -343673 -31399 -368124 -811575 -763012 -243728 -280493 -579266 -183189 -340653 -817730 -836592 -474288 -775539 -137210 -608731 -865059 -216705 -69642 -220991 -350196 -573571 -852966 -66009 -678127 -476839 -817917 -633587 -320028 -932176 -26835 -796234 -360802 -196355 -307780 -13194 -429380 -331086 -108937 -523756 -662356 -804435 -175558 -764504 -226368 -449968 -267930 -560571 -362365 -77354 -642567 -301696 -197786 -21914 -398415 -54736 -8161 -224067 -837776 -711218 -428131 -35858 -929000 -181544 -458180 -45286 -875936 -51642 -378879 -289976 -74293 -171809 -332123 -84752 -134548 -749061 -276363 -109386 -49827 -759598 -633168 -542608 -93865 -556166 -35003 -378691 -239260 -850702 -108053 -88341 -663912 -892402 -330142 -614990 -711728 -643772 -331521 -293153 -903472 -246813 -378950 -72402 -848008 -322504 -273254 -768667 -821338 -227604 -460145 -933111 -229514 -132630 -635319 -560372 -14520 -635578 -535618 -896253 -808212 -697022 -583200 -937354 -271361 -880353 -817391 -474189 -14901 -286195 -894175 -932768 -523651 -927218 -638312 -595991 -147302 -390077 -359314 -884636 -819788 -331099 -417014 -295478 -292806 -795070 -12751 -716539 -756288 -476796 -446635 -883717 -668343 -430293 -278025 -48966 -756606 -827967 -4153 -439715 -523521 -271225 -905364 -274614 -240194 -636972 -297938 -834285 -948748 -184008 -950964 -265270 -423449 -19067 -942870 -391121 -516492 -379740 -810710 -11871 -161066 -42299 -562980 -390387 -623383 -276287 -366011 -368139 -218666 -221278 -904487 -666518 -245499 -385189 -134823 -72123 -253303 -365358 -427558 -606549 -617950 -174742 -873469 -593569 -653028 -774871 -702605 -566763 -901318 -289090 -356049 -345562 -459843 -596735 -236725 -156876 -391220 -522233 -353751 -693965 -339041 -836069 -803280 -436789 -513329 -602602 -491483 -190365 -687799 -891713 -433436 -849641 -919701 -44926 -140339 -45430 -126650 -235467 -814282 -762508 -784100 -588452 -269602 -945892 -840924 -950963 -356986 -666383 -150539 -951515 -301501 -313960 -516578 -559986 -429954 -674562 -722758 -396405 -955675 -640553 -362208 -118132 -672803 -39512 -828062 -622320 -275976 -127649 -37714 -828774 -263572 -378857 -480515 -487117 -248389 -104343 -114795 -150970 -817548 -390266 -886962 -150294 -237390 -415826 -109438 -750097 -434366 -745621 -45347 -700839 -240548 -129254 -557118 -346994 -514673 -164836 -98293 -263050 -34251 -693025 -289398 -217023 -204855 -191558 -834855 -720814 -724497 -239214 -286042 -289420 -39979 -170088 -873176 -382748 -315827 -329173 -266385 -134904 -477117 -636114 -84045 -136787 -863987 -405096 -346540 -948991 -297337 -313713 -938199 -668060 -712299 -103455 -383250 -611646 -155526 -619808 -692451 -14931 -550064 -772154 -67343 -935121 -677174 -29492 -362293 -30779 -474634 -151862 -639504 -685328 -324881 -729889 -659145 -559087 -885584 -441732 -892391 -823360 -861355 -641981 -938330 -101375 -596632 -245775 -93604 -150887 -943228 -672021 -949116 -880066 -306417 -753169 -164733 -781254 -698407 -811826 -234902 -852684 -18803 -835296 -46853 -889682 -126155 -697092 -590446 -837958 -704196 -719529 -921012 -695636 -795375 -504833 -15125 -751921 -777682 -215987 -135207 -918505 -177693 -13595 -639447 -684278 -326780 -198620 -70299 -912257 -99226 -378991 -659470 -124656 -291599 -596401 -774350 -327588 -618097 -648848 -730872 -76081 -161810 -14963 -570066 -629756 -34375 -881621 -165802 -152835 -97620 -747228 -479211 -143996 -201581 -239608 -723910 -246196 -23685 -593918 -357032 -192157 -63384 -213577 -543811 -277955 -603365 -910698 -121912 -206258 -724376 -314819 -108940 -665434 -745374 -359317 -902898 -237298 -606192 -879449 -546166 -200443 -787060 -13621 -520464 -922586 -682515 -18046 -785059 -687760 -608849 -522910 -429415 -938076 -146427 -435810 -458513 -363899 -669862 -340608 -571989 -117097 -931811 -32249 -751595 -659212 -111887 -272998 -204799 -448477 -282672 -13357 -271492 -136159 -251597 -150719 -639044 -563300 -692959 -86940 -835626 -99797 -84819 -199077 -425706 -547873 -185583 -817721 -134512 -376720 -270123 -736344 -377165 -540593 -941523 -558803 -808949 -779806 -769351 -913121 -685191 -41344 -943284 -175074 -79777 -236373 -588727 -801324 -167996 -273696 -81776 -607218 -236166 -53838 -204491 -806694 -71202 -766089 -65122 -769239 -682925 -177702 -240270 -121760 -848044 -3413 -68155 -531887 -115312 -186585 -402243 -173508 -657110 -900764 -498330 -638116 -660322 -534471 -195781 -110349 -330960 -751978 -486961 -464695 -540202 -525112 -705002 -681774 -372551 -364848 -602758 -41403 -945215 -472430 -270723 -487685 -103703 -876077 -89476 -630740 -71327 -854753 -546467 -516627 -938645 -638609 -354158 -640481 -167556 -75308 -356880 -118387 -438089 -619797 -937783 -757452 -150517 -772179 -199536 -39090 -4375 -850922 -593628 -644127 -226898 -931949 -135244 -185777 -904068 -523924 -434141 -221647 -85916 -115692 -32893 -953034 -25280 -344314 -654788 -36223 -167545 -954689 -426595 -825726 -235992 -440396 -382039 -96406 -855654 -645450 -540193 -319569 -686211 -857823 -17488 -161683 -305974 -923592 -308569 -423317 -345815 -192932 -330191 -330289 -432798 -725913 -952116 -236762 -768753 -864645 -377266 -522182 -414933 -404955 -380714 -289359 -174488 -341669 -518988 -176296 -909118 -818062 -445505 -596165 -249518 -157458 -353993 -436115 -189077 -247595 -101628 -924090 -134216 -83648 -952841 -354873 -799686 -905078 -797785 -109469 -765332 -678504 -264679 -522312 -632012 -248870 -872993 -106656 -80939 -230704 -516378 -534588 -511163 -892689 -466196 -518593 -423149 -404907 -202679 -591622 -674780 -501130 -234279 -786299 -696577 -674876 -759789 -790387 -294047 -83773 -7362 -72274 -456435 -333361 -454143 -940931 -501192 -939496 -210537 -496946 -354704 -266651 -206335 -521917 -468369 -328113 -515364 -938075 -723125 -810926 -416413 -133969 -402535 -731147 -885677 -550385 -486906 -64324 -857438 -953738 -136229 -596511 -224248 -803173 -239644 -46341 -278539 -424279 -170801 -84852 -186848 -585359 -126302 -262544 -314100 -787862 -861285 -386735 -279453 -931762 -94877 -363885 -128249 -108504 -368125 -21333 -511806 -593477 -777390 -897723 -65555 -472463 -255143 -491911 -575040 -601133 -159240 -797750 -711129 -150335 -186935 -133793 -467108 -81799 -695050 -494322 -917520 -872238 -933640 -590617 -626370 -15870 -902620 -66245 -134998 -904750 -237323 -596203 -239541 -120000 -607444 -339643 -27682 -410445 -759328 -617257 -699067 -462131 -273957 -534816 -205256 -187889 -635010 -356850 -693745 -167397 -287050 -236031 -368426 -193947 -422121 -599966 -111834 -177789 -781698 -204163 -625791 -20254 -813297 -39563 -597162 -303286 -354605 -492704 -527006 -30671 -12714 -313944 -61699 -454561 -765595 -678560 -349874 -382782 -209491 -126471 -176416 -251451 -841642 -799241 -593971 -451856 -25358 -938292 -323546 -439292 -587182 -319109 -579285 -768448 -815843 -555332 -327657 -669162 -828658 -738348 -240853 -795093 -855598 -305000 -871554 -304186 -390197 -874737 -375659 -404091 -785478 -653116 -497882 -22823 -220021 -839998 -394457 -328229 -149186 -680164 -108239 -954187 -782459 -470892 -883286 -417865 -856588 -736531 -180498 -46524 -147569 -702586 -820933 -737782 -333107 -198332 -818748 -224839 -335071 -483181 -896121 -112995 -551374 -449101 -810352 -805316 -603712 -65884 -738650 -660042 -909466 -463633 -755126 -10533 -204443 -794925 -640482 -305037 -777860 -680406 -136718 -256368 -474275 -824703 -180971 -600251 -292531 -30084 -41311 -723220 -245659 -902805 -618196 -28702 -483314 -45451 -265056 -237380 -846739 -439081 -72136 -909513 -210868 -84800 -429250 -885803 -40772 -773908 -500976 -397100 -467299 -54075 -474973 -404693 -588210 -602598 -98178 -716695 -75894 -801625 -456019 -509485 -710783 -189197 -428903 -598090 -450821 -584387 -517883 -527595 -873296 -830427 -38256 -947805 -857676 -325094 -128348 -355620 -202481 -38655 -147610 -170270 -939707 -552736 -632687 -416109 -155482 -101926 -773761 -48921 -905772 -324942 -385 -865674 -458893 -693818 -129000 -178550 -487285 -886562 -65209 -249046 -479219 -190128 -17953 -348918 -594088 -647892 -24990 -81651 -64399 -426123 -324625 -248911 -39543 -41390 -807144 -155073 -419291 -208543 -103382 -673924 -525436 -14842 -408561 -839529 -770162 -395375 -597691 -768844 -139945 -886195 -950094 -84591 -34778 -682301 -242679 -723597 -630707 -669839 -298975 -206451 -244744 -450617 -863559 -279852 -345477 -650413 -798880 -879613 -416885 -484978 -125915 -77118 -803286 -515678 -231059 -466352 -79839 -593904 -591983 -119303 -939231 -666234 -809052 -141305 -52123 -322747 -724439 -707081 -645444 -827310 -817113 -903175 -495422 -499192 -946407 -22401 -20522 -340697 -397092 -448569 -792128 -581710 -665239 -846877 -335295 -35523 -764586 -69132 -287990 -355507 -597671 -623275 -31894 -758363 -217866 -838364 -535003 -12774 -300877 -584511 -760882 -82799 -237416 -474316 -119332 -591273 -334712 -863500 -79447 -246052 -176524 -594946 -911929 -12005 -415171 -108598 -911023 -806760 -129129 -92670 -677973 -237541 -285689 -287264 -841211 -488299 -238698 -789619 -29323 -413242 -677826 -401732 -556393 -181354 -605922 -494678 -461826 -126806 -355894 -309750 -625409 -203 -432771 -850681 -162526 -854623 -445327 -409600 -186789 -635854 -663691 -737356 -628309 -252497 -447760 -764260 -422038 -48843 -822546 -496133 -883322 -429068 -340022 -346152 -239443 -585580 -934708 -636763 -262909 -22150 -284831 -693719 -786845 -583924 -209878 -286995 -689648 -222119 -183651 -407406 -607283 -325091 -494615 -420506 -34867 -735375 -933066 -386055 -242726 -109142 -301300 -650803 -855838 -649621 -324998 -314507 -424351 -556463 -698594 -199049 -453974 -611385 -38878 -268428 -227346 -913757 -767466 -20637 -485148 -899479 -810815 -290818 -608676 -772994 -103215 -209042 -369126 -597976 -23079 -358279 -33200 -373655 -400606 -542287 -763931 -502640 -477058 -374773 -153195 -593479 -915615 -37540 -179336 -332281 -810628 -691200 -733026 -74530 -317662 -578698 -321633 -551383 -570384 -158102 -229902 -231239 -338990 -238288 -165041 -422123 -341763 -181278 -222022 -96410 -902926 -391381 -284873 -13234 -863355 -261556 -705658 -899808 -734731 -676001 -335855 -539548 -167413 -146182 -846752 -518502 -935712 -696311 -849679 -330306 -466701 -200517 -230117 -171284 -797680 -564218 -613079 -419954 -198603 -126453 -81807 -501865 -242905 -58094 -844226 -422556 -825972 -834089 -612787 -78461 -426501 -355537 -637087 -364270 -63028 -909182 -566707 -915748 -938090 -573685 -327881 -309312 -563985 -236974 -861248 -770514 -809458 -431566 -486332 -767695 -463704 -635872 -2729 -898631 -692567 -253572 -204174 -93887 -21986 -564991 -142334 -726968 -510166 -880953 -755700 -179290 -282007 -210513 -457961 -692405 -640635 -121980 -737913 -602482 -926850 -41989 -321776 -48803 -264729 -209262 -432559 -449038 -186580 -91131 -83003 -215032 -67319 -17298 -857449 -85284 -374963 -35249 -60487 -32055 -126706 -769228 -343640 -647965 -724293 -212464 -694100 -388981 -456205 -55715 -442499 -430703 -763441 -278579 -947173 -404994 -510492 -371278 -9958 -268905 -778306 -103868 -74521 -908556 -674059 -764159 -646011 -170711 -734797 -908226 -527458 -528219 -192933 -119977 -481207 -768023 -903337 -405967 -380017 -467408 -343332 -942339 -161968 -681754 -860602 -822953 -162145 -184200 -314544 -94355 -810638 -802632 -745351 -161663 -504288 -302533 -607302 -74455 -428651 -146349 -671342 -848182 -250510 -592299 -293910 -178543 -893369 -605902 -292742 -636890 -160633 -266046 -43636 -830622 -379485 -564619 -923135 -669430 -947971 -428836 -810448 -944628 -817903 -466537 -282169 -75846 -955527 -438133 -197313 -930201 -874937 -301408 -338126 -871415 -955170 -239454 -101700 -249534 -477001 -178245 -49949 -337375 -397577 -105014 -254564 -364920 -591796 -309813 -328932 -513634 -210121 -477443 -167809 -562054 -540410 -472574 -837859 -79830 -290670 -667971 -349496 -843273 -568826 -843002 -377213 -379484 -814579 -176965 -285629 -356583 -710197 -604291 -793326 -440671 -338374 -251287 -395247 -37499 -854018 -174093 -761963 -555512 -515854 -770184 -125798 -922398 -667078 -50625 -258461 -522586 -263541 -779038 -674209 -757153 -287610 -368708 -25938 -724050 -159231 -547254 -11996 -767454 -282940 -754735 -109309 -778195 -879803 -356346 -779793 -473839 -906760 -732268 -953698 -244223 -36843 -500425 -293431 -644439 -705348 -180258 -176159 -810220 -438455 -391563 -37041 -263000 -947280 -516231 -235974 -405870 -287559 -788533 -97845 -773824 -888213 -506140 -809532 -940817 -835034 -193166 -251856 -771210 -807664 -922220 -863504 -238260 -949542 -411612 -258162 -724415 -684200 -434684 -178309 -623871 -238659 -483229 -368926 -167984 -148927 -738357 -63955 -182790 -879245 -504736 -778400 -545839 -288078 -932994 -703880 -230917 -883122 -112648 -70808 -179333 -247696 -904625 -265301 -713250 -536747 -848533 -353207 -482198 -60699 -1218 -76957 -471106 -65258 -164054 -334633 -749485 -146640 -360221 -763495 -889418 -24925 -54289 -938637 -40884 -721887 -658266 -388890 -11523 -810709 -205807 -903040 -17413 -132507 -507723 -29675 -357762 -850099 -101047 -395359 -851202 -861845 -357998 -251549 -569895 -953692 -664166 -954236 -631686 -632280 -251780 -745230 -404024 -439313 -723031 -471440 -326332 -74440 -412875 -45073 -819586 -633145 -206349 -459138 -432430 -472984 -424741 -847322 -716431 -761773 -927725 -636744 -231360 -308483 -925901 -584654 -83572 -797510 -405198 -938273 -871543 -254417 -290076 -182925 -808261 -24166 -450515 -101021 -801270 -778386 -325076 -440344 -952157 -250450 -356230 -494210 -892646 -666427 -13665 -29153 -10516 -735662 -243465 -151845 -412312 -379914 -768136 -873774 -899471 -654244 -173510 -83597 -85903 -830726 -826111 -637096 -571912 -146289 -256746 -938274 -56031 -645494 -755686 -67404 -825896 -894586 -20525 -750937 -842053 -188574 -722898 -3102 -951724 -5591 -570725 -357244 -382874 -21168 -115908 -51183 -772769 -365406 -157975 -160736 -912182 -277700 -109043 -925871 -830616 -259470 -78618 -156074 -122976 -321193 -140911 -418867 -677618 -772367 -20446 -950117 -69542 -9320 -331053 -324403 -792351 -390887 -543920 -66075 -390394 -458301 -888325 -732273 -314927 -779604 -816655 -408469 -313600 -254636 -719031 -32498 -317361 -231071 -619554 -651559 -13428 -172431 -256360 -63630 -871883 -104884 -11318 -948665 -384362 -27708 -632063 -65631 -191414 -889524 -913854 -610424 -553541 -275459 -154897 -857402 -612953 -913693 -41128 -116289 -811273 -154631 -248632 -92552 -462540 -446099 -101347 -66434 -113508 -78712 -432112 -316805 -315286 -460380 -520468 -749051 -676583 -550873 -815358 -109870 -288946 -18976 -12737 -64620 -899053 -412172 -14923 -118304 -819661 -902119 -75411 -204508 -888966 -31755 -543301 -678447 -22886 -574618 -121914 -741440 -43730 -694455 -626879 -875743 -891976 -829646 -827524 -639880 -815495 -76614 -703715 -310085 -135917 -560408 -786169 -944489 -639464 -70662 -560099 -339420 -814878 -81181 -836820 -851321 -716618 -142246 -192851 -887450 -651960 -356287 -523470 -639959 -910354 -247033 -705307 -929696 -591282 -84727 -81781 -222108 -927562 -85319 -853953 -146163 -466021 -423214 -728899 -134620 -285657 -444557 -626747 -26453 -146757 -628352 -242887 -178972 -80034 -600366 -852584 -2524 -770378 -511055 -126039 -182776 -56608 -828521 -38664 -58326 -693338 -866918 -277058 -879496 -932104 -173878 -802804 -572254 -380913 -56054 -649647 -380927 -463981 -8291 -900553 -301737 -870312 -308675 -444663 -86554 -115514 -429212 -304338 -850694 -174826 -531261 -49681 -466867 -849008 -12753 -558847 -115833 -678843 -146989 -293064 -918629 -639814 -864587 -80335 -121185 -930469 -195127 -950234 -562622 -74399 -392790 -769787 -577648 -804887 -896813 -7446 -887317 -515193 -179233 -298095 -263339 -162195 -146263 -672184 -693816 -265415 -663726 -109333 -539225 -692410 -868345 -865619 -401981 -251550 -315422 -938626 -743768 -136381 -61864 -544648 -41320 -59016 -388684 -151303 -274788 -6247 -885521 -662658 -501314 -331750 -67620 -930145 -814301 -149427 -41381 -942409 -416893 -296194 -232818 -313550 -803607 -90426 -281015 -925836 -706079 -646245 -741830 -697016 -327798 -432499 -764244 -850570 -657634 -356891 -824421 -99074 -133060 -591072 -632205 -687505 -819596 -298524 -119029 -229749 -196180 -828818 -482472 -115061 -328716 -171277 -117353 -712973 -141809 -426263 -432605 -146572 -196370 -616090 -504539 -25290 -299384 -822468 -955532 -142037 -125827 -265036 -827479 -252278 -14678 -585409 -678964 -276462 -451957 -63226 -97666 -688671 -600222 -167587 -901259 -161746 -769803 -105036 -220384 -952169 -919935 -389870 -200292 -700418 -172834 -244318 -128484 -150992 -741990 -368244 -44791 -560206 -599684 -640860 -422964 -444165 -34245 -883295 -368079 -121531 -79576 -724476 -728123 -235945 -147280 -335153 -588251 -590908 -857947 -209054 -806902 -304150 -284824 -384054 -306790 -161234 -189775 -295744 -873743 -446316 -725291 -550248 -267185 -26129 -669135 -254588 -12240 -856012 -257116 -308670 -540108 -439367 -910456 -278039 -672884 -745853 -575510 -306939 -175003 -387455 -51199 -413935 -140601 -797967 -653525 -638699 -730984 -325088 -602101 -459330 -664462 -825051 -102952 -634280 -890848 -154047 -763547 -825098 -919091 -758962 -66228 -853067 -619227 -881890 -338749 -936753 -743995 -90134 -180759 -220706 -685438 -699680 -415899 -768994 -493170 -903573 -946909 -373406 -364422 -230639 -822613 -191498 -127332 -576533 -98318 -286481 -438006 -610523 -103238 -821385 -886391 -771049 -802125 -246283 -848891 -867410 -35088 -56490 -633697 -639993 -34104 -640963 -244452 -661019 -808575 -689457 -302756 -257230 -136365 -429208 -11762 -721649 -879539 -233324 -260214 -480203 -263473 -311524 -397345 -775235 -631218 -403678 -137222 -746648 -648046 -415227 -125867 -181381 -874199 -741224 -301609 -250396 -666502 -878961 -198382 -257142 -474673 -240523 -557682 -903309 -495906 -632215 -48103 -609761 -735332 -894627 -779591 -554717 -175600 -468759 -432202 -544418 -126450 -135150 -552834 -785499 -264056 -701758 -731880 -204615 -424073 -276003 -802823 -865892 -281229 -884367 -420596 -176390 -389920 -37149 -905445 -879257 -20518 -588677 -322356 -627421 -450282 -190649 -394507 -287060 -950245 -503298 -656845 -529690 -131762 -13026 -432780 -750606 -726578 -99579 -204949 -287966 -817720 -115130 -661712 -418008 -538922 -158700 -446030 -792347 -706920 -338495 -932546 -177614 -65846 -276435 -99703 -347486 -706982 -573723 -838257 -77168 -836926 -558204 -429384 -21221 -949050 -790722 -850938 -722281 -729361 -75395 -151398 -590582 -340610 -418434 -524206 -747901 -542604 -129432 -950221 -360790 -861253 -243091 -798014 -22410 -663319 -539395 -724058 -31171 -244925 -350201 -467321 -812608 -423025 -52364 -475340 -466932 -290577 -2101 -772874 -528905 -325631 -547336 -561433 -497709 -81691 -13033 -466826 -254285 -619735 -568519 -532786 -161080 -42883 -380579 -605886 -247981 -447808 -53094 -45563 -75704 -601096 -696129 -124678 -952135 -474012 -127625 -438051 -276468 -916801 -420580 -178026 -404027 -887336 -429036 -504973 -824160 -696899 -319916 -412358 -424175 -858678 -696988 -137135 -654751 -895840 -483195 -802386 -349515 -390944 -905216 -820036 -433499 -887320 -679650 -170610 -428780 -754058 -498311 -126805 -37221 -738687 -188667 -483858 -524759 -195706 -36536 -896214 -761618 -860124 -95005 -568892 -332285 -441321 -267522 -864029 -651167 -118779 -434447 -942659 -674587 -467366 -482831 -607228 -601227 -822603 -550910 -24838 -524761 -108528 -716968 -221141 -875635 -309212 -652479 -847856 -856930 -519225 -662674 -208669 -157351 -495594 -142436 -791806 -95543 -285065 -326903 -935333 -181204 -81265 -618162 -778640 -446759 -15936 -305425 -577086 -63788 -851513 -29427 -415697 -623130 -598147 -221703 -54642 -363772 -652390 -505883 -17469 -143716 -449578 -32338 -818707 -499852 -592496 -108985 -623231 -920602 -546802 -11699 -55536 -488440 -409899 -946852 -48514 -683217 -938569 -61714 -366980 -232544 -482487 -854707 -742205 -940537 -101348 -440595 -362168 -431822 -661505 -313073 -396871 -851154 -654658 -2668 -211943 -441239 -437885 -369084 -289143 -763653 -864823 -927457 -843009 -468257 -310582 -700732 -488450 -717520 -619789 -436958 -704218 -724273 -908169 -867799 -270464 -191204 -151708 -83688 -763185 -313068 -55890 -378175 -811913 -404467 -198719 -682507 -658477 -222110 -41343 -65835 -693760 -682562 -564513 -283628 -596549 -58660 -500576 -416220 -558823 -228253 -41036 -569990 -875847 -916455 -801693 -75924 -808240 -888806 -59292 -403664 -199650 -278391 -397323 -808271 -420731 -192917 -325964 -161273 -142736 -875984 -680742 -872758 -207817 -742050 -581008 -851724 -77488 -354399 -836661 -46649 -206167 -244613 -405404 -344024 -956944 -212047 -757930 -635514 -741435 -664437 -472138 -221513 -574811 -269801 -905411 -111813 -830266 -776794 -173490 -156135 -362104 -362484 -566973 -490925 -556723 -673554 -312592 -472333 -422536 -739586 -34484 -714106 -390965 -400761 -340329 -339412 -432008 -725761 -743876 -892399 -33427 -917829 -653679 -10095 -400294 -574985 -896532 -192841 -845020 -423225 -168450 -92179 -148341 -69845 -442306 -679595 -783888 -624877 -706813 -562715 -446722 -492469 -664715 -167993 -44578 -776670 -478303 -439674 -817293 -356958 -265369 -94366 -257186 -762767 -13012 -939668 -445716 -648894 -725602 -10118 -701527 -487260 -319868 -110241 -217921 -14423 -48830 -368699 -883176 -168606 -381862 -663301 -55771 -711950 -646005 -11505 -314680 -731039 -448851 -903176 -126565 -797018 -338211 -372481 -577754 -377447 -368964 -821095 -719352 -136759 -224953 -813991 -640094 -74063 -792707 -240589 -872817 -237126 -440565 -303559 -146833 -590886 -710023 -611756 -385192 -771476 -45236 -927666 -727094 -898230 -592736 -770687 -160878 -606804 -606889 -486295 -592023 -661429 -822182 -864883 -814885 -7527 -920996 -667377 -947146 -275248 -586828 -126018 -766047 -93312 -323724 -594036 -49310 -20497 -835203 -417361 -483683 -52442 -663531 -110171 -451793 -236518 -758544 -702736 -648 -704399 -47858 -855672 -63227 -117805 -642916 -476455 -55983 -729196 -359918 -122267 -704299 -421100 -396783 -417447 -207364 -210878 -888284 -545062 -759999 -103998 -664283 -365626 -940408 -9916 -762996 -495700 -369808 -761462 -223382 -772088 -909656 -955597 -834975 -830959 -732755 -652568 -466855 -501434 -691993 -237699 -564380 -674288 -633810 -845158 -662791 -824641 -750893 -930462 -326107 -85240 -476026 -687084 -239143 -487059 -232343 -886869 -706293 -909426 -781754 -832473 -300949 -141353 -778021 -339822 -81820 -162113 -554328 -209340 -954804 -439258 -793877 -470716 -135197 -79300 -449596 -674973 -489524 -421117 -634679 -89232 -161953 -8232 -636282 -840079 -173607 -264853 -649782 -222146 -419131 -75743 -692971 -267885 -599356 -229846 -448002 -947216 -343502 -924734 -810070 -938391 -635671 -53358 -53071 -161033 -870991 -836027 -730880 -583333 -946097 -690721 -466633 -772660 -525626 -426743 -956589 -631262 -595052 -518398 -804207 -595839 -533398 -593990 -849159 -870218 -109925 -510699 -846740 -237406 -340592 -279264 -805443 -709101 -663310 -466836 -167687 -278748 -824312 -41174 -77384 -696519 -619249 -682119 -293985 -249603 -136707 -83565 -803989 -299737 -801374 -778341 -151119 -651710 -60156 -242290 -230546 -369025 -663614 -13219 -504828 -314053 -896486 -814319 -485293 -427516 -617015 -347082 -279280 -770486 -238681 -320698 -274113 -418948 -678592 -256710 -418275 -732086 -132307 -48968 -533825 -793572 -197199 -693294 -516943 -444786 -286038 -314556 -707462 -941563 -561083 -11276 -330095 -902500 -221015 -593455 -639361 -686823 -219536 -491790 -896657 -873021 -155455 -591792 -824891 -421122 -638166 -898078 -437809 -22450 -733604 -639784 -233819 -327889 -428630 -652070 -698671 -667407 -109019 -308889 -274724 -763981 -88876 -781751 -663223 -943603 -427303 -543759 -450674 -108565 -263601 -103186 -236146 -931953 -574217 -674570 -598328 -203653 -727052 -414781 -912542 -740454 -150735 -616544 -160471 -847714 -442535 -575336 -779916 -239408 -727085 -386591 -164948 -865755 -458681 -661635 -654363 -472232 -367743 -697732 -65277 -707286 -521502 -912401 -734407 -229602 -86487 -144349 -391371 -121703 -624448 -646476 -146864 -627193 -107962 -763280 -413082 -364697 -643518 -738136 -940863 -491398 -237771 -390430 -264363 -692951 -887164 -674419 -64085 -278380 -378037 -849178 -34193 -692736 -488841 -582190 -496015 -606595 -111873 -703733 -403938 -414679 -379763 -703274 -54276 -113647 -206563 -771399 -208379 -301732 -751602 -94671 -769675 -754283 -328809 -327070 -487658 -703470 -419792 -19212 -883131 -809623 -283805 -636014 -155565 -664987 -905488 -522977 -335386 -778390 -12420 -402385 -38257 -63030 -779543 -587294 -366180 -758414 -355721 -237347 -758277 -515924 -196890 -569148 -863924 -283102 -523436 -332056 -188715 -134642 -900640 -326586 -235814 -53940 -288670 -908253 -704224 -19991 -750201 -337996 -276359 -16751 -199411 -911771 -362406 -12248 -277834 -58650 -606821 -136525 -430371 -779636 -52040 -19214 -180204 -277515 -817318 -808439 -658548 -34365 -400349 -871754 -943765 -147595 -275972 -543748 -712815 -110468 -335104 -30625 -126693 -368611 -527777 -265409 -784794 -6773 -23310 -801424 -435948 -41984 -208594 -134575 -238965 -777812 -770647 -777583 -513664 -243968 -330922 -573041 -627679 -199170 -41289 -273749 -35536 -136409 -282366 -482842 -596133 -652554 -506550 -605366 -652726 -467383 -636741 -928989 -235430 -260333 -322125 -66252 -29893 -340602 -204536 -26731 -490590 -548097 -5908 -919538 -49970 -249314 -284817 -730229 -835021 -776329 -312250 -686329 -795627 -403651 -204980 -811547 -381462 -693673 -354699 -13181 -44280 -207942 -682085 -275297 -329505 -820837 -251983 -723909 -240592 -255451 -263459 -777742 -124608 -357013 -778319 -470451 -742801 -199142 -101864 -372404 -227040 -836838 -707459 -261274 -896029 -275334 -508074 -603607 -449199 -495787 -58035 -921193 -838086 -828763 -214365 -424465 -171867 -838577 -38483 -653227 -54860 -747965 -852786 -418752 -579775 -635000 -673294 -438569 -357039 -956419 -432590 -832592 -951886 -516848 -328134 -559120 -475834 -287072 -667390 -167010 -734326 -26714 -419965 -467423 -145005 -117282 -869454 -362384 -902807 -51439 -240604 -725902 -553297 -732711 -721774 -375269 -655484 -99800 -55694 -807342 -875485 -311192 -570686 -692997 -457293 -397724 -191279 -878529 -245009 -420124 -436126 -930435 -885543 -809774 -856760 -811134 -403865 -248373 -837869 -50278 -705280 -237302 -440462 -955346 -315821 -778622 -298874 -136749 -245105 -124818 -196752 -268051 -686833 -627780 -693541 -646704 -214794 -934375 -35169 -42617 -380668 -397401 -892185 -592680 -793666 -921788 -760801 -60758 -32065 -133533 -347719 -849851 -402436 -181779 -285316 -300316 -369818 -729228 -705687 -479947 -696647 -560545 -121216 -865611 -749888 -231086 -679888 -263751 -746150 -190162 -146357 -108324 -857159 -952401 -55806 -279439 -129458 -823349 -36860 -669979 -221975 -570649 -703918 -673300 -148446 -661242 -803028 -279400 -164796 -595194 -944246 -838071 -853043 -816058 -640911 -591230 -108464 -324528 -226084 -651635 -884036 -35757 -886738 -642949 -340578 -866944 -55914 -163298 -779807 -9191 -304194 -380287 -101349 -39122 -519550 -861042 -326398 -315682 -148704 -24682 -424394 -902517 -161964 -793359 -717157 -168216 -623929 -62432 -621081 -632240 -207583 -867342 -944381 -789853 -432802 -120474 -88573 -386786 -524088 -177538 -261439 -309455 -404375 -658765 -248449 -229629 -649887 -356238 -330417 -310659 -727072 -919137 -781176 -286658 -850563 -421110 -257389 -108433 -554865 -131720 -624392 -230245 -790750 -605424 -836474 -81786 -664750 -101039 -691546 -351012 -638615 -453603 -208651 -635820 -941980 -848854 -278597 -885605 -230701 -471051 -656288 -163186 -713650 -787846 -579942 -755681 -951726 -161282 -112232 -443025 -951309 -326017 -910849 -85448 -451445 -923938 -590559 -501445 -292314 -511311 -36958 -694412 -574234 -830320 -938073 -245466 -124305 -427663 -652831 -365724 -455767 -779984 -155549 -736260 -176385 -253392 -663740 -484143 -269018 -157538 -580511 -247976 -11177 -944913 -25973 -79219 -828158 -659689 -157658 -636978 -122174 -863182 -364708 -685416 -757312 -764971 -798053 -871430 -667299 -906181 -9607 -166778 -909161 -684104 -507577 -917066 -206533 -408418 -435760 -457417 -563397 -661723 -176130 -387126 -57873 -706942 -173742 -63139 -711900 -726717 -850114 -477419 -870213 -870868 -74350 -949304 -716363 -25511 -814687 -635560 -48594 -731830 -355927 -83646 -374632 -829470 -77365 -776231 -485584 -62961 -423551 -380888 -778434 -162516 -364540 -336198 -656886 -707103 -162770 -479812 -133694 -901299 -249440 -40460 -567733 -822960 -537074 -642665 -181725 -755809 -669264 -902212 -278542 -199011 -321808 -56621 -764599 -190970 -646340 -652971 -29867 -438049 -543158 -397494 -444576 -151742 -148425 -923121 -709766 -1314 -316419 -741685 -868165 -17294 -98281 -362286 -567375 -523212 -16728 -879465 -674709 -596741 -24676 -301291 -678848 -416992 -770510 -130813 -9790 -308035 -391949 -936247 -16498 -830778 -338282 -14856 -639738 -82352 -121922 -880423 -581738 -255788 -260014 -155100 -638648 -53179 -799452 -208644 -23059 -167602 -13202 -65624 -108832 -396222 -51367 -18840 -743579 -117207 -437848 -564600 -183195 -34632 -19747 -569842 -585466 -909922 -401001 -263525 -339693 -732641 -270507 -556621 -30226 -177934 -951971 -174829 -142193 -66996 -450054 -478551 -22385 -841073 -880312 -109140 -237391 -271029 -744066 -721630 -242543 -716729 -123027 -375568 -301766 -467378 -68768 -401752 -40926 -672721 -273212 -185413 -281438 -395116 -806776 -684249 -732710 -818329 -189526 -255463 -473212 -118623 -116549 -257696 -284156 -328195 -899628 -779786 -738265 -797718 -111672 -930799 -280565 -583433 -874098 -580279 -211408 -11814 -495984 -317780 -235888 -388114 -149507 -704363 -167310 -81669 -388623 -146196 -403809 -73883 -274810 -728501 -146661 -817707 -625658 -768863 -848838 -838479 -347812 -800987 -447996 -342968 -879098 -450293 -632505 -900101 -818505 -785318 -920633 -471588 -591831 -479480 -30964 -943139 -826140 -157327 -44919 -94713 -22789 -881227 -153248 -937833 -653751 -564497 -138706 -149280 -658198 -287948 -450804 -38860 -939765 -347240 -532245 -33313 -801166 -787701 -120714 -74115 -235255 -131379 -49908 -323979 -340819 -72135 -64527 -502201 -711996 -73344 -400951 -272625 -235287 -742865 -328483 -255042 -777928 -723629 -292815 -48380 -682644 -354085 -807343 -308443 -814416 -590743 -183798 -14997 -237230 -120614 -750933 -69728 -130491 -14688 -158540 -747735 -688155 -446420 -416441 -636248 -856502 -643459 -570848 -86765 -447234 -343696 -133176 -818299 -841323 -181519 -314969 -364499 -921638 -894236 -591215 -99024 -554325 -674659 -812519 -763887 -719132 -567783 -332062 -55913 -382742 -452480 -910142 -235895 -148502 -328463 -432476 -670998 -134722 -853164 -701323 -376679 -163413 -142419 -915457 -103997 -24463 -40786 -912247 -704268 -440289 -65557 -168064 -580944 -762411 -164869 -43451 -391380 -259366 -357147 -265027 -830166 -745983 -78611 -856949 -896903 -97538 -452320 -238932 -724078 -908240 -371465 -857545 -621719 -14922 -59534 -777530 -941823 -856128 -141384 -28561 -383364 -57018 -133997 -73259 -726462 -597356 -140303 -533130 -647998 -800540 -224583 -390138 -857052 -415892 -317750 -206523 -827641 -846652 -712114 -4141 -342941 -335221 -162413 -632645 -30654 -886571 -137059 -204469 -139761 -541320 -103015 -717531 -64754 -911879 -874282 -420818 -331916 -428705 -335400 -769225 -783957 -22065 -14898 -701042 -86947 -563549 -77107 -524649 -658264 -889464 -174078 -75486 -278444 -827878 -601245 -827026 -922708 -94708 -154082 -411217 -329257 -35042 -673231 -88820 -861117 -300575 -429221 -523427 -645467 -276978 -67725 -223643 -382300 -451703 -257058 -106824 -639607 -442789 -627243 -766107 -579812 -274997 -97412 -116726 -460858 -170544 -315243 -31995 -769606 -682292 -309596 -16958 -885754 -472811 -94753 -556323 -465099 -361868 -53101 -49356 -658729 -420199 -397726 -198931 -758125 -541860 -761132 -538147 -554334 -836793 -147377 -633949 -72141 -141723 -633174 -35175 -270413 -707699 -561588 -332209 -709883 -386617 -695704 -654727 -356878 -716591 -718947 -110152 -25195 -466213 -101824 -706785 -954466 -100871 -711304 -526331 -303380 -846175 -448608 -816100 -501205 -317621 -200360 -356919 -630479 -301418 -664949 -361367 -659660 -716517 -686937 -337960 -438478 -737995 -137079 -750105 -625122 -625625 -429937 -355917 -749338 -648715 -811851 -914083 -391551 -103566 -331963 -806256 -54807 -557334 -918495 -654544 -724149 -486193 -654750 -280780 -952859 -926501 -771545 -45633 -199816 -382859 -472356 -522674 -588719 -722452 -745932 -81534 -19912 -423148 -798268 -774040 -291899 -756762 -761566 -775358 -142104 -67224 -485517 -280577 -573029 -92065 -356527 -117968 -401927 -626699 -863866 -892350 -126168 -150125 -51580 -289341 -346876 -216052 -45539 -86180 -444682 -9811 -476556 -594840 -505099 -135752 -645376 -845288 -108023 -859138 -74537 -639625 -398813 -706537 -882049 -354405 -462522 -659791 -947884 -226778 -859718 -235304 -292026 -840372 -594670 -597309 -814863 -26213 -652325 -887334 -263056 -628013 -295538 -387863 -277253 -637114 -894386 -56144 -225640 -86531 -33042 -204706 -460116 -278478 -227609 -847423 -337945 -521589 -377520 -803233 -311865 -109294 -60491 -424105 -579056 -690720 -106341 -74748 -379406 -355302 -826398 -705378 -285292 -438868 -571731 -939731 -426578 -189045 -637868 -639490 -696920 -661650 -880047 -206224 -231352 -142119 -44701 -716540 -220414 -784078 -331094 -176604 -195590 -132858 -24244 -38167 -382206 -552681 -597482 -169949 -574715 -281053 -448572 -834149 -544050 -251182 -129194 -595511 -315444 -64592 -651607 -633032 -226564 -766533 -642881 -569474 -749113 -75398 -364340 -610781 -405900 -443996 -218839 -158499 -230964 -737882 -832330 -70191 -266717 -185344 -909302 -843713 -430337 -161614 -782473 -48881 -422965 -818666 -930694 -384144 -24930 -514533 -475324 -537800 -123023 -749889 -512770 -339400 -824446 -199167 -828923 -279498 -697749 -450057 -230369 -389013 -170775 -357128 -249343 -754833 -675848 -123701 -292540 -927072 -536641 -654296 -374972 -226783 -106724 -481660 -951751 -664806 -831115 -477070 -646474 -474947 -20907 -117202 -408554 -506551 -395557 -605079 -142524 -9978 -931062 -396253 -117106 -9107 -178970 -279239 -889254 -163745 -945002 -365803 -946037 -125358 -887328 -912108 -164755 -668273 -573976 -300763 -321864 -65452 -336248 -101573 -180746 -54772 -159898 -647962 -606352 -237318 -158073 -185407 -760035 -712028 -13564 -186683 -13039 -903451 -192950 -482331 -412803 -19984 -423811 -711411 -798255 -55981 -449443 -770824 -161072 -796113 -45388 -236959 -136711 -364634 -83604 -120190 -12143 -677213 -200726 -93658 -837794 -72615 -852028 -810049 -55875 -639778 -858038 -926509 -929897 -527620 -49312 -369546 -140007 -807452 -823011 -64484 -779350 -790961 -113327 -164383 -35661 -323094 -153022 -366124 -892400 -333078 -136869 -294505 -805515 -940884 -210136 -652037 -674147 -388560 -825917 -244560 -206311 -684554 -664622 -905351 -339840 -229051 -56807 -367509 -83651 -465319 -73969 -415485 -769181 -332119 -578758 -113494 -644163 -257280 -222966 -782648 -848859 -463721 -550700 -85283 -311978 -569801 -651806 -872315 -263439 -140848 -818293 -832074 -107471 -578111 -140347 -398361 -107913 -849185 -830771 -410430 -289120 -501947 -395833 -72447 -19711 -906414 -495668 -688045 -762566 -449540 -851053 -437778 -866421 -294112 -903567 -48487 -119488 -426434 -421121 -35187 -379943 -764376 -951973 -165262 -50889 -262810 -638785 -199520 -773900 -146585 -252350 -288033 -732436 -603893 -163935 -605678 -778396 -358367 -188366 -955491 -265541 -236019 -734927 -431536 -627252 -298801 -357169 -55058 -949306 -200398 -932544 -876576 -45404 -179282 -99059 -460881 -645424 -446548 -236002 -769171 -66190 -795835 -333891 -678583 -125121 -312956 -116182 -874005 -301700 -20695 -392805 -772635 -838259 -762075 -483984 -270524 -524078 -420223 -674588 -278544 -684196 -740342 -355488 -166138 -457186 -788596 -111078 -339678 -442882 -457502 -458195 -404993 -479477 -748335 -210330 -374508 -17549 -894357 -759871 -205756 -705825 -778409 -700367 -701157 -163553 -659270 -425946 -179778 -945132 -467192 -533553 -939755 -136226 -185674 -797699 -386750 -790732 -373519 -51513 -46889 -22939 -373517 -898451 -797022 -460996 -818500 -542785 -295628 -873994 -671381 -2547 -251463 -480608 -272337 -76272 -13570 -790005 -742073 -196151 -245333 -742547 -296593 -352870 -45576 -839588 -837099 -243806 -474249 -440381 -59237 -810918 -789801 -410629 -490800 -861876 -434962 -104027 -12290 -824628 -726041 -818185 -216839 -770638 -889683 -57896 -652618 -664858 -802966 -369862 -334197 -638756 -284136 -800037 -127857 -287765 -929062 -912297 -24175 -897870 -859352 -29031 -24644 -930052 -735972 -156537 -278562 -542636 -400373 -71235 -244432 -103153 -869009 -403161 -946105 -571479 -388037 -693276 -658239 -72793 -451537 -231079 -689456 -381775 -699349 -771287 -361177 -121076 -114206 -789505 -797911 -294398 -464513 -758367 -139940 -98270 -815639 -381339 -120148 -654511 -737923 -112418 -808320 -141486 -770589 -429907 -178223 -261951 -294582 -821927 -386799 -348315 -593311 -887876 -735270 -701334 -380512 -832441 -727109 -951452 -11511 -225842 -32775 -55772 -576532 -596889 -387156 -43821 -24325 -432945 -126143 -672776 -431241 -567972 -616671 -349279 -88313 -287751 -557045 -391588 -84883 -131769 -331154 -863206 -865608 -102741 -377442 -346076 -702217 -30914 -623512 -401459 -246189 -433722 -859763 -122115 -97643 -56114 -742787 -398493 -258021 -396009 -277686 -442660 -854466 -445230 -667188 -467034 -885946 -368407 -941582 -785313 -167761 -343732 -439503 -815476 -129238 -77167 -678612 -698527 -242753 -281736 -206051 -113388 -492585 -763217 -440085 -624429 -289337 -277256 -488913 -943282 -162939 -782218 -678511 -919922 -848238 -216644 -177968 -452335 -264161 -259477 -370230 -173205 -200240 -14827 -427053 -544112 -416662 -776031 -479991 -286860 -833994 -349123 -598258 -777947 -59089 -455963 -494575 -109589 -694616 -197967 -56683 -197567 -381650 -170322 -338697 -698783 -922672 -790340 -339269 -797603 -841434 -263472 -573449 -329559 -371338 -204843 -948234 -777387 -771674 -521195 -12276 -647599 -775318 -12864 -804769 -456016 -243113 -104237 -176538 -22073 -84651 -66882 -790128 -102427 -466929 -635834 -435361 -8838 -796146 -744397 -13100 -168671 -935094 -860287 -170172 -510996 -168227 -581266 -633947 -926293 -360641 -179865 -48494 -823191 -76991 -745542 -332554 -178749 -480155 -699388 -531670 -912248 -885135 -626991 -203893 -674679 -130824 -732277 -809676 -272324 -545017 -141570 -873405 -227905 -606151 -567966 -201717 -339698 -663238 -293920 -841319 -487852 -211386 -95062 -327272 -511471 -245250 -818731 -20405 -495986 -416830 -41356 -324637 -343815 -296232 -956516 -255614 -710692 -767051 -786238 -755474 -776379 -513018 -253040 -884281 -762843 -173746 -830757 -57929 -952381 -308749 -121775 -161125 -56097 -851749 -287606 -890106 -588735 -659670 -332234 -20523 -255228 -96974 -732039 -264403 -341723 -51699 -630798 -221340 -329685 -894817 -18865 -891106 -579171 -597257 -294102 -342942 -881973 -612931 -339542 -640661 -340538 -591612 -178902 -430148 -369850 -328445 -170617 -903028 -465533 -125869 -113310 -480088 -200698 -94368 -636034 -951656 -756407 -48234 -298851 -33894 -305967 -354339 -872707 -624946 -905352 -317798 -83464 -529049 -776374 -769970 -279406 -864584 -37387 -684553 -81501 -853193 -643462 -707316 -555359 -697032 -801181 -570690 -777366 -301009 -273140 -897011 -78598 -847639 -805074 -50132 -402091 -157048 -16565 -31466 -252177 -831352 -819849 -238783 -384877 -325900 -134972 -178482 -263885 -465517 -467149 -656239 -854339 -43119 -113894 -813208 -480108 -453879 -767126 -175264 -797402 -222034 -249594 -388364 -447856 -939625 -897571 -774158 -462534 -922473 -285603 -340015 -833789 -210979 -751915 -109191 -17855 -802784 -388744 -609188 -533954 -906632 -143740 -165587 -706364 -10769 -827638 -693125 -897570 -671420 -719069 -135949 -769462 -12937 -917872 -599842 -396641 -145323 -209653 -472711 -636583 -21989 -898099 -714034 -900461 -738382 -298261 -166117 -944562 -143456 -725574 -428839 -206531 -439698 -55683 -66154 -232798 -487248 -764237 -398433 -309965 -545504 -533883 -572791 -574207 -488266 -165818 -640677 -77039 -40932 -114160 -147619 -675611 -450819 -239335 -658622 -523356 -71999 -27620 -260270 -636040 -322951 -595517 -546111 -900963 -12322 -408556 -756427 -97608 -2918 -771618 -670028 -911265 -605812 -123381 -300304 -83751 -718159 -626802 -828748 -189780 -306467 -228376 -276001 -825043 -146879 -625712 -808178 -263868 -500440 -482055 -577726 -286968 -433527 -870529 -55032 -290555 -420750 -910329 -885800 -98576 -253318 -836427 -185321 -639027 -44017 -94051 -292877 -539761 -863804 -328465 -759268 -491101 -899388 -275018 -699412 -244485 -886031 -235008 -617441 -339848 -920980 -371479 -281988 -695416 -436099 -146762 -848808 -336261 -465052 -251863 -211705 -610084 -930023 -272983 -431761 -627334 -147609 -464339 -103112 -770886 -233811 -126875 -366372 -626091 -485157 -173913 -308368 -619539 -211759 -674190 -322795 -775045 -412653 -777297 -802854 -517708 -784690 -135145 -3826 -380037 -284663 -832589 -500590 -770875 -113599 -778399 -36812 -896198 -486522 -772432 -432528 -332013 -89323 -693718 -903510 -640074 -63634 -843228 -743596 -633245 -453596 -793707 -11328 -908264 -276781 -954750 -43176 -558027 -671347 -909680 -520909 -204537 -455658 -396631 -435616 -284945 -755965 -693394 -768747 -771880 -587771 -59883 -661194 -97780 -840245 -814279 -669847 -281211 -24766 -372981 -785498 -186601 -623241 -564638 -937984 -925413 -927561 -49123 -464356 -327072 -282761 -291370 -117388 -499352 -515925 -45571 -518649 -52567 -301656 -767451 -126580 -188784 -756629 -664599 -342828 -350174 -829829 -12154 -625903 -772519 -33423 -442518 -480254 -927084 -496114 -863980 -778054 -762333 -40968 -751530 -398545 -580246 -620682 -787406 -109163 -927469 -73346 -722314 -795729 -673752 -552081 -397480 -485641 -793943 -656080 -131658 -206177 -411858 -956582 -525132 -365057 -78932 -325432 -661225 -745552 -707321 -853113 -727010 -527674 -172608 -528412 -701727 -146393 -428790 -677871 -952457 -516302 -367965 -301747 -163086 -778462 -329178 -422360 -632461 -103683 -734577 -467402 -853918 -504813 -883347 -756340 -27738 -605729 -736582 -584077 -625077 -830943 -335996 -424450 -113565 -735711 -633856 -716496 -465059 -680979 -857024 -446604 -103904 -46195 -20368 -802860 -138486 -42462 -129009 -663181 -71761 -230711 -439826 -518846 -780289 -533958 -781734 -868825 -886850 -171542 -401068 -156435 -905033 -170221 -298952 -942960 -570403 -24604 -235208 -743544 -115214 -677619 -938993 -347457 -530470 -429152 -487291 -637397 -818649 -693237 -298131 -502379 -107104 -859735 -462008 -922393 -287203 -86091 -604141 -500182 -390599 -606347 -136782 -324748 -98458 -84826 -674192 -647451 -823846 -15956 -93511 -223125 -834209 -816194 -143938 -594819 -339617 -25013 -823503 -345202 -147510 -856006 -517604 -717506 -387936 -420603 -237388 -579795 -331899 -696533 -354284 -72514 -403813 -501842 -409556 -264069 -105046 -438606 -319683 -382970 -435652 -275767 -497252 -577686 -940309 -203142 -607157 -208940 -806035 -126849 -339427 -852991 -846269 -849972 -769168 -482153 -901681 -435415 -891926 -93929 -58277 -299371 -388541 -97836 -173256 -300008 -648986 -931424 -308584 -327778 -124936 -86280 -927219 -750103 -755260 -145699 -778110 -86248 -777131 -594017 -274565 -579859 -738099 -389737 -334610 -161850 -15138 -187486 -835097 -474024 -568927 -119881 -932670 -447164 -383344 -696769 -159307 -135926 -356811 -672754 -178222 -544409 -848709 -517081 -429739 -444420 -165166 -23791 -707423 -729927 -775516 -469411 -534936 -902722 -474278 -354701 -244179 -945901 -761937 -242460 -615751 -487892 -825831 -42693 -782186 -899470 -436359 -275330 -596106 -885427 -129035 -14978 -847982 -858057 -703714 -364064 -42297 -744161 -132733 -505240 -279454 -325889 -577176 -726840 -41332 -514025 -916432 -521637 -570509 -673869 -20529 -828066 -416847 -315572 -688032 -504594 -582299 -559671 -479326 -261753 -146308 -588655 -79082 -948144 -143447 -853002 -739550 -763199 -309752 -364664 -213735 -179361 -192591 -793624 -491135 -232742 -674964 -239334 -169355 -71114 -894680 -276301 -943608 -485194 -598177 -810887 -205014 -73840 -853476 -695397 -356216 -46666 -90866 -174342 -17027 -599937 -224912 -23972 -501263 -426723 -916498 -296866 -125111 -694020 -262770 -768058 -258195 -684111 -338146 -353365 -238048 -144643 -117190 -254842 -479486 -369613 -916106 -497127 -648521 -321635 -368710 -777125 -164034 -940955 -88943 -144140 -179239 -913989 -423013 -420137 -152783 -231096 -155754 -618454 -551337 -77647 -238897 -550710 -380038 -140607 -93587 -264085 -137080 -742825 -630790 -462210 -553635 -244963 -175696 -24446 -11662 -758142 -335302 -557352 -838247 -339433 -333804 -385754 -115815 -650713 -737579 -896492 -302946 -364554 -886234 -93316 -495577 -671221 -831275 -466857 -343034 -940378 -432457 -465021 -63076 -502080 -255485 -616087 -575543 -449163 -230848 -236358 -724351 -686554 -598317 -310584 -112229 -766170 -542230 -229725 -401872 -672787 -282756 -588090 -454070 -818060 -758016 -945648 -826173 -536915 -363896 -598201 -639495 -84067 -545215 -41539 -49789 -460063 -139214 -139768 -125475 -118189 -235966 -255150 -258231 -458805 -70771 -17889 -116340 -144383 -369617 -608446 -639676 -170877 -272443 -297813 -895214 -785559 -16558 -763014 -623681 -459414 -821039 -517613 -763464 -210683 -407899 -344333 -145170 -833022 -98996 -602675 -390253 -126509 -235389 -814937 -703174 -49865 -794968 -258003 -481574 -229428 -939434 -597364 -836665 -847142 -123531 -263640 -13541 -45557 -772470 -204608 -419197 -771960 -377221 -942986 -73715 -802396 -155388 -913908 -449760 -331607 -111370 -627402 -467306 -836617 -743765 -898151 -147318 -430401 -561159 -139201 -127254 -814489 -177932 -356166 -188441 -946534 -134633 -184201 -398434 -428356 -488821 -209024 -369357 -320872 -563449 -611013 -824571 -268854 -251126 -354377 -711749 -907542 -650572 -288905 -832869 -273902 -422092 -805584 -20112 -669813 -141321 -341631 -770369 -155835 -478355 -371991 -891865 -610087 -188372 -390308 -32802 -910152 -588050 -338492 -313778 -290158 -11302 -336258 -339566 -372993 -809601 -480480 -952081 -633366 -33623 -943984 -954557 -515642 -84766 -543708 -41863 -521991 -186941 -339650 -349367 -795532 -23012 -479902 -150650 -771253 -879680 -456313 -400982 -456436 -418975 -674909 -111571 -11706 -178347 -691871 -358805 -86032 -485398 -736695 -814954 -951467 -423534 -433309 -97751 -436929 -900772 -46576 -346283 -333535 -698110 -456919 -27750 -221610 -13052 -543306 -198429 -236128 -759340 -111493 -199291 -756712 -949143 -602037 -885623 -115257 -226946 -61031 -701582 -440510 -313577 -673165 -735085 -604329 -627036 -876571 -478202 -482723 -940441 -603896 -103898 -945317 -683847 -871873 -70583 -436788 -263418 -48567 -679934 -126444 -376700 -175328 -27059 -67649 -477009 -96785 -745722 -859760 -276211 -336076 -395539 -344371 -44450 -458083 -663995 -481465 -185448 -17528 -45358 -761677 -232899 -196781 -806552 -423070 -289199 -859769 -284806 -237202 -777662 -116338 -680956 -61850 -439879 -878944 -228158 -865723 -941335 -535043 -501388 -53248 -44388 -955767 -317053 -147295 -337369 -930536 -55818 -257491 -21596 -70087 -283278 -441080 -279357 -198632 -423256 -448486 -488773 -48511 -737832 -403438 -380735 -179367 -200314 -803575 -731433 -265121 -124302 -678000 -254592 -172156 -424086 -270191 -197631 -66356 -908585 -632143 -587168 -550651 -85130 -624243 -940553 -423426 -858392 -18009 -90217 -728667 -604122 -716895 -507950 -301871 -124383 -62065 -10644 -298339 -423323 -383560 -10486 -836642 -156346 -596495 -634462 -368261 -268582 -437448 -550346 -664484 -163052 -462098 -634176 -837978 -750049 -557999 -923174 -480009 -320923 -794743 -384252 -140579 -144731 -19131 -821040 -701126 -769033 -794970 -344362 -944291 -843456 -141761 -153352 -331561 -524121 -107967 -632824 -65102 -658709 -83929 -605323 -37995 -239461 -637042 -580211 -596337 -335876 -917668 -378438 -809936 -272529 -917396 -173175 -109002 -165601 -740334 -638811 -716513 -146921 -682586 -474783 -287089 -797770 -534773 -377392 -661623 -189539 -168245 -767700 -436411 -415264 -267909 -305683 -406761 -303260 -794600 -235517 -772545 -354719 -777370 -72094 -72178 -71571 -780788 -633441 -55640 -906721 -151614 -821456 -580349 -802064 -476968 -281917 -143056 -415606 -338471 -654775 -889210 -324088 -822464 -50599 -741959 -955531 -722079 -608673 -349962 -556622 -732885 -912977 -196898 -322962 -708072 -518802 -328847 -290243 -559177 -433551 -750491 -857239 -677071 -67314 -505560 -308324 -137083 -132684 -254903 -363474 -123108 -608897 -257329 -529494 -570828 -725900 -209454 -542445 -479063 -491375 -900135 -129815 -698957 -67766 -442521 -704038 -468386 -544525 -743001 -800886 -860887 -926826 -818655 -809484 -446654 -391764 -426775 -833822 -303717 -891024 -694372 -126387 -522975 -654499 -916004 -816498 -72518 -949209 -865221 -834053 -13787 -707476 -257388 -278341 -250046 -701720 -947611 -353569 -814000 -372393 -813261 -371990 -181411 -932736 -942869 -673410 -689784 -765005 -51728 -596917 -232300 -262908 -631954 -13664 -795479 -951036 -936418 -278102 -591692 -43308 -227086 -16403 -511983 -16551 -564409 -102979 -340071 -101501 -270421 -719453 -60892 -928367 -361064 -864951 -476722 -336332 -330036 -623737 -648072 -298762 -13497 -249352 -755092 -863759 -324335 -592754 -489725 -175659 -433437 -303155 -170753 -32012 -205343 -626462 -785466 -427529 -674539 -292507 -571883 -640611 -439479 -407711 -346153 -429448 -646454 -300885 -180735 -186068 -953550 -41251 -260010 -432656 -205016 -698267 -156235 -766251 -298490 -720298 -677944 -880268 -859078 -266285 -388653 -679295 -521757 -106282 -805325 -252439 -457568 -334745 -939409 -761499 -951269 -284362 -333512 -271104 -446352 -721274 -467444 -132991 -723719 -555776 -745525 -695669 -115380 -485527 -636743 -954809 -174580 -6880 -475035 -823792 -89306 -98083 -861028 -302497 -485909 -828795 -195956 -691853 -362147 -70139 -646469 -822872 -807166 -162907 -357340 -58177 -197190 -30815 -63587 -295676 -511361 -638962 -726887 -727245 -437356 -770991 -449094 -887179 -721925 -324020 -383654 -745579 -108923 -338210 -277184 -450983 -591515 -402236 -357312 -349504 -705385 -704431 -705343 -661247 -400381 -221407 -777117 -420115 -278489 -259360 -208590 -696971 -496817 -396243 -29165 -104713 -510289 -8284 -790505 -58615 -484306 -411314 -279047 -899438 -303248 -938413 -800737 -818253 -445876 -434818 -920469 -791756 -206266 -361706 -335472 -76046 -495744 -206176 -953979 -206570 -411218 -183207 -847298 -334304 -57430 -109815 -296613 -920303 -449956 -537467 -545440 -957055 -818691 -240586 -35655 -45497 -792619 -215203 -335495 -717960 -945753 -258124 -918918 -321997 -482767 -674342 -723794 -495580 -902886 -516735 -160322 -45018 -377482 -285642 -580233 -692205 -525373 -919150 -577443 -617610 -488775 -565959 -570348 -683879 -955902 -11636 -69675 -402043 -436485 -858323 -366163 -115419 -468316 -28756 -642471 -143527 -725797 -264166 -797452 -940811 -904093 -854317 -308528 -651795 -67315 -401037 -534496 -526565 -356081 -344145 -454311 -229867 -545763 -606457 -582245 -102300 -910909 -730651 -779179 -229634 -506723 -476755 -536868 -930659 -82338 -423290 -816326 -604593 -672165 -397470 -364937 -12231 -743396 -526976 -295868 -88379 -594451 -34301 -379462 -226728 -456290 -699659 -658838 -114832 -864635 -124458 -782237 -733553 -611619 -46050 -826658 -245851 -240964 -48998 -562925 -571685 -113398 -529025 -648863 -742875 -944407 -644380 -786590 -858801 -114480 -63672 -231001 -326569 -463701 -284849 -785823 -679821 -466754 -86614 -260595 -571991 -348103 -240464 -704860 -151858 -718564 -805265 -13650 -945181 -919566 -921526 -46638 -13770 -897539 -58205 -643015 -910057 -316307 -907761 -444019 -121280 -584503 -249073 -55499 -443597 -27982 -828692 -849537 -857854 -457954 -121460 -771249 -645536 -690025 -636118 -4114 -492550 -101741 -468397 -96645 -879148 -7839 -874205 -763828 -605956 -80461 -274693 -236961 -457549 -820502 -772586 -385663 -544636 -735659 -699557 -339580 -734607 -461447 -356921 -589371 -262837 -405628 -287409 -803054 -109188 -181577 -295038 -68718 -294021 -669975 -579107 -577636 -785826 -118032 -601074 -848557 -708769 -832906 -128451 -797342 -311809 -335545 -241881 -247813 -194059 -874747 -315503 -342564 -693554 -159304 -925847 -206268 -72787 -186232 -61492 -221991 -874258 -761520 -897521 -636020 -766320 -38268 -533583 -437570 -273722 -202443 -835403 -917232 -730396 -923094 -339497 -305412 -332774 -432710 -276035 -864558 -328192 -404644 -257985 -567802 -145318 -693737 -52929 -945394 -257326 -69233 -735389 -790763 -658711 -403703 -813303 -29938 -137691 -160251 -268561 -468650 -947753 -195928 -501414 -382239 -287755 -912171 -81296 -51710 -570335 -121657 -612549 -828192 -217257 -545577 -170419 -786129 -843246 -302687 -702140 -677795 -671996 -394154 -617779 -138498 -644561 -446005 -339944 -256508 -801279 -946870 -231848 -632738 -481064 -76077 -308808 -350187 -120697 -297360 -825885 -345926 -39679 -29764 -232536 -461469 -480296 -222451 -764129 -723665 -556911 -624211 -621626 -735390 -347283 -485562 -276959 -362731 -426066 -704173 -328187 -893520 -369738 -142507 -1947 -695877 -40432 -61537 -715368 -909876 -300474 -468637 -806176 -338696 -764434 -806137 -407469 -455492 -232302 -923958 -400836 -706181 -672118 -288259 -83224 -717204 -63496 -60652 -21911 -436323 -181772 -952446 -524156 -171662 -701070 -878987 -237871 -236273 -811386 -271787 -475617 -12395 -602958 -491403 -29495 -303263 -109477 -639341 -70700 -577830 -59851 -848084 -730282 -63125 -370127 -256741 -686456 -217967 -195269 -70242 -229564 -397750 -162166 -6270 -954835 -790030 -902231 -144857 -785267 -359557 -904790 -188116 -616242 -48569 -846924 -796246 -369856 -132234 -850760 -55560 -264054 -414428 -162986 -49249 -802835 -32043 -60655 -548969 -283382 -636196 -799359 -464274 -10094 -606941 -863835 -519028 -211851 -810355 -671891 -710771 -670788 -317398 -607461 -730978 -802851 -941110 -396485 -41981 -737581 -770426 -161288 -829913 -764095 -788263 -120209 -514287 -298255 -624304 -37004 -358663 -339529 -340101 -947541 -436564 -602820 -342867 -336272 -316682 -230352 -727023 -835841 -173645 -266056 -750726 -798810 -343324 -33933 -636038 -230115 -375802 -440562 -404825 -211611 -956696 -182675 -149302 -164220 -808719 -167614 -477122 -206296 -114047 -339723 -332185 -809695 -548267 -937407 -101513 -243663 -945805 -777901 -271752 -410568 -663456 -457444 -506197 -365808 -223709 -151463 -902169 -323588 -449581 -910905 -910025 -908272 -609610 -836698 -72102 -193116 -908403 -526817 -762772 -133876 -291527 -451572 -480403 -327788 -788504 -664827 -751987 -730331 -334523 -17951 -913787 -177990 -164350 -114487 -949989 -712269 -398218 -765344 -904498 -237284 -204634 -259484 -948652 -60746 -482294 -787880 -446235 -685748 -254295 -485183 -673648 -349502 -289918 -921596 -489724 -360586 -658651 -842597 -339544 -903513 -86961 -72356 -420399 -162258 -438943 -341582 -564573 -402106 -527474 -467227 -358184 -432214 -275261 -336306 -642994 -432426 -920020 -321324 -485081 -905099 -3362 -627531 -605484 -742866 -597502 -474859 -534631 -667386 -447681 -96674 -648562 -404879 -186915 -105647 -567856 -529278 -778017 -877032 -506056 -470413 -735272 -632818 -63565 -162570 -37798 -362594 -630325 -306700 -44577 -86742 -256434 -428448 -13370 -810042 -802908 -99705 -794919 -366025 -25561 -135037 -793657 -900339 -165781 -175525 -859134 -351054 -297670 -929085 -626680 -765199 -52010 -330325 -738319 -814826 -350284 -21670 -626541 -760369 -102795 -692265 -430445 -466353 -377303 -297547 -919030 -154159 -940396 -705321 -946722 -43005 -600130 -594663 -158988 -847598 -365805 -690745 -126703 -460033 -353754 -511265 -817004 -429260 -517028 -903918 -575320 -510473 -853096 -220540 -872348 -805015 -437019 -800690 -606144 -41232 -665237 -653762 -942491 -305167 -694082 -73511 -241683 -860647 -301146 -618456 -492894 -885777 -851520 -434206 -44143 -712877 -505286 -21411 -863654 -675064 -466540 -163267 -160406 -851457 -90460 -18093 -243783 -705794 -436895 -305852 -684269 -130904 -596567 -317313 -60587 -303310 -466384 -386881 -649387 -912492 -741053 -850143 -822656 -335392 -331493 -617323 -361310 -790554 -858135 -892243 -315794 -253771 -5910 -377385 -333022 -344404 -293015 -459908 -284643 -255121 -316738 -56557 -181178 -827151 -635717 -178516 -264198 -787718 -275923 -648981 -759924 -581928 -339735 -943705 -181478 -301316 -315561 -261974 -8196 -188615 -888919 -581845 -707416 -262988 -916437 -692179 -611507 -564473 -14876 -143845 -132760 -750366 -37031 -294159 -121830 -161791 -589378 -713248 -930280 -576400 -902192 -689026 -920932 -500826 -303153 -295772 -683282 -841049 -436985 -893642 -429117 -763569 -641408 -605686 -605712 -487058 -180542 -558825 -778289 -804262 -208827 -467133 -10876 -315096 -375249 -136989 -321927 -60809 -321229 -249029 -482429 -948287 -360439 -461850 -929786 -30080 -71626 -466819 -766262 -885286 -678378 -41999 -357204 -847875 -65851 -67450 -720088 -10374 -399541 -340284 -755171 -323797 -168416 -854759 -646699 -259337 -686138 -296620 -714741 -146620 -806152 -339292 -31796 -113452 -603615 -431895 -949223 -64447 -14421 -418588 -403841 -206390 -836724 -617409 -261923 -14590 -309767 -223831 -398054 -117169 -226337 -444436 -862620 -12559 -411137 -384185 -117019 -157381 -944883 -283014 -440244 -369283 -455747 -188104 -957004 -282685 -857420 -355059 -679264 -357346 -75857 -278917 -101315 -279449 -94794 -792851 -330327 -883980 -364032 -912529 -732013 -232819 -12518 -639950 -765527 -570470 -640847 -803535 -197592 -347609 -33347 -701265 -541864 -362521 -803867 -302611 -22135 -671304 -726385 -147577 -347106 -295835 -137132 -344294 -388976 -851535 -817118 -747026 -489219 -199311 -538242 -313831 -921403 -128573 -37494 -747180 -475130 -403463 -297219 -933591 -127977 -755252 -228258 -285698 -676433 -750452 -356340 -81313 -749525 -69753 -935409 -488102 -822988 -635461 -820005 -849734 -235642 -278584 -722471 -20196 -126379 -755052 -232664 -953009 -326915 -662549 -276247 -80206 -442340 -793956 -913811 -95631 -577161 -358285 -579788 -503982 -698519 -136938 -579482 -793885 -88250 -343980 -328343 -101914 -55419 -575505 -113670 -807354 -879681 -942939 -752251 -589354 -40738 -68430 -569945 -729092 -716677 -893371 -943050 -875495 -40017 -30678 -556222 -122171 -305669 -381395 -642480 -318665 -253351 -11571 -724340 -398265 -181256 -597763 -412173 -16291 -343996 -390119 -889463 -779460 -693641 -204827 -242834 -732050 -9219 -192282 -723058 -656284 -721292 -704432 -9517 -698874 -523349 -499025 -451157 -654043 -445706 -468627 -742676 -754500 -467060 -276058 -58464 -296902 -511140 -815656 -856047 -371797 -210836 -277630 -625711 -939157 -261132 -83705 -178804 -285901 -784383 -306616 -269326 -537862 -121944 -25929 -773762 -636004 -45075 -657785 -654684 -775079 -427596 -873858 -173823 -651071 -11402 -909000 -62791 -261743 -150180 -742017 -830372 -836467 -537803 -295311 -9778 -695390 -633830 -802892 -504342 -84313 -9912 -244867 -45864 -682391 -466470 -775660 -481534 -274081 -774370 -793842 -329324 -740770 -418936 -485064 -245241 -550877 -403347 -179024 -299653 -813155 -282938 -414488 -425151 -795114 -307424 -122021 -158970 -787814 -878890 -328875 -685472 -339657 -144369 -422694 -96878 -885566 -952398 -369670 -750060 -305864 -648411 -609794 -501113 -664690 -206227 -582502 -97800 -495032 -743722 -602773 -255387 -784998 -502241 -827424 -945652 -453254 -501895 -829441 -619830 -262377 -380731 -571739 -793350 -920432 -601119 -664956 -661741 -170712 -313710 -599154 -817285 -663495 -379412 -83459 -823456 -176548 -865317 -850332 -425687 -112979 -116139 -166548 -9921 -929278 -573514 -156803 -425708 -11818 -120045 -361536 -357329 -354532 -923912 -440534 -824213 -631947 -18163 -527782 -328017 -905117 -55861 -147463 -622380 -23407 -467052 -166902 -656990 -587805 -848762 -323610 -707410 -716742 -60671 -466817 -42267 -136247 -129903 -645980 -215969 -937303 -809938 -861049 -235280 -517861 -115063 -568889 -760246 -125765 -802633 -453633 -252329 -581080 -898517 -793754 -14506 -238736 -493060 -650625 -333485 -491860 -847305 -338848 -170752 -757979 -650826 -273145 -897155 -9084 -420484 -948984 -467912 -27815 -398397 -46238 -450984 -278465 -155669 -505172 -570532 -841258 -96909 -898055 -98342 -237991 -415650 -682580 -28008 -186035 -886886 -695414 -456184 -358562 -209357 -220710 -245941 -716437 -11769 -673016 -456499 -516389 -182616 -798766 -186905 -513506 -393445 -343082 -25520 -697489 -388217 -364681 -344345 -640046 -625028 -503219 -40606 -949357 -12431 -232052 -516623 -579340 -857732 -562693 -851306 -217590 -208143 -426056 -914714 -56748 -16723 -144423 -325046 -444976 -278624 -339642 -676613 -282770 -706338 -334696 -818648 -97944 -357134 -64829 -37151 -230507 -13603 -12107 -328447 -345272 -437751 -654209 -289089 -294191 -769136 -24882 -693762 -742113 -511439 -925872 -12433 -682004 -516187 -897038 -875697 -64589 -550533 -162614 -96571 -492627 -917348 -663694 -770045 -24636 -271660 -843209 -46448 -697025 -356749 -799438 -332299 -600557 -659963 -763664 -18565 -557678 -175630 -232767 -951888 -337975 -14623 -336084 -838361 -263847 -939170 -836755 -707258 -692828 -274201 -197436 -755205 -35659 -563434 -837042 -733761 -765559 -108581 -279459 -496085 -520630 -298362 -161866 -449850 -659527 -110323 -830079 -594079 -280102 -945751 -911185 -632924 -391659 -733614 -289034 -808068 -443647 -23729 -405965 -73301 -816081 -328877 -650855 -379903 -377547 -249434 -334746 -579487 -798108 -797430 -356848 -774131 -153427 -259851 -404834 -47283 -904387 -175033 -151629 -903702 -591165 -13205 -362067 -682218 -938477 -356544 -857501 -75403 -236758 -13559 -922275 -281446 -600906 -837205 -663769 -598869 -46536 -440644 -106253 -476994 -133041 -603908 -181099 -408713 -484051 -442978 -645770 -232413 -141857 -610498 -140050 -301210 -863997 -518429 -670638 -110128 -900623 -652974 -69677 -39197 -321950 -666937 -134559 -502608 -555167 -385257 -206070 -14628 -578630 -667054 -227060 -635088 -20693 -231268 -305166 -279754 -265140 -47159 -455930 -639835 -806520 -13237 -598142 -796127 -736686 -82167 -173023 -825875 -56462 -78693 -76797 -356308 -544172 -628248 -28580 -228988 -232915 -261015 -664064 -472549 -596671 -624936 -856995 -919849 -373912 -112640 -350614 -145593 -391472 -429567 -224276 -504565 -380583 -13392 -340534 -900090 -625594 -867080 -703659 -35864 -276455 -260040 -230355 -760417 -436105 -926987 -434376 -717147 -800789 -760507 -132284 -108550 -663194 -764221 -630773 -236922 -275182 -364135 -704410 -534796 -570430 -811398 -848170 -940726 -343935 -234357 -254571 -438110 -30268 -97937 -112468 -269387 -269943 -868862 -531383 -798169 -303163 -364760 -504339 -906843 -164680 -883402 -747184 -32813 -323814 -314267 -429312 -37492 -144800 -10084 -67405 -310784 -535243 -77041 -880733 -321957 -703999 -13562 -490591 -234480 -903954 -721898 -63731 -217368 -434455 -364638 -44302 -383439 -795192 -342608 -192991 -777090 -892740 -438065 -180999 -449638 -830512 -711394 -449200 -495749 -280797 -924529 -314778 -415047 -599878 -605315 -102248 -10632 -33948 -191506 -894528 -324586 -162518 -42692 -903976 -364891 -303506 -938096 -539121 -396768 -946753 -32204 -293289 -412877 -307102 -893258 -522148 -598202 -45390 -753233 -491133 -830481 -801542 -370290 -772567 -432379 -339882 -887994 -808852 -704187 -893110 -596744 -374995 -887783 -10227 -381043 -629425 -216736 -488984 -732467 -232869 -665890 -349369 -251495 -785534 -190213 -88930 -136902 -823938 -298771 -516358 -420092 -828630 -244941 -418688 -755104 -704806 -346849 -887323 -418370 -562533 -154965 -770048 -418596 -550424 -264859 -368863 -429291 -163295 -264872 -104725 -945784 -681614 -611128 -846265 -13783 -326047 -493606 -235491 -237941 -38020 -458548 -322280 -242986 -83378 -557483 -262387 -339886 -175599 -706407 -879832 -445851 -328549 -671857 -953473 -587511 -98262 -39910 -651666 -36412 -388077 -841062 -179948 -119122 -450610 -807316 -116761 -369261 -190623 -83277 -30106 -853595 -179268 -460809 -281808 -598023 -90757 -228703 -956877 -527948 -104649 -223161 -445067 -273676 -271161 -134014 -178372 -571962 -398012 -845220 -434318 -696433 -534130 -507151 -471218 -165943 -13824 -166452 -94432 -255963 -93311 -83722 -582196 -524685 -331704 -251389 -695967 -315453 -61141 -894640 -514938 -386848 -579339 -474176 -13835 -889019 -96628 -464786 -831102 -546068 -954976 -577298 -825565 -420712 -230571 -108284 -732150 -538347 -322573 -251975 -550579 -335290 -397219 -709905 -839716 -516736 -722140 -634196 -37401 -158129 -765606 -237550 -307908 -49926 -527234 -664368 -58178 -395370 -364907 -609798 -830301 -64740 -133166 -646410 -669978 -915933 -162995 -937922 -170428 -405786 -779745 -249180 -72298 -295770 -767295 -34185 -625052 -45116 -934607 -818784 -906199 -325486 -97011 -861347 -17051 -331563 -677650 -755989 -916456 -430085 -86755 -180522 -609574 -630764 -161647 -566312 -789981 -648972 -206237 -554629 -155399 -505697 -482221 -852985 -822995 -910951 -547107 -778445 -428183 -90383 -915645 -571477 -694339 -62991 -463600 -755499 -949475 -376697 -947848 -619431 -280786 -431880 -166043 -534676 -403344 -742100 -912552 -480161 -289238 -837938 -54645 -557587 -591183 -670682 -671992 -627063 -774246 -404505 -824967 -264765 -828619 -221317 -914964 -857896 -702434 -417052 -344184 -504300 -828789 -56690 -42861 -794839 -228264 -339891 -394533 -396248 -364838 -258261 -634965 -61560 -806865 -614662 -942961 -72119 -11622 -438218 -444495 -173836 -33098 -93233 -223295 -691648 -849469 -46592 -766141 -25218 -180822 -332293 -31983 -758001 -273528 -669340 -656423 -178389 -149962 -260277 -439820 -528729 -52555 -938016 -49053 -332324 -943254 -145658 -77268 -723514 -30346 -545740 -13102 -900531 -61649 -174814 -404198 -323372 -654696 -64471 -664173 -355958 -848950 -221989 -649012 -654541 -660557 -332290 -169454 -517500 -903590 -159226 -491068 -117036 -172584 -701630 -175896 -776681 -615164 -46272 -703552 -63792 -11624 -426524 -666574 -70783 -882892 -764466 -805308 -574260 -331711 -257327 -567850 -431817 -437696 -455671 -605759 -941205 -824681 -838337 -860308 -342924 -549385 -22082 -49037 -504714 -805309 -321279 -43268 -938822 -903532 -297803 -885066 -391635 -820642 -334937 -941661 -145836 -279483 -107893 -281130 -931472 -65435 -466829 -300876 -232318 -276379 -952509 -106161 -22646 -24316 -79304 -294831 -336985 -542062 -24341 -905203 -917404 -556976 -640965 -415280 -30635 -368646 -863809 -254145 -914490 -145704 -127804 -623836 -900122 -185597 -758040 -335598 -23988 -802661 -906014 -817057 -491518 -166174 -58966 -770774 -471380 -440346 -204583 -605978 -479220 -45141 -309677 -486658 -834765 -331752 -925348 -484039 -625828 -188659 -698480 -703131 -938221 -27337 -235731 -658907 -343508 -135393 -779596 -602005 -209466 -134734 -199018 -488458 -647771 -337325 -50244 -316660 -741323 -149212 -587976 -699345 -926254 -514959 -772535 -68459 -18946 -526102 -346896 -880435 -260710 -175101 -773937 -463580 -504396 -178527 -281544 -799085 -162112 -828514 -380612 -942229 -126664 -13072 -205458 -60924 -570357 -618584 -470565 -165253 -956701 -275955 -566467 -423332 -301429 -636141 -368373 -226518 -427595 -180981 -363248 -265622 -932801 -480122 -313306 -57582 -661485 -208145 -296934 -244401 -67441 -610550 -814145 -289418 -828645 -380162 -829811 -262396 -650475 -673054 -698305 -137597 -939828 -12220 -874359 -33111 -523645 -714024 -566817 -94778 -468345 -139964 -915638 -279270 -852729 -593909 -894543 -461323 -524602 -837939 -608414 -868711 -275102 -492655 -15883 -35165 -911322 -825869 -48705 -894350 -20949 -10350 -279281 -518536 -766656 -204437 -10335 -438021 -284859 -667102 -231270 -695980 -330584 -676403 -248529 -891879 -726783 -376488 -755304 -658767 -673189 -279526 -48126 -564305 -102799 -332950 -577450 -368260 -534117 -875255 -521660 -17462 -769127 -939459 -859809 -377214 -474663 -370062 -772310 -854730 -750493 -821351 -96537 -261364 -611393 -125425 -151871 -171486 -249324 -500773 -485397 -515366 -119920 -942943 -693328 -774146 -477130 -369731 -601122 -309836 -52444 -328674 -27959 -954403 -570126 -591239 -512330 -858178 -863758 -579086 -956316 -458148 -872653 -84913 -84541 -712095 -817633 -645580 -433236 -336287 -12263 -9929 -680657 -467009 -764727 -903981 -731041 -18181 -231191 -495874 -37762 -851313 -102284 -882742 -348960 -742036 -449437 -52345 -397393 -687862 -275478 -938427 -898247 -298887 -907772 -801223 -399505 -696328 -522872 -422740 -639957 -633903 -300236 -580488 -534254 -77824 -390648 -229753 -734966 -675552 -697674 -315012 -339540 -361390 -386340 -369344 -593671 -188929 -272629 -395156 -737593 -633651 -168300 -21324 -247262 -154158 -602688 -121058 -572005 -451609 -507718 -120208 -178054 -693030 -851142 -409803 -883697 -9915 -812576 -171447 -267904 -546608 -27968 -19404 -13780 -627375 -303563 -342246 -849577 -391666 -681431 -803640 -10887 -563646 -46570 -462721 -933105 -707456 -349158 -236027 -71317 -927864 -955326 -136656 -432697 -741086 -771190 -172108 -932570 -890635 -694318 -275640 -38866 -885541 -56663 -288001 -856007 -75408 -380623 -701315 -420052 -956489 -917268 -661286 -646143 -49076 -485770 -523762 -238821 -791995 -153443 -336333 -369869 -369320 -696131 -771884 -401022 -22076 -379289 -40779 -808994 -829873 -192357 -175127 -224342 -524061 -524994 -630153 -173608 -161827 -403711 -830974 -738249 -458229 -733685 -380694 -442342 -122748 -32920 -83298 -358411 -819716 -245469 -828698 -751422 -74515 -128878 -725849 -308326 -763886 -188371 -913736 -108188 -682155 -197495 -826121 -832905 -395592 -363788 -955876 -698396 -111317 -619063 -37448 -690517 -296650 -74006 -929083 -434835 -807935 -717569 -651503 -836182 -239741 -752259 -806291 -767468 -329073 -674388 -48814 -447221 -88657 -569271 -281534 -25192 -40649 -617316 -461579 -178267 -712133 -863936 -855916 -629962 -77169 -96372 -78306 -196768 -238797 -349163 -203787 -1561 -679873 -120232 -221485 -420475 -619900 -334680 -563087 -164658 -950545 -371102 -207576 -463760 -64593 -693071 -328601 -314762 -678012 -756005 -52811 -488330 -289109 -163945 -106114 -293334 -221546 -176740 -319934 -875575 -404954 -399103 -544207 -315439 -334340 -590936 -653297 -238968 -827229 -527573 -285413 -623626 -356828 -294995 -281315 -771295 -730175 -239124 -79299 -274569 -467327 -800028 -179036 -81935 -262603 -581872 -167708 -810801 -759729 -287027 -32201 -426650 -219020 -716644 -597929 -362795 -8538 -271267 -408552 -772222 -762412 -287524 -207046 -382579 -262186 -666963 -450885 -506189 -57221 -159551 -257653 -946643 -638275 -29617 -297589 -551393 -206394 -15010 -160151 -240425 -211842 -179280 -814407 -774197 -66357 -936154 -665380 -524784 -459504 -800749 -913094 -359957 -450835 -639558 -387503 -394213 -663651 -387869 -788395 -134179 -810723 -430159 -931425 -291537 -223781 -765316 -391745 -781685 -593135 -864803 -478186 -710803 -857946 -385916 -244905 -199140 -495708 -738203 -408422 -444449 -656289 -232497 -137168 -230341 -275896 -555323 -645465 -459835 -600113 -205373 -908145 -956955 -432762 -84702 -390548 -733167 -489726 -803487 -940427 -952920 -339825 -886897 -797761 -249349 -828987 -753470 -662206 -145921 -245954 -617980 -324190 -559007 -863649 -758077 -281484 -9833 -933012 -910214 -848022 -633785 -168229 -814573 -604173 -461312 -27714 -386997 -785854 -811947 -693560 -34727 -901641 -388781 -442657 -389712 -476969 -308931 -187924 -806557 -137231 -328224 -501987 -387523 -18724 -147309 -951435 -674300 -292849 -567980 -513377 -656148 -113156 -168136 -650048 -868928 -520038 -673825 -910721 -349572 -222162 -67657 -377411 -213546 -825076 -375101 -200444 -10606 -318582 -297882 -78709 -422877 -876936 -287932 -158814 -60010 -293539 -322349 -95149 -113167 -437352 -245380 -205235 -779153 -380692 -332598 -518603 -257521 -456473 -696509 -93289 -272996 -70785 -357107 -380137 -759594 -832991 -806845 -866930 -315171 -330215 -465561 -86125 -258181 -686376 -257191 -694834 -163114 -83498 -903814 -838194 -942268 -577906 -492656 -534754 -173616 -223992 -693242 -458101 -26940 -422935 -606583 -88369 -69727 -68762 -270719 -775704 -106728 -652551 -941966 -590727 -909578 -759560 -376419 -315807 -603845 -443935 -886827 -892979 -492450 -151719 -943670 -157383 -102165 -224063 -433423 -482697 -546638 -930674 -847676 -42903 -667166 -849702 -229772 -801277 -835130 -853063 -556346 -922000 -888241 -97461 -782537 -26907 -404621 -125740 -457765 -829987 -903670 -149827 -248881 -806024 -258718 -861348 -809858 -854864 -228206 -288611 -221516 -254884 -536512 -210663 -237922 -657388 -22849 -126065 -726995 -517631 -362360 -715413 -415270 -893386 -25579 -747072 -44792 -343999 -10504 -16223 -553962 -19428 -282976 -26635 -777521 -439320 -239550 -910336 -362189 -412386 -528292 -162025 -938296 -667301 -337833 -205108 -235855 -846538 -56549 -405064 -7788 -193174 -439421 -240226 -18442 -391800 -677057 -119385 -198818 -834188 -849715 -633107 -802290 -702086 -179387 -568189 -581020 -632401 -274796 -596084 -312101 -767013 -888954 -818403 -166970 -612780 -939703 -426246 -582239 -274549 -366205 -726621 -193887 -895569 -943656 -884553 -191382 -331233 -205456 -482527 -411774 -831429 -736594 -569917 -850820 -245130 -123097 -726411 -485695 -434699 -664902 -48641 -14829 -117212 -341659 -388043 -446428 -5214 -749518 -431809 -274948 -571795 -674226 -64316 -756705 -185129 -222087 -645896 -955303 -914170 -460237 -263281 -560267 -939896 -205352 -606349 -244690 -901860 -476102 -467440 -155485 -638878 -175237 -480012 -469232 -196762 -14721 -369451 -651634 -136609 -248537 -700272 -8456 -602795 -36529 -79235 -260346 -904134 -512388 -598060 -200435 -41084 -919948 -124551 -165878 -429112 -623330 -440280 -704139 -641050 -246039 -324111 -674530 -254504 -298043 -593940 -429186 -768705 -859380 -111658 -800851 -127954 -234628 -633545 -633581 -356944 -711753 -641393 -251403 -937232 -356150 -262492 -870706 -526663 -832668 -854575 -648724 -204561 -649974 -62493 -945480 -343072 -905215 -921049 -137002 -133354 -664416 -35078 -490352 -404714 -472591 -126630 -475097 -797074 -579207 -845170 -103750 -861238 -167544 -809170 -460569 -848931 -717619 -817774 -129648 -316193 -151404 -172521 -888326 -154341 -66053 -596630 -609179 -711933 -172752 -567948 -847709 -914296 -276267 -41136 -899723 -172944 -34064 -1621 -323970 -773962 -390140 -442864 -434338 -796095 -686791 -758490 -748061 -938290 -861659 -500762 -331508 -567006 -67040 -821118 -229255 -824911 -699464 -329124 -412910 -713174 -563153 -72239 -229950 -133081 -283159 -699147 -588601 -364757 -349433 -934787 -704481 -432739 -953143 -865653 -31292 -945099 -527026 -502275 -181228 -627752 -814554 -224293 -439424 -171156 -192741 -289292 -105576 -627618 -434110 -132622 -99802 -597429 -676683 -829802 -516756 -754971 -808990 -337932 -704471 -475367 -447981 -776346 -639917 -122776 -204888 -666123 -125381 -810817 -716336 -116793 -173419 -241928 -569814 -17400 -693093 -222054 -742805 -51086 -456309 -475279 -603218 -404792 -745615 -668500 -699030 -305342 -257423 -594085 -775531 -318096 -674726 -640020 -450564 -268883 -710472 -206257 -491775 -369803 -52270 -205085 -180194 -715454 -390995 -121958 -541772 -687872 -664960 -927204 -870439 -363693 -723375 -42719 -393155 -41178 -487190 -324862 -782573 -682319 -693929 -501187 -598184 -101924 -747483 -187953 -732649 -179319 -217425 -372157 -689606 -437976 -313872 -162094 -737027 -437472 -356837 -553486 -419235 -271193 -704440 -444957 -254618 -830343 -101170 -162808 -786292 -898681 -178975 -936605 -298445 -931754 -884803 -52336 -488461 -559571 -115186 -623274 -176469 -533608 -932880 -664730 -684626 -785286 -114305 -638331 -534387 -886900 -386053 -849600 -121592 -97539 -677874 -892974 -881857 -285187 -749129 -849517 -223783 -754769 -16304 -728586 -674556 -427581 -242364 -932278 -390491 -329163 -778093 -323225 -198798 -886196 -88873 -356033 -269006 -162985 -86597 -527780 -358315 -636821 -429439 -178272 -396694 -328534 -950799 -763180 -83524 -945307 -458584 -219207 -563392 -604119 -227964 -49481 -808198 -55370 -575668 -925092 -300641 -942219 -408351 -208495 -788726 -301139 -297708 -41362 -30238 -563118 -83874 -687345 -606703 -125161 -557945 -662121 -651944 -779950 -147343 -109933 -326392 -40416 -10477 -758271 -744330 -751944 -873800 -695957 -384132 -96404 -562218 -522884 -897550 -599849 -226185 -231018 -139706 -902727 -7864 -34141 -25995 -559201 -770662 -120015 -307000 -110350 -284570 -642835 -904769 -571678 -41385 -574463 -569808 -222114 -450351 -480482 -83614 -134986 -335521 -57181 -743695 -271893 -794741 -707372 -655340 -603220 -182840 -186607 -663713 -858516 -104855 -49727 -661963 -30492 -742620 -913065 -525035 -187914 -734933 -34496 -108890 -276427 -255496 -950557 -83593 -116081 -764035 -357069 -668597 -126228 -917763 -324110 -180093 -118606 -405697 -699580 -585792 -913042 -382974 -516260 -512353 -630420 -166470 -768849 -14916 -626904 -569798 -240603 -295757 -23073 -651643 -246924 -260129 -627984 -601251 -930631 -395323 -500998 -349839 -714183 -87024 -449690 -704159 -364730 -736948 -12207 -113497 -46435 -854738 -806854 -189007 -328519 -86059 -899717 -603619 -186937 -38874 -899570 -669819 -789137 -921664 -631706 -172792 -758939 -144476 -783938 -916016 -277879 -199364 -824542 -64759 -19988 -279398 -480019 -906413 -59666 -725585 -161503 -306863 -212847 -390521 -288072 -97811 -402147 -808063 -753099 -450538 -81289 -660125 -70722 -763644 -923867 -320983 -732330 -563536 -180598 -772355 -658553 -232687 -222103 -556876 -6258 -247639 -179164 -597979 -556840 -393616 -692936 -531260 -160670 -490649 -245858 -568339 -455475 -248984 -176013 -348842 -638673 -333603 -449058 -566563 -277762 -181683 -863660 -950468 -501769 -855455 -232749 -432964 -943271 -570443 -300680 -262763 -115665 -834556 -94604 -673687 -787642 -330834 -141685 -716561 -382198 -545007 -835011 -65628 -453973 -490386 -124235 -568278 -458346 -261699 -31105 -732588 -50256 -824835 -39959 -327366 -240502 -277303 -627602 -848538 -48647 -83582 -625763 -85928 -704031 -786534 -892463 -229060 -274980 -932065 -534869 -303670 -364121 -221338 -279445 -255810 -281317 -618183 -498117 -771973 -773222 -335630 -658421 -374970 -335914 -692871 -248955 -255195 -706909 -814925 -538175 -29736 -514126 -543106 -741837 -803240 -533960 -556750 -91118 -613078 -427126 -581579 -71196 -194838 -650464 -629684 -245319 -841625 -704284 -148296 -20342 -192984 -17290 -511880 -146396 -833513 -46087 -78447 -16578 -768001 -47296 -325512 -466840 -791690 -512675 -66818 -551327 -758341 -815814 -239429 -339454 -524488 -320613 -136732 -490048 -945745 -40697 -712012 -439384 -208597 -606736 -437813 -215903 -215452 -51516 -9526 -897238 -152921 -61929 -710037 -682725 -642702 -951882 -609755 -849592 -626704 -287740 -516915 -280620 -523827 -655612 -29367 -334277 -593921 -693136 -588223 -855825 -713180 -25337 -285182 -43902 -939834 -646178 -24951 -356170 -343248 -651959 -736345 -824854 -874529 -20154 -183299 -849558 -885842 -220852 -379457 -55777 -699631 -101775 -938747 -956063 -79088 -739471 -431973 -710943 -695904 -648772 -119304 -354007 -836291 -849925 -893701 -894041 -271083 -777446 -533095 -696057 -335307 -655468 -874174 -357223 -916627 -770675 -846666 -835081 -707455 -478346 -362350 -573878 -90393 -21389 -623661 -696122 -424408 -13551 -616579 -11593 -555062 -424152 -577231 -740519 -818792 -440464 -818001 -907811 -545997 -72046 -941519 -46446 -144516 -198173 -456823 -372958 -779224 -616414 -520712 -721196 -234605 -828357 -693851 -479582 -82510 -434133 -402762 -806614 -777453 -590717 -772523 -857229 -479080 -292176 -861047 -124609 -16744 -136316 -502532 -912434 -428861 -937037 -403756 -150876 -674877 -62051 -934542 -514349 -852715 -147142 -71203 -439703 -386130 -535474 -483909 -333665 -683304 -836064 -297551 -339653 -506071 -44223 -136378 -525523 -52625 -411266 -348529 -547381 -436874 -899772 -682471 -379434 -604832 -451365 -139748 -806104 -625596 -294745 -83387 -691828 -738060 -640857 -366149 -823956 -322687 -699394 -750538 -902974 -192348 -751381 -541712 -328007 -156471 -755137 -937856 -884376 -900013 -64521 -42014 -574655 -22868 -839616 -443762 -586383 -616493 -768991 -461503 -171182 -66329 -12995 -505457 -282870 -813396 -440452 -643020 -363222 -839254 -292578 -711377 -632450 -485131 -64763 -418785 -844350 -638587 -541242 -693256 -652610 -13974 -653506 -848971 -675278 -801899 -153146 -736433 -847477 -475137 -314631 -588995 -404039 -176196 -380683 -461298 -152325 -525801 -78037 -492283 -83828 -814623 -478979 -662705 -68614 -896104 -387902 -310071 -178580 -228125 -358076 -356887 -255224 -235801 -113200 -241651 -235513 -818632 -137818 -230388 -501055 -271369 -72714 -766291 -727170 -526199 -664944 -937120 -460381 -658309 -286757 -234020 -771688 -47606 -13252 -121950 -62974 -236887 -571814 -827060 -880462 -46675 -837855 -642837 -446434 -884283 -105489 -579940 -506029 -514725 -332217 -258362 -339204 -848329 -901857 -551874 -249220 -420993 -340550 -432356 -280233 -649019 -97941 -483871 -108274 -751423 -766271 -215709 -95295 -904060 -235471 -339128 -166037 -859227 -606578 -125903 -562763 -791760 -934312 -824584 -176518 -50203 -260965 -684721 -43164 -721878 -40673 -364262 -738555 -164877 -258868 -340665 -941804 -785471 -158232 -842601 -60792 -436035 -33500 -635525 -937027 -393860 -734479 -777363 -704005 -926574 -303246 -543259 -169557 -73792 -31242 -205363 -945697 -303755 -338419 -864069 -769887 -812714 -398465 -591453 -837067 -900177 -539792 -847223 -525201 -731140 -813912 -297114 -298416 -370286 -64444 -452078 -301390 -618368 -38537 -84607 -291827 -502811 -654232 -55701 -757824 -863648 -298035 -158197 -207585 -68276 -631661 -283013 -809485 -659648 -355363 -690957 -94524 -810326 -71728 -129391 -906713 -495797 -426748 -849023 -167494 -90035 -279037 -894820 -627880 -515704 -449049 -851573 -293929 -523787 -835111 -916930 -150966 -702541 -693789 -217724 -657385 -375710 -275026 -935840 -784871 -364301 -853990 -623066 -288085 -326528 -126853 -536006 -21988 -699477 -821386 -479338 -598389 -158000 -816770 -649666 -777301 -457419 -940859 -510106 -864808 -518799 -696216 -602911 -414250 -349005 -955654 -189521 -742644 -427569 -300406 -374931 -185410 -587139 -87008 -648616 -755296 -240375 -377347 -534860 -624303 -579150 -788485 -228952 -158973 -146425 -524264 -9527 -580945 -251732 -778297 -945944 -466247 -94090 -355215 -420494 -704999 -55706 -195774 -356406 -328644 -639809 -570427 -47782 -612793 -609021 -75985 -588040 -38172 -356861 -37264 -17954 -485724 -480364 -527174 -559139 -209498 -568911 -639518 -511643 -849151 -343109 -841997 -397269 -761275 -367038 -493871 -84890 -534390 -64795 -306857 -412676 -418680 -314968 -666826 -770335 -264160 -642819 -319227 -145749 -921928 -919856 -570734 -82536 -49489 -626992 -736094 -361686 -743277 -789675 -108007 -906746 -848383 -849510 -58953 -241219 -58987 -456314 -78464 -138394 -7074 -783865 -184538 -13589 -948453 -40622 -470328 -38902 -852823 -536388 -7727 -162920 -368105 -192654 -705223 -357181 -308001 -686340 -437503 -564589 -371038 -321349 -428857 -442979 -611182 -550013 -580541 -932998 -775258 -154657 -625242 -817263 -922452 -785586 -251997 -411751 -75115 -699678 -184178 -848370 -89336 -113440 -796873 -136317 -30452 -868660 -289163 -11267 -397090 -11795 -262769 -855662 -825633 -61746 -573516 -469042 -390860 -51333 -424484 -830038 -354028 -78481 -602909 -208530 -868143 -938020 -109369 -787840 -62992 -741209 -387042 -677724 -241798 -412232 -110142 -468825 -464021 -683883 -336979 -360819 -860525 -829708 -726098 -278884 -339695 -686270 -66076 -46114 -315089 -782598 -326570 -618189 -200718 -442896 -673698 -76010 -89294 -523653 -822323 -11948 -58532 -951508 -836198 -869949 -394698 -505475 -365285 -693395 -775433 -733078 -598256 -365480 -41134 -677327 -78903 -162801 -307883 -941014 -254808 -648126 -139845 -551432 -776651 -336953 -951506 -578903 -608570 -41214 -765762 -140790 -738369 -246439 -387017 -950905 -458187 -343278 -653508 -787866 -542293 -69968 -170466 -147034 -660551 -342480 -656576 -235689 -177652 -806371 -12244 -120601 -841044 -678773 -661917 -817867 -54859 -27497 -429107 -124617 -786193 -761128 -771233 -431789 -96317 -690198 -902695 -449862 -194196 -948810 -202489 -575202 -369374 -534699 -98878 -391363 -293783 -62830 -384260 -673369 -921133 -684187 -350567 -918668 -334785 -587251 -136467 -743266 -15959 -919571 -479905 -750936 -89929 -279080 -652976 -74130 -544535 -139253 -856940 -261967 -605909 -820963 -684192 -930743 -264057 -649572 -538280 -167074 -333744 -637030 -169717 -713032 -448884 -440246 -90161 -703645 -123437 -181259 -595560 -642652 -825410 -407159 -914603 -282006 -648520 -232311 -431768 -428292 -920464 -770987 -940680 -78471 -258174 -69761 -786233 -596194 -163089 -764120 -277072 -165919 -168889 -888252 -674752 -709498 -920358 -207378 -477427 -834431 -699021 -939525 -343822 -358407 -436976 -156732 -314775 -20016 -294896 -402163 -153123 -180281 -254158 -586713 -271282 -263712 -114048 -910455 -565236 -179487 -309650 -41620 -204774 -270845 -300056 -254859 -277530 -239149 -514039 -795080 -20371 -763835 -384165 -934620 -935164 -921141 -827635 -761459 -11040 -955316 -128416 -343636 -25494 -955860 -825861 -270639 -652592 -25188 -533290 -255626 -32224 -482899 -286947 -112797 -274116 -405724 -921176 -755509 -321369 -412403 -371974 -147176 -418778 -786614 -638042 -121655 -579432 -31100 -18684 -749930 -931055 -827274 -737868 -184227 -117073 -777954 -429673 -640651 -137193 -12029 -822154 -926838 -505963 -412314 -356462 -154155 -361527 -244682 -239518 -806108 -239738 -673478 -12084 -914303 -425895 -947789 -504262 -880266 -164674 -768963 -66555 -343086 -861325 -777989 -597124 -10503 -935688 -387857 -260247 -648902 -843281 -539803 -757378 -772770 -18199 -802645 -611553 -650613 -568728 -915636 -864100 -446241 -477307 -883532 -14817 -796692 -223820 -671403 -775654 -165415 -738064 -797629 -365368 -861339 -133855 -47347 -150406 -936428 -179173 -632958 -239227 -699801 -580989 -703113 -651856 -608656 -385960 -760495 -59085 -664698 -637875 -518376 -656195 -749590 -407440 -292724 -24686 -346738 -531857 -496127 -239173 -12645 -292627 -743180 -222155 -302418 -86474 -853800 -405829 -13390 -375504 -908615 -306779 -12515 -376995 -1142 -521823 -235316 -275745 -56722 -241178 -501643 -419511 -236797 -768751 -741820 -470708 -100233 -828329 -362610 -141818 -529261 -831019 -286048 -428608 -285638 -672879 -852816 -899319 -680431 -570400 -325367 -211464 -288542 -406001 -847784 -533938 -343352 -786964 -863512 -886473 -442682 -926425 -791993 -400751 -571486 -356626 -521467 -49971 -589886 -816045 -13434 -207379 -229755 -342481 -773853 -750684 -709902 -488209 -148615 -155222 -805444 -28202 -40927 -238789 -286851 -314852 -365709 -301595 -188547 -921366 -351942 -97447 -747918 -281482 -369341 -755005 -292731 -643755 -357259 -44283 -762587 -132018 -923624 -44085 -11774 -696616 -325953 -419972 -113115 -543840 -111809 -64739 -95168 -270238 -673756 -447319 -9371 -326413 -801036 -745357 -231375 -793246 -254877 -781551 -857589 -64228 -648276 -539499 -137190 -169227 -848730 -729442 -41102 -611600 -10641 -879158 -80204 -106359 -629427 -812013 -769112 -325616 -884459 -753104 -951078 -450636 -268939 -377516 -315710 -861324 -223249 -847220 -364924 -193065 -277081 -457725 -475370 -204093 -671288 -639342 -505888 -383774 -134511 -637079 -375809 -778421 -562928 -151372 -532518 -28837 -802614 -679686 -415084 -173771 -421127 -304041 -102999 -920431 -504729 -672717 -161167 -600689 -719096 -302043 -118036 -902801 -368944 -265993 -300803 -702167 -885285 -441658 -381236 -769728 -713541 -904194 -482116 -935858 -649052 -44294 -199669 -606407 -72249 -391711 -523746 -890230 -811840 -312579 -151399 -723297 -606646 -466566 -442988 -322455 -646106 -733497 -236185 -751432 -699693 -33214 -285442 -410770 -789965 -870468 -193045 -325938 -632698 -183178 -889057 -364986 -92240 -594817 -168361 -87083 -312077 -125077 -396685 -364842 -236280 -308382 -285449 -676355 -648450 -594690 -612319 -507455 -442093 -814730 -921455 -40758 -956591 -494937 -794689 -55128 -818190 -402286 -266869 -765711 -489368 -66561 -635784 -627241 -339154 -390309 -70243 -776878 -748608 -445002 -315416 -108872 -357245 -596607 -234408 -344363 -64745 -514379 -540931 -650888 -618251 -945411 -428009 -294361 -803121 -86426 -543747 -932119 -418360 -147294 -483297 -474604 -634561 -236117 -658679 -458472 -431636 -356865 -939531 -541455 -70954 -111398 -33735 -195262 -298535 -110396 -500242 -817498 -140011 -755482 -282522 -174695 -857278 -11872 -165951 -445139 -678140 -636000 -775415 -591833 -11366 -387092 -279320 -718291 -761945 -34042 -11776 -13248 -396775 -636081 -42696 -876126 -319738 -126225 -128244 -297528 -106823 -6019 -52386 -501696 -686617 -791326 -522464 -313773 -174726 -479151 -473350 -175777 -263119 -864943 -688860 -693262 -679912 -237366 -778336 -184575 -455817 -342902 -167725 -124666 -606310 -288637 -866801 -204688 -836044 -475870 -633933 -891501 -889765 -673988 -926528 -422343 -111863 -268573 -775511 -730340 -35184 -165671 -70579 -396383 -764423 -460393 -183475 -454522 -702212 -501381 -506054 -539768 -286775 -12466 -90493 -5217 -946717 -666753 -348346 -606620 -23629 -934603 -277506 -880223 -120476 -731511 -10017 -305422 -791815 -455725 -839949 -268208 -349203 -368494 -707377 -514479 -820553 -136699 -602098 -947688 -623543 -265244 -398941 -22155 -627138 -361562 -852399 -282688 -889827 -930727 -488131 -909651 -275393 -811721 -397810 -768442 -795613 -752795 -598133 -430480 -93517 -904815 -39956 -883608 -649284 -306785 -191557 -424097 -330547 -288231 -768715 -675062 -350475 -350971 -594527 -282835 -164966 -895721 -412040 -514632 -362527 -745444 -853239 -444393 -126117 -805867 -812585 -803276 -851075 -936148 -758345 -454339 -143775 -569904 -296739 -393392 -442020 -371281 -137614 -185963 -899126 -308878 -347047 -403248 -300517 -147298 -172367 -255488 -520878 -147634 -830795 -854751 -205554 -432718 -422741 -125627 -592964 -19460 -132349 -237079 -889206 -930290 -29444 -802648 -71269 -606763 -657240 -765378 -52623 -148614 -946100 -239236 -930059 -946792 -952406 -92965 -428514 -791390 -411149 -830729 -383905 -288906 -464402 -825413 -310296 -850052 -689349 -762757 -764525 -303646 -457703 -70177 -117830 -226817 -64682 -704129 -256783 -13636 -340506 -361958 -861114 -426790 -939370 -278744 -33049 -328971 -433405 -859148 -457639 -481813 -251407 -230632 -59996 -342273 -785913 -733451 -279013 -26304 -797714 -129720 -693757 -479436 -733322 -265874 -871157 -141146 -91119 -240408 -951449 -793642 -362697 -728025 -754661 -288393 -605266 -632017 -294595 -113097 -72109 -411222 -236032 -797969 -400660 -164340 -606360 -229608 -921919 -176458 -56629 -571318 -554857 -848809 -108492 -218638 -297774 -795356 -871211 -253352 -851100 -368913 -281904 -483662 -644701 -432080 -143476 -248900 -584460 -579693 -108040 -45112 -66422 -282985 -498591 -13707 -24831 -735492 -157126 -685359 -741445 -846763 -889274 -573813 -11906 -276288 -897626 -243946 -343395 -55116 -310574 -382865 -639317 -865304 -173643 -514818 -864981 -453076 -828047 -511083 -750323 -711341 -954126 -775679 -524715 -10092 -857512 -229721 -58373 -763837 -408832 -465065 -653218 -768591 -821060 -126217 -779773 -730666 -701305 -941482 -416786 -402265 -505733 -700446 -898005 -632920 -273146 -270915 -450043 -612852 -772184 -181372 -930600 -38481 -888006 -356901 -750361 -439768 -69852 -449242 -864917 -676299 -680605 -501714 -849879 -415587 -738003 -294834 -502994 -372052 -601481 -81557 -38168 -79319 -20267 -826147 -281192 -452028 -176446 -693712 -598008 -589950 -25560 -671730 -923011 -521697 -338491 -703809 -945724 -927494 -771376 -54809 -150308 -724381 -529020 -20414 -853706 -90551 -667307 -236889 -724440 -480196 -530723 -569868 -909376 -534227 -11453 -598830 -411291 -427275 -367628 -206073 -222167 -768022 -496082 -598013 -258263 -820093 -434119 -701216 -764251 -621390 -57031 -764509 -56156 -388858 -627876 -398438 -541453 -24568 -309366 -379898 -497894 -43817 -353260 -24480 -423420 -158171 -187879 -938601 -763400 -301001 -110506 -248527 -782408 -268985 -764435 -639980 -43717 -902746 -59573 -942006 -303586 -893864 -635923 -516506 -454647 -120176 -242864 -947826 -575187 -941773 -21979 -187357 -251837 -938258 -673849 -749030 -956671 -33108 -892404 -119033 -901139 -195848 -83931 -229811 -102349 -234268 -160941 -767521 -623233 -278680 -486635 -442481 -927208 -784770 -671225 -262953 -863362 -857957 -341720 -466852 -54165 -148562 -873739 -11387 -770038 -854611 -64409 -533914 -573609 -582325 -183324 -298414 -91045 -155377 -303288 -894509 -912662 -49907 -147466 -303073 -10138 -832884 -349333 -866827 -693536 -560480 -929914 -943737 -273183 -467367 -755874 -921377 -189011 -861817 -952114 -344253 -369875 -739386 -851135 -81906 -55705 -908261 -75881 -25092 -243148 -96163 -95187 -142317 -3496 -268283 -821124 -937228 -934909 -61845 -326123 -547300 -167919 -550328 -440058 -447677 -7516 -850993 -927332 -42907 -166012 -704063 -455619 -64808 -464727 -432579 -556283 -940308 -13645 -782258 -873756 -810715 -797634 -504587 -936701 -436879 -177470 -702204 -755365 -567929 -786190 -31121 -383235 -938469 -321495 -782183 -141654 -467425 -708937 -697010 -606795 -904880 -85152 -806565 -663802 -325456 -257184 -164879 -323022 -61741 -886044 -466975 -150768 -564487 -12416 -362245 -403789 -813225 -855853 -579872 -101031 -717739 -653528 -751334 -830657 -930456 -415167 -226903 -160620 -364409 -600522 -732285 -752979 -351666 -354610 -298468 -151480 -423726 -81212 -947074 -83909 -125140 -36730 -257380 -705013 -62216 -950745 -602439 -322490 -141631 -802853 -712265 -562093 -782257 -344330 -257787 -952151 -324616 -866835 -436355 -132680 -224876 -238144 -741629 -533414 -41402 -722160 -832619 -945803 -292544 -34687 -494594 -328020 -118361 -773819 -466532 -801412 -92544 -527424 -139456 -805999 -821712 -108286 -256906 -701693 -439900 -567918 -664232 -4174 -771581 -172434 -86714 -479149 -466922 -767607 -454930 -847874 -742829 -327386 -282813 -543551 -185556 -778529 -849549 -951922 -622781 -757962 -781140 -137093 -638080 -786036 -101920 -709323 -605975 -402993 -790345 -43419 -11613 -59086 -364858 -191455 -155735 -276851 -721101 -67786 -465041 -716479 -570429 -948735 -134476 -569233 -217773 -263708 -278330 -22248 -571402 -12844 -110261 -642804 -900037 -350806 -342339 -144392 -268262 -735344 -136754 -363865 -442887 -534865 -429806 -827740 -849504 -466113 -45678 -918059 -803091 -270189 -196954 -751624 -496068 -146997 -142364 -511039 -201754 -461875 -277270 -648036 -210855 -327692 -84381 -699651 -879971 -577130 -34558 -151831 -69159 -105106 -384089 -925655 -617204 -664889 -423085 -575451 -851587 -235515 -179379 -467161 -290916 -669131 -5577 -527864 -600861 -892995 -678728 -147590 -252708 -377201 -314112 -702346 -240477 -20040 -81455 -235358 -849464 -433037 -417020 -932530 -328327 -409138 -10198 -756749 -855800 -85923 -921124 -164437 -405422 -286986 -55581 -935655 -13571 -742415 -191454 -7453 -726711 -874194 -579695 -474431 -926608 -714740 -183209 -556339 -600199 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ss_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ss_train.jpgl deleted file mode 100644 index bc12b28dd4b2faa3002449db7f0055c731b0565a..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_ss_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -281044 -395557 -816100 -364937 -954976 -800886 -635560 -70700 -808261 -212056 -537803 -291899 -805316 -643772 -303073 -482487 -737356 -235869 -90866 -771287 -575451 -198429 -834029 -175074 -683304 -751461 -928199 -807452 -571647 -443647 -848157 -831352 -637385 -86698 -269538 -874174 -925212 -429806 -139220 -280291 -116289 -147582 -350294 -843246 -395264 -103084 -339542 -633638 -820717 -672118 -26635 -181772 -349279 -516735 -134823 -357032 -514959 -664902 -693965 -669135 -695439 -580488 -512016 -703858 -372393 -944291 -847220 -443935 -633587 -674659 -74063 -222055 -730555 -40736 -284945 -564573 -818001 -797603 -524464 -74293 -699313 -41065 -146585 -848835 -437848 -622380 -917443 -362168 -284362 -664368 -854730 -644380 -333573 -653028 -188371 -790750 -288199 -571777 -228155 -738099 -293431 -698480 -896903 -108832 -861324 -49356 -109469 -663740 -165587 -696433 -11512 -350972 -734927 -141818 -822953 -69753 -432356 -181001 -344253 -716268 -215709 -902169 -847477 -925862 -137691 -617257 -638080 -522182 -718159 -779950 -850681 -724481 -894753 -64436 -879673 -903446 -466510 -229867 -674342 -73135 -501842 -832592 -172695 -665036 -260346 -141305 -8456 -673988 -721292 -19866 -802290 -426650 -344333 -695517 -400751 -259851 -334340 -185556 -27750 -287004 -806845 -689457 -46087 -940766 -800851 -152325 -487059 -873739 -669686 -99074 -732039 -198620 -301316 -379484 -229811 -222108 -257280 -902194 -276533 -570357 -224583 -555167 -390387 -755029 -236981 -324110 -742676 -152783 -463633 -180759 -770185 -745542 -346076 -692125 -425946 -223161 -386643 -249440 -534936 -674426 -342942 -108571 -340071 -335263 -106941 -706942 -18442 -527641 -410430 -906099 -643755 -753913 -717042 -662399 -420993 -591730 -481883 -704999 -339792 -802966 -846652 -892310 -330613 -605869 -272629 -439320 -308808 -210804 -274614 -883980 -405959 -661019 -656974 -548969 -422694 -109815 -273696 -203627 -289292 -244228 -77647 -739791 -34042 -829987 -293920 -803471 -112979 -676821 -913827 -941626 -64897 -455747 -550873 -369867 -782186 -639603 -101348 -911322 -31120 -831268 -874097 -909302 -476685 -593135 -134986 -885800 -44831 -606578 -71327 -903954 -922708 -942918 -751530 -916498 -648114 -891131 -545017 -97811 -60010 -516302 -552925 -820837 -297790 -669862 -236887 -261699 -152421 -743408 -873625 -741830 -857159 -799085 -64525 -331143 -650772 -415270 -446241 -210136 -200314 -649249 -405703 -664893 -720298 -693536 -948162 -52625 -284703 -606762 -667390 -144369 -727109 -855672 -649737 -199408 -55914 -388114 -281211 -810723 -362235 -723058 -676646 -274116 -483902 -511018 -49210 -642779 -765316 -686617 -950557 -54532 -46666 -801277 -698874 -278980 -659394 -114795 -114048 -258092 -627036 -761598 -186015 -339369 -817644 -517028 -97848 -99931 -527154 -664268 -652610 -260270 -391381 -546638 -786546 -45112 -364723 -504685 -901259 -668986 -362131 -486804 -109369 -327788 -626091 -822960 -813225 -470363 -726098 -12220 -851573 -274921 -422092 -894817 -575336 -402043 -480403 -350676 -390552 -357169 -79838 -640857 -921176 -288198 -724411 -712250 -797285 -347106 -146762 -395833 -667028 -235895 -929085 -913960 -151463 -864256 -781698 -935112 -357210 -97802 -161273 -64808 -66552 -658477 -329685 -757378 -236166 -755904 -70810 -46651 -24274 -205085 -18959 -947826 -946753 -884281 -864178 -394632 -852684 -663531 -482781 -274565 -436869 -343538 -186608 -904498 -740454 -762772 -82536 -203 -515364 -814826 -166452 -684292 -831303 -793220 -262582 -356811 -908585 -403787 -673016 -366372 -732013 -442481 -287027 -697022 -751559 -923199 -190970 -165781 -162952 -656930 -371991 -85903 -570427 -758418 -876066 -325124 -791382 -682292 -23073 -786834 -774246 -578762 -693541 -908253 -459414 -438455 -714106 -327889 -538251 -316738 -120212 -880953 -122532 -196366 -650625 -601227 -541722 -733761 -101349 -202536 -674311 -268854 -951506 -827701 -105929 -183207 -24325 -520630 -665434 -233775 -38167 -276404 -67483 -45388 -687839 -192392 -439715 -37401 -706669 -289109 -64409 -557483 -628727 -803487 -778557 -773900 -221340 -836812 -743399 -828514 -170419 -282562 -837939 -3413 -626370 -83723 -643624 -368977 -411858 -932802 -938391 -691871 -132349 -146396 -618454 -64886 -416847 -660405 -746856 -387126 -339653 -33457 -240586 -803028 -307424 -382815 -600113 -197270 -472463 -325432 -634679 -44388 -416441 -626991 -89044 -626704 -166174 -822182 -84688 -422360 -17462 -119724 -193887 -590743 -46291 -889254 -596506 -119304 -934509 -811851 -405953 -144425 -401981 -900623 -760285 -867799 -769239 -81289 -280577 -560372 -108992 -630382 -772561 -571839 -104975 -438868 -331233 -784794 -279454 -872748 -610781 -636890 -239518 -404644 -135145 -596735 -253040 -729889 -290555 -651607 -933111 -786883 -265386 -712012 -861248 -809858 -476839 -883418 -9884 -836793 -460381 -849577 -401417 -56462 -942960 -446352 -653762 -693394 -398545 -737995 -431366 -893864 -187889 -852715 -952920 -781176 -162808 -64593 -474431 -571670 -197495 -389022 -81781 -237323 -592680 -857119 -843228 -664151 -857420 -938747 -95605 -553891 -95694 -134559 -356216 -331092 -706407 -839716 -118036 -235517 -502640 -101043 -579056 -242364 -301723 -892689 -600557 -128573 -296324 -954752 -707372 -288101 -1327 -841073 -436035 -154897 -640039 -778077 -257690 -403169 -166102 -150522 -781734 -210864 -411266 -261731 -87008 -879465 -703645 -942585 -781754 -412910 -522872 -172584 -471103 -487117 -350928 -951515 -445139 -783865 -186601 -686823 -811273 -205170 -570335 -636181 -900553 -637538 -7453 -857589 -786590 -681774 -308508 -136316 -327070 -368161 -37714 -921124 -232915 -315066 -482408 -433309 -376610 -523086 -124235 -465676 -534227 -44724 -265264 -75289 -271660 -623697 -816060 -848972 -845010 -554885 -905066 -486906 -934217 -466537 -777928 -402535 -390548 -228206 -17294 -806256 -189007 -679873 -30710 -669978 -550700 -69673 -220414 -674964 -910022 -241651 -163935 -513810 -945121 -315390 -378950 -847298 -754633 -390394 -691993 -467366 -458102 -790030 -90163 -878895 -820343 -441321 -713637 -446356 -607251 -658421 -883402 -487291 -125903 -340631 -43005 -132137 -865221 -389713 -12143 -231352 -263525 -703722 -698752 -560571 -236901 -579432 -779604 -396521 -934312 -196151 -111078 -843456 -922394 -293153 -806739 -630432 -319683 -636743 -642471 -649666 -380714 -180281 -256783 -425609 -540785 -248411 -745704 -778529 -724415 -301673 -408713 -768817 -512636 -74521 -305967 -689786 -933073 -388684 -673752 -265622 -956955 -764120 -91118 -165173 -898178 -18206 -783825 -938637 -753074 -504587 -610523 -74721 -58035 -493256 -821862 -15959 -673097 -308931 -681772 -173175 -480356 -284859 -889274 -932570 -134512 -18199 -192348 -398674 -821039 -335104 -589886 -292768 -313068 -331711 -942869 -830807 -917872 -803233 -950770 -712252 -405457 -22868 -594085 -230138 -710143 -664131 -570348 -715430 -313303 -725576 -637114 -244867 -51183 -528292 -113494 -7446 -19991 -275745 -930743 -751595 -83341 -727703 -141564 -458187 -256434 -938296 -70662 -483412 -755241 -707177 -699700 -380287 -613064 -580246 -200556 -754971 -9107 -521486 -791993 -652325 -281130 -863206 -624410 -32245 -466352 -512771 -226178 -725345 -694616 -769136 -324748 -438524 -445335 -204551 -245858 -263626 -401022 -400349 -247696 -403759 -368426 -835841 -817494 -857035 -346519 -396631 -215127 -239606 -760882 -758142 -151372 -64682 -328020 -283279 -437570 -73922 -322455 -648981 -650225 -252278 -769497 -940513 -215244 -335400 -666963 -738319 -181683 -194059 -596194 -72233 -65846 -165946 -94083 -14838 -645536 -455371 -915645 -460809 -64671 -354719 -165033 -755585 -636760 -76077 -185120 -32775 -444623 -423428 -218055 -951971 -374019 -14721 -279498 -861659 -793842 -501055 -377482 -208497 -286195 -273957 -44294 -545440 -135850 -904926 -831809 -72119 -188101 -537074 -298771 -17502 -303007 -606192 -81820 -171156 -181431 -952081 -395247 -186935 -289114 -692205 -706079 -471129 -314507 -39959 -369738 -749452 -79299 -821686 -768517 -341669 -244560 -649052 -379535 -870474 -619354 -206523 -422935 -714500 -919701 -229564 -588677 -838169 -881621 -524649 -848182 -886554 -222966 -255228 -726578 -873743 -162111 -704000 -124302 -927457 -36536 -455658 -813261 -462721 -28580 -679534 -45354 -701857 -106656 -785559 -79839 -501434 -424450 -174224 -571990 -678481 -695904 -849151 -463170 -175696 -382874 -830320 -454561 -128484 -337932 -577623 -520909 -10632 -244963 -404993 -879978 -624506 -282940 -181779 -589354 -876576 -645992 -261079 -108171 -28202 -23988 -640331 -396405 -461690 -861253 -894175 -140790 -720814 -428780 -266717 -870218 -807137 -99024 -882972 -938073 -795835 -244786 -634561 -660125 -929609 -600188 -537804 -863997 -949050 -824919 -64795 -334745 -165951 -270191 -932549 -439768 -806137 -769833 -384260 -180194 -52040 -328549 -24882 -476416 -682537 -396009 -645770 -33453 -156271 -328447 -342246 -477223 -434206 -564409 -373709 -556339 -25973 -663628 -567802 -949717 -693566 -458101 -25936 -582325 -640974 -457703 -705023 -326078 -151034 -516358 -298952 -80147 -286038 -599326 -38640 -829764 -264198 -96789 -467228 -938221 -771476 -588601 -145749 -86762 -917117 -878491 -821338 -624946 -650613 -129352 -726977 -141857 -864387 -690202 -472812 -428861 -180971 -949055 -806819 -296613 -372517 -365787 -574759 -470914 -623032 -357340 -860308 -482472 -597502 -868825 -945648 -684626 -704481 -865611 -220875 -141384 -283013 -405203 -845035 -395375 -585792 -944320 -894819 -16723 -295772 -610948 -622788 -295769 -577086 -374011 -806756 -454026 -96485 -764423 -474786 -835404 -677382 -417259 -328286 -338904 -923007 -339693 -281736 -774158 -762391 -766292 -635525 -221647 -418778 -35536 -466829 -74635 -937544 -747184 -236465 -204084 -142507 -678843 -838201 -570509 -736654 -296620 -627193 -92065 -112648 -244283 -70946 -682502 -807988 -38690 -338126 -338146 -468629 -343034 -802614 -205456 -467115 -287559 -923011 -692662 -743579 -661741 -599356 -694269 -948287 -122267 -134216 -852399 -573318 -835078 -944628 -897767 -475617 -763217 -32055 -542608 -9607 -237331 -825896 -472549 -935738 -491911 -524759 -121280 -794970 -179290 -545215 -756427 -95149 -282998 -315307 -927562 -818784 -956713 -250396 -207817 -648276 -16403 -758490 -775235 -793707 -298131 -343822 -849879 -568911 -240464 -223709 -61537 -701630 -68396 -285698 -125119 -569798 -898468 -696856 -732436 -13707 -841146 -126143 -324528 -397345 -766533 -940408 -654727 -223276 -288611 -99797 -12207 -285449 -107651 -564469 -532271 -474914 -597429 -268878 -606620 -27982 -764260 -403865 -666427 -338141 -541772 -431768 -569945 -697383 -510996 -938359 -472074 -43419 -706982 -821118 -917236 -615744 -258623 -568728 -422877 -14421 -656195 -639814 -595560 -471218 -378175 -404792 -239533 -196781 -166902 -656845 -671347 -71317 -358285 -674556 -327530 -797746 -950799 -412676 -680639 -385663 -573813 -443762 -757312 -113200 -485329 -451609 -625510 -927062 -926254 -482723 -123756 -438606 -867410 -52623 -356126 -22789 -231848 -292647 -268208 -83593 -895668 -29675 -350614 -193947 -275261 -848329 -360802 -756626 -418554 -678511 -629236 -491494 -623233 -417058 -886473 -640327 -117073 -500280 -126018 -794919 -674530 -230701 -841062 -244352 -167746 -902898 -51656 -261274 -798766 -362208 -180542 -229751 -825310 -404954 -915144 -251565 -48830 -832884 -441810 -66434 -597257 -828698 -46449 -319934 -925847 -45539 -340284 -648097 -744839 -208530 -131658 -402436 -726744 -769112 -728057 -709905 -237389 -96372 -699067 -534130 -456499 -855371 -661194 -749930 -755217 -477130 -477001 -279809 -53013 -617779 -810355 -239545 -851011 -622745 -111726 -204143 -416220 -637042 -474543 -333485 -910203 -837979 -342941 -321950 -764435 -806557 -300877 -691853 -895840 -428857 -136917 -10258 -37221 -88250 -287740 -504970 -53101 -369767 -487260 -570318 -674909 -449049 -96974 -101021 -817720 -772179 -900845 -309543 -161968 -281053 -236993 -852140 -334277 -450610 -468345 -51143 -30731 -391380 -161773 -436938 -155377 -97620 -43119 -673478 -787483 -173510 -52105 -712207 -165933 -292850 -917335 -147298 -809812 -316193 -732710 -715375 -778434 -617316 -717711 -367509 -537975 -350187 -713180 -268428 -653528 -865059 -440173 -252643 -188116 -276287 -68736 -117097 -446011 -119977 -667054 -364681 -5217 -431809 -808198 -671322 -938631 -766228 -230627 -152292 -598202 -825875 -501192 -93375 -270719 -125161 -759729 -350284 -263459 -822546 -439292 -742648 -190278 -682319 -762996 -659670 -537727 -526115 -369275 -622228 -511983 -730550 -64609 -161953 -67649 -764376 -420743 -49422 -387729 -744383 -879270 -157877 -651083 -75846 -577830 -20695 -409899 -619431 -832692 -103382 -546719 -812013 -651766 -200698 -109386 -465319 -757153 -890835 -662206 -54378 -49926 -827229 -452525 -626904 -347812 -13012 -485695 -522884 -498124 -505034 -815860 -60792 -677174 -164700 -712269 -855167 -127625 -231149 -948548 -59200 -10259 -120208 -629425 -899438 -754769 -339678 -443238 -388623 -795093 -544172 -515957 -699706 -956049 -271369 -688061 -42696 -207585 -25995 -587294 -597671 -742825 -289359 -706181 -546919 -476554 -830427 -218800 -244682 -17650 -950736 -344314 -109043 -219207 -629494 -685359 -693719 -403409 -830771 -766319 -170466 -701305 -751334 -286481 -10362 -435878 -808375 -315771 -824213 -491101 -276400 -476790 -356749 -7403 -556563 -579207 -741248 -154105 -785826 -173471 -615150 -427596 -279357 -210979 -570666 -724439 -830209 -435236 -635784 -162428 -278680 -84045 -325511 -912401 -799297 -714183 -563879 -181701 -941563 -559234 -421099 -338282 -314927 -830657 -11073 -495708 -51699 -391666 -255469 -411137 -176296 -592536 -857946 -810364 -327657 -405724 -249467 -375837 -525626 -943984 -899559 -464080 -388578 -312258 -278373 -107104 -328644 -24686 -46576 -701527 -238783 -894509 -835235 -711341 -699510 -579887 -281238 -910336 -64228 -401927 -37798 -696971 -66297 -343696 -262611 -106707 -446726 -861117 -633942 -148425 -72141 -451856 -196438 -503624 -146349 -432590 -505240 -287564 -278254 -441658 -801542 -870795 -911504 -235280 -77270 -944913 -491590 -44701 -474197 -712299 -590936 -435652 -752856 -778110 -356887 -245941 -13434 -512675 -44280 -364246 -85916 -836665 -836064 -692997 -207364 -285901 -710783 -540265 -240363 -298535 -80204 -129009 -291370 -922393 -275459 -108581 -811398 -530723 -673231 -571722 -442518 -369675 -623552 -400673 -742037 -358407 -566312 -495687 -802804 -460063 -682245 -48177 -920002 -674233 -206227 -756629 -263529 -285442 -297882 -585580 -835118 -476672 -90035 -785679 -393860 -763569 -298445 -810801 -255614 -192955 -344188 -282761 -55058 -804435 -488131 -398376 -661360 -328224 -13636 -66435 -518675 -463701 -27059 -942943 -230914 -297551 -424457 -126387 -903567 -334304 -137207 -359811 -773937 -168889 -814623 -933591 -704432 -491191 -440595 -838241 -70583 -189232 -140753 -365706 -622638 -680164 -144800 -427024 -237672 -148631 -506071 -150887 -705658 -11872 -284663 -926850 -830735 -615286 -10053 -678000 -699595 -309455 -137597 -461013 -599966 -623330 -787682 -392805 -448671 -364554 -732086 -128451 -314968 -598869 -483662 -924482 -860887 -438569 -183798 -952401 -616544 -25250 -195269 -886195 -907698 -215903 -88876 -43926 -93233 -902792 -890200 -278330 -87109 -311978 -921317 -945394 -205352 -195436 -657634 -185969 -219962 -61730 -830372 -144423 -375269 -42903 -25428 -56722 -298723 -556399 -910142 -245130 -101347 -732273 -699355 -438021 -54747 -221183 -30741 -842053 -617409 -635976 -158465 -861049 -516260 -103215 -185583 -839529 -717157 -516578 -433551 -224516 -240270 -249314 -191414 -594751 -694780 -553541 -483785 -491483 -556840 -83929 -126749 -204608 -596084 -909651 -703839 -779459 -226817 -864803 -37988 -262396 -167397 -501435 -565236 -474316 -402617 -796113 -806104 -856007 -932801 -571030 -506140 -326413 -939525 -258195 -459504 -811721 -382970 -99579 -929696 -323546 -94075 -518603 -239510 -755401 -485108 -120697 -674173 -315827 -422556 -237991 -115495 -366025 -706747 -102677 -428836 -827241 -298715 -141188 -279524 -52215 -287667 -828357 -376480 -12995 -166929 -148704 -149507 -885623 -334424 -29764 -592299 -161361 -499106 -102546 -777625 -343324 -419792 -693816 -633260 -594516 -851716 -281904 -751921 -71761 -612925 -121460 -710037 -903532 -520579 -96537 -884376 -864883 -529278 -403124 -312762 -479902 -897571 -773222 -840417 -233926 -121216 -787880 -639914 -412877 -10486 -743000 -863835 -57018 -906413 -491068 -525035 -765599 -35169 -423300 -167413 -171662 -604119 -707103 -434455 -608656 -107383 -295538 -442999 -505564 -851724 -854317 -560073 -60418 -915547 -421127 -503982 -851321 -830642 -836617 -872653 -628292 -819788 -592023 -755686 -913034 -753064 -382143 -81776 -623616 -35858 -330435 -195590 -243557 -643048 -761769 -246283 -33427 -511055 -693287 -864558 -638673 -773824 -512933 -677973 -355748 -502608 -164340 -599891 -602267 -395330 -735937 -109294 -625711 -372551 -848709 -705687 -935858 -246052 -844031 -766271 -173738 -58953 -879098 -113647 -810638 -52567 -428183 -761520 -124894 -524520 -338491 -906746 -862470 -513634 -873858 -146258 -223655 -541455 -679595 -510882 -498879 -424408 -755965 -114806 -693025 -442521 -734933 -770647 -542818 -253498 -803326 -604067 -894843 -139768 -331899 -303559 -72518 -923688 -13824 -787933 -946878 -354339 -41309 -112652 -909182 -766468 -760507 -450564 -336907 -834149 -292507 -242602 -321864 -55536 -80206 -905367 -731309 -578630 -863512 -299371 -922481 -236562 -231375 -327853 -204706 -226783 -454930 -428565 -230571 -301659 -25218 -21440 -854357 -418785 -861401 -98342 -591602 -440115 -170712 -873405 -314808 -817839 -822323 -825831 -281808 -405096 -711377 -19460 -474176 -275955 -793666 -113518 -94877 -866957 -125193 -523436 -527333 -676683 -899082 -30106 -397940 -633949 -188659 -15359 -317662 -905099 -696577 -261923 -920633 -450247 -259612 -265251 -156471 -420494 -262770 -282937 -351020 -560488 -706568 -598060 -594024 -536641 -432112 -759588 -65630 -108360 -52398 -361607 -696769 -841642 -396871 -904815 -716618 -369808 -542870 -55861 -140234 -106823 -11387 -100542 -545062 -268883 -748608 -941110 -61560 -456313 -573205 -31995 -249180 -183559 -644439 -518289 -930015 -924529 -120614 -828236 -504300 -228261 -356280 -677947 -359091 -201754 -14678 -717724 -633316 -786585 -768646 -294834 -899101 -152089 -120474 -104855 -750261 -86614 -426595 -485157 -849464 -711129 -921336 -787907 -44713 -239335 -946963 -857911 -325076 -778386 -956637 -16498 -566817 -861355 -187039 -388625 -160970 -185735 -827740 -664750 -833513 -879040 -494937 -277630 -952940 -663614 -816426 -571318 -524088 -749525 -380927 -901641 -41981 -905772 -521829 -301696 -46153 -65588 -103904 -691937 -886897 -824584 -189539 -161125 -440400 -636995 -843209 -763787 -230546 -206073 -864069 -916953 -298089 -527777 -550470 -485584 -836698 -944246 -146879 -451190 -728899 -162788 -101775 -501895 -426295 -295882 -349735 -817113 -730998 -125798 -857533 -878524 -121963 -935806 -905364 -47296 -704328 -127662 -46050 -450608 -243705 -689606 -48649 -399450 -65668 -501314 -332209 -263056 -865551 -310347 -362365 -819849 -743266 -818329 -480278 -239214 -342867 -902119 -439548 -147463 -501381 -566123 -750777 -514725 -87024 -572979 -54860 -666937 -434524 -916106 -946909 -518649 -941205 -147517 -434366 -639342 -407203 -875446 -130816 -785471 -938099 -391363 -584342 -547405 -206266 -45162 -94753 -263751 -409729 -293334 -847322 -226904 -853953 -202489 -673952 -556622 -442102 -140911 -280544 -126343 -90393 -885427 -844238 -562295 -875635 -287948 -848891 -237893 -417865 -312768 -332185 -174742 -41102 -939799 -147577 -384252 -275972 -527458 -930145 -106728 -516506 -11624 -502994 -144140 -711964 -769787 -930659 -294995 -418964 -591692 -642835 -326588 -150733 -872896 -43078 -745410 -10084 -561221 -833932 -522233 -863595 -827274 -335914 -412875 -849558 -433405 -422965 -91131 -874937 -899477 -60242 -483677 -678196 -635340 -364924 -642916 -311192 -604173 -449638 -499323 -767867 -888006 -416901 -56654 -85928 -210513 -754735 -25520 -296993 -516756 -662239 -19428 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_test.jpgl deleted file mode 100644 index fb21e962f1a970b64d0b69a6375f7e899eb62c6c..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/generic_test.jpgl +++ /dev/null @@ -1,20000 +0,0 @@ -629797 -902498 -588000 -448695 -237907 -314147 -306944 -486521 -545526 -126116 -28784 -415178 -713715 -234538 -53043 -497944 -722101 -843320 -334499 -156437 -185173 -413104 -639709 -384014 -438926 -758327 -349734 -722028 -822511 -731886 -534103 -538205 -715540 -120710 -598172 -555465 -427476 -864374 -40090 -200880 -926003 -232781 -596783 -778425 -605838 -645466 -323224 -743468 -799937 -445875 -649970 -941842 -810654 -764043 -405168 -862398 -836686 -347442 -185816 -505443 -310126 -38438 -67623 -80262 -935264 -85677 -579418 -72404 -761062 -432913 -223792 -389200 -518425 -756104 -172870 -204567 -638076 -133732 -72258 -597421 -85307 -303329 -635274 -429957 -723452 -95588 -669016 -317214 -79248 -747440 -25645 -119338 -425795 -463587 -637259 -595614 -234344 -206123 -640600 -846701 -118107 -922662 -162495 -545603 -857235 -477300 -26104 -683854 -434023 -879540 -500240 -748030 -662727 -365779 -709765 -472435 -792286 -288937 -537735 -764910 -475190 -786890 -954237 -43796 -155610 -857567 -938559 -338256 -13187 -812351 -453967 -203847 -1725 -359641 -35130 -763123 -778244 -637105 -81547 -168295 -577571 -893860 -82934 -157450 -620561 -31123 -698884 -650874 -245914 -119748 -71291 -11784 -942310 -405031 -948865 -256253 -518267 -285412 -60763 -276529 -906838 -327870 -174717 -38879 -160330 -946335 -859209 -679529 -118569 -526400 -614649 -901650 -652065 -310975 -109518 -580946 -751287 -11714 -365894 -172684 -611758 -12690 -449659 -437896 -669473 -817520 -284718 -525715 -120583 -376709 -354089 -638876 -36760 -251242 -505607 -919989 -917285 -560360 -671299 -437423 -526769 -684288 -317686 -430415 -430271 -773755 -902731 -456598 -284011 -302435 -482793 -204016 -77645 -196207 -90419 -951490 -366004 -281124 -880040 -546220 -578645 -938255 -810816 -759106 -527102 -785764 -887370 -93710 -277239 -386679 -852700 -581586 -595105 -809425 -913724 -456610 -569993 -863873 -486446 -291593 -809781 -225302 -728157 -422321 -287162 -840176 -597278 -577515 -905240 -263806 -506026 -884169 -676135 -512490 -864271 -576563 -244555 -845011 -676734 -48265 -141159 -557450 -763200 -926128 -511441 -706313 -340487 -434635 -783853 -524469 -433438 -561425 -349710 -300913 -650641 -759948 -140491 -954726 -637535 -626729 -693402 -234053 -725983 -476889 -854555 -514524 -367474 -263282 -243152 -94941 -437831 -425811 -220949 -843295 -231032 -908713 -442852 -187400 -425684 -278334 -445194 -570689 -182755 -706768 -494200 -505288 -257612 -626174 -639331 -848380 -298152 -413963 -577423 -248043 -206242 -425744 -752789 -133776 -189281 -339468 -907683 -287540 -824599 -759709 -122088 -665824 -426457 -816662 -343092 -894518 -321810 -261307 -847461 -30639 -24455 -102737 -241742 -855903 -258105 -405872 -737940 -310602 -597036 -627049 -203767 -82333 -546201 -894777 -613033 -18801 -458443 -227016 -160230 -713028 -255603 -796157 -778350 -184594 -778282 -373190 -579792 -95277 -853907 -136863 -382668 -254354 -719442 -19105 -305495 -477226 -134918 -120284 -475673 -875881 -899459 -384881 -640779 -866304 -162883 -481000 -864267 -879002 -638777 -935131 -285455 -265613 -247522 -305975 -723200 -852474 -894371 -836053 -579073 -186312 -263452 -21972 -595351 -897115 -936283 -402652 -910184 -637603 -233352 -650443 -204141 -204953 -186619 -163051 -637844 -637019 -818695 -321301 -495930 -611456 -916812 -283828 -124606 -650705 -318398 -3726 -309879 -55737 -456061 -287187 -627883 -826202 -110369 -181253 -21070 -757238 -886834 -520125 -880110 -884778 -286400 -776003 -916944 -847425 -67372 -161679 -771629 -879774 -204904 -794961 -34005 -540005 -716724 -429412 -848132 -517887 -338321 -608292 -784901 -263569 -735485 -755373 -939940 -189068 -340121 -350936 -329186 -463872 -62708 -443850 -344499 -168275 -677828 -851088 -286554 -770231 -578715 -245940 -106921 -32034 -251267 -357305 -11832 -757370 -343478 -84869 -285123 -23077 -716867 -357022 -908876 -283301 -405234 -458988 -307940 -881627 -163019 -944228 -516271 -67745 -693817 -582495 -909646 -650756 -937529 -19141 -339448 -446921 -176593 -189462 -778541 -847751 -416262 -39001 -756635 -521783 -402356 -759926 -738337 -713771 -305742 -163109 -68622 -46603 -679826 -376530 -109751 -189410 -477484 -764555 -737978 -902389 -589578 -714589 -751966 -120248 -650544 -777630 -681609 -78701 -140763 -49715 -689836 -517009 -427422 -417659 -767008 -536775 -743446 -75636 -751286 -574243 -614495 -948269 -672156 -187217 -638916 -829423 -649900 -738903 -129580 -615219 -420011 -337929 -78845 -293662 -802833 -577437 -36032 -714326 -81143 -578559 -342293 -487750 -577637 -18528 -372471 -138841 -201807 -38975 -331433 -756688 -564438 -716944 -455972 -586207 -25324 -480441 -388868 -284413 -185807 -78700 -287694 -9132 -188651 -601727 -764119 -570597 -628481 -430838 -580878 -595697 -135275 -847200 -43955 -938409 -459087 -40934 -11589 -656131 -188756 -771258 -467709 -851005 -416306 -455358 -489215 -36802 -741358 -376715 -756483 -70411 -356500 -388030 -170473 -476580 -598189 -911439 -106309 -831296 -25956 -456725 -67339 -698631 -577945 -790097 -437963 -795109 -830273 -155567 -878989 -84456 -546128 -81826 -949949 -625015 -287838 -505358 -732140 -898794 -772231 -303080 -136160 -738603 -772297 -188585 -865530 -288957 -492664 -278700 -564021 -11791 -49133 -802065 -840177 -122030 -65648 -611561 -795545 -589160 -956261 -117931 -369246 -55696 -612613 -263938 -287886 -36535 -830001 -545435 -468780 -188740 -198143 -433521 -828732 -189052 -455833 -513385 -526508 -257172 -121259 -785480 -75181 -71265 -857549 -395422 -632806 -774340 -96558 -504544 -19193 -504561 -316853 -499136 -570100 -712356 -581183 -573058 -649901 -894316 -577770 -825020 -772044 -578633 -159412 -408648 -40045 -615725 -804837 -763108 -526683 -581714 -747877 -763437 -427372 -78816 -429141 -149960 -497744 -933842 -637389 -56752 -254972 -154072 -355896 -599265 -818383 -251456 -648640 -655787 -661765 -55121 -814456 -358235 -923140 -737772 -383721 -614691 -502196 -638769 -205109 -405438 -109951 -674551 -315788 -718865 -831438 -585469 -273074 -85328 -742601 -342529 -446168 -464987 -651439 -124491 -598038 -241900 -776490 -552802 -55209 -454137 -464790 -732113 -755310 -333757 -40080 -391135 -120158 -781615 -277183 -707435 -851111 -691274 -237879 -163514 -30999 -735246 -927162 -480720 -622827 -296345 -272463 -500132 -934123 -936539 -109462 -123341 -383113 -458544 -204976 -638964 -390831 -421992 -409830 -893480 -720022 -230799 -188801 -569795 -748336 -116887 -343190 -485165 -66913 -67545 -263384 -835354 -188789 -427460 -662792 -903920 -603973 -158916 -127052 -368952 -656272 -187043 -369653 -666403 -917187 -535079 -532059 -391083 -731126 -925800 -784602 -287863 -453777 -870740 -658562 -917470 -767865 -55078 -306554 -383932 -641341 -924186 -222004 -729260 -786848 -79829 -26065 -577021 -575582 -235435 -190175 -653110 -330259 -505094 -643185 -452572 -906005 -53753 -253340 -527388 -243071 -441109 -720152 -796739 -782400 -188680 -775583 -18982 -667369 -370563 -716482 -759090 -204559 -902462 -772507 -781548 -371896 -802744 -24258 -63879 -345707 -115735 -19164 -667335 -596037 -53737 -578634 -161617 -22653 -593572 -862373 -484004 -56495 -151318 -637594 -765938 -192478 -429489 -168063 -885073 -597006 -109380 -841272 -455539 -279774 -232789 -551587 -650659 -204948 -894111 -769212 -152605 -704652 -712283 -90250 -629339 -772021 -419349 -637086 -854722 -595928 -727870 -394701 -522686 -828820 -488982 -163412 -255879 -678582 -932831 -69523 -5143 -1779 -188467 -61034 -664511 -337530 -530230 -898552 -71906 -764349 -776901 -650580 -638904 -26013 -161407 -409610 -451780 -650238 -797411 -5282 -332737 -436014 -27683 -134560 -757247 -782263 -63400 -651497 -661016 -574125 -155369 -706641 -772291 -727039 -544275 -158903 -625533 -395812 -625601 -369832 -779268 -362505 -436367 -945549 -264167 -732248 -630157 -686685 -569936 -763856 -656029 -754228 -625008 -386438 -109643 -680598 -579734 -947137 -560222 -445337 -385250 -761463 -693741 -502356 -284696 -514729 -830511 -705390 -592949 -638903 -676771 -780481 -770007 -37747 -902443 -331349 -617846 -564192 -902348 -595743 -483124 -133728 -777907 -519101 -154725 -143762 -802501 -69234 -797073 -710077 -486608 -276118 -754773 -155415 -168250 -19206 -116779 -828638 -441813 -756611 -564832 -40032 -528900 -158812 -665110 -816584 -391085 -121383 -477474 -893827 -197846 -437723 -943953 -358767 -927408 -205098 -748899 -393683 -268979 -894314 -177967 -136602 -770679 -896076 -790764 -600184 -593523 -230928 -319006 -45265 -150307 -448632 -637838 -40069 -629941 -455391 -842613 -284898 -924739 -402990 -756213 -665557 -827213 -671387 -733903 -759353 -777307 -160643 -562175 -296421 -67418 -563986 -210946 -708928 -638862 -690863 -436098 -342022 -753595 -387696 -246339 -148762 -44856 -362537 -666887 -33975 -40885 -292957 -121021 -538870 -598839 -287798 -911404 -712539 -761939 -617487 -38955 -324984 -638661 -227590 -577329 -451799 -256738 -497834 -950627 -344498 -726334 -278199 -464811 -902411 -328127 -957645 -43275 -939952 -66787 -607110 -541567 -106060 -445421 -777685 -403737 -71860 -182756 -370910 -611437 -898926 -954045 -400195 -609317 -300664 -160815 -436915 -300627 -412613 -78114 -38120 -449191 -289088 -63379 -162165 -86519 -109384 -894516 -948760 -269956 -778489 -816537 -83274 -731619 -305094 -249393 -60320 -158790 -829138 -912066 -508119 -342779 -947175 -657474 -754392 -27688 -15856 -124930 -361603 -831378 -756743 -24780 -150674 -204476 -838574 -782296 -943949 -713583 -162564 -524819 -109621 -687316 -515559 -233242 -723429 -799936 -643045 -211550 -493940 -136657 -626660 -225774 -523688 -713700 -289241 -437674 -81731 -278566 -106331 -917447 -948467 -609788 -135165 -608659 -460249 -129640 -42821 -434556 -259730 -79889 -686574 -730894 -768403 -326336 -914298 -12784 -808832 -654755 -299460 -247384 -411313 -318992 -461667 -667793 -13445 -667802 -30676 -132193 -578468 -515914 -204513 -545948 -287440 -457108 -802903 -646356 -223206 -694086 -483875 -642904 -359298 -771792 -771569 -946856 -796464 -228203 -175662 -455467 -294727 -3136 -842990 -281854 -366067 -56493 -911363 -477254 -19098 -69783 -118105 -343675 -106293 -75176 -842309 -10848 -948797 -654206 -764485 -797394 -185184 -598412 -24174 -475360 -328667 -599653 -855308 -344878 -384956 -763486 -24997 -504551 -120129 -907881 -666607 -902495 -727146 -532987 -756485 -908135 -162807 -9730 -839881 -27614 -458150 -403684 -514249 -271317 -11772 -181522 -552679 -751952 -287167 -917417 -317276 -728813 -318856 -85452 -174270 -512521 -272061 -456728 -646418 -875985 -899091 -315434 -82177 -661745 -74361 -438030 -734654 -140536 -22934 -498421 -19199 -772353 -234368 -791997 -189067 -460865 -185733 -37722 -282785 -938299 -609613 -607156 -117218 -4370 -60760 -444654 -734388 -899646 -356443 -571397 -109955 -395713 -742119 -437969 -754483 -668155 -762796 -232685 -386580 -373524 -867805 -327370 -538807 -451593 -159016 -943874 -755927 -129286 -300166 -633811 -26015 -734716 -665341 -34983 -534799 -893961 -462224 -274694 -395639 -948284 -117359 -241418 -564553 -730681 -183947 -200238 -587991 -105836 -625672 -782935 -502970 -129033 -388750 -75449 -729931 -384714 -247866 -764580 -579151 -18365 -656231 -907966 -837011 -717008 -528822 -327796 -796012 -813212 -284371 -109640 -63224 -786843 -841524 -831411 -445735 -349499 -710228 -724499 -828731 -46165 -593994 -396045 -54338 -63535 -764400 -608174 -635048 -7301 -582430 -765153 -129926 -369657 -292552 -388681 -95447 -205253 -201925 -858118 -505658 -15611 -607272 -732121 -598311 -777466 -770221 -590542 -816742 -518732 -930396 -18804 -188737 -919860 -289314 -456432 -274835 -664779 -367973 -43038 -810910 -17579 -919916 -779992 -543637 -283673 -954535 -390619 -638850 -432150 -451722 -285121 -764635 -784491 -259979 -715527 -669553 -72015 -639023 -61107 -30518 -86634 -498218 -18347 -156853 -300725 -284675 -567871 -573040 -851296 -661493 -227143 -7112 -495356 -10575 -808609 -456306 -391395 -591976 -284238 -284598 -164687 -516651 -713649 -500494 -253792 -317890 -487593 -118098 -917395 -436828 -84185 -572622 -730271 -324624 -786499 -661899 -437502 -203884 -595502 -821824 -550401 -300791 -295808 -787928 -336891 -656218 -764266 -358662 -162232 -764550 -359936 -771892 -65856 -327399 -851471 -391611 -713655 -375068 -778333 -230358 -611213 -920036 -239279 -39223 -204875 -911250 -85061 -850786 -109308 -293849 -617746 -468302 -466815 -459303 -45995 -18127 -579753 -221985 -906881 -802790 -226905 -553849 -118031 -570094 -196353 -300753 -731153 -745890 -188240 -474159 -350229 -803511 -17108 -38072 -588278 -163087 -188356 -446653 -756421 -345888 -405780 -284771 -404626 -555762 -240242 -272528 -117136 -468598 -315736 -614055 -403075 -44052 -714573 -514788 -859673 -115137 -327964 -650283 -161565 -607358 -101847 -719452 -674517 -828771 -156520 -344338 -421037 -836616 -324645 -58736 -649079 -596091 -666715 -39378 -227918 -777309 -930047 -511450 -43257 -768433 -884366 -256655 -640887 -56737 -644255 -672132 -528570 -279948 -436430 -650808 -731935 -648453 -456115 -314785 -712649 -134012 -328412 -684456 -601252 -593760 -382465 -679377 -36745 -698279 -317413 -81110 -303113 -675046 -650373 -342600 -320858 -134704 -501162 -456926 -496794 -606875 -907785 -18219 -484230 -38343 -116756 -818369 -188140 -786931 -501400 -948667 -105108 -504713 -596650 -643017 -795604 -503434 -361973 -248956 -343735 -799417 -704615 -848234 -564482 -119702 -483668 -588064 -547060 -768127 -900920 -26374 -449072 -303574 -77158 -369415 -878419 -546194 -688596 -391531 -713569 -592400 -109591 -716939 -285391 -902291 -302986 -655835 -116806 -202661 -810690 -591682 -830725 -633124 -898547 -709117 -423048 -115490 -853234 -540674 -617548 -593468 -899650 -743309 -732276 -786908 -950102 -253651 -665529 -276218 -251824 -842853 -827596 -660059 -9436 -170824 -590433 -13050 -167862 -132368 -163074 -433484 -282968 -803215 -87033 -803559 -153260 -866017 -551124 -523504 -39989 -790743 -536632 -375205 -802523 -477390 -502662 -769050 -934274 -748628 -514171 -19063 -909879 -726946 -132847 -597329 -477562 -897139 -592985 -791505 -106307 -947508 -284926 -566002 -223452 -640957 -733917 -543698 -412028 -788854 -544646 -603498 -708606 -84365 -938596 -529325 -446734 -716934 -527403 -264597 -65455 -734663 -884982 -638722 -420004 -132245 -18800 -342083 -640531 -45247 -705104 -377536 -246249 -199354 -331538 -40916 -648566 -457900 -477225 -842273 -30592 -24092 -854304 -552480 -244325 -880447 -679288 -457866 -524494 -692743 -205052 -597631 -171235 -848021 -823849 -91044 -19159 -154024 -953289 -276453 -398202 -505656 -78782 -206580 -84124 -573748 -140116 -942248 -597371 -204139 -349219 -868642 -284884 -263562 -119997 -26850 -554578 -66835 -524104 -220455 -16516 -121226 -140784 -265870 -676194 -136591 -360176 -109778 -22659 -357629 -894221 -797405 -640528 -942004 -632803 -109356 -455226 -888106 -222973 -792580 -610560 -237290 -365975 -699744 -237531 -456767 -662691 -77494 -204786 -437245 -459509 -186841 -733616 -13274 -456748 -601148 -802404 -704345 -364504 -233901 -516167 -455661 -510990 -102281 -18503 -67548 -405423 -616761 -825068 -601942 -118951 -857532 -790204 -182727 -435006 -451719 -916647 -570234 -235310 -9488 -716176 -117437 -287969 -684189 -437648 -260331 -817523 -162981 -938523 -393412 -944829 -775691 -760788 -456311 -899255 -256836 -189008 -162610 -205257 -86572 -367848 -387266 -446504 -517033 -577711 -510475 -456487 -61744 -45667 -955317 -189069 -487476 -938289 -199447 -84169 -770037 -188474 -913802 -425971 -8118 -106350 -388533 -215210 -564388 -183533 -36235 -348393 -285746 -162977 -248009 -857121 -386623 -18384 -109439 -902464 -683298 -519824 -278065 -12061 -297638 -682125 -109547 -69607 -796222 -456720 -133887 -559408 -525139 -36446 -500828 -600895 -19022 -865008 -639014 -598321 -204176 -431354 -755204 -188233 -462023 -733554 -504362 -778439 -129795 -69943 -650768 -309538 -716586 -698183 -881685 -437775 -698541 -522675 -879729 -292367 -283216 -421094 -186022 -233817 -686710 -50909 -538130 -684760 -18455 -730308 -162609 -825585 -923251 -80395 -693758 -238996 -543100 -796937 -360859 -669454 -67268 -123119 -342449 -75601 -661504 -18969 -385452 -885355 -516298 -333677 -59217 -42532 -867498 -491037 -731987 -21089 -755362 -284646 -764467 -461313 -444966 -661561 -932838 -795140 -398163 -65105 -680501 -525765 -271319 -522062 -64270 -26271 -257775 -328700 -226506 -161747 -656223 -533198 -223380 -513430 -86955 -12880 -303403 -449299 -938541 -444911 -734564 -285666 -684291 -631255 -367682 -37733 -626466 -471154 -151130 -776920 -312801 -593947 -803632 -300377 -587184 -615595 -598417 -205423 -724115 -671692 -456383 -902297 -651420 -187682 -439173 -651440 -202298 -157282 -176452 -941432 -809930 -297494 -438043 -670787 -697254 -94655 -258053 -129309 -781390 -51387 -453816 -435986 -57222 -327533 -94734 -102872 -830563 -670047 -638038 -790724 -284905 -135697 -792836 -894478 -284349 -539707 -156133 -625536 -518597 -698471 -109454 -660045 -395730 -429266 -762206 -530933 -480626 -456702 -26651 -637542 -639034 -536288 -525215 -148738 -413278 -61974 -19111 -83686 -952935 -776642 -281435 -638998 -405331 -549644 -25522 -285333 -409204 -333909 -504163 -53039 -650895 -947084 -447682 -189056 -455890 -157028 -693775 -576704 -122086 -44041 -390017 -597258 -109484 -716203 -324429 -362195 -457512 -316772 -680962 -330196 -479058 -778556 -154717 -773818 -597538 -323550 -763809 -454254 -323321 -825252 -67317 -686401 -602492 -204072 -216952 -106232 -475125 -446424 -778321 -705340 -86958 -434805 -458705 -456264 -638097 -705410 -72326 -750597 -598044 -570299 -568283 -825267 -56536 -491039 -652676 -721334 -785519 -344508 -411154 -941507 -777408 -449964 -284762 -74297 -663488 -290739 -97934 -777905 -908404 -529132 -229088 -298815 -651552 -796258 -800038 -67412 -14885 -433090 -56136 -690768 -18623 -804727 -614465 -279060 -463581 -719485 -312551 -515549 -638666 -582906 -586008 -563708 -564491 -463939 -158006 -157499 -571297 -476287 -21294 -634899 -60182 -763113 -712677 -109421 -373883 -748861 -462027 -769540 -285380 -438108 -57306 -524262 -94681 -356151 -124086 -18824 -777507 -158495 -577110 -778308 -67527 -785557 -577181 -328189 -864918 -723952 -350221 -712536 -440450 -905100 -723605 -763490 -338250 -457127 -756893 -453758 -902409 -917073 -19018 -754772 -39114 -311194 -777488 -102166 -883474 -516545 -882385 -600254 -320512 -57257 -109478 -148519 -935551 -434945 -626695 -234564 -44286 -114687 -56561 -648056 -132134 -948217 -434742 -403961 -397555 -66953 -271322 -227663 -258166 -150658 -716898 -419947 -18937 -674005 -80185 -848030 -181521 -129031 -656676 -539248 -511782 -574063 -846521 -54333 -893972 -354340 -751706 -188772 -230154 -297481 -901296 -652064 -931460 -638826 -667113 -761853 -598150 -784678 -750615 -638225 -48283 -249022 -760859 -317618 -30143 -36364 -85910 -846664 -121032 -755382 -653912 -178044 -18677 -472331 -168070 -504280 -808071 -271803 -704032 -282536 -736704 -723392 -761542 -438134 -357041 -94386 -716666 -69342 -292240 -754983 -846651 -755377 -329839 -603836 -236157 -538035 -44309 -388836 -797153 -109748 -406600 -158720 -883094 -13236 -416829 -713678 -667298 -116876 -57284 -881515 -850723 -518410 -77162 -182357 -916959 -812567 -456437 -618452 -785579 -287473 -940152 -419155 -943231 -418201 -76969 -420975 -775544 -561014 -598320 -445149 -122600 -599136 -456145 -455882 -600721 -450741 -614710 -224053 -893692 -649573 -473888 -261926 -743489 -653591 -951260 -286586 -288320 -74669 -18962 -831289 -79953 -614059 -604106 -516059 -56738 -426039 -172618 -767006 -505618 -851084 -863652 -799289 -55757 -57275 -120887 -438083 -291037 -93106 -518448 -644776 -144357 -638327 -266867 -664823 -566172 -596219 -698380 -693109 -325080 -772765 -864216 -754277 -189062 -189047 -387327 -599438 -675491 -713065 -327999 -339576 -536110 -623371 -800164 -574113 -614054 -222038 -296772 -534397 -582194 -857519 -221147 -481913 -453991 -59935 -13057 -771404 -427423 -357324 -690013 -903246 -211486 -258940 -35368 -526797 -98257 -151795 -929140 -440632 -11485 -306325 -896929 -147556 -46652 -428881 -542874 -303602 -256814 -43949 -338393 -376385 -749070 -113588 -853238 -928489 -904166 -855801 -167030 -935352 -263513 -849509 -230850 -175625 -631294 -738487 -231003 -879864 -879161 -831424 -593920 -889597 -838006 -257328 -191582 -894565 -638408 -482713 -159172 -622649 -68648 -938056 -472389 -121222 -856884 -626544 -226999 -603448 -68217 -674118 -606536 -721353 -566662 -34916 -331031 -541676 -324647 -824551 -868062 -804988 -34336 -819748 -735712 -749509 -102123 -32682 -660403 -795134 -147140 -883607 -422398 -272428 -652691 -151251 -808509 -853103 -85262 -226503 -291190 -132337 -174771 -916809 -32050 -730556 -634165 -176323 -30616 -489791 -278819 -112927 -212665 -546055 -934387 -186392 -208494 -64597 -337287 -631750 -361159 -43733 -772726 -443396 -393421 -338205 -137120 -236921 -424352 -849899 -757476 -32054 -533644 -812751 -430349 -798036 -137089 -278573 -338463 -930695 -904069 -863857 -848457 -368417 -639376 -130812 -771825 -44355 -767830 -375854 -136460 -809215 -472452 -211697 -230973 -186448 -215565 -34881 -743207 -803991 -717002 -765810 -20540 -27926 -75947 -263603 -903885 -147472 -808494 -105054 -735723 -849499 -163686 -738115 -248274 -812579 -701772 -176556 -255596 -892753 -297038 -913783 -17957 -779466 -275508 -606351 -517019 -263503 -526513 -137251 -878477 -921151 -230882 -903220 -91703 -482011 -9787 -231165 -356382 -342150 -505881 -276169 -482638 -778253 -684060 -282874 -926547 -494465 -368633 -12227 -355421 -64510 -927651 -75595 -170168 -13529 -574011 -139213 -847492 -803596 -770650 -16657 -930648 -177956 -501447 -723554 -280732 -771470 -192635 -173217 -230459 -35900 -423724 -71479 -502526 -34127 -627887 -92760 -341499 -339568 -247491 -623705 -802896 -113191 -360169 -238483 -339725 -744179 -12632 -64865 -136949 -696396 -664297 -229913 -904334 -903906 -453689 -169365 -137186 -203436 -663923 -331412 -260112 -485880 -928037 -96190 -347474 -210910 -192748 -638798 -666885 -661991 -857098 -749339 -10113 -448308 -90198 -933069 -377446 -96881 -64047 -309848 -449502 -34869 -210606 -165352 -248062 -393234 -196330 -246141 -166080 -365913 -828604 -131154 -64982 -263484 -166010 -11723 -865066 -86415 -244226 -257179 -842203 -217970 -387468 -64572 -126715 -155284 -164583 -801397 -769324 -897134 -898244 -857160 -817490 -693935 -893768 -406002 -623487 -85558 -41412 -814746 -863909 -317524 -147402 -315088 -186666 -638066 -760598 -893461 -324793 -78046 -773265 -196453 -206135 -821563 -388180 -639382 -49110 -165683 -613445 -926963 -451796 -6220 -126185 -106818 -236656 -798011 -137127 -420172 -462034 -751505 -677609 -911545 -45268 -903449 -141753 -104815 -136838 -74287 -926504 -54703 -137237 -698878 -320620 -848018 -791582 -339253 -289373 -362080 -924072 -388327 -743794 -227594 -502211 -506001 -930194 -563818 -43903 -208518 -345384 -786788 -192557 -606400 -343056 -132400 -90861 -850127 -690695 -653920 -396708 -935297 -669408 -228900 -779128 -230793 -676740 -164902 -394938 -339387 -639891 -210703 -500073 -71500 -429251 -171190 -680679 -154319 -43822 -260511 -879233 -177623 -438343 -667513 -65951 -40676 -56459 -244521 -435040 -802535 -167445 -926973 -163829 -616030 -376067 -224911 -836157 -930021 -695016 -96802 -899742 -72642 -231336 -11488 -639652 -146317 -329013 -602541 -9804 -11659 -824606 -151567 -210559 -821564 -706553 -325132 -648820 -705260 -342718 -303595 -303619 -403626 -157453 -447676 -805320 -185310 -338554 -521302 -314415 -235082 -173737 -289033 -3353 -20238 -237078 -332488 -350884 -639996 -627419 -12988 -558477 -54553 -113433 -634897 -778298 -631448 -781206 -283450 -33739 -797145 -171945 -770240 -797829 -383934 -102892 -177510 -439202 -233909 -486950 -70719 -179921 -677864 -824704 -606548 -391513 -794070 -931203 -801712 -246251 -544016 -34861 -429413 -856622 -409283 -303585 -863361 -933852 -413083 -331858 -895816 -229777 -413199 -453882 -917342 -137204 -24833 -852507 -794539 -260611 -464143 -230792 -164236 -122205 -484972 -360620 -880229 -732430 -490926 -280885 -424154 -502184 -105135 -741970 -298424 -429426 -797744 -65838 -339847 -11502 -276278 -888090 -660461 -174668 -538025 -912146 -592149 -153914 -173894 -196379 -2141 -431175 -165358 -454521 -710830 -13217 -20437 -719470 -88828 -13804 -47468 -470991 -684166 -272920 -230789 -305714 -233096 -234410 -697059 -31548 -11496 -326284 -939859 -889225 -680621 -332015 -624316 -639969 -924638 -375876 -770166 -286487 -72758 -363211 -524868 -16656 -945572 -237340 -888313 -339370 -157356 -514094 -20337 -230950 -165667 -653003 -770684 -116021 -769987 -683938 -645422 -930497 -771552 -575392 -914026 -774043 -174646 -422979 -903595 -146587 -164010 -33550 -881889 -323190 -495999 -245928 -74283 -653968 -772188 -771867 -273071 -458811 -772024 -349930 -43223 -918768 -21323 -281552 -139475 -428708 -32033 -193920 -59054 -298839 -365232 -49514 -136697 -215132 -661907 -426558 -163821 -563555 -518641 -820410 -185256 -244998 -944823 -811221 -166047 -244170 -768400 -155354 -735836 -859492 -881111 -463989 -328865 -339624 -857521 -535026 -819566 -230470 -456700 -55685 -135696 -802715 -304115 -10137 -593946 -431164 -193937 -475833 -94766 -66174 -13763 -262543 -64431 -712198 -147251 -693597 -13549 -176573 -248246 -276206 -763945 -175147 -899072 -80188 -394207 -761037 -860247 -449142 -455009 -210752 -524084 -32144 -114875 -639790 -882299 -518430 -118778 -657235 -238257 -929684 -901886 -560054 -322366 -921902 -127664 -561740 -324926 -897149 -693432 -677674 -861099 -233100 -245862 -288135 -914321 -738074 -858212 -398334 -84217 -837362 -432709 -86835 -236644 -397700 -362523 -837349 -230980 -542652 -798161 -40976 -175373 -899433 -489250 -349564 -522712 -819444 -443073 -55501 -215640 -246290 -571465 -208447 -164820 -63655 -28441 -926652 -888081 -68688 -847863 -165859 -165285 -346048 -811847 -331911 -234684 -283455 -654594 -164851 -847726 -900901 -136324 -71097 -11642 -332453 -180997 -456847 -186856 -943031 -296628 -22365 -388825 -817472 -252016 -770538 -917354 -159924 -799260 -317134 -457312 -926812 -785463 -934580 -328972 -34875 -78120 -838402 -639458 -137238 -339841 -147560 -256131 -897266 -787006 -187419 -146412 -19712 -899379 -943951 -59290 -12139 -119333 -827143 -63599 -339581 -236094 -659579 -923127 -888932 -308644 -12206 -798058 -799217 -254741 -416445 -573547 -662359 -174901 -49948 -671331 -817257 -625540 -361671 -824495 -805190 -356247 -617849 -818824 -799984 -421505 -137150 -160887 -457302 -95608 -828517 -772500 -339737 -11580 -339391 -65857 -31945 -6671 -539639 -604736 -192708 -694430 -188470 -147585 -360994 -661774 -424466 -615755 -676452 -517348 -903560 -419282 -495932 -3962 -503866 -657342 -68137 -339843 -116629 -20389 -924606 -227103 -405410 -185135 -853219 -635558 -475881 -827297 -164854 -314682 -227931 -136678 -45665 -934140 -4164 -291528 -660681 -136821 -693016 -602920 -840369 -951589 -625103 -384717 -517368 -15536 -19887 -136957 -338297 -416492 -42383 -933580 -840929 -744376 -60960 -78135 -842319 -509212 -758051 -78771 -84656 -34111 -905001 -232055 -799082 -308117 -155770 -902597 -190550 -829854 -224944 -81711 -711754 -943714 -101268 -617732 -559214 -347784 -416435 -794195 -633776 -633645 -165378 -132270 -732806 -731105 -657390 -523061 -748478 -579689 -27830 -154093 -215869 -339006 -296515 -889554 -908191 -949624 -308852 -488211 -321760 -771466 -412791 -15609 -258607 -481413 -913599 -75744 -432514 -113581 -50223 -185656 -37138 -40983 -477128 -240229 -803207 -732879 -331186 -286093 -33446 -854536 -602441 -137116 -801421 -505593 -475109 -534352 -13695 -130571 -456255 -606267 -195769 -164174 -40861 -924777 -545770 -121303 -445498 -132758 -32211 -263621 -433535 -506675 -347531 -593389 -339545 -663208 -818815 -165973 -430295 -101746 -498009 -267576 -46910 -429996 -32321 -574059 -101851 -176180 -124924 -576359 -276053 -939552 -277382 -197057 -824302 -924610 -442615 -819953 -820844 -166024 -432638 -11631 -132264 -621600 -276303 -364840 -330440 -628961 -153084 -818199 -816637 -491273 -924219 -426371 -31628 -832444 -67482 -33685 -633876 -47443 -831444 -660547 -63530 -124932 -339583 -165780 -662234 -165386 -812280 -669365 -930757 -631980 -85258 -165575 -64265 -20573 -663581 -315801 -698842 -799369 -136700 -638880 -173922 -512638 -694024 -853081 -46008 -395212 -191347 -828386 -860453 -41143 -954190 -338794 -338493 -788348 -65028 -860920 -473811 -13841 -176029 -263277 -80847 -929051 -946106 -502646 -294517 -903089 -742984 -230822 -13716 -779790 -28562 -131208 -324712 -136778 -244691 -196231 -350177 -6111 -49959 -166051 -741033 -339633 -848253 -637021 -70580 -374458 -782334 -429308 -313900 -743458 -798162 -639981 -482434 -673170 -574831 -765737 -76016 -14434 -303268 -386819 -915631 -163316 -667068 -652600 -917344 -593124 -457689 -835491 -456415 -206206 -282892 -288718 -164609 -934218 -123164 -949431 -270199 -113879 -780411 -180313 -345879 -26815 -524518 -693682 -281766 -339425 -86725 -175439 -650163 -46831 -904092 -81271 -245055 -570377 -919913 -495570 -276075 -490821 -633881 -677702 -866368 -95335 -802664 -13059 -322540 -484750 -79292 -339513 -164984 -729330 -651860 -892803 -192954 -69028 -875506 -419013 -800020 -639974 -46230 -13809 -501866 -363282 -339875 -864532 -955445 -450800 -512482 -546047 -179704 -101769 -712222 -890727 -832790 -207849 -633334 -321587 -383910 -474180 -325056 -668195 -14836 -364061 -799306 -444249 -299906 -768970 -695729 -212972 -609795 -931520 -85680 -640040 -565871 -819713 -639858 -933013 -904007 -339361 -501173 -328340 -865111 -112138 -11486 -77871 -273827 -772391 -89095 -140582 -272975 -132761 -275324 -245061 -908149 -786536 -924698 -451430 -43727 -228154 -604050 -477010 -228469 -165883 -391392 -840760 -288558 -368328 -273129 -737102 -676169 -72131 -154854 -115267 -305718 -793949 -810730 -847227 -797295 -186480 -537587 -339701 -779766 -274677 -12288 -421929 -137184 -742530 -22985 -51881 -930904 -263566 -728976 -846066 -176230 -147630 -339856 -814212 -90403 -101725 -798061 -393191 -527508 -639074 -312196 -125862 -121144 -639801 -435538 -799447 -165807 -503572 -291028 -783548 -816542 -586498 -893061 -271088 -547793 -632111 -276447 -371273 -165598 -394716 -246235 -570792 -64710 -276466 -9014 -606409 -118944 -52667 -432911 -32080 -931913 -581474 -339747 -99724 -625163 -889563 -761806 -67067 -403187 -654447 -170906 -163409 -943853 -800022 -70665 -61091 -828683 -15942 -30754 -621269 -511293 -196109 -120656 -867086 -624711 -892837 -633804 -552838 -369792 -522458 -339887 -446489 -398505 -342320 -491824 -588422 -137137 -854772 -661385 -147541 -605830 -330174 -339282 -428745 -939635 -64820 -797867 -204738 -536778 -760484 -551955 -278348 -160508 -514188 -362238 -632194 -953630 -644025 -121341 -101911 -517411 -364479 -33222 -603558 -512874 -945650 -137223 -364361 -638758 -477131 -711945 -279896 -268443 -917105 -451917 -823327 -482516 -568913 -638602 -101355 -1330 -934643 -771936 -457404 -237345 -200337 -787372 -65911 -447666 -714844 -904929 -89165 -938226 -117002 -648824 -231339 -907543 -265787 -653417 -298417 -200460 -750253 -165051 -332352 -37408 -457788 -22488 -339426 -272833 -801736 -165732 -30536 -576320 -920386 -894757 -155599 -45939 -555563 -908189 -650367 -445121 -49788 -257963 -339094 -288989 -215635 -550499 -165998 -133545 -516569 -236101 -347282 -630647 -109257 -527356 -294789 -192454 -831220 -335139 -212792 -950614 -723496 -72537 -797772 -174816 -461247 -172821 -562503 -635174 -245937 -92113 -913315 -165270 -904053 -77845 -593102 -244338 -413222 -506201 -126227 -680143 -424455 -23710 -716488 -929810 -330470 -763554 -356367 -73921 -11623 -117272 -501095 -683263 -361692 -658487 -571340 -361628 -372000 -6015 -853882 -175065 -263596 -491457 -879309 -448482 -626006 -798166 -491118 -365178 -367833 -248435 -837382 -526934 -190875 -624375 -624767 -888845 -883559 -164286 -387781 -13677 -836819 -239199 -793072 -245020 -889478 -310477 -226842 -809794 -163812 -772983 -397992 -133716 -455961 -102884 -607169 -75540 -839592 -110402 -952929 -832971 -246392 -778116 -657792 -51337 -931917 -15866 -904056 -125336 -253403 -102556 -600839 -616316 -675479 -797965 -945691 -903570 -323602 -395272 -322810 -935187 -150834 -949379 -614885 -113665 -181011 -812427 -13822 -151662 -667207 -356167 -122491 -618466 -772220 -44102 -54513 -22774 -639922 -117161 -15851 -355890 -431699 -424855 -13954 -137108 -32465 -387348 -422139 -863906 -190556 -260620 -381870 -571483 -744593 -652587 -585009 -230970 -369890 -237872 -264967 -263395 -324459 -784211 -903377 -292047 -824780 -74156 -863591 -26930 -625555 -528470 -13746 -395078 -462043 -224024 -60537 -632089 -856923 -230764 -482477 -907615 -823882 -860425 -132277 -169761 -164805 -34646 -803582 -544629 -186574 -298433 -165237 -102642 -809432 -431166 -146856 -295776 -633826 -60957 -539571 -431971 -834381 -64725 -133250 -29690 -30720 -16757 -129118 -822815 -206709 -78488 -692619 -445239 -13657 -811815 -196071 -771438 -659424 -623828 -406012 -369347 -419135 -116605 -314510 -618247 -243934 -659504 -162210 -259718 -703221 -242545 -287608 -660447 -155374 -837285 -114978 -633083 -518326 -458955 -4121 -15779 -112046 -483684 -92157 -32928 -510093 -625361 -309839 -321215 -261020 -273803 -278902 -691559 -186662 -926244 -57261 -377490 -633416 -490293 -899102 -635283 -369888 -31627 -291487 -228327 -662890 -54223 -322359 -842836 -237398 -912498 -600142 -662818 -63621 -777028 -164074 -888214 -362517 -864053 -237160 -136289 -931498 -772704 -861043 -101949 -79322 -326464 -173381 -146562 -687004 -126152 -72679 -231255 -1124 -70916 -243979 -263532 -165409 -732709 -115723 -364233 -829536 -332022 -433861 -221082 -818005 -954614 -603848 -235242 -771650 -698588 -639851 -310129 -165498 -264021 -123030 -74060 -146907 -633362 -875959 -377360 -684785 -639218 -428912 -255314 -396615 -669353 -114080 -277252 -170200 -231158 -22811 -454696 -87105 -634234 -778358 -449239 -113349 -934017 -896259 -418822 -31998 -110296 -592728 -235265 -70188 -173815 -667177 -185697 -330966 -155365 -229658 -791625 -832284 -180927 -377078 -894571 -165359 -20397 -920315 -197518 -580777 -726922 -848824 -721154 -897652 -443920 -244881 -908055 -161969 -442359 -638284 -382378 -744297 -11461 -948919 -623338 -531502 -371663 -446526 -390925 -147552 -639881 -633025 -235906 -831158 -51484 -905791 -46416 -442983 -255518 -457382 -15644 -210906 -180894 -365508 -139403 -136889 -338851 -428789 -76761 -636323 -904073 -727050 -173269 -576 -32242 -275008 -810833 -444967 -693627 -927515 -935689 -832786 -674276 -248244 -71264 -173820 -832597 -343896 -67480 -339739 -824205 -276311 -362000 -735428 -934304 -850864 -284406 -512525 -20546 -164832 -176246 -637455 -863183 -932735 -606521 -342632 -575034 -803536 -432633 -136292 -856937 -12296 -163801 -31968 -640007 -662252 -818230 -853000 -529175 -22616 -948963 -523894 -189952 -480993 -43383 -186846 -11362 -354358 -801668 -444071 -523211 -160418 -103710 -888285 -859183 -383988 -858197 -518255 -145241 -474444 -797828 -294873 -339237 -872559 -647657 -429277 -747985 -675917 -122470 -412979 -287295 -237547 -652480 -339645 -331130 -684742 -765681 -677252 -564354 -661955 -762442 -40463 -904162 -338959 -165971 -41358 -516610 -205801 -46028 -559216 -171118 -316960 -818385 -867022 -84751 -315721 -31615 -639982 -318750 -888792 -396932 -281139 -633709 -512447 -41409 -29141 -907534 -112059 -377222 -567704 -413008 -192166 -163677 -8843 -690440 -906829 -334178 -286946 -702987 -330775 -179779 -947123 -216064 -362471 -633562 -927879 -797917 -268442 -187539 -317819 -900161 -68366 -92137 -564010 -262623 -17342 -443072 -459339 -159629 -427985 -756249 -291699 -717539 -73343 -155623 -238715 -552931 -639741 -209134 -339118 -637574 -365207 -735999 -10075 -773306 -126014 -639988 -504293 -31950 -263478 -878604 -889526 -141649 -262841 -292236 -750026 -947108 -632919 -339890 -735266 -132258 -635790 -322880 -835103 -606189 -863197 -279102 -712077 -444387 -173035 -54488 -34900 -638006 -861059 -517730 -9316 -348464 -754559 -292723 -632578 -26234 -81187 -363717 -303115 -396378 -235749 -110427 -63475 -625012 -394806 -245281 -51369 -422045 -126255 -886490 -388028 -935364 -276859 -503368 -133268 -136836 -863520 -253721 -6146 -735034 -732762 -101460 -744246 -253833 -824647 -824610 -230983 -339899 -454619 -391222 -879483 -314287 -465073 -661756 -135618 -399605 -524008 -633720 -14699 -771082 -430411 -635173 -914115 -165803 -76343 -11524 -42753 -846608 -773764 -397329 -158867 -215637 -676136 -114175 -900877 -46261 -288477 -362139 -889649 -483742 -12185 -415214 -195557 -613021 -353366 -243835 -534687 -714607 -663874 -711700 -927086 -229376 -511080 -627240 -788537 -339889 -695873 -339310 -930324 -763164 -164942 -863075 -238138 -270993 -648555 -420174 -757833 -832825 -204728 -135751 -890892 -72478 -837925 -441886 -796852 -909678 -638221 -417029 -215758 -185243 -40344 -577898 -561521 -237124 -85245 -732814 -930717 -663863 -136870 -34492 -588283 -62760 -861278 -332288 -948959 -847490 -767699 -382992 -326442 -165513 -710984 -46575 -896138 -827074 -635576 -225529 -322288 -82298 -564172 -430342 -237171 -705391 -505912 -55820 -824482 -667083 -691636 -165418 -950725 -756472 -136035 -443578 -743456 -794911 -50201 -34113 -137158 -239292 -382294 -72776 -844384 -793505 -644391 -667117 -332153 -547143 -339524 -315668 -192817 -575180 -771602 -504225 -63142 -175265 -125693 -200725 -954737 -882827 -811958 -904091 -856585 -126699 -173898 -850946 -834496 -797939 -279761 -291797 -556433 -303328 -836874 -338621 -168123 -239237 -797063 -164730 -288138 -6027 -416928 -340778 -812021 -175187 -339116 -375303 -925863 -477200 -403643 -388358 -69979 -64680 -210837 -432883 -24952 -662921 -136953 -483179 -185870 -60301 -56761 -913050 -638689 -33176 -166090 -835290 -81723 -801717 -377363 -70829 -475308 -762158 -633204 -176486 -339721 -174291 -63913 -147465 -113965 -663698 -822828 -839451 -215706 -914240 -182844 -610917 -276960 -68507 -165053 -695947 -832080 -45309 -600994 -172378 -339868 -101796 -276401 -176570 -516278 -866912 -906651 -303630 -122990 -518817 -714446 -32966 -339641 -713877 -397921 -653618 -912495 -245683 -738397 -339872 -231312 -504215 -2595 -416298 -908129 -26937 -54636 -141699 -715782 -648751 -924756 -892830 -730762 -7639 -133429 -861343 -839472 -797623 -854659 -32209 -916436 -365482 -852868 -303580 -835377 -505962 -210832 -436309 -771005 -286276 -368630 -339170 -926180 -152916 -54670 -121206 -25239 -361123 -273153 -94915 -388914 -906430 -402864 -305808 -941495 -74699 -137212 -244015 -213371 -113621 -315321 -920101 -593935 -136223 -438726 -744401 -860170 -863828 -577633 -835401 -457707 -811876 -165614 -627631 -51775 -73796 -65985 -41349 -888037 -75740 -157452 -364831 -640023 -101543 -136760 -275223 -569905 -618661 -710927 -325151 -236005 -110487 -859240 -173786 -331857 -730146 -61035 -432500 -632204 -544422 -175814 -96393 -824066 -888238 -339673 -234541 -802730 -734489 -803259 -422622 -711752 -795194 -214987 -732683 -137020 -49924 -757044 -573124 -163854 -904137 -920018 -457534 -772202 -165705 -505471 -16404 -342918 -840202 -834978 -56135 -432549 -904144 -113532 -84769 -602698 -634597 -446763 -523064 -536731 -756510 -245496 -905166 -339703 -818640 -470211 -306849 -332199 -510775 -906731 -456464 -481138 -331954 -660000 -137098 -147449 -71911 -854429 -653051 -929536 -239384 -603704 -35539 -695377 -374751 -126249 -54931 -123053 -395196 -362033 -914711 -237319 -230208 -364933 -521496 -46090 -540 -841628 -933355 -54460 -953722 -573182 -232443 -816857 -761224 -326083 -363805 -301471 -136032 -34893 -914231 -40973 -50145 -696687 -476090 -504456 -526123 -722416 -889534 -63644 -696443 -124279 -541800 -339506 -712020 -542456 -176500 -602116 -786776 -797653 -78285 -63456 -487481 -907953 -904103 -81708 -423835 -242229 -339279 -34184 -222140 -766133 -518612 -601116 -228156 -39829 -17868 -636226 -138515 -822507 -64885 -19250 -653610 -46215 -132479 -802978 -282206 -343994 -940061 -339257 -491203 -634871 -130108 -680221 -477371 -805803 -779600 -377212 -332343 -439904 -168299 -742619 -140642 -695359 -665918 -72323 -303435 -141881 -470920 -424783 -934514 -766115 -405001 -236602 -933840 -354480 -180714 -424203 -771174 -510347 -274769 -41313 -33634 -692680 -132468 -674799 -364297 -806044 -55729 -852019 -943842 -125933 -632233 -95011 -350891 -494546 -846059 -949668 -693738 -331397 -289910 -722010 -493499 -258239 -306461 -41389 -924184 -112973 -339322 -304002 -128661 -330839 -275063 -139985 -112380 -20269 -124978 -196886 -442456 -495681 -303591 -101684 -511920 -706060 -63398 -807440 -786059 -78949 -620325 -935236 -98177 -389558 -40778 -398346 -113587 -706103 -329376 -31866 -525387 -723136 -692133 -738279 -208634 -101614 -82222 -275328 -593889 -879126 -824293 -489268 -577741 -65813 -512002 -262972 -131537 -816575 -127982 -41399 -278868 -20503 -72211 -851350 -71940 -571382 -56983 -640768 -762290 -861244 -10933 -141521 -714234 -471481 -24197 -574792 -324838 -770770 -80748 -495829 -40792 -173157 -271812 -367630 -11327 -696526 -228330 -569444 -41094 -899324 -785927 -828817 -72614 -328696 -818771 -426419 -369417 -246521 -273892 -242691 -432340 -912608 -857527 -246123 -849841 -809765 -896178 -921289 -274611 -860235 -276157 -245015 -146164 -768482 -449035 -795537 -413251 -569804 -894833 -347308 -192412 -538198 -775208 -426402 -801564 -413096 -476548 -147015 -559375 -484416 -231058 -422472 -576962 -494776 -635964 -415564 -359265 -563967 -423166 -262511 -914050 -97663 -853788 -524881 -799218 -914251 -850540 -90810 -231318 -6120 -10667 -735031 -20434 -348820 -497193 -895375 -346183 -75684 -40645 -923566 -678045 -423280 -639561 -838041 -429040 -802502 -101772 -112114 -765396 -230512 -886920 -756461 -41333 -941532 -332475 -821066 -441825 -528618 -211827 -813403 -17521 -369579 -414439 -56083 -11562 -773226 -898274 -706405 -112695 -665718 -621106 -519804 -113220 -505525 -113495 -145374 -876005 -765982 -941852 -23058 -55928 -20439 -242779 -137078 -545161 -841751 -646219 -103254 -639579 -55533 -137006 -86399 -186394 -41265 -797723 -751508 -886550 -914143 -365680 -83066 -321574 -820889 -788346 -388374 -807172 -276098 -387559 -891405 -492644 -571921 -537912 -480284 -889549 -26922 -72293 -801441 -194925 -22777 -155201 -88058 -324103 -51415 -395771 -275286 -756499 -495896 -586590 -251005 -735924 -132184 -836878 -51030 -180724 -782954 -430750 -322569 -832637 -715786 -76844 -413249 -186850 -52064 -772389 -72013 -588441 -954013 -722923 -849372 -661775 -72774 -141902 -375422 -855334 -276099 -640773 -694315 -581110 -186928 -67570 -239131 -266724 -298863 -738263 -35304 -827225 -690893 -426212 -232232 -10522 -904057 -81128 -770608 -230433 -476365 -504455 -571953 -423359 -571308 -624301 -51388 -899766 -127344 -503355 -696956 -388720 -521548 -256381 -241818 -274651 -74355 -663907 -308917 -443277 -146538 -27459 -102645 -571842 -761249 -723708 -220584 -742889 -96357 -860274 -433064 -262788 -170843 -185086 -42995 -563141 -647569 -782132 -41350 -377196 -923482 -178565 -102258 -752211 -445792 -62093 -324815 -9891 -19359 -495812 -136762 -27898 -815667 -447819 -765091 -809061 -893101 -803601 -123051 -78092 -445777 -413123 -12242 -173399 -252227 -500988 -445774 -358776 -70857 -486306 -849748 -20009 -627221 -641356 -444720 -712307 -172872 -250650 -377443 -754440 -186429 -72068 -176206 -165616 -91028 -827892 -859989 -244976 -495576 -245518 -445661 -778651 -50193 -372194 -422184 -850012 -287828 -890860 -525425 -600551 -61685 -63615 -782623 -758069 -333446 -925649 -185100 -879942 -233340 -260952 -817885 -804850 -429257 -752154 -80271 -693029 -280861 -512591 -103106 -86657 -46278 -861337 -763364 -639594 -129047 -818828 -177528 -74434 -72492 -903521 -307444 -316647 -24741 -827432 -186278 -783421 -711543 -779497 -723517 -55744 -729339 -842985 -72143 -141140 -97677 -436156 -262991 -780994 -186645 -60934 -184947 -839535 -818161 -415149 -49273 -424330 -262520 -229775 -456027 -595 -55854 -424393 -235027 -275895 -363580 -562407 -835993 -107747 -128148 -337383 -579483 -636852 -141658 -252813 -857651 -900116 -756709 -433555 -495595 -510238 -68310 -72256 -709968 -72072 -571303 -486090 -801724 -72252 -68603 -412889 -20419 -639359 -105651 -727065 -12922 -13747 -514002 -459778 -624662 -330785 -102518 -823096 -741398 -806938 -224757 -13565 -252034 -244279 -889017 -451167 -867015 -28936 -349961 -871550 -274333 -262431 -463536 -547233 -559662 -139832 -254922 -147373 -10675 -861080 -230141 -731069 -142005 -606717 -308751 -938526 -954261 -934774 -79173 -126299 -189314 -602687 -696999 -445289 -81480 -938363 -588911 -839732 -429233 -907696 -755467 -63580 -10992 -153981 -942889 -632656 -276545 -180716 -693924 -828699 -44426 -861338 -41003 -886858 -568940 -801635 -652575 -710661 -894596 -952559 -374988 -638647 -866962 -236791 -856220 -900682 -230934 -797743 -74058 -624955 -64693 -415945 -850128 -335327 -19964 -338235 -542658 -125989 -916492 -272698 -781148 -362463 -250579 -53840 -334172 -843793 -230267 -661094 -765724 -791842 -857097 -899779 -276367 -255058 -297298 -67200 -756451 -925693 -593478 -489202 -242907 -120720 -904998 -217838 -339663 -622260 -77525 -72336 -16522 -330391 -323122 -801796 -470531 -941367 -4246 -27727 -423992 -429678 -97979 -779616 -107830 -577213 -423259 -255392 -257129 -648452 -332640 -934068 -377311 -634436 -23521 -288829 -694359 -338326 -876119 -195229 -91105 -403805 -921535 -475980 -424469 -300179 -887060 -27872 -27350 -101495 -27938 -387679 -602900 -672970 -349875 -97377 -33262 -386417 -823735 -660462 -921266 -800414 -850944 -288685 -924263 -386016 -13638 -118626 -185935 -802902 -97446 -142221 -36297 -63519 -542916 -571200 -571704 -375448 -164968 -202928 -340187 -45349 -282882 -263493 -765293 -927718 -66956 -105722 -442257 -847866 -54324 -81710 -669078 -231330 -164243 -734253 -100397 -765684 -186488 -834865 -532103 -12306 -488318 -264031 -825722 -857056 -848896 -929671 -72173 -16176 -18189 -866287 -611824 -352821 -388026 -431695 -45629 -614396 -852617 -335399 -834633 -134146 -624444 -755477 -398425 -812703 -362357 -918850 -569893 -37142 -446485 -572812 -6201 -362371 -743187 -445174 -782105 -173635 -173598 -331041 -515209 -927580 -940093 -258551 -445037 -562562 -165794 -217013 -893063 -16510 -21842 -52838 -136650 -671661 -129466 -839689 -779472 -65993 -15862 -889011 -693384 -663551 -101371 -46100 -140478 -410713 -231935 -855571 -177468 -6527 -846247 -695254 -602629 -727816 -263072 -696154 -34807 -520051 -327325 -422728 -669632 -232585 -348302 -429303 -324856 -398626 -29358 -102093 -254521 -910914 -570783 -26612 -358374 -283626 -126184 -503964 -388253 -115756 -118631 -457800 -145672 -706453 -759620 -482388 -849414 -495606 -266868 -569035 -429160 -211875 -749545 -314761 -921634 -931201 -377307 -2732 -11131 -59272 -693715 -735289 -116282 -98430 -924562 -330826 -32137 -402355 -330376 -730488 -173259 -102797 -13969 -11170 -330915 -351382 -936791 -28766 -461352 -27892 -128473 -543579 -18271 -133453 -368528 -64536 -78097 -325121 -818949 -96064 -339316 -922686 -735521 -605811 -41410 -824575 -838374 -924455 -656267 -761896 -496134 -40864 -490283 -338689 -494085 -775103 -239035 -593262 -63371 -775680 -690888 -255197 -743422 -34678 -24490 -564448 -40665 -684205 -214403 -54352 -94886 -663953 -275933 -556411 -692759 -741337 -45693 -934840 -413358 -421705 -163676 -816953 -164997 -569173 -277444 -75094 -648597 -443890 -669138 -798763 -331204 -838633 -446422 -760905 -174214 -37502 -356751 -207011 -34806 -648271 -945729 -899311 -394227 -781831 -41287 -435771 -280923 -282736 -13819 -327498 -592846 -48929 -339675 -768843 -250128 -428677 -804582 -394338 -368905 -668574 -536752 -18233 -948564 -255308 -412067 -903138 -428833 -85000 -461728 -439477 -685321 -929503 -571779 -68068 -470687 -20428 -331693 -303420 -41138 -123505 -282357 -11326 -500588 -72067 -20138 -445501 -954546 -208828 -24190 -12709 -393972 -860325 -539001 -837324 -714599 -925634 -147247 -460902 -102808 -660880 -823783 -279667 -70818 -526759 -35651 -324614 -350616 -726445 -110063 -233069 -165693 -532506 -238516 -624977 -102267 -10336 -101820 -66108 -647650 -266129 -764970 -753802 -638270 -121621 -238056 -427608 -767144 -2626 -282784 -697725 -71299 -44313 -173435 -146893 -41354 -20270 -237229 -429374 -387435 -231180 -10832 -104258 -252033 -5613 -132124 -886685 -83832 -308843 -41367 -570721 -174809 -13766 -400629 -497812 -70300 -945884 -805404 -72369 -831100 -340695 -182575 -332376 -424349 -726089 -586480 -421558 -820203 -383749 -648929 -396741 -133152 -890693 -402239 -663914 -704942 -282777 -72144 -844891 -65955 -488807 -494458 -890755 -275359 -2347 -806930 -102447 -26945 -113666 -361157 -514933 -429080 -429326 -230371 -166483 -244612 -763803 -838025 -494945 -276290 -692706 -233075 -169138 -91093 -125708 -834744 -744562 -502368 -465671 -147163 -246706 -829744 -63495 -276836 -835503 -591527 -839423 -839494 -232750 -168319 -950781 -486500 -113330 -471378 -328744 -113654 -71883 -233337 -85687 -385559 -432232 -81818 -388864 -425716 -582466 -477204 -897517 -33414 -13396 -603539 -362288 -533316 -361705 -735525 -811429 -158047 -668452 -270237 -174958 -891306 -892919 -469759 -134804 -147264 -27534 -375784 -276279 -41069 -781155 -332210 -903468 -449730 -227085 -904175 -893939 -12275 -490846 -75538 -886931 -901866 -360747 -395638 -22665 -398368 -706355 -290327 -96846 -423291 -165643 -107598 -664681 -339865 -779244 -279600 -812401 -339087 -570546 -932076 -12663 -490700 -113604 -404997 -648813 -246286 -777690 -239515 -458816 -330739 -22305 -276438 -892861 -562557 -250659 -807435 -428537 -381742 -843224 -716525 -273556 -330542 -886582 -839404 -647014 -474667 -67447 -373396 -799386 -941826 -24522 -19974 -339380 -761911 -643271 -687228 -49421 -495899 -176058 -760170 -731749 -810950 -211440 -6324 -146045 -102897 -331612 -235433 -802616 -147567 -566342 -591358 -765835 -824075 -164269 -458163 -420871 -80833 -771426 -353447 -71897 -164647 -837767 -919574 -349595 -238484 -390695 -771118 -85047 -824684 -360130 -30979 -861323 -571879 -696917 -165993 -465308 -362476 -623428 -839489 -768172 -633182 -5960 -604087 -95919 -706987 -145798 -11179 -284433 -800804 -71314 -768732 -352255 -254897 -142625 -428814 -505977 -761534 -860243 -184942 -405538 -86448 -233904 -172324 -54956 -21476 -67057 -186282 -92674 -873277 -186903 -67708 -97429 -371543 -587959 -812986 -261657 -81472 -395188 -27906 -98467 -432446 -13038 -833628 -832680 -770571 -861290 -427657 -136595 -326115 -99922 -357505 -273652 -948183 -891417 -664065 -40760 -663560 -139817 -239436 -481333 -432840 -438024 -428629 -643166 -571456 -198867 -747784 -693486 -114892 -251810 -112988 -2766 -721523 -693930 -68237 -61711 -245799 -250973 -298630 -512746 -803380 -839630 -195933 -56701 -674955 -272731 -80106 -412043 -765490 -767859 -229364 -672086 -437462 -54713 -445634 -786866 -656810 -752048 -275361 -6222 -637787 -489306 -140567 -216763 -199777 -472176 -72330 -754680 -255041 -190292 -513561 -276121 -276219 -578488 -412309 -333726 -812481 -941530 -494459 -41256 -50693 -481969 -931073 -603600 -188394 -693176 -41212 -72272 -278872 -303582 -749536 -377427 -388786 -71944 -231274 -500527 -830387 -533113 -2209 -426142 -275998 -54201 -477347 -147583 -66102 -168375 -736126 -60370 -68414 -159452 -702039 -525890 -147395 -96956 -273760 -303044 -252396 -72096 -147451 -126289 -790106 -680483 -361663 -359184 -192054 -734947 -656561 -94863 -200573 -786112 -184973 -746888 -482449 -640858 -418485 -539202 -34109 -420938 -832366 -331063 -953857 -69407 -897108 -201850 -500007 -339883 -944900 -736008 -928364 -102364 -10241 -238258 -632035 -13558 -281265 -837664 -861218 -68397 -196476 -237138 -72338 -377539 -263170 -197156 -441839 -796135 -238765 -483130 -570381 -250632 -11308 -398238 -331100 -429393 -835543 -358681 -824073 -5690 -934103 -495095 -147473 -332433 -165690 -838335 -649420 -734792 -511244 -323522 -841829 -172365 -215720 -307203 -25859 -56150 -892005 -41213 -188727 -34538 -41340 -282942 -835374 -924396 -772055 -163440 -672870 -212749 -373275 -692139 -857766 -205918 -924889 -422659 -489371 -841313 -63445 -52574 -695417 -337768 -822284 -824249 -274756 -950666 -17056 -828803 -385186 -819455 -54561 -215375 -750943 -600707 -707367 -190988 -163668 -49294 -693034 -791782 -398562 -282495 -766119 -136950 -893113 -694368 -834569 -926904 -23036 -98315 -571106 -42593 -383240 -231253 -134932 -308134 -861040 -95833 -932899 -136285 -211628 -274736 -186826 -817533 -244962 -766283 -887058 -49396 -431533 -893034 -463780 -230913 -11098 -799349 -664768 -693610 -850176 -892969 -521271 -25386 -176614 -101652 -775302 -677169 -330589 -243473 -693473 -481850 -173374 -341751 -879854 -330055 -861075 -385288 -274160 -932579 -571290 -892845 -10912 -83833 -627494 -486303 -574402 -623511 -497903 -831599 -236223 -591651 -435093 -423050 -175445 -177478 -423803 -722919 -30342 -733797 -112275 -591335 -12273 -861045 -409569 -679725 -147450 -495872 -857219 -12176 -481721 -13912 -280910 -429844 -166309 -113373 -481654 -468708 -32241 -1129 -704480 -693708 -32396 -444805 -211714 -873231 -103011 -186999 -40682 -476996 -799761 -20771 -854784 -101622 -875752 -50877 -779577 -20482 -56124 -39185 -27896 -19646 -682282 -417472 -287717 -776604 -428201 -235315 -666357 -306943 -801253 -429931 -782241 -56346 -664294 -828327 -354747 -838008 -703618 -891510 -331923 -284194 -67342 -41185 -43288 -282635 -26933 -766301 -460538 -478630 -775248 -104691 -856984 -462101 -540777 -398619 -51971 -444709 -558818 -667660 -540553 -165668 -245155 -850157 -542452 -728190 -27343 -248530 -889438 -232525 -836264 -649884 -272131 -532932 -638443 -20536 -437594 -486808 -13111 -772320 -246277 -168147 -40679 -165515 -196074 -477165 -797218 -569872 -784296 -602764 -34823 -439471 -497943 -706334 -831601 -9404 -33737 -254973 -741797 -41133 -279757 -427706 -63138 -319193 -456431 -494180 -278078 -769013 -779351 -562634 -20063 -277921 -231261 -493 -903181 -495835 -717167 -68551 -46078 -276120 -287914 -12464 -513656 -153907 -562971 -186275 -4394 -839316 -623270 -562318 -788444 -548046 -778684 -347815 -83042 -801903 -275645 -41150 -339704 -626239 -687510 -140459 -121932 -428977 -67983 -571956 -196443 -244514 -225411 -186576 -454764 -638859 -847046 -696085 -599263 -32762 -19716 -759318 -788564 -278577 -41338 -186884 -744305 -165281 -725149 -470052 -175170 -633791 -13413 -47689 -857145 -887071 -243697 -954990 -231166 -930455 -255543 -287495 -311402 -711938 -811891 -697012 -67196 -516988 -573436 -563280 -165965 -423098 -173284 -185136 -192880 -314749 -277826 -29760 -716582 -97135 -113429 -839414 -204684 -49379 -26699 -838607 -835578 -365413 -13861 -694399 -262308 -398288 -278493 -165453 -693012 -157143 -486723 -112175 -624445 -142706 -425734 -154148 -275420 -571319 -807188 -423129 -496109 -677438 -186592 -72339 -492975 -175526 -730690 -751215 -439536 -671761 -325114 -390534 -12578 -444363 -230940 -291953 -428743 -791486 -233258 -635751 -43921 -255380 -45441 -872994 -9548 -606078 -332370 -137249 -173649 -848969 -12664 -439815 -516681 -694116 -775607 -274114 -616391 -446234 -939331 -230171 -41008 -173146 -234364 -54598 -831238 -703616 -830149 -356236 -302860 -245850 -873448 -816404 -72169 -396698 -798708 -503970 -566236 -428520 -167332 -803197 -365843 -10810 -330772 -332226 -864074 -636421 -696451 -146528 -95341 -749697 -765132 -549463 -41261 -27882 -812908 -184731 -820368 -186715 -857459 -29591 -672583 -56048 -846017 -35653 -636940 -279278 -811949 -636144 -591187 -134153 -539574 -637843 -909034 -562860 -41351 -865134 -68690 -495574 -456083 -197212 -155429 -505167 -429358 -505421 -239643 -886573 -552087 -547148 -349854 -799808 -375022 -186853 -193919 -503954 -703537 -687334 -668403 -471062 -667001 -907600 -376662 -75126 -402199 -745475 -828939 -279025 -50697 -386403 -593622 -208624 -820085 -173509 -524386 -838655 -81606 -44307 -831109 -78341 -516036 -64833 -146520 -165738 -90584 -303725 -60154 -185303 -919650 -695745 -652869 -593774 -185594 -173348 -154102 -317399 -935418 -113360 -142204 -275948 -22730 -136029 -834864 -144422 -54477 -365299 -428889 -386612 -528547 -308921 -432327 -761752 -623549 -288613 -928983 -40759 -741864 -618423 -778275 -431683 -103666 -649730 -858085 -127962 -223834 -911565 -308791 -395603 -255467 -54973 -520497 -277057 -636339 -387792 -624789 -856421 -732507 -375027 -173863 -66377 -401233 -41404 -567296 -887446 -376039 -621474 -343924 -935470 -373279 -627940 -162784 -424211 -747528 -126262 -29056 -939653 -704026 -10274 -858034 -241586 -505603 -797982 -949092 -368695 -778696 -570920 -331376 -338854 -32128 -282556 -656842 -721451 -494867 -272466 -40540 -569900 -739730 -496126 -330298 -387862 -832151 -704604 -536122 -8728 -600561 -541103 -173159 -800879 -650063 -230682 -292709 -664104 -176251 -375954 -78502 -263361 -690204 -343697 -859696 -313776 -41049 -67768 -897883 -57756 -645399 -232344 -142708 -266135 -775441 -519032 -858852 -593053 -272618 -324193 -850027 -186808 -232973 -146996 -134702 -932140 -703723 -837284 -274868 -402345 -648836 -22884 -71124 -65890 -141883 -462059 -400695 -6725 -569337 -622191 -281221 -288006 -394440 -250787 -823028 -99791 -760040 -181604 -422002 -136030 -691639 -84838 -76087 -41331 -725789 -927729 -779149 -285827 -146986 -10944 -854305 -29126 -774103 -606552 -56094 -58179 -331895 -133974 -826835 -437094 -19455 -254389 -253791 -98327 -721273 -533652 -72093 -308380 -288151 -12689 -349742 -132004 -128293 -208756 -388670 -146043 -332313 -432722 -188287 -41762 -147559 -146542 -410693 -398457 -694115 -852614 -639734 -331098 -276289 -403349 -232526 -469671 -277985 -64124 -755257 -824557 -9531 -743440 -276325 -733873 -173763 -524068 -86096 -74352 -291453 -850057 -768417 -231069 -904486 -103086 -658602 -244853 -35741 -632554 -527340 -479963 -355687 -231236 -66014 -468000 -74402 -133061 -259393 -904205 -462239 -479097 -659182 -350408 -46595 -687599 -129027 -262291 -623323 -591782 -287655 -829297 -783391 -263093 -438517 -147304 -466062 -125728 -783555 -322555 -775786 -403598 -707084 -505868 -694422 -424400 -458820 -639715 -429072 -377492 -96725 -811573 -694426 -387820 -115830 -368427 -824341 -675927 -276088 -62615 -451750 -453760 -184886 -195857 -635284 -41372 -96994 -674171 -483762 -78316 -190922 -131445 -602737 -185871 -736005 -528102 -339837 -67472 -844407 -811941 -931888 -168303 -40579 -818642 -146554 -350466 -736519 -785258 -518089 -119221 -339124 -603750 -344976 -252120 -27802 -754748 -326147 -358296 -658482 -79590 -408322 -270798 -779635 -245685 -282583 -641314 -244049 -41302 -669893 -303840 -824236 -308315 -429435 -676236 -279084 -432307 -797220 -392304 -442133 -12167 -129115 -8803 -358308 -64778 -236736 -41155 -665982 -779533 -450894 -659687 -62970 -90632 -259827 -494188 -604138 -799677 -591690 -123530 -347362 -311813 -432666 -277966 -70699 -121890 -790733 -645546 -12117 -909774 -103335 -339226 -41346 -847546 -14711 -18083 -878506 -625588 -331370 -20322 -347354 -43785 -150309 -369568 -632291 -246256 -10906 -285473 -799273 -623863 -440951 -297582 -240683 -640959 -102161 -422762 -691998 -920341 -276065 -281218 -138827 -524048 -84847 -419384 -263256 -674790 -418626 -819527 -186870 -691722 -244078 -722463 -338956 -808195 -768623 -133558 -675970 -45926 -347541 -518872 -601047 -102285 -929379 -41161 -54458 -852426 -287482 -265838 -433416 -839426 -737893 -395494 -176114 -395607 -640466 -494865 -182902 -40952 -571416 -571104 -551560 -490733 -710914 -802531 -716317 -927448 -308570 -205948 -172325 -588073 -208527 -16648 -377418 -51838 -27884 -740557 -350991 -78426 -703075 -600482 -331969 -476616 -738686 -13574 -332110 -234426 -254042 -150438 -175797 -910007 -76574 -160466 -401426 -70685 -356351 -338602 -287484 -203539 -579177 -810807 -399723 -573672 -223893 -952088 -190953 -853962 -741462 -633812 -550179 -666998 -769552 -755225 -501767 -665094 -738392 -483839 -521483 -952173 -200282 -73475 -89289 -369096 -357360 -537154 -865178 -76504 -235694 -171199 -771685 -821001 -664146 -81695 -544041 -707376 -199540 -234578 -401875 -528582 -792353 -724229 -597029 -778328 -674848 -909126 -680906 -778226 -350915 -203522 -94283 -719924 -344175 -590577 -698829 -798273 -627533 -564038 -952108 -582116 -498210 -502401 -261913 -600382 -838367 -699730 -375443 -879767 -356262 -390147 -357266 -785710 -536278 -76257 -356566 -397277 -324465 -503018 -372523 -192448 -278627 -48572 -674744 -357002 -424467 -543952 -504236 -356411 -199310 -575546 -481617 -611078 -861628 -670010 -256592 -446740 -205144 -619125 -34180 -712324 -268301 -354452 -426162 -95429 -756596 -226399 -746101 -503385 -299206 -416374 -171315 -475090 -539666 -324667 -355808 -946051 -666171 -367232 -747714 -782239 -357328 -663756 -741991 -322159 -578656 -57675 -67326 -97947 -97988 -349956 -498222 -358316 -809505 -544247 -192806 -951949 -356260 -350881 -802831 -673168 -156275 -662175 -30808 -925832 -356691 -596954 -314769 -47358 -527687 -944699 -524949 -355704 -761313 -945857 -640904 -415541 -344495 -215523 -312516 -596565 -830468 -944493 -282151 -534178 -240607 -611656 -355893 -179460 -500866 -902972 -140657 -542196 -502623 -945306 -355992 -858906 -118021 -814108 -287677 -355250 -766372 -545910 -715800 -696094 -504178 -332295 -97190 -799267 -356695 -324798 -664713 -684277 -490024 -118124 -595803 -677622 -498208 -582271 -719138 -226177 -416985 -355346 -946023 -69639 -922298 -258051 -943008 -398561 -463420 -344421 -663535 -476036 -802458 -265116 -946094 -242968 -471202 -576164 -593975 -956517 -656663 -360849 -906221 -778442 -935531 -719450 -269534 -863456 -479704 -277636 -742155 -416110 -458116 -94735 -119395 -95323 -274823 -747675 -417003 -504688 -322814 -769430 -216745 -383414 -595706 -543878 -956394 -121238 -175359 -577947 -260716 -576257 -582292 -395928 -717240 -356542 -103722 -943969 -905289 -29163 -801045 -144447 -510511 -86522 -356311 -120548 -672091 -86588 -126771 -315148 -86494 -381825 -582486 -348353 -504498 -664465 -143947 -544240 -328233 -356175 -556132 -862970 -432696 -605492 -923093 -159332 -652216 -290517 -806809 -859025 -825946 -903601 -350674 -405255 -882840 -324033 -134679 -783414 -905346 -56534 -402118 -18823 -356104 -678171 -197801 -174929 -505689 -664028 -174722 -625634 -403188 -299888 -348265 -717484 -811654 -799326 -777115 -166721 -645544 -73456 -182655 -356704 -814838 -502117 -523723 -787596 -550526 -174177 -264179 -923078 -824483 -875719 -674797 -65439 -255619 -45329 -570652 -762664 -355642 -475989 -545060 -695922 -149803 -394009 -486827 -61703 -909575 -564564 -248489 -661354 -474534 -940888 -867700 -777173 -409754 -664953 -805606 -416201 -356359 -863674 -474774 -778218 -712164 -75626 -236316 -761039 -239230 -806768 -648969 -339228 -806758 -284112 -510092 -904342 -39061 -175371 -952951 -863795 -249280 -500604 -724865 -355772 -591633 -564574 -240211 -871583 -750809 -777949 -76458 -309773 -65909 -741634 -550591 -945550 -93471 -476665 -663540 -581743 -81649 -743726 -944166 -227117 -505588 -543399 -597480 -833954 -880737 -421130 -597500 -777285 -505751 -703009 -761503 -413737 -809206 -515624 -388982 -901993 -414931 -402203 -696159 -832586 -343865 -927784 -543326 -134380 -824381 -883064 -44167 -78538 -431948 -777147 -214808 -882334 -44419 -813986 -143832 -834277 -822519 -523860 -790173 -704231 -339859 -383836 -326481 -249179 -934153 -942730 -226372 -97598 -258097 -424388 -133758 -542052 -295849 -236144 -248160 -200739 -356393 -435034 -795180 -803506 -596397 -947733 -505720 -279810 -862596 -323540 -677642 -504161 -160879 -822545 -236073 -688022 -847228 -780017 -946487 -74285 -324012 -205846 -865677 -123930 -31021 -883462 -486986 -598168 -911311 -338791 -403485 -877089 -51606 -778505 -806399 -695850 -952056 -301806 -749453 -502168 -946289 -356031 -901223 -260703 -142346 -856780 -357246 -863410 -906068 -585828 -180880 -279121 -912034 -805913 -76686 -599967 -720296 -777861 -315256 -892696 -238459 -706348 -356840 -527305 -825367 -510732 -402034 -795586 -325101 -354195 -716227 -673306 -618239 -313143 -743105 -803336 -786940 -357242 -432422 -564044 -325131 -308766 -575529 -166057 -829573 -69256 -683408 -591090 -564224 -398372 -248821 -390176 -876061 -953980 -663996 -356320 -342392 -284132 -426149 -415680 -765361 -280287 -743733 -910311 -769301 -426669 -679915 -124357 -477315 -397947 -339377 -747857 -71255 -952137 -170139 -458234 -516987 -603906 -271119 -434169 -451677 -567751 -144985 -513274 -590770 -946442 -446237 -76032 -356156 -539371 -246263 -115017 -596825 -338661 -205141 -659629 -88325 -240583 -84611 -537500 -802590 -324016 -231082 -703294 -449078 -350060 -528694 -690646 -356495 -199588 -559319 -643715 -884530 -730938 -863694 -356079 -714836 -624340 -325081 -461612 -883584 -736196 -684658 -806163 -666847 -367875 -76818 -910287 -346181 -357347 -402044 -239156 -505620 -532436 -333126 -342624 -335464 -414604 -315203 -793816 -356571 -796797 -384191 -119579 -502450 -598152 -779847 -326460 -73987 -936153 -612940 -39870 -315475 -152854 -61131 -706464 -356975 -256822 -324983 -587960 -954712 -865138 -563801 -98011 -372007 -121977 -504403 -589325 -325198 -315013 -27008 -674716 -898114 -240114 -951561 -555763 -948809 -458507 -324689 -795585 -31145 -946739 -922344 -298570 -911348 -415187 -951381 -581766 -74981 -518873 -341539 -932531 -490749 -431924 -720238 -863696 -373493 -889253 -321640 -177942 -691481 -913371 -775432 -605788 -785504 -951851 -312444 -919148 -670492 -672409 -806551 -144396 -278463 -148915 -150982 -534940 -512649 -349545 -278822 -489159 -73829 -90288 -389903 -850810 -504555 -720146 -818788 -354296 -946823 -167753 -949411 -355723 -777254 -135447 -694493 -401676 -952158 -107991 -521766 -543336 -778175 -593731 -334879 -144372 -141173 -119553 -313662 -388992 -677929 -88308 -672544 -832558 -160814 -354753 -357178 -664978 -747883 -97207 -445143 -927491 -806724 -767095 -952878 -580336 -482140 -577784 -462013 -650420 -94693 -66045 -664791 -598197 -415786 -541688 -676815 -389984 -391652 -144335 -741987 -333127 -287361 -136633 -524399 -302625 -324280 -95283 -627054 -356822 -328428 -86357 -736591 -677987 -696648 -322956 -234535 -232301 -767591 -634815 -356240 -772357 -703713 -518392 -501744 -910187 -591139 -949820 -113480 -300804 -630534 -345075 -94037 -883488 -45603 -664482 -416136 -904548 -594062 -217745 -951954 -610076 -780703 -134199 -275229 -512879 -129208 -589660 -356926 -47817 -598241 -606273 -945302 -134491 -793955 -633872 -355283 -321029 -56554 -610997 -354649 -136208 -593469 -383761 -806748 -680101 -116285 -530244 -326489 -288612 -847114 -529024 -668059 -326007 -237419 -748037 -366076 -431992 -399733 -899468 -610284 -349486 -94565 -869976 -702557 -718081 -171326 -693315 -97736 -669595 -434436 -66667 -704185 -938256 -608965 -426628 -936812 -214547 -232839 -667187 -37939 -340787 -95523 -98013 -499061 -817090 -314671 -67169 -29482 -855666 -767535 -323107 -747541 -784495 -704360 -491514 -94592 -956753 -598648 -356596 -185991 -947882 -470321 -289022 -356937 -151188 -801301 -355081 -479146 -315324 -84861 -771651 -155133 -948968 -356279 -652474 -444493 -516388 -79154 -954254 -529162 -398931 -356188 -124137 -515132 -596644 -50745 -606106 -559459 -415419 -197841 -786560 -817305 -314541 -832476 -249589 -606253 -95252 -569225 -43051 -402047 -778515 -366483 -929311 -875298 -855647 -790534 -161463 -909263 -912039 -517398 -199210 -703684 -288113 -311616 -836178 -896056 -893784 -183052 -343751 -326496 -278043 -103675 -947839 -830690 -832348 -806172 -564225 -288979 -175041 -482117 -174948 -908034 -670772 -539985 -417024 -357202 -674514 -443270 -48663 -123052 -259363 -919009 -355382 -892856 -637094 -388841 -227087 -92052 -578859 -395038 -337835 -612364 -416278 -394248 -504123 -814795 -344469 -430763 -951800 -198616 -940708 -33769 -684766 -322772 -43890 -814146 -150764 -560270 -412927 -951837 -780021 -582243 -538079 -500972 -894700 -186169 -704451 -616573 -497813 -204801 -416474 -397064 -394379 -947771 -315221 -770220 -324629 -203368 -88349 -917104 -397847 -217880 -879678 -126078 -233802 -516087 -773842 -411997 -611108 -801872 -301853 -79557 -954666 -867323 -278226 -411909 -49001 -402121 -388016 -31062 -50313 -665771 -951936 -806507 -356213 -513118 -538207 -616570 -806508 -791778 -483638 -806789 -533339 -51607 -34118 -401818 -415816 -457553 -857206 -504430 -848287 -491543 -376041 -913726 -806735 -806389 -84870 -589938 -409454 -946800 -166539 -821090 -952942 -537059 -948678 -417631 -657116 -501807 -776893 -322403 -133759 -828191 -441358 -475295 -121649 -765063 -532234 -690970 -951180 -527082 -139738 -674185 -538062 -610184 -529866 -947759 -86506 -354278 -252211 -95491 -560458 -166709 -33835 -24071 -599988 -479624 -598631 -460192 -170947 -412973 -855892 -71239 -763758 -806370 -326148 -226694 -737975 -564159 -314369 -403659 -315388 -1164 -165162 -570475 -48885 -952147 -183031 -296459 -528748 -771041 -84761 -769388 -706788 -496839 -197893 -344198 -167916 -282010 -245321 -343795 -348281 -889387 -814893 -354046 -951960 -350756 -814754 -596880 -364230 -86472 -167963 -384069 -98017 -392322 -325604 -906141 -809032 -551364 -805017 -97929 -792738 -53051 -402170 -234010 -135492 -529151 -518714 -778071 -778359 -667046 -175408 -922576 -389039 -313227 -500517 -671777 -278582 -310907 -829486 -40333 -579160 -97347 -357064 -555033 -356696 -242904 -35411 -249210 -813207 -176215 -553923 -814560 -357054 -38017 -325073 -325142 -532508 -951221 -932854 -270944 -58944 -75715 -406041 -753340 -459548 -249032 -778517 -712284 -371413 -806237 -865624 -60247 -583272 -664147 -70155 -356726 -863509 -333913 -151475 -551077 -591533 -769381 -167269 -856237 -79262 -199279 -88572 -618335 -662360 -287177 -772651 -778642 -767526 -390099 -367256 -323348 -945217 -278469 -125902 -921222 -102089 -431875 -133443 -952094 -275903 -687600 -401082 -323198 -34004 -780471 -738362 -179551 -195903 -483560 -116917 -281997 -543869 -202182 -278863 -141980 -326561 -97330 -829581 -564597 -596110 -607010 -806821 -224050 -741893 -955390 -935628 -354817 -389224 -447572 -681077 -719474 -885063 -86578 -355505 -902410 -400843 -85786 -777774 -155227 -805520 -126357 -870847 -302840 -669610 -800954 -616122 -633773 -72725 -655818 -512345 -396635 -696701 -730339 -486610 -780308 -97810 -382679 -951463 -174927 -377551 -169308 -377613 -383649 -810826 -355296 -336971 -410753 -582161 -401635 -718316 -601000 -799319 -862954 -400893 -665758 -432049 -428753 -777621 -509729 -672189 -331266 -298449 -515819 -851547 -326414 -810141 -611803 -94715 -451659 -826113 -483302 -905186 -683691 -124034 -680204 -956863 -468596 -199318 -796214 -905162 -676749 -525625 -314450 -137423 -899549 -806800 -708615 -65077 -248354 -666349 -230685 -385903 -795512 -678766 -662137 -342802 -951150 -632546 -608800 -504794 -722174 -472716 -950494 -581101 -806835 -794975 -68212 -717017 -340165 -800990 -655373 -33950 -84144 -448547 -799372 -707362 -542630 -814632 -838153 -314878 -911784 -37250 -412869 -473383 -52937 -677762 -334952 -801055 -904511 -720160 -503597 -868215 -33014 -677592 -149870 -269977 -619130 -867402 -905262 -592870 -871046 -900939 -337489 -55104 -402470 -865635 -883338 -502660 -284765 -867174 -950790 -777711 -424383 -881674 -65601 -218026 -833537 -434638 -372026 -759583 -456192 -171264 -509230 -683499 -552831 -664954 -541227 -136550 -232396 -634551 -355114 -356377 -863569 -356933 -786037 -387604 -810993 -403716 -793724 -863780 -355062 -652832 -65531 -442406 -353881 -900717 -871192 -762435 -53089 -704265 -778581 -356037 -947224 -575165 -943120 -647329 -312541 -528981 -806866 -314041 -461631 -805570 -43402 -357118 -693964 -97707 -679914 -335452 -707461 -199101 -895477 -803544 -739370 -723396 -779566 -515130 -412940 -681721 -297222 -785562 -747946 -356338 -951240 -723637 -635339 -786863 -741119 -780193 -124939 -240186 -84903 -332208 -955895 -875148 -697969 -803371 -598173 -627178 -133388 -793708 -249306 -647581 -73644 -814941 -323210 -391477 -806535 -349973 -742001 -344310 -247301 -610413 -357262 -53273 -868709 -597920 -677178 -678644 -828914 -808909 -396655 -356881 -809214 -927122 -461018 -350746 -33781 -677467 -98007 -560526 -140659 -943660 -155692 -523010 -947034 -348028 -260249 -357313 -84613 -899031 -295617 -158158 -814859 -150865 -507269 -199641 -514301 -395404 -180920 -827636 -884426 -358097 -372326 -232319 -806713 -796561 -278702 -533361 -939642 -490544 -177721 -326113 -822481 -501017 -952082 -636158 -897193 -691898 -184039 -952936 -86853 -570081 -357268 -529092 -954732 -947746 -776317 -387486 -434414 -719421 -501018 -625097 -78507 -701247 -662570 -97293 -311639 -923614 -796818 -663412 -803017 -467886 -413462 -932131 -79389 -805751 -176605 -882740 -473836 -806530 -143533 -144095 -455773 -577721 -703465 -33539 -121132 -924170 -415107 -401893 -711999 -390216 -18160 -285656 -356594 -505548 -819587 -421060 -279448 -695776 -909207 -871371 -326019 -232291 -734613 -171089 -805686 -625817 -581633 -527659 -110184 -669824 -684312 -714543 -512365 -664518 -831889 -110010 -593641 -846706 -65598 -37892 -134695 -874869 -809212 -806686 -896110 -355519 -768818 -770590 -952362 -912720 -481740 -814499 -206579 -580537 -814659 -799382 -523170 -677472 -592401 -400772 -730909 -343509 -305659 -952121 -338962 -667246 -777757 -793913 -174598 -144419 -150344 -582242 -460988 -502460 -369070 -171153 -167489 -767353 -779959 -535599 -766616 -614898 -356945 -581511 -490254 -342719 -953257 -308832 -525298 -317090 -315083 -882111 -513362 -447862 -717178 -327570 -480282 -181885 -917326 -775766 -350984 -344272 -778394 -899644 -384190 -769667 -596645 -619251 -240227 -451797 -326461 -778420 -103946 -249245 -356350 -134591 -806539 -848141 -401939 -55669 -780544 -356513 -65875 -590604 -72430 -249124 -801102 -610635 -350990 -400921 -806369 -785982 -825975 -534878 -684342 -743511 -544198 -419366 -899501 -401663 -349175 -287154 -427399 -356721 -807710 -326081 -423146 -67355 -479934 -776834 -795207 -806135 -261972 -830639 -867045 -433289 -491138 -38903 -665922 -383754 -551025 -899196 -802771 -715548 -401275 -315479 -648650 -96723 -904986 -898912 -42717 -573865 -158175 -738019 -88612 -521926 -823129 -250088 -816239 -323050 -75970 -954826 -555700 -246762 -672131 -805877 -883202 -670337 -517003 -466758 -355303 -414124 -180453 -440148 -86622 -737101 -788506 -945783 -545315 -322609 -951885 -357290 -593714 -880055 -652873 -355085 -350830 -696008 -524483 -67081 -885150 -163675 -747726 -222123 -943623 -579626 -332379 -397201 -475924 -118140 -539759 -776616 -97286 -657698 -612534 -351015 -92990 -525190 -483530 -355124 -333058 -803257 -416729 -916405 -255529 -577385 -778309 -322220 -828093 -799790 -504924 -579202 -143760 -474251 -239475 -51799 -322040 -577474 -848921 -342964 -747262 -892895 -147829 -673898 -793227 -563607 -748009 -748856 -944520 -356131 -416938 -605706 -518408 -45100 -918180 -785717 -550063 -951028 -863204 -494460 -322559 -287121 -678613 -246488 -335445 -168212 -300934 -403673 -490366 -249160 -762230 -97813 -29759 -907768 -337657 -794402 -537683 -144375 -331849 -632230 -513371 -832139 -822671 -524969 -753871 -820529 -791748 -511814 -654492 -97622 -183304 -304183 -108111 -651924 -502231 -239676 -492658 -741902 -951769 -315215 -581342 -109658 -327931 -400133 -340673 -582257 -558822 -947775 -69590 -134456 -324026 -346753 -279222 -391735 -800845 -738498 -777769 -41140 -423246 -176396 -584360 -260124 -865103 -432105 -857670 -692061 -643160 -911052 -248559 -610591 -401091 -372439 -369622 -676706 -897993 -357355 -810222 -44344 -918276 -199380 -275752 -867434 -875921 -354638 -919654 -546776 -58756 -154129 -950465 -492554 -51823 -350672 -863549 -717971 -866649 -805522 -906434 -776401 -414790 -850042 -357285 -951314 -703902 -674335 -458662 -712219 -344678 -323701 -75496 -44936 -315575 -305483 -57034 -349605 -355679 -778641 -651595 -676428 -813973 -909999 -157880 -662926 -889870 -582529 -102701 -97781 -179582 -74633 -937858 -143668 -664423 -952064 -954723 -552872 -30755 -892668 -889277 -134495 -144518 -342697 -248341 -534786 -402145 -767048 -517452 -544007 -226917 -597693 -616577 -328544 -906478 -582204 -480179 -902802 -778082 -161290 -524391 -181357 -594006 -491153 -945371 -136441 -582565 -677721 -601235 -84085 -271030 -957000 -588682 -940105 -802207 -143675 -579818 -702189 -129333 -60322 -305170 -511919 -539900 -473899 -314972 -740470 -315642 -388818 -742934 -777737 -696680 -333854 -598274 -651434 -598040 -777313 -768938 -320187 -160590 -773322 -323952 -301415 -347305 -674903 -529887 -591719 -322783 -521867 -806733 -710673 -344239 -44927 -691964 -568167 -167868 -137407 -72858 -952086 -32478 -616273 -570687 -85834 -490147 -133842 -832137 -346005 -287866 -598070 -935651 -295816 -786966 -334039 -169980 -596135 -773507 -174305 -341682 -696997 -892678 -505523 -175028 -74021 -735312 -356844 -334969 -96896 -318733 -903710 -816654 -240409 -452974 -199409 -356818 -759096 -227018 -231275 -97986 -160866 -97747 -482329 -356649 -143837 -656858 -333864 -365954 -517074 -192461 -680079 -169656 -952126 -370865 -97722 -541755 -926192 -777945 -401634 -390533 -226747 -637070 -203940 -301841 -56484 -476134 -75246 -663307 -322241 -404912 -910304 -398832 -566878 -228883 -162937 -347450 -355930 -663401 -582296 -879561 -888832 -48057 -769433 -180992 -757369 -874371 -536003 -918214 -183046 -340195 -494727 -350046 -693550 -813007 -806755 -632383 -74008 -550603 -234156 -899541 -273626 -116578 -193173 -356765 -596991 -487359 -786800 -335380 -816072 -138735 -525442 -284028 -558412 -49783 -271299 -580998 -203410 -671906 -307606 -133762 -248937 -575569 -544177 -171128 -80321 -183067 -525030 -94566 -325628 -47777 -664845 -325881 -853430 -315726 -922589 -278427 -772368 -822550 -806641 -372550 -778211 -439720 -350075 -947578 -906605 -143651 -456193 -778637 -393865 -200441 -505115 -640618 -272322 -805180 -595484 -570487 -593699 -722714 -717603 -951268 -776872 -422686 -416027 -855528 -322649 -416450 -68820 -545907 -516379 -278080 -525231 -327615 -544127 -72733 -652000 -680545 -103561 -882803 -240105 -348793 -462142 -357368 -504371 -598340 -760903 -577689 -537506 -355082 -829541 -542427 -581134 -323096 -356123 -652886 -707179 -49039 -102948 -68717 -951723 -680571 -322082 -250191 -663188 -523000 -501646 -401841 -271508 -537992 -536762 -658784 -504619 -36242 -412887 -951559 -859111 -504062 -309632 -806833 -92155 -33394 -806512 -767170 -777511 -904064 -867482 -65831 -432569 -544013 -771735 -712163 -897483 -598430 -356685 -899575 -196394 -829196 -175188 -929779 -810225 -365077 -802811 -893552 -375785 -217740 -355290 -136177 -676629 -348187 -578946 -899437 -596729 -952343 -247495 -772511 -315640 -904988 -515953 -795590 -66990 -806754 -103387 -356392 -471879 -548037 -80308 -922575 -633298 -300956 -900497 -881941 -899584 -818457 -833980 -356288 -34047 -324143 -605989 -74380 -298755 -559727 -887420 -956689 -84864 -65422 -801426 -648347 -800191 -487616 -451954 -591400 -872894 -187858 -705394 -634119 -805365 -441446 -777472 -544099 -49088 -356946 -355471 -429192 -772042 -598007 -695566 -949303 -951266 -94683 -457651 -948903 -573433 -797662 -175911 -695242 -745616 -391569 -695703 -56220 -769169 -652915 -206153 -742021 -344490 -820610 -79032 -888056 -343942 -355671 -472334 -326180 -372402 -343951 -901225 -97960 -952110 -95466 -759772 -479811 -324941 -805010 -670252 -38131 -535547 -512716 -882222 -879071 -75954 -882364 -300592 -313868 -941140 -863979 -289111 -334318 -907494 -287910 -451741 -475504 -434218 -151571 -389879 -655595 -542883 -116762 -597448 -325468 -523689 -832488 -951845 -315168 -949355 -679856 -490078 -777927 -190121 -70324 -349484 -851108 -653796 -514174 -355636 -416886 -570373 -800981 -648538 -615786 -227081 -747752 -263804 -769044 -799295 -874972 -476122 -528536 -899152 -677883 -570309 -237155 -327006 -522824 -463235 -364473 -414522 -277377 -420698 -661566 -314327 -898179 -926115 -150949 -472002 -327571 -416494 -673950 -646289 -874663 -236930 -427478 -935768 -674331 -125922 -155393 -570668 -174577 -598016 -150959 -155643 -581019 -223763 -305351 -893983 -97959 -420974 -180107 -175705 -791603 -354761 -362401 -464514 -268900 -663933 -606898 -525662 -199406 -232993 -179167 -465267 -221269 -778921 -849540 -198381 -379734 -249385 -222068 -243020 -305321 -175379 -134508 -946025 -701275 -236498 -362451 -179605 -699466 -797883 -756352 -811722 -239658 -558059 -121121 -663971 -325063 -416789 -643132 -755878 -461454 -364381 -334048 -250548 -880064 -705510 -632917 -108601 -818683 -414171 -731066 -188525 -738662 -140781 -601008 -824657 -711266 -466685 -236786 -823751 -699082 -449898 -403112 -279434 -440330 -344000 -669889 -652562 -222064 -518501 -467364 -108523 -220441 -364541 -467001 -312498 -405827 -637979 -863378 -278812 -397359 -631992 -343630 -858002 -305079 -294390 -686237 -92405 -219972 -811357 -348815 -479952 -466317 -198099 -653839 -136970 -221971 -463757 -654770 -184378 -256752 -240537 -226998 -863630 -405920 -439626 -612727 -175690 -902932 -457731 -459904 -363546 -295544 -465567 -375889 -642954 -855934 -132299 -200263 -466903 -124763 -898795 -277832 -240536 -902532 -730714 -405768 -490187 -261440 -179354 -279478 -210104 -801345 -268300 -270999 -268879 -240526 -133761 -313074 -949469 -476055 -180817 -270535 -840740 -342723 -295710 -699679 -279512 -220843 -919135 -378480 -277965 -607614 -235913 -849629 -344258 -676143 -467130 -221543 -205138 -379859 -717518 -277169 -738651 -236175 -243166 -295369 -943310 -426778 -764328 -583913 -673737 -467390 -222029 -378682 -669791 -751384 -645421 -955901 -616659 -439767 -621687 -413018 -364735 -457772 -395146 -645917 -623366 -674369 -900114 -317624 -395054 -646042 -240415 -569330 -921999 -922485 -754982 -380190 -570277 -765659 -292598 -934092 -670734 -453994 -669886 -587648 -467042 -848862 -673746 -178438 -899076 -466881 -699474 -939681 -150618 -205328 -836860 -879264 -390094 -277528 -662023 -680163 -502359 -646241 -855463 -454155 -309845 -236264 -566413 -764199 -885757 -723762 -609864 -129392 -773320 -279440 -150833 -751007 -200316 -381885 -466970 -633928 -796171 -334020 -699985 -206284 -936478 -490729 -465419 -206525 -240197 -455688 -231925 -412723 -625553 -325082 -574737 -236703 -931947 -346537 -177966 -833825 -126843 -548485 -309646 -235203 -940435 -885673 -209651 -557126 -236537 -240552 -177638 -724072 -109224 -699605 -178802 -245538 -130744 -673629 -817776 -237096 -282752 -633249 -505377 -732899 -863508 -364616 -944076 -791421 -356378 -278396 -278280 -438637 -379454 -609952 -479888 -724083 -763802 -939299 -474470 -277670 -712319 -276852 -701524 -276844 -600345 -432699 -367002 -319521 -208603 -803362 -828824 -933697 -272993 -698698 -796238 -955908 -235607 -898635 -939890 -750651 -885921 -181695 -587964 -204818 -300632 -469748 -391223 -339502 -337998 -886093 -279039 -369293 -274584 -240511 -478698 -361306 -175052 -238473 -305188 -940584 -486817 -279582 -315756 -609335 -466677 -334560 -240348 -754929 -221662 -905202 -278658 -898491 -716856 -754161 -131856 -730897 -335368 -245577 -365087 -590888 -222053 -527081 -885486 -447233 -858184 -196814 -698460 -386233 -691115 -282262 -340773 -383640 -151715 -818311 -185822 -847173 -368570 -521764 -837133 -755920 -178518 -279571 -720063 -318826 -467196 -468367 -617499 -633860 -888149 -480204 -593630 -754704 -440294 -380362 -945284 -957008 -940676 -240419 -457509 -538261 -464068 -731121 -673794 -600383 -467340 -367041 -156779 -878695 -569423 -332714 -465710 -742728 -280689 -897762 -531499 -639737 -340603 -944582 -724277 -126755 -337447 -939477 -677930 -635674 -178695 -67366 -276987 -702106 -325143 -340766 -658515 -558824 -467182 -431814 -467377 -179272 -892660 -389705 -257707 -654373 -124751 -342672 -137736 -918409 -404584 -674497 -390742 -364829 -249123 -510713 -560313 -699583 -601106 -609871 -182904 -474644 -104230 -379592 -700921 -148984 -312578 -628124 -310437 -633754 -499338 -474069 -625162 -172381 -466386 -559910 -104936 -404974 -673847 -941604 -741829 -380294 -305396 -404688 -698511 -482491 -466806 -703930 -683133 -953178 -400185 -237765 -364807 -221619 -742541 -601051 -627199 -369132 -698764 -238973 -862437 -179037 -885698 -144521 -325540 -760951 -240175 -939538 -177534 -702100 -467337 -237140 -899297 -849002 -437868 -570655 -742653 -364560 -894366 -912644 -764226 -646447 -158890 -369706 -405812 -237196 -152592 -344507 -150058 -288479 -654246 -390852 -886050 -692952 -698611 -762994 -364501 -268581 -297848 -521546 -396596 -466892 -747826 -550534 -405191 -747439 -267722 -617406 -405474 -899606 -831003 -581107 -424270 -901062 -167671 -181778 -752273 -699168 -279368 -730689 -673342 -943029 -103407 -467370 -534225 -465414 -555349 -933712 -107738 -699592 -329141 -466098 -239231 -702012 -948391 -939810 -243068 -771248 -537468 -952866 -465733 -791663 -674225 -227229 -824623 -779595 -699570 -543796 -739677 -221947 -700246 -440192 -265300 -944901 -327237 -763578 -674404 -363780 -362903 -654669 -199529 -824520 -956826 -941841 -107398 -763196 -380700 -503387 -927431 -664238 -687039 -236978 -269728 -772429 -906567 -233991 -649733 -754458 -122074 -699484 -814570 -818057 -821259 -926253 -574624 -301529 -276876 -364923 -93633 -300698 -879444 -616231 -908100 -268876 -943767 -313891 -787455 -539008 -300759 -467372 -459494 -823729 -132501 -319463 -390392 -673865 -177504 -128009 -161838 -699410 -123732 -762877 -277812 -176785 -347330 -725233 -179362 -356270 -472355 -333476 -459910 -527562 -262571 -391186 -431614 -268828 -700709 -467165 -219758 -861755 -533223 -503728 -640219 -123942 -479232 -500745 -375873 -862099 -811543 -373258 -956632 -699598 -699635 -609411 -313232 -396679 -803579 -289266 -793577 -849670 -314699 -505905 -755861 -124842 -824966 -637076 -222074 -125949 -840535 -177973 -99117 -295860 -205107 -903700 -244631 -378048 -811385 -310924 -849685 -633598 -117971 -364781 -235745 -183053 -526763 -867934 -932359 -340204 -575243 -240121 -699619 -178657 -221244 -240292 -467041 -221541 -908121 -391009 -717041 -261155 -279062 -157444 -783576 -126622 -581815 -673848 -328206 -439994 -482837 -126864 -124952 -536732 -133594 -427211 -220775 -147846 -898623 -817635 -198178 -500455 -363675 -424169 -393131 -700371 -177120 -249432 -310463 -181346 -933145 -724479 -212527 -625599 -309261 -587414 -746100 -179119 -526775 -604278 -955966 -432456 -818777 -379755 -264861 -642370 -942473 -239129 -626824 -550562 -617403 -645800 -236879 -864795 -718873 -735706 -939713 -698293 -664256 -660978 -934109 -238147 -132375 -179056 -170983 -590590 -763410 -379291 -336298 -410247 -587804 -946726 -955910 -118671 -559586 -157633 -235970 -467125 -219997 -197624 -562559 -763800 -371089 -378185 -467453 -207828 -700350 -125184 -699597 -434411 -817171 -132169 -500761 -237236 -793051 -885549 -646209 -898928 -774879 -763235 -512509 -450551 -831064 -523690 -268886 -364057 -382377 -513208 -151424 -380514 -755876 -141820 -412231 -240133 -251693 -265430 -627216 -738510 -431892 -450925 -278764 -125301 -345325 -221780 -699414 -279202 -699403 -127564 -120896 -314614 -762377 -920344 -275624 -444995 -265471 -617906 -899807 -902602 -293958 -474741 -646467 -416993 -279018 -326475 -404795 -518973 -235269 -270335 -837028 -208709 -199531 -124232 -793718 -370900 -150526 -598186 -755657 -212814 -173120 -126802 -699561 -108424 -751490 -345238 -229092 -863493 -264290 -487613 -699646 -512792 -593234 -908214 -763524 -627738 -584253 -265763 -642913 -755737 -862514 -503306 -378472 -404404 -256416 -939326 -824924 -590826 -583254 -707125 -278251 -222121 -672008 -178055 -955299 -461460 -747330 -179098 -674180 -611504 -305288 -919838 -636441 -644254 -460484 -279424 -149875 -317355 -387735 -380544 -632098 -221710 -566417 -814758 -465992 -179146 -476978 -362043 -646353 -501373 -669773 -663400 -763712 -177546 -769985 -323006 -651670 -698749 -264555 -561680 -464033 -662610 -624296 -738590 -262039 -885503 -344790 -650423 -176150 -155072 -427550 -546686 -569991 -249401 -470414 -446633 -405214 -613018 -466917 -404130 -49986 -643437 -309934 -237320 -761312 -670591 -273936 -674436 -593559 -699443 -527663 -939226 -234343 -237020 -849627 -945584 -277975 -661293 -234601 -99432 -784087 -584188 -390342 -467145 -939212 -943316 -364545 -424067 -341818 -584515 -181523 -346502 -278075 -745120 -560118 -424292 -674550 -279219 -251915 -642715 -831023 -704002 -818684 -943852 -625363 -912291 -405609 -114622 -698377 -254819 -234049 -396971 -458143 -278920 -436791 -849502 -896108 -722785 -123712 -550959 -940865 -699663 -390343 -279443 -691497 -715325 -677727 -151652 -315169 -742022 -239485 -535667 -701950 -107469 -857959 -174194 -459115 -817637 -107639 -764541 -849574 -814190 -886067 -897758 -526913 -808389 -367856 -478458 -519021 -857785 -369331 -673271 -756003 -811307 -634463 -475919 -179045 -898656 -205943 -940917 -372467 -131031 -927014 -237976 -590650 -600828 -405369 -221849 -922978 -458189 -699648 -221960 -251345 -728540 -379068 -167707 -439817 -742561 -518475 -478846 -699656 -115938 -445163 -439990 -308115 -277272 -846633 -291805 -567528 -178967 -817272 -107823 -102640 -456991 -476812 -397001 -479028 -673807 -127863 -661114 -124639 -830461 -312992 -315497 -151823 -235746 -317613 -460056 -237266 -179330 -364105 -140654 -849534 -129074 -370867 -811578 -922404 -831042 -793764 -466319 -646044 -699289 -108596 -102351 -764270 -938978 -335430 -480246 -789866 -803443 -651736 -249458 -503393 -955786 -363697 -151840 -825725 -527334 -466760 -254153 -239532 -674112 -836180 -103749 -895940 -402214 -642051 -124017 -431286 -181006 -511309 -466969 -930653 -335431 -661322 -511958 -637421 -826203 -641468 -826097 -361875 -329417 -251788 -849548 -457117 -222085 -369333 -241839 -688698 -319691 -440552 -755556 -179852 -517701 -940928 -262902 -240400 -658968 -391187 -241084 -405321 -389956 -480068 -738698 -879447 -364113 -288123 -405921 -762150 -699406 -812962 -898266 -485888 -863697 -132470 -570656 -179170 -856500 -865590 -241871 -922079 -849665 -278586 -173197 -236076 -297762 -411863 -478619 -237316 -288994 -746956 -270888 -279585 -654534 -199874 -316713 -749119 -380282 -239732 -661193 -499383 -341054 -458749 -940131 -168480 -124865 -335176 -749405 -699684 -300937 -626972 -107496 -898406 -265392 -335588 -570051 -427178 -317810 -575137 -264871 -124890 -200291 -431435 -672799 -558841 -885899 -702188 -703356 -126598 -367122 -364917 -672883 -848109 -518235 -240116 -178849 -699393 -240009 -314623 -748611 -401905 -588818 -584099 -482109 -700832 -817544 -181707 -344467 -314284 -939410 -129078 -108362 -395049 -522231 -560053 -943044 -133958 -432584 -698065 -501489 -847613 -181614 -199882 -182206 -108409 -397214 -116805 -325745 -863126 -238181 -466224 -764104 -464331 -724346 -570212 -391242 -671392 -467333 -221896 -325478 -840950 -955912 -603137 -788671 -439638 -380733 -106286 -666238 -245506 -184018 -687528 -467365 -378036 -413181 -645558 -849500 -851315 -148185 -309141 -178934 -126062 -290933 -658208 -632727 -707433 -151769 -745956 -249537 -671921 -718867 -405924 -405196 -617307 -181491 -746183 -516183 -764642 -198633 -322821 -220409 -559289 -769218 -184082 -239622 -768714 -522216 -429922 -322588 -181046 -410877 -357180 -702190 -841053 -900061 -660096 -858203 -350245 -674391 -466616 -343668 -364240 -645944 -518372 -926655 -313657 -397463 -179044 -450756 -830944 -439787 -119894 -437470 -151501 -662633 -673916 -723378 -354598 -473343 -892580 -287355 -755191 -668065 -350423 -221264 -896148 -252118 -127347 -811830 -178142 -258050 -287943 -660727 -240482 -897585 -466264 -609905 -305173 -661487 -397113 -376644 -817613 -273075 -518377 -703890 -138847 -179345 -550564 -736525 -130712 -676833 -500392 -755849 -240513 -416822 -942648 -584478 -917698 -227436 -730856 -557877 -266850 -407640 -260891 -405976 -440257 -616571 -289304 -730803 -360923 -434129 -650570 -178702 -364896 -574369 -795684 -609809 -199474 -745818 -289370 -466513 -863312 -161162 -310239 -105936 -764147 -466266 -502444 -278841 -287963 -404449 -312394 -172599 -632054 -397040 -109657 -624893 -220709 -325974 -474988 -465304 -341743 -403968 -750792 -749081 -317015 -275446 -849551 -300364 -130850 -755618 -932749 -571836 -310862 -659401 -240579 -462944 -670889 -438237 -852005 -659511 -817701 -926965 -278356 -237147 -905341 -544241 -741669 -108164 -178579 -478483 -257146 -279519 -651773 -181175 -179343 -466355 -551589 -643157 -900120 -113640 -183191 -107392 -461355 -335107 -221633 -239421 -534198 -632247 -467424 -605271 -439771 -185507 -953186 -115402 -386775 -108556 -661434 -120174 -260106 -746159 -480059 -289335 -661195 -782860 -107572 -278835 -176816 -858162 -107882 -139641 -892836 -857812 -636085 -609634 -849503 -699579 -271064 -701957 -108435 -486820 -259530 -560435 -301874 -952719 -409568 -313648 -761455 -424414 -240412 -361991 -639461 -270628 -929989 -241957 -200349 -738267 -289656 -666636 -268943 -539956 -893433 -646243 -849644 -127556 -930514 -556567 -644422 -466223 -649752 -337167 -364740 -379803 -334045 -181540 -236934 -510992 -507071 -556988 -823120 -764786 -930246 -699275 -584754 -534381 -316897 -118018 -906115 -534692 -305380 -377797 -253660 -279590 -379576 -237239 -755784 -258612 -466653 -724114 -363756 -646443 -391561 -322863 -405514 -817811 -950684 -272853 -486569 -884588 -221624 -405774 -646424 -150502 -814800 -917022 -625811 -857924 -467382 -431900 -268695 -404699 -460849 -434680 -626303 -901446 -178133 -140341 -482443 -466291 -215569 -273446 -652057 -222050 -831005 -925988 -498405 -240439 -755949 -134894 -651599 -628357 -224799 -699705 -903659 -856707 -193175 -467208 -699697 -560189 -636055 -590805 -141391 -701199 -532798 -813662 -120267 -501430 -312038 -939897 -150971 -264711 -221637 -526258 -385440 -314974 -131304 -404614 -304207 -236890 -107762 -537985 -126770 -645468 -296487 -574104 -583290 -791532 -609961 -952888 -178265 -276273 -346382 -148037 -513393 -922972 -196958 -278115 -643093 -501166 -287155 -179309 -316850 -500369 -204224 -699607 -179229 -866033 -404109 -763576 -177573 -939429 -426440 -372498 -760500 -723949 -954010 -879145 -221565 -125593 -123306 -390132 -827792 -702215 -516952 -316564 -179273 -714481 -439180 -852782 -482095 -482601 -236796 -924835 -780157 -661184 -885370 -402429 -364624 -763457 -485130 -237036 -918057 -251998 -305168 -108573 -118216 -699320 -617439 -494464 -680439 -380605 -805881 -468373 -183845 -647495 -711294 -474843 -179071 -561786 -379791 -437620 -135012 -108588 -371758 -377218 -319796 -699016 -278052 -898567 -858140 -327671 -893965 -844270 -278894 -151098 -278346 -557471 -739530 -178898 -836982 -754959 -748795 -354530 -144449 -702163 -174669 -228013 -650700 -362912 -278691 -667049 -328816 -390747 -737537 -221840 -178146 -405867 -653699 -825775 -763215 -794873 -632932 -299671 -703273 -179285 -673142 -348854 -220066 -575526 -277708 -489288 -126350 -221980 -452993 -900149 -98740 -609820 -647457 -803464 -586466 -347551 -904580 -463990 -255207 -704058 -764621 -633984 -205772 -279503 -238962 -898408 -700835 -865634 -934065 -946081 -239954 -467154 -278457 -156067 -116940 -828381 -817282 -364700 -359973 -368411 -106499 -940137 -904541 -149882 -387715 -768752 -701845 -126881 -435162 -315692 -898565 -764094 -539544 -755550 -700597 -500978 -119946 -690553 -933139 -755552 -467186 -309598 -413191 -560046 -584346 -326360 -381499 -203583 -266102 -232699 -163562 -693243 -107532 -148995 -802359 -941354 -551256 -617493 -180224 -661415 -811546 -893802 -162199 -108518 -467166 -938399 -661246 -210800 -769750 -277471 -817773 -698256 -940530 -817834 -698369 -576569 -45550 -208677 -849505 -701115 -157948 -412824 -650676 -633739 -334697 -129185 -844541 -179297 -702203 -264066 -270718 -236136 -179248 -632965 -515803 -697902 -479340 -784275 -803085 -368369 -780463 -174831 -584275 -240137 -671219 -390589 -124275 -664687 -84930 -535361 -662804 -107450 -849581 -525938 -900139 -817607 -811455 -467273 -719528 -759735 -818528 -502200 -119940 -169786 -763768 -440104 -511954 -267228 -243854 -699685 -185627 -346177 -709080 -617013 -434775 -132199 -484680 -504654 -124766 -248976 -645515 -151347 -738141 -287947 -515416 -108154 -161970 -938493 -397447 -379747 -125345 -502466 -446273 -895532 -405649 -237467 -153876 -236487 -955785 -463867 -199105 -436836 -139989 -536572 -163579 -184193 -236614 -278623 -546643 -179155 -850726 -720243 -260687 -108586 -466400 -653323 -815989 -458753 -122742 -178932 -267728 -824373 -292290 -939225 -559914 -279528 -328823 -343152 -501327 -440537 -501165 -917919 -849484 -144346 -518776 -599847 -462238 -467381 -292342 -464765 -391366 -517041 -939509 -750727 -666844 -460833 -479830 -627159 -674475 -222031 -799277 -236751 -900111 -148659 -277753 -405584 -231174 -102890 -945207 -264121 -699703 -577174 -465530 -277681 -447004 -817693 -380000 -435316 -157213 -584481 -842285 -699825 -466689 -774096 -724249 -662937 -465963 -699552 -814824 -707364 -746118 -293067 -262517 -391442 -464012 -198440 -180681 -405895 -900080 -272716 -476813 -373712 -209458 -745373 -160836 -648534 -461652 -701222 -744006 -466833 -939584 -674547 -817114 -98105 -222157 -475499 -228011 -524385 -701361 -847320 -108323 -699613 -705494 -633121 -317520 -374344 -649535 -466986 -927496 -240293 -257272 -535261 -126619 -742031 -107944 -933304 -254938 -373633 -933983 -661185 -699323 -380646 -646378 -179144 -400742 -278351 -279490 -113838 -646430 -534510 -219588 -587284 -396748 -383780 -369750 -464440 -258148 -278870 -664838 -702154 -548093 -237326 -752445 -432849 -259692 -498219 -748764 -887237 -183732 -501416 -737784 -815819 -880410 -150921 -808539 -467329 -315798 -124396 -139678 -644214 -300180 -633734 -898551 -343776 -400113 -272887 -720260 -310269 -324469 -301285 -706375 -793197 -787555 -390991 -270631 -124363 -699638 -750764 -240562 -467427 -849573 -701096 -648052 -455362 -112941 -404902 -268915 -364009 -192524 -364788 -764259 -344209 -692574 -343247 -817641 -151435 -534774 -266035 -198456 -439712 -360614 -774034 -237301 -804685 -110374 -192342 -439476 -664513 -672145 -706152 -661291 -134161 -815927 -380493 -250274 -526720 -558325 -742543 -179151 -238935 -653476 -380212 -360896 -811898 -453191 -600893 -323459 -93603 -632080 -667257 -646313 -345720 -464036 -130550 -588540 -410547 -278229 -390219 -432894 -385913 -379321 -197215 -296453 -703540 -467445 -431138 -439657 -507152 -237797 -937359 -234280 -535225 -701150 -380629 -402813 -222169 -405425 -747452 -182103 -437282 -901164 -667856 -367210 -742232 -175397 -942142 -652019 -723299 -621732 -631749 -479217 -247146 -472557 -905812 -270590 -952850 -700358 -265364 -412296 -344351 -236187 -676781 -932751 -600870 -236561 -459635 -127657 -704214 -849578 -827520 -559619 -762281 -179274 -646437 -463584 -703962 -811582 -97725 -661337 -461669 -475985 -848332 -178152 -652047 -159913 -179281 -750655 -511539 -156426 -467135 -849217 -652085 -793929 -444918 -257120 -460436 -584498 -440320 -651091 -608916 -817242 -46657 -699308 -617028 -442694 -480149 -953286 -236722 -569852 -928236 -646006 -175054 -206964 -462431 -219627 -369114 -763745 -698082 -645487 -945408 -423237 -117099 -364819 -717557 -909942 -174702 -936022 -120058 -239747 -699536 -905309 -124058 -608860 -760284 -652044 -724387 -703097 -699569 -518515 -237394 -363721 -803166 -423496 -593808 -179257 -431659 -674769 -151368 -460286 -651933 -339780 -574223 -296815 -654760 -291582 -461492 -118689 -756590 -849585 -927874 -584108 -698768 -391717 -929179 -467394 -665091 -466514 -177476 -735215 -500184 -361105 -380727 -140622 -238696 -817283 -832774 -501452 -898513 -273166 -362916 -383194 -805351 -277381 -699969 -609504 -368392 -938957 -588041 -480202 -780051 -464587 -251430 -609734 -130794 -148994 -383757 -180109 -824428 -939198 -440357 -599974 -405312 -632330 -320849 -223868 -397045 -380177 -265034 -753740 -729985 -239625 -808916 -699617 -894341 -719797 -769505 -523549 -177799 -124042 -466703 -900033 -754320 -463072 -472659 -567472 -235109 -635830 -197587 -222109 -268092 -474691 -559267 -938550 -368355 -539524 -699986 -232700 -259613 -584473 -811542 -839829 -660628 -838258 -286106 -556084 -819785 -770668 -287048 -239434 -807299 -729923 -382184 -33865 -771924 -682723 -163633 -722138 -653059 -383921 -676035 -254873 -935640 -851153 -59379 -294978 -239352 -664568 -847031 -663300 -693537 -850016 -954823 -115767 -119387 -955584 -887458 -407307 -13779 -945760 -287144 -944588 -112183 -261824 -541996 -872022 -361587 -648500 -64793 -934675 -595660 -254478 -394490 -778647 -197411 -75369 -547434 -390361 -260512 -897352 -944032 -195760 -588475 -574462 -147204 -392824 -385441 -155481 -308582 -943988 -538044 -761620 -751825 -664132 -524079 -589747 -103120 -582260 -578977 -285238 -647954 -62068 -26221 -384808 -168165 -823056 -760006 -33206 -771717 -100693 -70626 -579515 -873608 -315762 -287074 -861150 -315855 -640693 -910839 -757441 -659682 -647798 -696984 -934534 -75854 -240181 -42110 -828510 -394526 -264086 -870017 -787158 -288652 -765434 -237985 -743133 -168331 -693576 -694236 -828675 -897510 -694862 -921383 -850921 -81069 -762324 -924872 -233763 -287266 -768957 -852843 -357136 -417911 -840430 -830506 -13978 -419989 -693674 -136450 -891378 -26830 -518784 -696902 -835001 -822063 -303510 -817181 -834877 -664242 -45915 -540729 -522887 -797888 -282444 -823320 -524863 -167637 -70684 -574998 -165970 -906346 -280943 -405338 -535410 -874287 -809941 -203824 -696512 -712266 -873790 -327849 -875289 -625397 -564625 -229500 -721840 -55080 -778602 -767449 -64611 -897116 -802712 -41062 -955623 -842946 -801836 -262220 -640802 -246951 -786972 -738256 -755245 -24521 -294791 -897977 -781912 -632008 -365642 -140484 -564539 -812916 -547108 -797628 -81589 -930057 -160547 -555555 -830756 -932639 -241892 -604472 -286762 -756645 -793746 -303379 -18213 -767422 -395419 -101511 -411002 -727192 -96948 -45973 -729063 -871749 -391798 -759395 -189594 -73575 -114567 -103928 -668524 -901068 -571981 -570577 -938608 -743443 -266666 -391718 -775294 -873344 -603996 -607234 -332200 -248928 -328746 -856238 -217790 -648108 -419541 -396995 -398600 -874619 -909199 -13594 -847030 -286346 -663712 -834927 -874055 -294139 -120971 -255186 -949878 -927183 -237714 -920389 -136162 -564548 -785136 -16772 -100514 -411335 -771149 -745855 -830388 -273073 -632845 -835007 -800195 -696543 -153291 -228151 -397221 -34128 -60285 -99914 -878657 -190056 -887192 -616526 -794713 -141441 -615604 -635543 -130154 -668803 -919968 -246049 -946320 -874218 -788707 -649204 -788717 -629658 -281581 -418636 -365933 -761396 -886946 -705025 -417412 -756428 -152436 -916074 -632966 -276095 -857638 -132463 -61350 -616961 -897299 -892550 -674940 -115263 -42837 -807416 -536112 -758263 -278428 -189700 -873139 -912690 -623688 -118049 -172694 -195274 -189516 -908894 -33167 -167230 -618181 -931012 -160307 -674567 -758076 -60479 -628393 -779715 -651411 -33045 -294425 -247875 -601983 -411507 -227902 -347273 -138747 -872339 -846332 -809399 -394432 -755664 -543861 -587161 -86931 -837381 -751537 -957656 -357973 -759656 -886583 -158728 -361969 -801062 -752627 -132541 -517276 -254606 -82046 -153016 -375730 -601071 -573599 -579714 -950634 -527824 -904668 -230805 -824000 -704782 -633208 -921917 -419887 -402815 -677675 -544027 -911961 -831198 -82315 -309087 -563901 -599001 -895792 -160274 -131428 -854684 -807584 -893920 -388805 -829917 -266030 -742942 -874056 -666977 -47052 -927108 -874239 -265623 -71771 -918887 -417374 -346443 -528844 -64542 -785942 -635998 -402119 -275961 -612437 -855150 -166056 -546833 -645561 -703914 -118173 -269904 -238537 -398595 -167971 -132206 -251461 -343806 -298733 -610246 -254421 -386327 -31483 -186064 -838502 -941840 -882254 -955466 -75543 -755061 -700925 -18248 -228095 -388994 -870778 -280091 -857551 -239172 -226823 -136795 -415193 -263833 -101044 -653058 -67364 -593762 -873284 -830769 -884795 -771289 -419516 -517856 -70206 -705756 -771684 -289385 -628320 -666642 -719058 -920829 -99789 -955228 -758107 -104932 -164848 -761427 -564187 -171283 -760457 -66679 -690182 -289255 -287706 -541989 -403268 -848788 -947838 -702001 -67387 -128035 -659164 -932010 -607469 -785359 -909178 -344277 -226972 -571915 -167337 -329777 -285675 -733940 -253913 -53108 -638137 -519422 -279936 -784771 -831469 -863824 -214984 -823235 -697000 -751990 -579345 -825874 -109975 -611386 -755561 -824839 -236917 -600708 -86404 -873601 -167525 -632574 -694110 -264529 -245040 -94892 -255409 -226549 -770111 -601383 -374984 -874253 -957753 -941502 -691023 -132084 -223852 -294023 -954213 -288529 -174447 -254704 -627790 -274894 -390505 -880037 -115804 -390037 -813285 -754520 -697519 -706739 -281747 -74588 -294408 -847905 -721302 -690397 -12850 -567249 -196161 -45877 -847038 -690800 -547271 -618529 -523315 -298846 -887214 -130221 -822038 -245275 -11739 -733117 -759005 -911793 -606467 -105030 -545139 -692450 -362311 -394079 -309237 -938474 -767186 -386460 -848350 -891511 -94799 -632761 -303610 -165987 -331904 -756103 -264050 -700033 -685476 -770287 -616334 -32680 -226544 -674958 -363893 -835051 -603033 -19586 -171341 -761519 -827784 -753216 -625197 -831717 -755884 -627983 -810639 -161858 -285416 -631001 -692175 -321013 -75702 -54970 -951018 -542374 -254986 -333385 -579599 -891130 -543430 -913649 -174600 -68561 -856938 -101643 -599247 -571257 -624843 -209269 -758242 -735310 -772514 -834434 -848373 -750198 -253020 -256700 -562941 -411106 -899787 -911102 -281504 -800825 -579690 -30546 -957256 -659595 -386323 -308981 -570110 -12968 -365339 -739429 -397349 -802070 -72156 -294402 -745065 -571425 -340274 -788053 -298419 -33164 -417471 -304059 -707394 -751836 -542478 -822842 -165360 -891699 -240394 -628331 -944224 -341219 -232863 -954455 -135927 -11193 -705660 -928397 -168712 -694421 -86956 -605910 -29186 -246216 -369988 -253087 -867794 -594076 -705225 -25227 -253834 -775712 -824088 -605374 -253799 -751557 -521889 -31192 -832615 -594051 -869633 -25556 -244050 -828646 -45161 -140921 -694290 -244525 -155381 -574736 -365301 -782134 -693675 -664408 -174623 -900305 -147291 -412256 -402859 -226743 -933958 -836846 -127971 -404433 -702040 -540567 -579927 -139620 -227030 -96937 -674339 -604976 -289104 -32773 -314739 -830704 -770828 -692974 -146216 -943334 -912093 -741529 -123130 -570703 -255038 -789938 -836912 -659241 -419383 -870525 -758235 -368629 -617364 -152958 -366113 -254680 -725305 -297345 -911977 -742583 -288737 -42922 -547443 -674651 -830507 -917053 -119487 -47522 -852858 -338316 -714744 -91073 -605019 -786211 -387084 -182782 -136425 -199673 -253801 -611629 -238710 -771392 -42678 -645292 -13520 -723959 -383337 -955146 -93245 -114302 -174808 -605727 -636452 -26315 -362200 -627914 -676827 -543366 -816913 -67586 -526625 -255435 -790451 -583202 -90574 -231877 -927202 -233363 -32776 -287537 -255306 -19668 -834955 -24465 -656232 -81150 -593855 -742905 -836651 -623529 -705259 -381120 -874254 -848814 -759677 -115556 -147299 -413224 -873646 -294588 -294668 -759535 -85955 -308705 -345878 -701773 -25651 -294268 -705258 -771131 -625339 -275966 -739465 -316356 -387095 -282884 -871676 -888217 -391491 -570889 -37912 -697024 -298779 -54544 -863937 -726954 -126114 -397719 -682565 -60026 -183115 -579475 -800974 -200732 -281989 -393825 -946526 -105734 -755197 -886211 -941752 -596661 -828210 -41961 -296805 -294719 -230809 -946671 -676242 -144371 -618375 -211605 -417878 -103073 -158050 -932025 -569326 -384741 -622844 -864395 -78956 -54901 -603037 -308161 -205062 -943607 -736606 -13569 -99360 -103217 -372067 -658877 -656248 -923643 -347530 -935456 -100664 -255198 -908879 -836669 -287333 -593354 -906839 -627093 -696622 -31853 -135613 -725827 -274336 -664653 -412740 -314701 -716938 -784909 -348898 -236057 -941774 -282559 -254015 -322900 -731142 -829208 -655322 -745568 -350912 -873887 -238925 -104775 -694355 -332476 -675048 -365267 -150806 -755838 -126226 -115333 -792551 -72773 -569879 -80529 -668887 -844555 -24516 -254613 -119564 -625623 -898092 -287868 -593196 -234600 -182183 -297254 -570328 -98759 -381764 -886783 -320767 -957154 -388010 -372455 -13971 -861525 -96500 -134951 -40672 -697070 -633560 -956333 -753753 -912015 -693027 -518457 -772301 -697109 -633805 -936515 -840289 -843227 -876114 -602072 -834984 -843892 -800794 -891189 -340305 -26086 -403329 -263328 -76168 -286939 -118987 -124880 -391779 -950317 -315858 -349419 -34339 -635123 -90432 -696863 -774213 -694248 -263492 -816570 -760880 -745623 -393221 -375823 -62954 -694376 -909261 -879227 -33151 -693249 -348991 -204700 -226885 -68483 -548043 -617222 -887285 -23725 -771291 -323886 -284541 -409974 -33043 -86100 -127610 -810700 -768377 -131962 -264611 -881442 -253011 -707136 -767334 -845549 -855982 -70725 -662397 -187559 -569479 -204024 -420209 -262227 -394377 -87093 -322219 -553985 -825686 -873242 -255476 -136756 -830280 -559726 -99120 -874228 -770645 -702222 -763523 -64750 -941618 -278325 -116094 -176307 -346561 -327458 -32694 -548095 -224240 -623480 -232047 -559140 -820207 -837761 -373598 -55680 -304112 -786027 -703296 -256249 -148843 -700569 -81528 -381966 -546630 -821102 -880201 -17329 -830651 -551207 -938406 -716634 -927310 -60129 -760711 -869300 -96520 -904725 -603955 -894180 -831288 -254848 -74100 -567726 -748962 -279958 -911251 -83925 -860966 -652805 -691846 -177945 -625468 -67801 -664168 -948844 -747554 -617467 -393641 -413303 -593518 -85346 -741816 -624983 -570085 -690431 -911287 -231009 -627973 -128375 -663837 -32963 -668669 -736072 -62693 -572889 -29180 -295129 -254225 -187464 -75583 -383008 -50010 -596897 -601165 -906668 -845681 -373022 -931128 -596250 -135551 -675997 -253301 -571939 -568947 -674862 -785217 -828439 -824638 -772342 -577031 -167758 -635317 -145869 -254600 -761546 -295697 -873806 -522369 -571762 -330753 -357292 -955667 -594159 -560044 -358655 -948310 -832138 -799179 -287752 -770410 -770180 -680644 -772488 -914604 -343588 -181785 -60916 -838104 -735469 -847029 -691107 -133890 -878673 -653499 -704631 -819702 -695578 -659492 -368215 -598972 -801028 -772558 -365198 -663549 -18100 -415413 -547040 -759319 -322907 -415604 -918658 -879892 -42608 -125025 -816558 -42918 -611760 -955435 -519699 -521396 -245163 -52657 -885748 -830712 -772637 -122151 -525378 -637401 -696575 -570903 -64706 -355690 -933078 -321701 -316735 -547568 -552142 -579889 -545587 -360749 -932369 -223948 -564149 -384883 -164213 -627651 -759625 -331439 -735697 -153222 -20515 -835061 -60459 -659811 -244927 -254627 -894567 -826164 -367106 -311904 -715326 -330743 -664962 -600122 -377584 -564534 -20385 -716373 -778361 -238979 -385516 -244813 -62337 -759103 -254459 -603640 -871032 -115390 -844167 -925405 -305171 -413310 -795095 -349410 -924695 -172462 -183321 -778272 -167185 -147505 -274673 -298427 -759618 -850860 -618581 -114883 -328735 -802406 -682394 -379297 -419204 -745451 -697013 -251380 -276032 -671712 -591572 -55424 -721587 -103207 -303389 -294832 -69137 -185561 -571908 -91452 -318997 -568343 -161061 -838507 -582251 -289103 -667439 -605495 -662577 -281896 -533259 -740734 -659596 -330843 -209384 -916260 -50611 -251003 -580891 -843159 -650794 -692632 -900454 -920564 -927061 -874190 -921532 -786053 -325115 -758278 -632577 -649721 -13750 -348569 -117223 -605704 -401007 -778324 -335047 -795559 -627455 -113684 -394513 -525413 -126240 -835000 -810419 -339612 -830482 -906117 -736714 -229678 -696898 -827281 -852750 -395760 -538693 -766079 -255505 -84286 -274936 -656303 -568315 -904579 -125955 -173431 -387690 -890379 -176092 -836638 -281280 -365229 -917102 -239276 -680080 -243157 -521795 -776214 -274198 -239483 -805388 -416980 -41441 -579329 -929777 -767574 -797788 -327933 -635695 -951597 -522217 -790605 -127974 -571951 -786132 -676416 -300677 -254976 -875824 -75350 -230241 -254978 -63101 -255620 -680504 -829319 -603031 -70910 -616722 -579710 -799287 -116317 -28395 -742355 -135339 -818407 -760043 -805339 -30950 -322820 -521068 -152625 -932585 -410268 -25850 -894519 -244996 -323467 -758953 -752752 -604484 -840266 -696450 -102653 -660864 -570058 -256450 -822875 -292851 -836775 -647080 -807319 -678400 -914799 -823805 -253100 -42027 -756544 -926462 -850890 -253797 -738550 -604112 -857187 -62503 -724432 -870465 -875317 -277465 -571142 -359917 -87151 -659333 -692284 -103154 -930929 -96975 -874223 -308173 -11877 -907725 -707082 -914515 -390041 -786250 -722168 -343677 -168184 -398033 -691183 -732284 -692016 -72797 -46316 -242050 -141514 -670729 -419790 -60146 -864254 -910332 -807164 -800765 -297738 -786582 -873886 -658799 -746067 -410763 -676360 -546203 -837393 -766018 -692739 -571042 -533484 -835003 -390310 -941730 -861088 -820096 -287795 -229460 -850068 -308953 -50746 -692963 -128163 -943141 -86323 -112339 -523613 -17295 -829465 -254801 -541865 -294512 -681639 -104940 -794161 -807207 -255258 -192046 -356505 -937817 -159020 -124962 -151797 -388008 -834881 -243790 -733369 -29108 -783876 -570336 -571333 -521915 -293952 -196348 -887234 -348996 -632638 -770422 -361783 -831641 -196098 -634945 -147278 -938764 -233105 -696572 -816738 -84689 -676514 -746285 -13017 -160007 -954793 -601932 -246212 -835951 -875493 -926843 -37720 -588213 -757699 -279728 -823279 -612302 -152756 -525358 -557950 -649013 -743405 -11837 -418349 -562221 -115446 -890807 -235487 -632535 -756524 -570035 -33121 -132073 -419997 -269004 -385294 -372034 -244731 -669549 -667271 -923957 -832424 -905312 -345916 -15131 -281259 -314441 -552972 -676620 -944394 -217929 -101868 -18404 -786478 -394144 -261371 -73787 -50761 -571896 -41165 -693918 -262381 -648829 -653007 -805431 -566687 -829771 -776735 -874650 -874216 -795127 -921404 -570303 -254858 -516630 -913972 -262079 -831645 -126916 -246414 -606581 -827581 -86596 -62301 -828467 -693569 -606729 -350641 -564495 -331701 -802354 -86865 -369277 -105190 -673845 -664200 -274678 -577595 -133415 -828395 -653008 -145815 -11133 -915637 -382877 -124061 -245168 -545595 -685479 -153493 -891536 -716187 -405793 -17956 -579749 -645386 -521464 -69560 -96133 -337336 -183159 -874318 -640490 -681794 -667355 -95012 -740389 -571755 -106285 -772186 -934137 -923857 -617140 -809394 -677722 -192985 -336773 -874994 -595956 -398251 -287769 -893845 -846327 -711043 -244888 -687294 -163533 -569398 -717730 -83509 -119341 -274680 -225546 -929509 -580456 -413530 -790716 -121939 -528970 -172429 -394275 -716905 -753985 -417687 -654531 -244375 -755178 -419397 -834060 -372514 -696871 -70697 -113367 -372549 -152702 -784849 -538286 -347158 -184157 -372058 -114082 -55585 -938160 -578943 -370948 -55647 -364538 -842736 -785558 -525198 -534736 -93565 -46057 -253843 -765251 -911238 -931999 -940547 -588374 -759694 -921437 -546097 -835824 -687382 -919918 -263234 -731038 -756108 -767537 -32774 -385950 -234158 -822940 -771172 -281548 -284052 -153932 -955016 -779109 -839307 -611794 -849906 -759675 -18596 -932971 -870802 -852735 -890550 -935680 -873573 -281591 -951645 -663882 -50623 -402219 -71717 -293726 -125952 -281637 -663852 -280504 -42434 -913855 -594022 -956717 -627505 -666699 -115251 -878546 -97004 -759568 -836732 -12487 -197343 -281490 -62486 -230630 -132449 -547989 -658212 -716592 -874301 -406870 -624480 -383241 -848519 -197021 -518838 -168119 -836794 -319669 -13718 -658559 -751759 -786126 -943909 -277668 -770417 -778436 -50077 -633106 -548056 -89254 -786192 -932296 -891663 -183938 -519850 -537866 -21072 -617367 -225532 -47396 -324938 -19730 -255047 -66692 -526369 -538272 -851522 -416444 -856666 -606747 -695516 -786308 -766103 -840494 -900904 -135263 -69538 -610089 -636694 -391025 -706123 -665443 -80122 -716583 -698943 -597388 -377414 -823837 -69524 -540800 -605685 -955664 -159160 -715216 -236168 -767219 -239743 -153138 -51863 -742835 -281212 -745482 -685447 -860000 -918997 -633210 -255575 -922983 -617405 -61146 -570776 -59916 -712318 -926361 -709007 -759666 -771052 -931239 -929303 -830087 -940101 -328337 -634633 -401607 -167762 -281197 -758217 -287890 -894587 -248484 -894211 -253495 -955632 -265286 -226083 -664660 -781959 -86782 -194046 -170803 -623858 -933011 -272408 -613718 -258334 -923103 -353446 -714893 -794614 -254736 -843632 -693721 -351000 -650249 -892049 -256711 -648599 -847753 -854323 -39369 -194083 -99165 -615734 -380178 -189263 -262528 -925925 -880273 -741646 -756527 -297302 -189779 -124883 -730077 -695172 -103723 -634120 -691813 -53872 -844408 -547318 -517082 -755969 -340783 -56143 -125967 -21053 -593461 -292532 -824761 -908916 -263419 -44819 -302996 -382683 -81717 -253770 -722266 -75288 -872576 -236170 -62986 -253803 -12156 -525196 -880225 -245003 -72279 -669970 -281612 -345484 -161119 -26228 -570979 -661940 -165897 -136319 -388812 -528286 -834750 -743630 -217973 -712206 -260918 -694109 -911186 -871486 -132174 -934291 -835101 -522309 -765021 -401381 -136049 -955617 -673853 -54028 -588202 -683927 -588485 -740339 -648954 -757522 -621053 -745434 -635787 -556331 -798112 -388802 -411163 -185673 -185493 -859214 -409522 -607451 -675050 -809327 -278746 -930969 -894190 -743752 -782357 -186860 -605034 -829823 -873393 -242987 -907568 -386664 -392421 -153109 -159403 -875299 -805574 -911760 -571967 -420168 -292712 -416898 -647013 -872884 -227183 -174459 -810809 -650899 -756326 -324615 -675763 -289378 -716716 -690583 -12935 -254892 -276970 -579653 -319787 -653011 -876086 -593890 -627865 -695105 -750531 -571423 -321039 -792813 -791164 -917493 -873847 -751717 -788705 -525040 -418969 -113369 -348582 -834860 -900330 -254195 -538186 -772591 -734505 -278048 -667197 -913214 -419102 -734945 -873842 -571384 -144899 -26222 -155590 -632226 -873278 -318850 -947128 -172483 -666398 -90357 -871851 -312200 -298631 -934848 -857131 -902121 -182281 -694075 -387763 -569188 -817555 -741722 -96879 -757690 -180769 -134345 -917227 -418294 -615687 -855423 -571772 -695748 -418933 -607188 -127723 -42414 -151042 -232987 -800270 -921557 -34493 -70354 -834667 -696531 -722575 -168169 -910235 -784049 -863139 -716703 -617376 -51574 -753189 -793814 -521987 -167145 -635475 -871279 -945357 -724298 -755684 -197204 -362533 -663899 -275251 -349221 -44373 -619102 -286450 -826220 -45970 -185309 -11546 -857027 -864188 -593141 -50228 -547351 -712633 -50813 -406034 -944993 -781471 -239397 -165894 -411372 -18912 -873987 -716864 -403844 -563796 -827889 -386578 -777747 -925648 -37128 -54744 -766691 -627981 -658322 -288309 -924875 -714787 -12093 -12489 -168151 -754747 -238590 -575266 -551380 -741476 -203485 -674210 -835987 -372183 -571114 -623460 -660997 -255159 -376086 -766621 -802866 -872773 -394172 -365909 -822574 -570156 -681156 -669486 -627746 -874681 -99926 -21456 -31162 -81670 -394199 -152894 -785754 -338528 -177674 -663688 -344224 -605504 -655459 -793581 -795117 -225847 -942032 -690980 -830432 -782191 -586659 -845572 -633189 -28364 -802537 -637909 -891578 -945787 -760864 -754387 -785995 -570421 -659141 -762564 -769455 -893865 -268936 -243026 -141531 -108071 -79580 -179077 -885301 -752871 -745581 -305944 -232718 -648497 -523459 -627466 -68754 -12611 -261523 -803237 -803242 -908870 -605264 -696966 -726918 -40833 -25831 -741426 -303154 -576066 -527123 -793442 -316635 -802503 -34217 -178573 -347157 -255404 -844014 -812892 -199819 -26319 -403311 -26254 -255400 -107693 -870751 -627986 -135519 -612830 -696503 -155527 -524666 -616791 -43414 -534921 -245283 -770986 -927857 -518219 -932044 -562399 -778119 -152980 -144453 -277182 -863941 -863130 -804764 -721702 -342843 -343834 -693815 -525356 -624619 -34241 -677510 -229038 -926226 -86418 -869935 -713979 -827886 -836788 -67410 -770406 -359280 -693336 -955613 -628007 -928255 -71865 -627939 -786266 -869665 -864003 -167814 -954452 -828146 -353413 -644600 -813099 -939360 -538421 -152274 -272556 -835580 -706734 -602090 -153967 -544292 -147329 -17236 -205339 -394456 -854120 -20240 -956503 -786135 -847906 -153388 -311331 -568732 -353716 -571946 -954829 -757660 -570272 -603538 -779744 -340755 -254110 -534401 -564239 -810956 -779802 -302765 -291539 -693909 -419474 -26052 -417566 -118101 -152836 -255479 -706404 -90593 -163433 -760452 -243490 -535851 -138702 -372016 -55724 -362442 -480256 -354289 -596912 -230198 -650838 -195945 -418119 -714829 -848487 -303983 -807711 -895200 -289414 -256173 -472005 -682441 -382333 -437732 -814619 -826084 -841249 -132327 -190134 -488998 -208180 -435772 -780362 -889192 -141687 -419948 -131136 -75914 -124492 -10133 -424969 -743026 -836046 -148748 -845565 -529314 -757424 -807995 -242088 -682620 -343116 -391560 -886616 -468993 -808172 -567005 -485411 -420183 -334149 -948961 -756278 -340455 -751466 -834374 -591748 -704478 -216065 -663524 -371844 -840477 -112172 -770297 -818945 -756258 -808049 -947157 -261908 -386099 -797386 -527672 -327179 -888744 -77024 -321020 -427484 -935346 -940332 -941591 -412164 -25035 -448331 -485573 -921029 -664876 -751285 -543950 -115898 -280695 -168236 -630625 -627079 -321315 -747893 -334877 -110344 -401762 -880131 -820055 -64280 -659310 -481464 -24814 -627985 -740686 -738160 -436888 -346105 -296832 -67937 -35925 -84780 -185283 -518978 -77636 -641003 -551288 -11971 -626937 -743467 -908373 -654778 -473544 -4423 -810774 -757414 -427469 -519094 -10204 -293069 -591780 -448260 -732884 -473144 -562478 -899385 -482716 -449835 -948744 -25133 -438090 -449468 -418660 -650734 -784558 -657104 -203336 -844335 -99223 -886796 -460420 -10149 -433472 -14905 -634368 -163102 -534545 -956640 -810904 -1850 -439513 -485550 -349703 -673173 -166766 -850970 -556829 -42230 -158446 -24327 -822942 -44253 -332168 -704875 -656025 -846091 -681608 -100207 -144850 -304868 -146194 -685467 -316895 -368385 -111482 -887731 -682971 -436034 -101908 -257307 -781535 -175404 -287488 -386312 -166408 -640944 -401899 -932041 -257588 -321408 -880026 -756675 -827226 -111727 -306769 -717672 -449773 -920119 -389138 -94101 -9510 -122076 -515804 -103920 -450467 -15813 -785219 -524384 -190008 -10012 -44380 -939795 -629951 -894339 -139240 -112238 -854150 -117835 -436275 -172935 -529073 -127606 -851670 -450833 -426526 -878454 -825657 -167727 -635972 -470539 -420015 -22429 -755925 -555777 -476948 -85622 -661431 -443062 -770581 -609897 -321967 -433204 -727092 -630735 -834202 -368961 -737906 -450672 -433550 -814504 -573065 -698739 -764458 -38427 -871233 -366777 -321352 -268214 -699153 -448536 -353624 -445158 -921280 -717695 -940605 -673922 -449298 -895610 -179301 -113481 -38740 -132881 -940914 -337435 -232774 -750801 -935897 -693033 -444059 -301725 -780427 -104435 -856008 -418366 -10240 -229944 -528801 -442249 -941593 -158592 -447866 -519022 -499091 -634501 -174289 -63838 -146882 -38367 -403497 -691483 -368498 -842731 -688874 -682105 -266127 -489152 -358460 -334105 -887398 -567753 -888 -15197 -607167 -172543 -9907 -552871 -588649 -727016 -851233 -880470 -437171 -630162 -537973 -25185 -677843 -410792 -751088 -314495 -281131 -556230 -302838 -563315 -473074 -575995 -665802 -443933 -16626 -658330 -772436 -335659 -727063 -880214 -105825 -129566 -895419 -603771 -560645 -4187 -652896 -759161 -319824 -322002 -164695 -929448 -728684 -643061 -772434 -932727 -550665 -490827 -112365 -422320 -886844 -687090 -92203 -556383 -431640 -824875 -610188 -319730 -437418 -10243 -146842 -10510 -83825 -420118 -593495 -141597 -737594 -449124 -3504 -933182 -650563 -321833 -777509 -11771 -828101 -830650 -276085 -289161 -64459 -725369 -779226 -218873 -84474 -864970 -387143 -485051 -64481 -438927 -815855 -576690 -660456 -755599 -154942 -870840 -126224 -298925 -818296 -292482 -346780 -449717 -300421 -98992 -212927 -573148 -334524 -588240 -838661 -408308 -470195 -408224 -508574 -679016 -150703 -145618 -422363 -683167 -580455 -940935 -88579 -438012 -821930 -172960 -468844 -306619 -198619 -448409 -404452 -268527 -335672 -630800 -420842 -121269 -422695 -287968 -938424 -331223 -429980 -326397 -173779 -372238 -802562 -625443 -722261 -265396 -890941 -619478 -818498 -420641 -169853 -10440 -823043 -136329 -860711 -674968 -320014 -391312 -546586 -663904 -649219 -448049 -743174 -685508 -802863 -825522 -252218 -593829 -10189 -309301 -176129 -297797 -109812 -519569 -483609 -96374 -455853 -952695 -488002 -545121 -679923 -227989 -747365 -405611 -612933 -450595 -36441 -760343 -772196 -273496 -449369 -483633 -650798 -751404 -681667 -690497 -721673 -10193 -437076 -623099 -298011 -335510 -427597 -437572 -25086 -845072 -675058 -714834 -820201 -24633 -200253 -210724 -188433 -606881 -857156 -134498 -802230 -868998 -847313 -808281 -280349 -695451 -769145 -210131 -677996 -590367 -474231 -948680 -623079 -385813 -450492 -682200 -757469 -258190 -158123 -385595 -534378 -167798 -924857 -53003 -300015 -894787 -746194 -448426 -892555 -588036 -701192 -824511 -658772 -887726 -938949 -763882 -25214 -483761 -757557 -36800 -903481 -473860 -799655 -474213 -943691 -181370 -587832 -261162 -25433 -182716 -836815 -482691 -524857 -40576 -910676 -635958 -810876 -673020 -946386 -321821 -153602 -533187 -943006 -796183 -672410 -338218 -120576 -786949 -442092 -232728 -172974 -723992 -248518 -271971 -843089 -51370 -834201 -689798 -439724 -782221 -863364 -563538 -619291 -616852 -607349 -65737 -321368 -388983 -156242 -404928 -82206 -77364 -539372 -912241 -749984 -851255 -142102 -735044 -569376 -761549 -772620 -577180 -100830 -162569 -708482 -452846 -561640 -772073 -485331 -790358 -9941 -554028 -479836 -306633 -863200 -742732 -485772 -103376 -422455 -259418 -425280 -309451 -437746 -855441 -772710 -770179 -145678 -38085 -122656 -404204 -482040 -634586 -553980 -944771 -826210 -249297 -842140 -107650 -517048 -755360 -906712 -353216 -795712 -419950 -475256 -649404 -328503 -757474 -407394 -25199 -419498 -114138 -873346 -524549 -679921 -297025 -457780 -125870 -16662 -911376 -940342 -146872 -77398 -944721 -412223 -570614 -728936 -874358 -597503 -875905 -763481 -221022 -202331 -826080 -913797 -705054 -705166 -229981 -618202 -124253 -515488 -251485 -772033 -425396 -807940 -86305 -92937 -7339 -682450 -835220 -743427 -85176 -281755 -459551 -295627 -340784 -653049 -135451 -111703 -347542 -631004 -689791 -770626 -15040 -507471 -49435 -915135 -816576 -594833 -342519 -546178 -415274 -792358 -475551 -111728 -614609 -48799 -744423 -8921 -284843 -427417 -852066 -229663 -85938 -25130 -186065 -929602 -10571 -248981 -940841 -659835 -910052 -660078 -467225 -506339 -421058 -329164 -630696 -166474 -420987 -450294 -791218 -374466 -872344 -24828 -441464 -36583 -727203 -420472 -756953 -12953 -935583 -300676 -398481 -86409 -790838 -714895 -887306 -335602 -141466 -813296 -116408 -546902 -450763 -304449 -348988 -24747 -607138 -945537 -737854 -229841 -634259 -260489 -259765 -481119 -56705 -319866 -288397 -647888 -684732 -831428 -378683 -138736 -229748 -821226 -369769 -689627 -555286 -778191 -541704 -887351 -170900 -54418 -299994 -447872 -62994 -756922 -145742 -146543 -672003 -145269 -481149 -44049 -436811 -504838 -280576 -395378 -485330 -851470 -302462 -333367 -321222 -248753 -554577 -69732 -457030 -743429 -421036 -778487 -770373 -223750 -919168 -283282 -843733 -656463 -664223 -303676 -857514 -120575 -640590 -853961 -926478 -90394 -380944 -671860 -23454 -91240 -437291 -490779 -726942 -760407 -569770 -742712 -681080 -27223 -300851 -802075 -426379 -650359 -44712 -62772 -437613 -77060 -949033 -642880 -285796 -432546 -825905 -533114 -843270 -906332 -251582 -596109 -689797 -64550 -626705 -81543 -736277 -801387 -528583 -921483 -84970 -284602 -806178 -738897 -756262 -349589 -485855 -879964 -880023 -573717 -698497 -192422 -947207 -924018 -381018 -59097 -919956 -200770 -711648 -223027 -420504 -682740 -879712 -407497 -505622 -426163 -689274 -800857 -673881 -234414 -24583 -825893 -910392 -276040 -455465 -42812 -154798 -343079 -682616 -86700 -341482 -417780 -31468 -101443 -777163 -682976 -711921 -910405 -439425 -729141 -356524 -111389 -309933 -488068 -62484 -564511 -115820 -546113 -1620 -561655 -680108 -605705 -641631 -704877 -382800 -349581 -879563 -24764 -436461 -917191 -952453 -751780 -418973 -284622 -260082 -933128 -504448 -264539 -369477 -218566 -504127 -70567 -448194 -452845 -60434 -910386 -449198 -309917 -484994 -836845 -21785 -283191 -545231 -895415 -578432 -735925 -38447 -658800 -349390 -174813 -755259 -689894 -287185 -262614 -554811 -249383 -105071 -434207 -555320 -556392 -100989 -286796 -833236 -462052 -432900 -830809 -429181 -386165 -449763 -280470 -869420 -390306 -105162 -925703 -32859 -450588 -472718 -461015 -672658 -136689 -334573 -514634 -857358 -763631 -743550 -449467 -757828 -892827 -893604 -743571 -477129 -171926 -257994 -808282 -717664 -627253 -928205 -844598 -390597 -24391 -389137 -451953 -287023 -728666 -870155 -609837 -328049 -25244 -732471 -844174 -576279 -760213 -667212 -301823 -448911 -328803 -952426 -957271 -642453 -556882 -630244 -151843 -67720 -474158 -739564 -761544 -244186 -313288 -166871 -137713 -788293 -383810 -528526 -425200 -334575 -725882 -684689 -743024 -803281 -680563 -335903 -501958 -431756 -561611 -229864 -38738 -579732 -57324 -476835 -402564 -156227 -875170 -387805 -639572 -822406 -460159 -556733 -594046 -887564 -757053 -372423 -482265 -436421 -946844 -436354 -143094 -125430 -158803 -319974 -78375 -477467 -895465 -554512 -649248 -751527 -795493 -635960 -485719 -887105 -486638 -334338 -871305 -594043 -787610 -340599 -356152 -484930 -86402 -281846 -922996 -879805 -229062 -555955 -674529 -940896 -609517 -419882 -620615 -55135 -672856 -619922 -478680 -177466 -846668 -179993 -630506 -675222 -476970 -650786 -298727 -299439 -843203 -271253 -418437 -32361 -649805 -10187 -335506 -271000 -726125 -605836 -629881 -841789 -39480 -892993 -919718 -269535 -111589 -85894 -865417 -311624 -535387 -819640 -300460 -460384 -833291 -436797 -807590 -223975 -616201 -264120 -844977 -420959 -80972 -747852 -757854 -743585 -541934 -16549 -240560 -25288 -921988 -947916 -778490 -673895 -321400 -436959 -368575 -232230 -467783 -368707 -391740 -882210 -167153 -810209 -829322 -515964 -301007 -219696 -416251 -594780 -12890 -284785 -462139 -761901 -804346 -867118 -236346 -698109 -682557 -98850 -121658 -280809 -423503 -303012 -228388 -879484 -180798 -94820 -611524 -397682 -165452 -375377 -410724 -424106 -682909 -594871 -226982 -841733 -945024 -111684 -483665 -678333 -658555 -593550 -230091 -120618 -533412 -742135 -135034 -742182 -320581 -461822 -682928 -641154 -650883 -418651 -12209 -24638 -323551 -926163 -342821 -625117 -640187 -274841 -435968 -230059 -619823 -288453 -337506 -698257 -904050 -279599 -954234 -164756 -643148 -421056 -853652 -917063 -569432 -661461 -211549 -554515 -829132 -145121 -180606 -732264 -344211 -381904 -497271 -771822 -447848 -594734 -942362 -716484 -863356 -117180 -438101 -420541 -679816 -875074 -322584 -741561 -448446 -349608 -151793 -724158 -322969 -202775 -234179 -116337 -111845 -787019 -878470 -689715 -587927 -287030 -539208 -149503 -98408 -477084 -553839 -906391 -487897 -720465 -682402 -684116 -120366 -921982 -64598 -262315 -406987 -833850 -234461 -403620 -816597 -577316 -443003 -192339 -24957 -165639 -768576 -188811 -619493 -736607 -220306 -118697 -485868 -340483 -283476 -99090 -878755 -319761 -357287 -827616 -498642 -174841 -541987 -232082 -880199 -22700 -103475 -847191 -39373 -418282 -937895 -636774 -260615 -335328 -111659 -439028 -333949 -725996 -55483 -713679 -275323 -458769 -10586 -41994 -448402 -802608 -935070 -894796 -173014 -493797 -432414 -167130 -25249 -636306 -793478 -15918 -827425 -676179 -532187 -462141 -764064 -332450 -742856 -773502 -854118 -285100 -808286 -27717 -145067 -682539 -431266 -426818 -870270 -608690 -399666 -64499 -567527 -200174 -234491 -460610 -427376 -489391 -422416 -279842 -167706 -569906 -35605 -111612 -563969 -145799 -349182 -388984 -736613 -823361 -151411 -659152 -562292 -833669 -487259 -890831 -305806 -402310 -810902 -674858 -693274 -926539 -309729 -742118 -792104 -248524 -795172 -12010 -851844 -285597 -848369 -84917 -769898 -12946 -85698 -153958 -135127 -449180 -674543 -155844 -588516 -17493 -590597 -104073 -808273 -154811 -840145 -182950 -682863 -537829 -925861 -737888 -622887 -444610 -67455 -334545 -844299 -725523 -333559 -8750 -943925 -145676 -451581 -22780 -617446 -640299 -190118 -953627 -934567 -844415 -157357 -855729 -879520 -64167 -763491 -278900 -167078 -30105 -416737 -894782 -422531 -280748 -1930 -388814 -336253 -321727 -39068 -682614 -819410 -340476 -312675 -331172 -65463 -600560 -361459 -779999 -454616 -77374 -572181 -448922 -98348 -321879 -919178 -126095 -908078 -55077 -339389 -167754 -494694 -751291 -278666 -667250 -738221 -229937 -47967 -938513 -228850 -278124 -162812 -319490 -651844 -910772 -741570 -356496 -216940 -756274 -891051 -942865 -925601 -943429 -153231 -369765 -693258 -78704 -453902 -725401 -86429 -109656 -248405 -34475 -61764 -134665 -614349 -918496 -717738 -742005 -650762 -274882 -158930 -472300 -762769 -641129 -432715 -566971 -94916 -556085 -314252 -295733 -11680 -25258 -232225 -121378 -123314 -420463 -934377 -492001 -42867 -135322 -642711 -768644 -86446 -874740 -623879 -24723 -100813 -247684 -933091 -874987 -532269 -177875 -808107 -285354 -116960 -87106 -98480 -930750 -682563 -559437 -61070 -617927 -515866 -631887 -693333 -807818 -706173 -225415 -510918 -871585 -468843 -682961 -101919 -24650 -827683 -200727 -817254 -427418 -416149 -505709 -772130 -579879 -61893 -204922 -228318 -382187 -261418 -676307 -734089 -315567 -451305 -413435 -419285 -880419 -230299 -686518 -546301 -33177 -313616 -279015 -667291 -764912 -437438 -163763 -603078 -139497 -770153 -578033 -440147 -680958 -619896 -640019 -843849 -891920 -273319 -63865 -883898 -682930 -142986 -777369 -88366 -25279 -721318 -173436 -737065 -337429 -592134 -808939 -305972 -895533 -314285 -335306 -762283 -332601 -916312 -333889 -248785 -799135 -570624 -427588 -724112 -669013 -226684 -89224 -298214 -109615 -939256 -623131 -555184 -455784 -264077 -372103 -111558 -544876 -63864 -681136 -181025 -475373 -287093 -673293 -47242 -906315 -448484 -281264 -460210 -618344 -555170 -826229 -833633 -289355 -616479 -426347 -702957 -7007 -615148 -427010 -681743 -698881 -350421 -262355 -101033 -946772 -641339 -638329 -157299 -770087 -329426 -446744 -87007 -800937 -420872 -420450 -225864 -368698 -248080 -485723 -281630 -554904 -535472 -447912 -95051 -550214 -832928 -609701 -947154 -720408 -286354 -659243 -485274 -121243 -482161 -895510 -640639 -650522 -180205 -670032 -262251 -873541 -384694 -151122 -412730 -18049 -871011 -111745 -273132 -733784 -338125 -449359 -671430 -838454 -64502 -257995 -619636 -375586 -174505 -139358 -895386 -391322 -454225 -875199 -305428 -425475 -77640 -681169 -485313 -427356 -66144 -843309 -855086 -148702 -801464 -404808 -927024 -440624 -106872 -918235 -736460 -330795 -85934 -39420 -857173 -19710 -828745 -244175 -303957 -871188 -905579 -798827 -733550 -404015 -114351 -385967 -73492 -807624 -399227 -387401 -698924 -827625 -941928 -427061 -329153 -109041 -845015 -220336 -432527 -485179 -420664 -629991 -285341 -200584 -448787 -836872 -771718 -64618 -460848 -320738 -408523 -52534 -334037 -447187 -606154 -32864 -552952 -436173 -648639 -289298 -792149 -411103 -227640 -659335 -298585 -549084 -188755 -101867 -11895 -407432 -832327 -575411 -888367 -555427 -807422 -22734 -146499 -435229 -424984 -952312 -274729 -54725 -334587 -711939 -751347 -187296 -365089 -878512 -741411 -450657 -361851 -333620 -853936 -606150 -801151 -682688 -761912 -556997 -899519 -336060 -301614 -62088 -295886 -820095 -220734 -687868 -534494 -99058 -627426 -231181 -753949 -86271 -879878 -111397 -819787 -490377 -717741 -300848 -417619 -544119 -594019 -844956 -247964 -605984 -808248 -43203 -832990 -427243 -45692 -88174 -727027 -233260 -659393 -458688 -426557 -628421 -328032 -534704 -879301 -157873 -32570 -424557 -11944 -689699 -682647 -613711 -342712 -858885 -630586 -819936 -145734 -448620 -630814 -190958 -303445 -229000 -545240 -108537 -319522 -524371 -230149 -918760 -472446 -717033 -65805 -818787 -448568 -610737 -693095 -937394 -477127 -32783 -320077 -620747 -642993 -248002 -284677 -257553 -580006 -321576 -599224 -379502 -770262 -309244 -847194 -918470 -447876 -682000 -816178 -757543 -427594 -777605 -479725 -760774 -630481 -69346 -43274 -177796 -56685 -828065 -438011 -111789 -295908 -273544 -606702 -490768 -948650 -192599 -226760 -70471 -62522 -64519 -24684 -491108 -117119 -258782 -596784 -633304 -299965 -11410 -682288 -26953 -756538 -438032 -362161 -447171 -448720 -37212 -144301 -202301 -686173 -296930 -592137 -323053 -142815 -105136 -422646 -633476 -866059 -554271 -368351 -158464 -408550 -567372 -703906 -250590 -472404 -930516 -925673 -435008 -439974 -25047 -334080 -763634 -829204 -879059 -932753 -272745 -64422 -69649 -657938 -425973 -407614 -954771 -232996 -905006 -402745 -751928 -295099 -170195 -298166 -774724 -287345 -846482 -554523 -123742 -809496 -448540 -262833 -756377 -682393 -845047 -768679 -162322 -132121 -77847 -556798 -763703 -555569 -675003 -587917 -451467 -111801 -36844 -369746 -449988 -807954 -556405 -145552 -81766 -343638 -26288 -258079 -7822 -486222 -2920 -182646 -162559 -819660 -929271 -564503 -263025 -314855 -230231 -682367 -606603 -283643 -808104 -226820 -95057 -488142 -23826 -9447 -391651 -121202 -858654 -693623 -844109 -755242 -77985 -322008 -681076 -42728 -684279 -691259 -290202 -641920 -490019 -618187 -450450 -86255 -604198 -666534 -717201 -150695 -564265 -42626 -865202 -882032 -705271 -844168 -516897 -464912 -59995 -922704 -878696 -124339 -180923 -25043 -252444 -224971 -450860 -718782 -481959 -912551 -317531 -725352 -630613 -53093 -423816 -793068 -553770 -290334 -333679 -802478 -726920 -321932 -170241 -138184 -422550 -361229 -677725 -88469 -390090 -895359 -485489 -618332 -684487 -111737 -829840 -75490 -175597 -911385 -830674 -473969 -256808 -539279 -301785 -950926 -654486 -850604 -435645 -538668 -682400 -419254 -650557 -450537 -508412 -88209 -952832 -151037 -777991 -841256 -474354 -425702 -430287 -12905 -940199 -449220 -919789 -828321 -683899 -24215 -683900 -336394 -946837 -513477 -157129 -50255 -69231 -877124 -167657 -139075 -449360 -791575 -305246 -162164 -498589 -315017 -604978 -735296 -337092 -334572 -120147 -342925 -930587 -248526 -321926 -887527 -383498 -102333 -619367 -281129 -316524 -14877 -476131 -230086 -532119 -446733 -134897 -627836 -885848 -544485 -450552 -891072 -129182 -924265 -476558 -223462 -607184 -115925 -608711 -675847 -64353 -727009 -48902 -179987 -625057 -642398 -716697 -490230 -724441 -121751 -285257 -478259 -316354 -25266 -16952 -850980 -143466 -104999 -525120 -726035 -382858 -911684 -687431 -830794 -301343 -367451 -234123 -333943 -556906 -607361 -156356 -466995 -626691 -7468 -617999 -533526 -649382 -816190 -289107 -772421 -33953 -427184 -850404 -82414 -413304 -701926 -918686 -561468 -104252 -386600 -288574 -846639 -13105 -232030 -86275 -756624 -680953 -384843 -314641 -10186 -650247 -25276 -484009 -527148 -563370 -280714 -245095 -387125 -129229 -774298 -59072 -554770 -278359 -24478 -937956 -619885 -182692 -532445 -287450 -754544 -687000 -865141 -736368 -588312 -388258 -133085 -44005 -146878 -249507 -482614 -679864 -859946 -683719 -813373 -380991 -742194 -767467 -532989 -529247 -684296 -450723 -340779 -912256 -93476 -807858 -847137 -554875 -299010 -331497 -726498 -112340 -730807 -25020 -945761 -168180 -763544 -577580 -295622 -763514 -167695 -134016 -450802 -348663 -664824 -652219 -50171 -857407 -461295 -936213 -663843 -131965 -663685 -432203 -346438 -181736 -429991 -419896 -172141 -650891 -334112 -905265 -500420 -554317 -215078 -880194 -134801 -680051 -717677 -698522 -228847 -485649 -25106 -64987 -305645 -333663 -133673 -157922 -153725 -314713 -543999 -365767 -472008 -288740 -447001 -421135 -65723 -845028 -885025 -163050 -420845 -674258 -65632 -319424 -762493 -611689 -296328 -424307 -285593 -545111 -841255 -939431 -248887 -615013 -448892 -217525 -775891 -689729 -11594 -629552 -629319 -229115 -937207 -32765 -848506 -470505 -301820 -135001 -649505 -167176 -678362 -643058 -775020 -810653 -50947 -106016 -245984 -89014 -443436 -642909 -615031 -457123 -378155 -864907 -893830 -932820 -119522 -673597 -12030 -350148 -702164 -687781 -703946 -346481 -577159 -475639 -802120 -93927 -432025 -479073 -633402 -837666 -75816 -651394 -363994 -309409 -212811 -83927 -115497 -609797 -724654 -617187 -267670 -174341 -111894 -208335 -413085 -9589 -442446 -83916 -61421 -404600 -315229 -74486 -871567 -827515 -744258 -214193 -650916 -494741 -61101 -21951 -559723 -325966 -336894 -312822 -905392 -367996 -38625 -34789 -724366 -14880 -336085 -95258 -257986 -288086 -133344 -92608 -746137 -334108 -53367 -459829 -12570 -88890 -343236 -906093 -308330 -870697 -135454 -304671 -461151 -94540 -306841 -70948 -260766 -260487 -11955 -570539 -18137 -44324 -668771 -239726 -189955 -634329 -449144 -630115 -206814 -761574 -602803 -65393 -68262 -618736 -934524 -781715 -133757 -679748 -755476 -102746 -14687 -354632 -940620 -363483 -263700 -464948 -18123 -50888 -112629 -710097 -505695 -209431 -350182 -51594 -43950 -82255 -129270 -742046 -291087 -482280 -369149 -659532 -793133 -525162 -764389 -763658 -863213 -306935 -356572 -46635 -11374 -335315 -315652 -116564 -292773 -35216 -133990 -538931 -421197 -494149 -50266 -555029 -623357 -9553 -71034 -193894 -24025 -24996 -327252 -360197 -591878 -769536 -793958 -670659 -938298 -303655 -709891 -151434 -14982 -926171 -94094 -21661 -658965 -912997 -80270 -726081 -49028 -305631 -668047 -70546 -361292 -144333 -167535 -16771 -3991 -713934 -301340 -343026 -702995 -160995 -581053 -92979 -64157 -42909 -61709 -371256 -938917 -71280 -314674 -24994 -124369 -847134 -444681 -17043 -72772 -140540 -36282 -912699 -703710 -286969 -12970 -381819 -771538 -25512 -28889 -741672 -362524 -159583 -516199 -761811 -387150 -654612 -262755 -12712 -109573 -874523 -91366 -49376 -501397 -335448 -943583 -23474 -207060 -18264 -70473 -337130 -312173 -664861 -826238 -560397 -372446 -71000 -125181 -162202 -21384 -67312 -781773 -13106 -48897 -431907 -148919 -305433 -20320 -431952 -44483 -243730 -343608 -276222 -795495 -49910 -20843 -703907 -709809 -492058 -526562 -49853 -209470 -822384 -523895 -358143 -84588 -780186 -773810 -118714 -114209 -661987 -392876 -67809 -354393 -110509 -835194 -29476 -707413 -676754 -89063 -949351 -475277 -83831 -597337 -334693 -81804 -67102 -900081 -576910 -856916 -286027 -78526 -487768 -14820 -732171 -564359 -633786 -871115 -780863 -203649 -119072 -36743 -75742 -372305 -60163 -591683 -566811 -42274 -372934 -31907 -83906 -480369 -71120 -29105 -95539 -9450 -132217 -480294 -821354 -529115 -340723 -13982 -209182 -708895 -7296 -728672 -719070 -263064 -782229 -409719 -298687 -294323 -10986 -14604 -78867 -458224 -625898 -593905 -764162 -11796 -24153 -162021 -348885 -320610 -449560 -85644 -851705 -799202 -44246 -379608 -717364 -810232 -625799 -334536 -664933 -9633 -446208 -762539 -208571 -368418 -780897 -48856 -121092 -50067 -123028 -177547 -16485 -44021 -769024 -22239 -64031 -537498 -30078 -650644 -11619 -465056 -66410 -179004 -174853 -777995 -936737 -769554 -58505 -56503 -721595 -47569 -13462 -74423 -79302 -622726 -479476 -316867 -235516 -223266 -828552 -501340 -273013 -774329 -626828 -41809 -126613 -304134 -703801 -28879 -847915 -375376 -199787 -892026 -18013 -616044 -60366 -222905 -747948 -527512 -134618 -73618 -4012 -169785 -49710 -762424 -180865 -341471 -793822 -724362 -130270 -7707 -593276 -39176 -420552 -321799 -832849 -608481 -541030 -427585 -355841 -314498 -124483 -893904 -5461 -751374 -178487 -48251 -870102 -345731 -43127 -71270 -88352 -939640 -957079 -312208 -303465 -427671 -268090 -28567 -70649 -118026 -29727 -179877 -230752 -242308 -58520 -273338 -776395 -11584 -856767 -418486 -178916 -590492 -9452 -249932 -236505 -673560 -901467 -95421 -513439 -672909 -746180 -30809 -55055 -45264 -459038 -501917 -267136 -560511 -91885 -236112 -129213 -15643 -825384 -316904 -788749 -870021 -14720 -56108 -319423 -526356 -170359 -242624 -506098 -637756 -356232 -50105 -125074 -301660 -897606 -747815 -267927 -654709 -158928 -353398 -66201 -704326 -70633 -570500 -121097 -674123 -77648 -455685 -99775 -943090 -330840 -368686 -12038 -389221 -740101 -489341 -275649 -18830 -306468 -255379 -957009 -168001 -151517 -738483 -626414 -93866 -73929 -354087 -119931 -733874 -357311 -21076 -67194 -782691 -78861 -275184 -956979 -8153 -224411 -208771 -316879 -525458 -323361 -407648 -567973 -215614 -802780 -903486 -215820 -62102 -710200 -952217 -383917 -43240 -121055 -288002 -739553 -82383 -300101 -208976 -788554 -91984 -167019 -468442 -654446 -60566 -690598 -77322 -140272 -673891 -316043 -930624 -64775 -186816 -547999 -546093 -475081 -202304 -559969 -611586 -166897 -211674 -361135 -741483 -34489 -824425 -141888 -588298 -248591 -761679 -919082 -439935 -368043 -262645 -319993 -843742 -301835 -601130 -793553 -124987 -654358 -464048 -673632 -467338 -558976 -316638 -527694 -34549 -9877 -335442 -53961 -463894 -434026 -623839 -527142 -42428 -841604 -23401 -234688 -31596 -104914 -209539 -482439 -219092 -786072 -278328 -54689 -305395 -316216 -881222 -270564 -125066 -27299 -889413 -711748 -661067 -633177 -286116 -521582 -208083 -277049 -800756 -40322 -383698 -847571 -425705 -88424 -140631 -478171 -62779 -945837 -12745 -927616 -590097 -528800 -367298 -94196 -693114 -199439 -1480 -288627 -492095 -62777 -372497 -11532 -623060 -178585 -487358 -3849 -522256 -445401 -443079 -166603 -17389 -8507 -221744 -54727 -77326 -847657 -938756 -379458 -461052 -134777 -642789 -92597 -886094 -181616 -750708 -175693 -191075 -60576 -701280 -927780 -373172 -606090 -512791 -817940 -34837 -13006 -49161 -12427 -122488 -626049 -13178 -882565 -295795 -315815 -251301 -24380 -626103 -166962 -79296 -155506 -451853 -793795 -852673 -25370 -938995 -45636 -423762 -35200 -58894 -267004 -538390 -335332 -303518 -320481 -68733 -50270 -122505 -315383 -763954 -332242 -673355 -19498 -75834 -60570 -377534 -20836 -45303 -747923 -753772 -44582 -79450 -139998 -761721 -64673 -655983 -20258 -322390 -468746 -709150 -95579 -32347 -63103 -821435 -140580 -124485 -538128 -310271 -335414 -232415 -9592 -65742 -42173 -50196 -606519 -233764 -11850 -703448 -315018 -771346 -611735 -390936 -920910 -58891 -471898 -905995 -66149 -738030 -148423 -925188 -554439 -821560 -763538 -66339 -336260 -558958 -673070 -342778 -606660 -883360 -78720 -669133 -423189 -672405 -950688 -232669 -65015 -263637 -709788 -656926 -231008 -10353 -559309 -594599 -196295 -288009 -230353 -871303 -123533 -224061 -575315 -475967 -142828 -113763 -943329 -341252 -472437 -127915 -151860 -11918 -51825 -361820 -24267 -167458 -45171 -262027 -676065 -46599 -433487 -902238 -729261 -88087 -793937 -363869 -527645 -150448 -62399 -606319 -866145 -277435 -892863 -702709 -619030 -264022 -320887 -475454 -195020 -84907 -869348 -943812 -58470 -68128 -42017 -556484 -653283 -60481 -122967 -11427 -271085 -488087 -307793 -26855 -933192 -278376 -135495 -29372 -704064 -703871 -12485 -924217 -175661 -935759 -21714 -42098 -798145 -709141 -238929 -719133 -559549 -334442 -921819 -35140 -21085 -174534 -329020 -625736 -60982 -663831 -718945 -13186 -57708 -369304 -26206 -309556 -793594 -937055 -71829 -423940 -919078 -784073 -446704 -64257 -652800 -197418 -671365 -567093 -206440 -45042 -387420 -14652 -180036 -611027 -283661 -41923 -38192 -327940 -854955 -821414 -559923 -697074 -590651 -747486 -267277 -195091 -432064 -449076 -803052 -460413 -288744 -227660 -885455 -707468 -943634 -684115 -863330 -59976 -913144 -534968 -85937 -279321 -722921 -196821 -8614 -52805 -674285 -236502 -83677 -13510 -442253 -281561 -95202 -168677 -12043 -647463 -130728 -140897 -477422 -905008 -288728 -24891 -86103 -101745 -472541 -317768 -682118 -956279 -60500 -649774 -17109 -121663 -914313 -567544 -134794 -871067 -907814 -256902 -940387 -896788 -858796 -13523 -124292 -264536 -27076 -761425 -197081 -167956 -60665 -558555 -62101 -203374 -234321 -483251 -335322 -640430 -670568 -547478 -20686 -952329 -791693 -736669 -337687 -271023 -478654 -698634 -351008 -48620 -226666 -334041 -168143 -903598 -748396 -61351 -181576 -832375 -11908 -623873 -625863 -560238 -503681 -471976 -9503 -177137 -913041 -112517 -772571 -222168 -516625 -263512 -329150 -446378 -517792 -459334 -53431 -50202 -393967 -774086 -177767 -814289 -209137 -383955 -717415 -555571 -631478 -166798 -465009 -795946 -442992 -677496 -263763 -93922 -288550 -77581 -301781 -75522 -74488 -199071 -198627 -903778 -769547 -114190 -135452 -351744 -84174 -675143 -180162 -180179 -16609 -125103 -94691 -939232 -947345 -793933 -422989 -42332 -558220 -237588 -494610 -442985 -45211 -11782 -34810 -188783 -125688 -24031 -91030 -522825 -174625 -134360 -309844 -682028 -174712 -852696 -583481 -447729 -73679 -23829 -718040 -166955 -936173 -52855 -901244 -22212 -110263 -737789 -125136 -831373 -10656 -61857 -569918 -131255 -378502 -209101 -368621 -654560 -856920 -143451 -60821 -144029 -831265 -42238 -468072 -556152 -926986 -148505 -413269 -335375 -18252 -50249 -903633 -750968 -738354 -750404 -665726 -474190 -854124 -65217 -41959 -74508 -34964 -837126 -237150 -632941 -68592 -332569 -34779 -702077 -497484 -119965 -68759 -15162 -473351 -397806 -4447 -372429 -180707 -12582 -386657 -735841 -479887 -495794 -717334 -863277 -930959 -13077 -769842 -933140 -199345 -793662 -704295 -869987 -425630 -557406 -17229 -29650 -617275 -178987 -93682 -772062 -145649 -760449 -875160 -833268 -460995 -310647 -731334 -956418 -304751 -703353 -892373 -192488 -42039 -255593 -257394 -790590 -773992 -94835 -567330 -793663 -690380 -30578 -465788 -104871 -334946 -758092 -792837 -905557 -267293 -602488 -782567 -17139 -460283 -892520 -440467 -201796 -947352 -14853 -412677 -383719 -369338 -47366 -221757 -335443 -442415 -249356 -814794 -90494 -115125 -149983 -632846 -479973 -196899 -43693 -717722 -119287 -581079 -903523 -814883 -93928 -249148 -559515 -78866 -408901 -372510 -765962 -463288 -693123 -946486 -270490 -149433 -27663 -5494 -271652 -264989 -139995 -416125 -43331 -148991 -795510 -858093 -13289 -135455 -591686 -623554 -328540 -110249 -198488 -306900 -380885 -47325 -131298 -792382 -51209 -843897 -930899 -792287 -336973 -452973 -84020 -132620 -57863 -678776 -751507 -703070 -47211 -941750 -26289 -249377 -271236 -384003 -4344 -20896 -656699 -112913 -273942 -486189 -574466 -679423 -21488 -10394 -651830 -627545 -890526 -94870 -151857 -20561 -825551 -450780 -73989 -262653 -20496 -57674 -70807 -271801 -95597 -35112 -887713 -343234 -118325 -315032 -810187 -818606 -277096 -199680 -270648 -356373 -21173 -616716 -343632 -950874 -774067 -120138 -34619 -334735 -903546 -159619 -69626 -7419 -337948 -67166 -149246 -263098 -120228 -75552 -296974 -94955 -79240 -36999 -61518 -73370 -23583 -458809 -926595 -491993 -354550 -458935 -519851 -467123 -635285 -70639 -881725 -257820 -742032 -759336 -118288 -793057 -9927 -468205 -107927 -69333 -342688 -68767 -386684 -638814 -242843 -75851 -938736 -37049 -174905 -429605 -92123 -560509 -62333 -390012 -226801 -750701 -338287 -289943 -20782 -664517 -736734 -60533 -328452 -16463 -926806 -518427 -390202 -534559 -427390 -600829 -801220 -271815 -259638 -183790 -49256 -23228 -42241 -99934 -431524 -769602 -664909 -704444 -626570 -13116 -191056 -704066 -334640 -231831 -225507 -26276 -95259 -301842 -45609 -335228 -126397 -486645 -433512 -956884 -645557 -461314 -58110 -49975 -555387 -9979 -315142 -830981 -452948 -77551 -276168 -446637 -688412 -665765 -945653 -633193 -663235 -77650 -51137 -21947 -131048 -289399 -180175 -277913 -931991 -772527 -144111 -480847 -81553 -209650 -735033 -753867 -329737 -340450 -945309 -636884 -277870 -907535 -901188 -176347 -931011 -54724 -550447 -199887 -54397 -413449 -249920 -813908 -377891 -247151 -18637 -488280 -92501 -291061 -61373 -569351 -210526 -188770 -20391 -397607 -335312 -22470 -687855 -60524 -638633 -88593 -928039 -332641 -472098 -50612 -413268 -225012 -566552 -462240 -883046 -120132 -736464 -248222 -268251 -12327 -11977 -664274 -335601 -441829 -213690 -863416 -198768 -12833 -48451 -75917 -485552 -307986 -260436 -718845 -443028 -15114 -483727 -903492 -364476 -693059 -275028 -536462 -342958 -29183 -316323 -121832 -424867 -903465 -451598 -317739 -691323 -869503 -269835 -335463 -903568 -45140 -366471 -44964 -737752 -636518 -867415 -103547 -52854 -747345 -237710 -387975 -800305 -108516 -25547 -395028 -262699 -167302 -11864 -23128 -732638 -824655 -45308 -161260 -625094 -101822 -379270 -30703 -126359 -715253 -412584 -335475 -73619 -609731 -864986 -60223 -159934 -533297 -460214 -695381 -942329 -56149 -42344 -641762 -678739 -330066 -161986 -11591 -12510 -686869 -756045 -20649 -811309 -141230 -129110 -349266 -297034 -428904 -867519 -267903 -72502 -14788 -661830 -847717 -461342 -34481 -63574 -63716 -634848 -2166 -846201 -130258 -438171 -527993 -956686 -6857 -717482 -276224 -602408 -447472 -205865 -248436 -15881 -475894 -34573 -415081 -35231 -380988 -12319 -863263 -930046 -952952 -53766 -236001 -505407 -192258 -161078 -190661 -193159 -100990 -416974 -351226 -410657 -118768 -809748 -900895 -84204 -251957 -45575 -859262 -83814 -66413 -143618 -198387 -58095 -50310 -956797 -804987 -471338 -197253 -50143 -647708 -575197 -28908 -501419 -636539 -54399 -650086 -699176 -937980 -325542 -512653 -13452 -185261 -517782 -12567 -635809 -41957 -347478 -617516 -793970 -1350 -43282 -305299 -97568 -846192 -453996 -911865 -942634 -457032 -196398 -334763 -380050 -4726 -51000 -465396 -612392 -227049 -60979 -836881 -401072 -140679 -933041 -150467 -427068 -75456 -356481 -384085 -654032 -120220 -34469 -49108 -125144 -107856 -604811 -600807 -791199 -805523 -141732 -255864 -155376 -63607 -295895 -952604 -222719 -709841 -360901 -287929 -197354 -103976 -956940 -182112 -200225 -115524 -167776 -20764 -192881 -390416 -337971 -855932 -279079 -721596 -271364 -224737 -784189 -224907 -782541 -350737 -377882 -284030 -6952 -317617 -110326 -545559 -651649 -769553 -332937 -859126 -550453 -560539 -37001 -166776 -21993 -101522 -202470 -35909 -186811 -79294 -22785 -77179 -88142 -881872 -48626 -289166 -703698 -236694 -54664 -119735 -387600 -645351 -95242 -366016 -9965 -101683 -292652 -167609 -712253 -129007 -931121 -48503 -367673 -493704 -171334 -836809 -943484 -13321 -768741 -99903 -99060 -648979 -793818 -74429 -16945 -560446 -198457 -693320 -534292 -8960 -10060 -747805 -224678 -534140 -699390 -693238 -605656 -642905 -79211 -479851 -793748 -57150 -501069 -29505 -461589 -709932 -543886 -351035 -95086 -679674 -436157 -664783 -549537 -279433 -628266 -262383 -751491 -82870 -830988 -665961 -670990 -768739 -678665 -852708 -25450 -17821 -773298 -622793 -121947 -326420 -179311 -429846 -292022 -26878 -1006 -106861 -323538 -41747 -93618 -224254 -682030 -196134 -780185 -56230 -17576 -35190 -833661 -131369 -647502 -88191 -692278 -69556 -379588 -394731 -689004 -93473 -694634 -119746 -913150 -104289 -42318 -21971 -750592 -445696 -348485 -335240 -236210 -192563 -536109 -806973 -483528 -352830 -263855 -277116 -457484 -257764 -78545 -138873 -326439 -199936 -520098 -109340 -387791 -688795 -864315 -644610 -40336 -51021 -791606 -16325 -20400 -599233 -232005 -559395 -308401 -135444 -169480 -692746 -110508 -335135 -942883 -554508 -251596 -130964 -953012 -663048 -712865 -63004 -635176 -205946 -896677 -3261 -753824 -119057 -200693 -260811 -557355 -209601 -119079 -346254 -640072 -524161 -261758 -21321 -467833 -42745 -88293 -431754 -481650 -303143 -89267 -297795 -782277 -879104 -130305 -347161 -324507 -53930 -89164 -871169 -781752 -402048 -106764 -76904 -60624 -250853 -551348 -265940 -42431 -64990 -9846 -222675 -829141 -304799 -848065 -23528 -266313 -194075 -19406 -773421 -274100 -59273 -57877 -515801 -129222 -771508 -176314 -11635 -113389 -168711 -466889 -48152 -111991 -401562 -80530 -177491 -58338 -149343 -678497 -610939 -76073 -575539 -8627 -139089 -13693 -380705 -23427 -65854 -840240 -73095 -12867 -72394 -764622 -720000 -77381 -97341 -58357 -60184 -454340 -622057 -407232 -287091 -224301 -340671 -593789 -74104 -836896 -151371 -319502 -63121 -499053 -200020 -567877 -61132 -577365 -373529 -201842 -128890 -719487 -893996 -35214 -750344 -927791 -742009 -647783 -3478 -28839 -306776 -710164 -139739 -30181 -197048 -39494 -928109 -54156 -869608 -616155 -67562 -480073 -837569 -9899 -129393 -60111 -484368 -903326 -859002 -457729 -486577 -37829 -927943 -952608 -426080 -380672 -224498 -94584 -248949 -846176 -943079 -458798 -22499 -45319 -240845 -170153 -928248 -956172 -409210 -482529 -921794 -20462 -168376 -459966 -661223 -721776 -316733 -74171 -105146 -61782 -249472 -151712 -199993 -92607 -236097 -324269 -777331 -343233 -300533 -140575 -61126 -166763 -13492 -24908 -259994 -922212 -45428 -275113 -266955 -348852 -45409 -460642 -46223 -329329 -624850 -827942 -271041 -129463 -287545 -367417 -811404 -272974 -372133 -60464 -368491 -144526 -590195 -124701 -45346 -770043 -442750 -630275 -455672 -59116 -69471 -251278 -78865 -104316 -134807 -845800 -280482 -956165 -329273 -151479 -28374 -93247 -316302 -344050 -203975 -162924 -373667 -368642 -39919 -258102 -654720 -781253 -158157 -260997 -334158 -151306 -359338 -902647 -458450 -201369 -10551 -318601 -44811 -50574 -811796 -29646 -614671 -308467 -937861 -21669 -76054 -607993 -605813 -923059 -269407 -862615 -814723 -12584 -333468 -367901 -13566 -861940 -858576 -886158 -68678 -223869 -364770 -289284 -45625 -635526 -580194 -494738 -902631 -263698 -214806 -938233 -85569 -83794 -73493 -603909 -26825 -206241 -347352 -589676 -223421 -120522 -663903 -480319 -79250 -693314 -612992 -903559 -21930 -23700 -131253 -763068 -247515 -434717 -347313 -334795 -751536 -119919 -486997 -35933 -83138 -61114 -46723 -922463 -500946 -673518 -480176 -762640 -6746 -62370 -35071 -901270 -179262 -32138 -397305 -903555 -671429 -599788 -439595 -671384 -13279 -208275 -52396 -69481 -54043 -822978 -381215 -295853 -208523 -224700 -34788 -7462 -72104 -387058 -231728 -747974 -6867 -40178 -291243 -284110 -650507 -373294 -819511 -600358 -932591 -574459 -618560 -738223 -76017 -292861 -566328 -492626 -588410 -325356 -871559 -773983 -256702 -249089 -749241 -441705 -120198 -279408 -17485 -934557 -190979 -549699 -88407 -60787 -197355 -124515 -624590 -704385 -334030 -14672 -92148 -304710 -99087 -192565 -210886 -444009 -127619 -46566 -885853 -52281 -554624 -798658 -196751 -68609 -885553 -863642 -364579 -2802 -45131 -29631 -12687 -309689 -908292 -61727 -786689 -82160 -451702 -704107 -227536 -416852 -205909 -80279 -234550 -317326 -88527 -31005 -254933 -64686 -848212 -836310 -134899 -516702 -594497 -267478 -149603 -458684 -897654 -215605 -248854 -956984 -279530 -55009 -333252 -251859 -332364 -717902 -66069 -836068 -807022 -45376 -127923 -582387 -586471 -719173 -881897 -305287 -429159 -160594 -192492 -33066 -215381 -793616 -803196 -89257 -440533 -6663 -459654 -45289 -49591 -133649 -13420 -237788 -73610 -14802 -29114 -240431 -271365 -57044 -197277 -62134 -408732 -477169 -460856 -63623 -363062 -467658 -626005 -524202 -12969 -673974 -250108 -669002 -642006 -475351 -75963 -666221 -25976 -723450 -787777 -786410 -904066 -269020 -276604 -451912 -62853 -936516 -890711 -724087 -381063 -57078 -625734 -80850 -803010 -814769 -355999 -29181 -790503 -337109 -761974 -154949 -71110 -334085 -356980 -458687 -79814 -93963 -494104 -928110 -792546 -107193 -793604 -167309 -885929 -20311 -297642 -324640 -911528 -663871 -34791 -224888 -497849 -174719 -83968 -768653 -849727 -128145 -95285 -383251 -491815 -625635 -44857 -10691 -703498 -54972 -62759 -179886 -903593 -75780 -769730 -395723 -335265 -743987 -343606 -60186 -180026 -711303 -15679 -45420 -84011 -674325 -15765 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_test.jpgl deleted file mode 100644 index 0a7d999428c225ddec080e2843e169f6400394b0..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -839829 -660628 -838258 -286106 -556084 -819785 -770668 -287048 -239434 -807299 -729923 -382184 -33865 -771924 -682723 -163633 -722138 -653059 -383921 -676035 -254873 -935640 -851153 -59379 -294978 -239352 -664568 -847031 -663300 -693537 -850016 -954823 -115767 -119387 -955584 -887458 -407307 -13779 -945760 -287144 -944588 -112183 -261824 -541996 -872022 -361587 -648500 -64793 -934675 -595660 -254478 -394490 -778647 -197411 -75369 -547434 -390361 -260512 -897352 -944032 -195760 -588475 -574462 -147204 -392824 -385441 -155481 -308582 -943988 -538044 -761620 -751825 -664132 -524079 -589747 -103120 -582260 -578977 -285238 -647954 -62068 -26221 -384808 -168165 -823056 -760006 -33206 -771717 -100693 -70626 -579515 -873608 -315762 -287074 -861150 -315855 -640693 -910839 -757441 -659682 -647798 -696984 -934534 -75854 -240181 -42110 -828510 -394526 -264086 -870017 -787158 -288652 -765434 -237985 -743133 -168331 -693576 -694236 -828675 -897510 -694862 -921383 -850921 -81069 -762324 -924872 -233763 -287266 -768957 -852843 -357136 -417911 -840430 -830506 -13978 -419989 -693674 -136450 -891378 -26830 -518784 -696902 -835001 -822063 -303510 -817181 -834877 -664242 -45915 -540729 -522887 -797888 -282444 -823320 -524863 -167637 -70684 -574998 -165970 -906346 -280943 -405338 -535410 -874287 -809941 -203824 -696512 -712266 -873790 -327849 -875289 -625397 -564625 -229500 -721840 -55080 -778602 -767449 -64611 -897116 -802712 -41062 -955623 -842946 -801836 -262220 -640802 -246951 -786972 -738256 -755245 -24521 -294791 -897977 -781912 -632008 -365642 -140484 -564539 -812916 -547108 -797628 -81589 -930057 -160547 -555555 -830756 -932639 -241892 -604472 -286762 -756645 -793746 -303379 -18213 -767422 -395419 -101511 -411002 -727192 -96948 -45973 -729063 -871749 -391798 -759395 -189594 -73575 -114567 -103928 -668524 -901068 -571981 -570577 -938608 -743443 -266666 -391718 -775294 -873344 -603996 -607234 -332200 -248928 -328746 -856238 -217790 -648108 -419541 -396995 -398600 -874619 -909199 -13594 -847030 -286346 -663712 -834927 -874055 -294139 -120971 -255186 -949878 -927183 -237714 -920389 -136162 -564548 -785136 -16772 -100514 -411335 -771149 -745855 -830388 -273073 -632845 -835007 -800195 -696543 -153291 -228151 -397221 -34128 -60285 -99914 -878657 -190056 -887192 -616526 -794713 -141441 -615604 -635543 -130154 -668803 -919968 -246049 -946320 -874218 -788707 -649204 -788717 -629658 -281581 -418636 -365933 -761396 -886946 -705025 -417412 -756428 -152436 -916074 -632966 -276095 -857638 -132463 -61350 -616961 -897299 -892550 -674940 -115263 -42837 -807416 -536112 -758263 -278428 -189700 -873139 -912690 -623688 -118049 -172694 -195274 -189516 -908894 -33167 -167230 -618181 -931012 -160307 -674567 -758076 -60479 -628393 -779715 -651411 -33045 -294425 -247875 -601983 -411507 -227902 -347273 -138747 -872339 -846332 -809399 -394432 -755664 -543861 -587161 -86931 -837381 -751537 -957656 -357973 -759656 -886583 -158728 -361969 -801062 -752627 -132541 -517276 -254606 -82046 -153016 -375730 -601071 -573599 -579714 -950634 -527824 -904668 -230805 -824000 -704782 -633208 -921917 -419887 -402815 -677675 -544027 -911961 -831198 -82315 -309087 -563901 -599001 -895792 -160274 -131428 -854684 -807584 -893920 -388805 -829917 -266030 -742942 -874056 -666977 -47052 -927108 -874239 -265623 -71771 -918887 -417374 -346443 -528844 -64542 -785942 -635998 -402119 -275961 -612437 -855150 -166056 -546833 -645561 -703914 -118173 -269904 -238537 -398595 -167971 -132206 -251461 -343806 -298733 -610246 -254421 -386327 -31483 -186064 -838502 -941840 -882254 -955466 -75543 -755061 -700925 -18248 -228095 -388994 -870778 -280091 -857551 -239172 -226823 -136795 -415193 -263833 -101044 -653058 -67364 -593762 -873284 -830769 -884795 -771289 -419516 -517856 -70206 -705756 -771684 -289385 -628320 -666642 -719058 -920829 -99789 -955228 -758107 -104932 -164848 -761427 -564187 -171283 -760457 -66679 -690182 -289255 -287706 -541989 -403268 -848788 -947838 -702001 -67387 -128035 -659164 -932010 -607469 -785359 -909178 -344277 -226972 -571915 -167337 -329777 -285675 -733940 -253913 -53108 -638137 -519422 -279936 -784771 -831469 -863824 -214984 -823235 -697000 -751990 -579345 -825874 -109975 -611386 -755561 -824839 -236917 -600708 -86404 -873601 -167525 -632574 -694110 -264529 -245040 -94892 -255409 -226549 -770111 -601383 -374984 -874253 -957753 -941502 -691023 -132084 -223852 -294023 -954213 -288529 -174447 -254704 -627790 -274894 -390505 -880037 -115804 -390037 -813285 -754520 -697519 -706739 -281747 -74588 -294408 -847905 -721302 -690397 -12850 -567249 -196161 -45877 -847038 -690800 -547271 -618529 -523315 -298846 -887214 -130221 -822038 -245275 -11739 -733117 -759005 -911793 -606467 -105030 -545139 -692450 -362311 -394079 -309237 -938474 -767186 -386460 -848350 -891511 -94799 -632761 -303610 -165987 -331904 -756103 -264050 -700033 -685476 -770287 -616334 -32680 -226544 -674958 -363893 -835051 -603033 -19586 -171341 -761519 -827784 -753216 -625197 -831717 -755884 -627983 -810639 -161858 -285416 -631001 -692175 -321013 -75702 -54970 -951018 -542374 -254986 -333385 -579599 -891130 -543430 -913649 -174600 -68561 -856938 -101643 -599247 -571257 -624843 -209269 -758242 -735310 -772514 -834434 -848373 -750198 -253020 -256700 -562941 -411106 -899787 -911102 -281504 -800825 -579690 -30546 -957256 -659595 -386323 -308981 -570110 -12968 -365339 -739429 -397349 -802070 -72156 -294402 -745065 -571425 -340274 -788053 -298419 -33164 -417471 -304059 -707394 -751836 -542478 -822842 -165360 -891699 -240394 -628331 -944224 -341219 -232863 -954455 -135927 -11193 -705660 -928397 -168712 -694421 -86956 -605910 -29186 -246216 -369988 -253087 -867794 -594076 -705225 -25227 -253834 -775712 -824088 -605374 -253799 -751557 -521889 -31192 -832615 -594051 -869633 -25556 -244050 -828646 -45161 -140921 -694290 -244525 -155381 -574736 -365301 -782134 -693675 -664408 -174623 -900305 -147291 -412256 -402859 -226743 -933958 -836846 -127971 -404433 -702040 -540567 -579927 -139620 -227030 -96937 -674339 -604976 -289104 -32773 -314739 -830704 -770828 -692974 -146216 -943334 -912093 -741529 -123130 -570703 -255038 -789938 -836912 -659241 -419383 -870525 -758235 -368629 -617364 -152958 -366113 -254680 -725305 -297345 -911977 -742583 -288737 -42922 -547443 -674651 -830507 -917053 -119487 -47522 -852858 -338316 -714744 -91073 -605019 -786211 -387084 -182782 -136425 -199673 -253801 -611629 -238710 -771392 -42678 -645292 -13520 -723959 -383337 -955146 -93245 -114302 -174808 -605727 -636452 -26315 -362200 -627914 -676827 -543366 -816913 -67586 -526625 -255435 -790451 -583202 -90574 -231877 -927202 -233363 -32776 -287537 -255306 -19668 -834955 -24465 -656232 -81150 -593855 -742905 -836651 -623529 -705259 -381120 -874254 -848814 -759677 -115556 -147299 -413224 -873646 -294588 -294668 -759535 -85955 -308705 -345878 -701773 -25651 -294268 -705258 -771131 -625339 -275966 -739465 -316356 -387095 -282884 -871676 -888217 -391491 -570889 -37912 -697024 -298779 -54544 -863937 -726954 -126114 -397719 -682565 -60026 -183115 -579475 -800974 -200732 -281989 -393825 -946526 -105734 -755197 -886211 -941752 -596661 -828210 -41961 -296805 -294719 -230809 -946671 -676242 -144371 -618375 -211605 -417878 -103073 -158050 -932025 -569326 -384741 -622844 -864395 -78956 -54901 -603037 -308161 -205062 -943607 -736606 -13569 -99360 -103217 -372067 -658877 -656248 -923643 -347530 -935456 -100664 -255198 -908879 -836669 -287333 -593354 -906839 -627093 -696622 -31853 -135613 -725827 -274336 -664653 -412740 -314701 -716938 -784909 -348898 -236057 -941774 -282559 -254015 -322900 -731142 -829208 -655322 -745568 -350912 -873887 -238925 -104775 -694355 -332476 -675048 -365267 -150806 -755838 -126226 -115333 -792551 -72773 -569879 -80529 -668887 -844555 -24516 -254613 -119564 -625623 -898092 -287868 -593196 -234600 -182183 -297254 -570328 -98759 -381764 -886783 -320767 -957154 -388010 -372455 -13971 -861525 -96500 -134951 -40672 -697070 -633560 -956333 -753753 -912015 -693027 -518457 -772301 -697109 -633805 -936515 -840289 -843227 -876114 -602072 -834984 -843892 -800794 -891189 -340305 -26086 -403329 -263328 -76168 -286939 -118987 -124880 -391779 -950317 -315858 -349419 -34339 -635123 -90432 -696863 -774213 -694248 -263492 -816570 -760880 -745623 -393221 -375823 -62954 -694376 -909261 -879227 -33151 -693249 -348991 -204700 -226885 -68483 -548043 -617222 -887285 -23725 -771291 -323886 -284541 -409974 -33043 -86100 -127610 -810700 -768377 -131962 -264611 -881442 -253011 -707136 -767334 -845549 -855982 -70725 -662397 -187559 -569479 -204024 -420209 -262227 -394377 -87093 -322219 -553985 -825686 -873242 -255476 -136756 -830280 -559726 -99120 -874228 -770645 -702222 -763523 -64750 -941618 -278325 -116094 -176307 -346561 -327458 -32694 -548095 -224240 -623480 -232047 -559140 -820207 -837761 -373598 -55680 -304112 -786027 -703296 -256249 -148843 -700569 -81528 -381966 -546630 -821102 -880201 -17329 -830651 -551207 -938406 -716634 -927310 -60129 -760711 -869300 -96520 -904725 -603955 -894180 -831288 -254848 -74100 -567726 -748962 -279958 -911251 -83925 -860966 -652805 -691846 -177945 -625468 -67801 -664168 -948844 -747554 -617467 -393641 -413303 -593518 -85346 -741816 -624983 -570085 -690431 -911287 -231009 -627973 -128375 -663837 -32963 -668669 -736072 -62693 -572889 -29180 -295129 -254225 -187464 -75583 -383008 -50010 -596897 -601165 -906668 -845681 -373022 -931128 -596250 -135551 -675997 -253301 -571939 -568947 -674862 -785217 -828439 -824638 -772342 -577031 -167758 -635317 -145869 -254600 -761546 -295697 -873806 -522369 -571762 -330753 -357292 -955667 -594159 -560044 -358655 -948310 -832138 -799179 -287752 -770410 -770180 -680644 -772488 -914604 -343588 -181785 -60916 -838104 -735469 -847029 -691107 -133890 -878673 -653499 -704631 -819702 -695578 -659492 -368215 -598972 -801028 -772558 -365198 -663549 -18100 -415413 -547040 -759319 -322907 -415604 -918658 -879892 -42608 -125025 -816558 -42918 -611760 -955435 -519699 -521396 -245163 -52657 -885748 -830712 -772637 -122151 -525378 -637401 -696575 -570903 -64706 -355690 -933078 -321701 -316735 -547568 -552142 -579889 -545587 -360749 -932369 -223948 -564149 -384883 -164213 -627651 -759625 -331439 -735697 -153222 -20515 -835061 -60459 -659811 -244927 -254627 -894567 -826164 -367106 -311904 -715326 -330743 -664962 -600122 -377584 -564534 -20385 -716373 -778361 -238979 -385516 -244813 -62337 -759103 -254459 -603640 -871032 -115390 -844167 -925405 -305171 -413310 -795095 -349410 -924695 -172462 -183321 -778272 -167185 -147505 -274673 -298427 -759618 -850860 -618581 -114883 -328735 -802406 -682394 -379297 -419204 -745451 -697013 -251380 -276032 -671712 -591572 -55424 -721587 -103207 -303389 -294832 -69137 -185561 -571908 -91452 -318997 -568343 -161061 -838507 -582251 -289103 -667439 -605495 -662577 -281896 -533259 -740734 -659596 -330843 -209384 -916260 -50611 -251003 -580891 -843159 -650794 -692632 -900454 -920564 -927061 -874190 -921532 -786053 -325115 -758278 -632577 -649721 -13750 -348569 -117223 -605704 -401007 -778324 -335047 -795559 -627455 -113684 -394513 -525413 -126240 -835000 -810419 -339612 -830482 -906117 -736714 -229678 -696898 -827281 -852750 -395760 -538693 -766079 -255505 -84286 -274936 -656303 -568315 -904579 -125955 -173431 -387690 -890379 -176092 -836638 -281280 -365229 -917102 -239276 -680080 -243157 -521795 -776214 -274198 -239483 -805388 -416980 -41441 -579329 -929777 -767574 -797788 -327933 -635695 -951597 -522217 -790605 -127974 -571951 -786132 -676416 -300677 -254976 -875824 -75350 -230241 -254978 -63101 -255620 -680504 -829319 -603031 -70910 -616722 -579710 -799287 -116317 -28395 -742355 -135339 -818407 -760043 -805339 -30950 -322820 -521068 -152625 -932585 -410268 -25850 -894519 -244996 -323467 -758953 -752752 -604484 -840266 -696450 -102653 -660864 -570058 -256450 -822875 -292851 -836775 -647080 -807319 -678400 -914799 -823805 -253100 -42027 -756544 -926462 -850890 -253797 -738550 -604112 -857187 -62503 -724432 -870465 -875317 -277465 -571142 -359917 -87151 -659333 -692284 -103154 -930929 -96975 -874223 -308173 -11877 -907725 -707082 -914515 -390041 -786250 -722168 -343677 -168184 -398033 -691183 -732284 -692016 -72797 -46316 -242050 -141514 -670729 -419790 -60146 -864254 -910332 -807164 -800765 -297738 -786582 -873886 -658799 -746067 -410763 -676360 -546203 -837393 -766018 -692739 -571042 -533484 -835003 -390310 -941730 -861088 -820096 -287795 -229460 -850068 -308953 -50746 -692963 -128163 -943141 -86323 -112339 -523613 -17295 -829465 -254801 -541865 -294512 -681639 -104940 -794161 -807207 -255258 -192046 -356505 -937817 -159020 -124962 -151797 -388008 -834881 -243790 -733369 -29108 -783876 -570336 -571333 -521915 -293952 -196348 -887234 -348996 -632638 -770422 -361783 -831641 -196098 -634945 -147278 -938764 -233105 -696572 -816738 -84689 -676514 -746285 -13017 -160007 -954793 -601932 -246212 -835951 -875493 -926843 -37720 -588213 -757699 -279728 -823279 -612302 -152756 -525358 -557950 -649013 -743405 -11837 -418349 -562221 -115446 -890807 -235487 -632535 -756524 -570035 -33121 -132073 -419997 -269004 -385294 -372034 -244731 -669549 -667271 -923957 -832424 -905312 -345916 -15131 -281259 -314441 -552972 -676620 -944394 -217929 -101868 -18404 -786478 -394144 -261371 -73787 -50761 -571896 -41165 -693918 -262381 -648829 -653007 -805431 -566687 -829771 -776735 -874650 -874216 -795127 -921404 -570303 -254858 -516630 -913972 -262079 -831645 -126916 -246414 -606581 -827581 -86596 -62301 -828467 -693569 -606729 -350641 -564495 -331701 -802354 -86865 -369277 -105190 -673845 -664200 -274678 -577595 -133415 -828395 -653008 -145815 -11133 -915637 -382877 -124061 -245168 -545595 -685479 -153493 -891536 -716187 -405793 -17956 -579749 -645386 -521464 -69560 -96133 -337336 -183159 -874318 -640490 -681794 -667355 -95012 -740389 -571755 -106285 -772186 -934137 -923857 -617140 -809394 -677722 -192985 -336773 -874994 -595956 -398251 -287769 -893845 -846327 -711043 -244888 -687294 -163533 -569398 -717730 -83509 -119341 -274680 -225546 -929509 -580456 -413530 -790716 -121939 -528970 -172429 -394275 -716905 -753985 -417687 -654531 -244375 -755178 -419397 -834060 -372514 -696871 -70697 -113367 -372549 -152702 -784849 -538286 -347158 -184157 -372058 -114082 -55585 -938160 -578943 -370948 -55647 -364538 -842736 -785558 -525198 -534736 -93565 -46057 -253843 -765251 -911238 -931999 -940547 -588374 -759694 -921437 -546097 -835824 -687382 -919918 -263234 -731038 -756108 -767537 -32774 -385950 -234158 -822940 -771172 -281548 -284052 -153932 -955016 -779109 -839307 -611794 -849906 -759675 -18596 -932971 -870802 -852735 -890550 -935680 -873573 -281591 -951645 -663882 -50623 -402219 -71717 -293726 -125952 -281637 -663852 -280504 -42434 -913855 -594022 -956717 -627505 -666699 -115251 -878546 -97004 -759568 -836732 -12487 -197343 -281490 -62486 -230630 -132449 -547989 -658212 -716592 -874301 -406870 -624480 -383241 -848519 -197021 -518838 -168119 -836794 -319669 -13718 -658559 -751759 -786126 -943909 -277668 -770417 -778436 -50077 -633106 -548056 -89254 -786192 -932296 -891663 -183938 -519850 -537866 -21072 -617367 -225532 -47396 -324938 -19730 -255047 -66692 -526369 -538272 -851522 -416444 -856666 -606747 -695516 -786308 -766103 -840494 -900904 -135263 -69538 -610089 -636694 -391025 -706123 -665443 -80122 -716583 -698943 -597388 -377414 -823837 -69524 -540800 -605685 -955664 -159160 -715216 -236168 -767219 -239743 -153138 -51863 -742835 -281212 -745482 -685447 -860000 -918997 -633210 -255575 -922983 -617405 -61146 -570776 -59916 -712318 -926361 -709007 -759666 -771052 -931239 -929303 -830087 -940101 -328337 -634633 -401607 -167762 -281197 -758217 -287890 -894587 -248484 -894211 -253495 -955632 -265286 -226083 -664660 -781959 -86782 -194046 -170803 -623858 -933011 -272408 -613718 -258334 -923103 -353446 -714893 -794614 -254736 -843632 -693721 -351000 -650249 -892049 -256711 -648599 -847753 -854323 -39369 -194083 -99165 -615734 -380178 -189263 -262528 -925925 -880273 -741646 -756527 -297302 -189779 -124883 -730077 -695172 -103723 -634120 -691813 -53872 -844408 -547318 -517082 -755969 -340783 -56143 -125967 -21053 -593461 -292532 -824761 -908916 -263419 -44819 -302996 -382683 -81717 -253770 -722266 -75288 -872576 -236170 -62986 -253803 -12156 -525196 -880225 -245003 -72279 -669970 -281612 -345484 -161119 -26228 -570979 -661940 -165897 -136319 -388812 -528286 -834750 -743630 -217973 -712206 -260918 -694109 -911186 -871486 -132174 -934291 -835101 -522309 -765021 -401381 -136049 -955617 -673853 -54028 -588202 -683927 -588485 -740339 -648954 -757522 -621053 -745434 -635787 -556331 -798112 -388802 -411163 -185673 -185493 -859214 -409522 -607451 -675050 -809327 -278746 -930969 -894190 -743752 -782357 -186860 -605034 -829823 -873393 -242987 -907568 -386664 -392421 -153109 -159403 -875299 -805574 -911760 -571967 -420168 -292712 -416898 -647013 -872884 -227183 -174459 -810809 -650899 -756326 -324615 -675763 -289378 -716716 -690583 -12935 -254892 -276970 -579653 -319787 -653011 -876086 -593890 -627865 -695105 -750531 -571423 -321039 -792813 -791164 -917493 -873847 -751717 -788705 -525040 -418969 -113369 -348582 -834860 -900330 -254195 -538186 -772591 -734505 -278048 -667197 -913214 -419102 -734945 -873842 -571384 -144899 -26222 -155590 -632226 -873278 -318850 -947128 -172483 -666398 -90357 -871851 -312200 -298631 -934848 -857131 -902121 -182281 -694075 -387763 -569188 -817555 -741722 -96879 -757690 -180769 -134345 -917227 -418294 -615687 -855423 -571772 -695748 -418933 -607188 -127723 -42414 -151042 -232987 -800270 -921557 -34493 -70354 -834667 -696531 -722575 -168169 -910235 -784049 -863139 -716703 -617376 -51574 -753189 -793814 -521987 -167145 -635475 -871279 -945357 -724298 -755684 -197204 -362533 -663899 -275251 -349221 -44373 -619102 -286450 -826220 -45970 -185309 -11546 -857027 -864188 -593141 -50228 -547351 -712633 -50813 -406034 -944993 -781471 -239397 -165894 -411372 -18912 -873987 -716864 -403844 -563796 -827889 -386578 -777747 -925648 -37128 -54744 -766691 -627981 -658322 -288309 -924875 -714787 -12093 -12489 -168151 -754747 -238590 -575266 -551380 -741476 -203485 -674210 -835987 -372183 -571114 -623460 -660997 -255159 -376086 -766621 -802866 -872773 -394172 -365909 -822574 -570156 -681156 -669486 -627746 -874681 -99926 -21456 -31162 -81670 -394199 -152894 -785754 -338528 -177674 -663688 -344224 -605504 -655459 -793581 -795117 -225847 -942032 -690980 -830432 -782191 -586659 -845572 -633189 -28364 -802537 -637909 -891578 -945787 -760864 -754387 -785995 -570421 -659141 -762564 -769455 -893865 -268936 -243026 -141531 -108071 -79580 -179077 -885301 -752871 -745581 -305944 -232718 -648497 -523459 -627466 -68754 -12611 -261523 -803237 -803242 -908870 -605264 -696966 -726918 -40833 -25831 -741426 -303154 -576066 -527123 -793442 -316635 -802503 -34217 -178573 -347157 -255404 -844014 -812892 -199819 -26319 -403311 -26254 -255400 -107693 -870751 -627986 -135519 -612830 -696503 -155527 -524666 -616791 -43414 -534921 -245283 -770986 -927857 -518219 -932044 -562399 -778119 -152980 -144453 -277182 -863941 -863130 -804764 -721702 -342843 -343834 -693815 -525356 -624619 -34241 -677510 -229038 -926226 -86418 -869935 -713979 -827886 -836788 -67410 -770406 -359280 -693336 -955613 -628007 -928255 -71865 -627939 -786266 -869665 -864003 -167814 -954452 -828146 -353413 -644600 -813099 -939360 -538421 -152274 -272556 -835580 -706734 -602090 -153967 -544292 -147329 -17236 -205339 -394456 -854120 -20240 -956503 -786135 -847906 -153388 -311331 -568732 -353716 -571946 -954829 -757660 -570272 -603538 -779744 -340755 -254110 -534401 -564239 -810956 -779802 -302765 -291539 -693909 -419474 -26052 -417566 -118101 -152836 -255479 -706404 -90593 -163433 -760452 -243490 -535851 -138702 -372016 -55724 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_train.jpgl deleted file mode 100644 index 92c02e2fe2a759e4ffe8fbf3d4495dc9db66ea9a..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/landscape_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -696328 -862119 -879613 -384185 -281446 -628307 -830807 -757501 -388813 -69722 -851587 -639495 -346456 -836182 -893864 -860282 -168361 -683847 -903472 -652592 -327588 -292540 -521589 -916498 -571477 -16744 -564335 -544636 -756005 -765999 -286038 -894819 -750603 -940536 -53094 -172944 -672118 -954809 -330036 -671996 -66569 -540108 -167687 -669839 -33198 -762411 -272209 -419991 -244744 -832592 -518293 -726968 -716268 -631526 -81212 -395247 -52811 -253105 -953143 -172352 -26907 -90551 -594085 -948984 -276533 -664956 -693338 -243465 -185645 -784998 -814100 -26836 -848551 -784330 -365968 -204855 -824552 -349158 -674419 -693581 -907772 -765005 -185777 -693851 -728123 -591612 -117353 -568927 -648972 -703839 -633438 -84067 -287931 -697092 -816426 -116348 -604119 -581630 -955579 -891713 -759999 -681772 -80204 -657385 -836027 -569808 -14922 -161647 -535904 -616671 -52405 -192984 -687839 -771545 -129720 -668343 -912108 -77270 -84565 -755809 -872348 -407469 -911929 -710023 -314788 -807664 -32901 -617610 -923624 -834188 -850681 -593979 -123892 -420399 -918734 -417938 -851154 -74999 -415171 -809938 -276287 -308584 -767484 -797629 -126039 -835118 -170428 -375790 -834089 -675064 -366133 -880686 -649556 -666492 -911771 -932880 -81501 -837794 -298415 -824584 -633580 -79299 -953113 -801166 -155562 -800540 -234628 -61310 -378524 -873774 -344297 -368420 -253726 -117212 -13072 -387278 -693791 -322280 -911972 -32893 -37399 -328266 -147544 -939157 -843106 -851724 -758414 -210764 -315784 -938626 -325406 -286775 -911504 -640661 -114806 -418964 -745416 -516979 -604893 -55875 -956671 -45388 -790226 -83614 -148513 -803575 -923938 -706747 -580541 -40313 -398465 -713939 -621051 -153022 -254983 -382663 -418936 -72166 -852584 -749385 -388890 -713541 -570335 -635866 -915547 -270699 -178580 -289143 -567802 -547873 -390479 -387413 -30084 -18197 -314775 -376720 -873743 -684292 -857052 -86027 -397401 -662658 -287990 -391303 -955675 -771111 -602820 -255150 -255371 -543138 -921051 -718291 -353260 -326441 -288681 -873756 -846209 -60640 -64620 -215903 -733895 -372517 -916296 -142127 -371797 -527777 -24368 -691649 -238681 -938330 -391810 -893857 -12395 -600861 -192719 -568278 -574228 -775511 -377442 -696616 -830248 -638116 -185344 -97539 -356865 -692179 -771960 -779725 -664698 -51885 -33042 -253318 -664827 -408832 -339566 -564600 -666574 -226178 -749930 -522884 -588719 -909219 -348881 -102979 -97412 -81822 -101513 -280620 -208543 -245161 -547610 -19753 -816326 -255496 -726977 -282870 -772189 -726938 -303563 -254282 -892460 -298711 -254955 -37988 -377221 -934542 -235471 -255137 -338282 -926528 -40478 -648046 -45073 -656288 -874121 -65884 -891522 -743001 -132680 -101375 -255488 -767700 -303717 -21914 -237941 -752142 -365724 -827878 -404792 -196898 -791760 -196166 -70486 -872653 -716270 -944489 -81291 -733646 -90716 -760882 -934509 -324969 -872707 -124125 -686937 -183274 -705343 -115705 -51728 -834855 -282122 -12376 -873176 -593569 -797783 -916801 -588276 -569396 -125111 -854611 -894689 -364842 -950545 -239503 -130579 -232289 -309965 -607218 -890835 -697016 -516492 -756340 -767867 -674142 -46302 -824967 -96406 -391314 -81956 -607143 -381339 -232478 -778399 -418662 -403248 -330547 -668450 -327932 -741412 -411751 -121760 -937856 -696899 -37494 -22896 -186687 -873135 -772654 -667377 -595511 -62961 -603876 -112418 -247813 -692265 -946643 -254261 -847322 -840297 -603365 -955491 -388653 -262763 -343622 -585466 -894640 -851313 -255423 -824911 -636890 -544295 -788635 -856940 -363474 -162518 -740984 -524973 -617779 -695416 -261974 -150508 -605740 -835087 -770319 -195956 -579086 -525061 -716677 -19662 -420223 -916520 -170610 -287559 -910336 -847136 -134642 -161633 -951751 -563879 -544050 -757930 -545208 -254846 -670042 -718850 -152783 -570427 -757452 -254592 -235255 -349433 -771880 -287948 -278201 -910849 -751595 -624303 -744369 -278391 -124753 -157528 -664987 -682507 -767029 -775704 -948144 -236518 -34042 -521917 -722265 -915457 -165951 -516302 -946105 -643518 -387503 -418434 -176416 -115665 -292507 -271660 -741867 -196206 -324748 -633947 -883404 -878529 -693926 -287753 -389022 -772561 -284672 -237230 -809484 -55694 -835130 -547300 -702618 -349543 -905772 -83705 -271893 -226904 -181683 -787907 -175885 -60491 -578903 -957587 -664437 -137597 -785576 -950495 -772566 -935712 -313073 -405724 -547405 -740267 -293156 -626541 -226903 -772192 -276781 -261951 -81820 -383235 -904880 -716431 -933640 -801899 -693484 -533958 -72046 -548267 -48803 -383596 -935333 -857676 -47055 -383560 -600222 -869623 -850938 -190278 -939170 -872965 -873022 -171867 -178367 -934603 -178222 -818665 -871552 -929278 -938199 -415892 -696485 -148425 -192209 -398616 -674268 -71236 -190253 -656974 -152292 -294841 -909118 -212813 -693328 -121076 -281429 -86597 -282940 -281917 -230507 -567918 -837776 -46651 -102973 -675589 -256360 -305166 -828923 -784997 -879593 -571739 -874262 -607251 -701294 -636972 -101924 -722216 -277879 -658622 -81807 -922220 -955597 -931977 -768591 -150719 -391357 -275946 -869009 -245130 -930052 -693071 -20368 -116289 -365228 -830642 -803173 -932768 -419235 -390430 -909000 -761462 -237871 -851513 -17889 -62991 -256434 -173508 -879978 -847875 -132349 -841211 -391472 -590887 -268142 -888954 -75360 -152835 -897011 -180822 -349210 -749030 -397323 -634141 -61560 -275953 -279852 -348842 -935882 -394213 -147610 -251407 -727152 -601251 -768705 -195685 -742205 -254966 -571883 -75393 -394192 -880274 -13541 -43410 -850181 -365480 -602438 -50256 -910455 -314478 -769168 -761322 -706785 -361367 -570430 -951449 -926425 -873691 -846763 -909152 -912248 -930905 -704268 -147590 -34244 -699760 -875847 -176723 -756288 -694455 -66786 -625525 -13825 -872817 -37031 -730900 -96799 -358315 -632514 -23873 -288906 -873858 -758345 -226368 -950770 -743266 -349312 -344322 -308174 -117282 -260014 -886234 -341763 -155775 -547254 -264057 -665953 -810887 -695050 -151558 -838364 -137691 -95624 -185686 -24372 -851433 -767389 -836838 -891832 -923592 -279449 -365525 -735332 -523992 -239260 -623241 -64399 -252553 -60590 -579340 -277099 -304194 -823785 -770335 -626802 -244228 -765317 -947611 -53101 -31105 -730396 -101775 -197567 -922313 -921471 -697090 -201982 -161288 -842601 -923007 -676583 -116340 -50889 -146423 -753074 -162770 -418874 -113894 -824174 -762014 -827461 -13821 -13713 -884431 -373517 -906746 -365805 -227060 -662606 -72268 -946717 -255158 -129482 -848667 -820553 -739471 -257388 -316617 -51410 -671288 -604329 -12248 -237368 -857192 -66154 -543106 -93829 -638042 -331154 -634703 -722160 -872888 -692451 -54289 -281482 -917872 -681754 -256750 -824919 -18076 -386799 -263640 -636040 -770237 -658567 -876066 -86599 -693025 -85610 -926293 -677057 -605630 -55842 -948453 -790443 -784794 -680639 -21440 -228018 -541963 -634988 -803121 -255460 -134904 -356583 -14628 -871554 -72649 -330327 -206070 -418653 -563941 -118779 -217866 -593311 -276210 -37995 -25938 -402286 -945652 -230711 -690940 -562295 -244941 -253033 -769240 -382239 -281524 -298975 -13707 -386804 -294582 -874174 -860802 -262810 -230234 -386759 -356901 -255451 -293153 -51633 -66422 -33049 -775646 -129000 -664730 -72138 -717157 -411501 -55581 -93604 -859345 -898178 -392790 -818166 -571479 -704221 -657240 -652251 -148615 -921664 -687602 -84702 -150970 -740454 -825861 -324556 -521637 -171236 -328343 -888817 -696433 -153352 -244485 -368244 -519028 -674570 -253771 -938469 -570348 -682720 -153261 -607228 -523827 -852715 -889274 -49076 -18976 -605812 -894586 -542372 -25995 -590446 -136609 -567006 -750066 -241651 -563300 -833988 -666753 -77039 -758367 -810638 -18843 -716711 -828763 -164586 -528759 -851749 -381834 -287060 -821060 -242946 -782033 -115117 -230388 -727181 -761576 -700732 -847220 -403834 -131512 -875343 -744383 -730978 -658553 -97836 -282053 -680164 -312250 -837514 -656981 -42883 -945924 -664295 -286757 -264872 -548010 -817237 -748608 -696647 -21596 -873994 -103243 -733328 -937889 -766320 -957462 -228253 -920431 -318954 -101047 -37011 -521697 -869993 -239518 -932544 -580399 -785471 -786117 -716324 -300056 -759588 -604067 -570017 -230245 -636034 -66435 -946946 -391745 -849879 -52040 -633290 -920602 -253392 -745161 -230286 -282809 -778529 -772545 -289163 -258718 -330306 -140800 -121071 -668719 -581199 -388578 -80034 -735711 -277142 -714856 -191414 -520579 -830622 -356944 -420092 -33453 -596567 -825869 -664893 -769190 -164383 -807452 -116549 -908585 -50065 -404879 -412875 -758077 -109470 -812576 -606349 -781734 -228158 -253303 -857024 -330730 -101196 -812435 -103240 -725602 -759340 -779153 -825043 -955346 -594036 -868711 -259504 -546919 -419131 -851573 -928199 -359091 -521486 -183189 -403182 -794396 -393392 -294603 -208663 -658711 -419117 -604141 -785823 -342741 -388214 -255252 -830596 -696217 -41278 -391711 -386340 -541772 -569234 -774445 -85907 -947041 -245105 -53940 -818337 -763627 -588050 -60809 -937162 -652479 -846666 -377447 -695397 -591730 -810918 -906713 -905393 -941661 -19947 -371338 -636763 -263965 -906843 -652892 -836665 -121922 -180204 -308931 -231102 -672787 -615744 -390648 -853043 -659487 -541722 -84591 -384450 -818748 -281718 -757824 -750538 -733078 -876126 -894649 -750366 -894627 -772635 -691688 -408761 -359811 -401417 -115196 -358806 -814204 -795598 -759560 -269189 -579014 -912171 -386643 -881621 -30492 -921638 -899319 -278531 -730229 -55560 -211895 -179290 -139357 -183009 -382055 -162113 -691853 -753913 -856128 -757021 -623737 -772355 -636114 -799430 -315439 -282597 -64324 -46089 -683879 -785679 -569798 -930536 -403022 -601275 -938427 -29492 -823938 -160471 -324644 -777390 -834286 -837855 -770886 -26224 -40779 -853096 -365850 -724156 -282032 -855671 -891278 -65885 -525132 -774992 -55629 -155754 -954476 -807343 -914490 -126065 -18181 -571403 -759729 -122171 -825633 -932104 -524761 -944280 -938073 -566457 -382972 -419943 -546474 -168245 -931705 -113565 -18009 -266046 -600130 -579827 -55765 -33367 -113561 -728586 -417361 -245186 -376419 -850922 -875255 -292578 -831600 -525035 -767607 -635525 -62432 -841434 -119385 -934069 -294745 -255565 -684626 -143456 -204143 -621390 -619797 -381395 -314544 -176548 -835011 -686456 -664462 -782218 -297569 -873642 -238755 -54642 -29299 -921719 -750201 -278072 -834029 -603615 -365309 -772432 -635461 -945753 -313224 -176528 -741685 -521195 -812608 -263751 -524520 -255121 -37762 -662674 -286272 -418680 -526617 -218055 -627115 -910909 -829525 -341755 -788052 -255277 -626462 -197967 -680742 -669131 -696904 -185963 -298648 -570818 -934375 -173597 -835078 -47606 -919314 -281052 -830481 -197313 -874098 -706920 -847223 -400751 -635834 -760889 -525373 -102741 -887417 -523814 -627144 -835081 -332247 -853592 -340550 -113508 -927386 -323588 -170466 -401459 -363788 -777446 -350938 -632687 -852991 -41344 -705825 -848563 -344472 -932119 -760246 -693242 -26923 -153179 -151372 -822960 -78841 -942268 -280780 -259858 -570532 -857589 -45141 -243557 -89184 -324110 -685237 -75395 -404994 -160620 -927196 -108992 -23788 -658421 -579940 -625763 -756323 -228129 -864387 -73301 -954403 -603893 -78903 -281988 -912247 -227180 -759328 -636000 -522182 -677389 -870853 -802892 -901641 -245497 -126580 -255195 -661712 -749061 -801542 -887317 -245025 -635088 -279454 -294112 -120601 -936753 -381862 -761132 -356110 -549385 -20267 -262470 -631876 -790266 -328192 -42174 -254515 -133037 -822995 -81682 -20342 -759160 -923677 -130839 -834312 -874084 -640481 -418975 -909302 -362235 -898099 -159231 -541266 -101641 -387863 -949116 -235515 -133081 -289238 -667348 -817118 -747266 -257326 -745946 -932670 -158535 -546993 -794970 -196355 -706942 -756947 -625242 -254571 -924529 -874097 -934620 -288611 -101700 -831410 -807342 -258362 -742865 -955303 -46524 -388987 -799241 -90426 -593671 -244283 -945394 -90545 -311809 -931580 -926239 -128244 -67725 -786205 -401839 -666401 -607444 -397810 -695704 -382721 -55499 -416901 -329257 -854317 -63125 -521502 -314507 -403711 -927218 -273102 -776796 -254295 -544223 -936893 -855654 -905203 -632450 -388781 -693394 -718684 -544513 -154965 -770675 -784346 -71202 -588655 -870326 -297063 -752332 -690494 -36412 -83582 -405420 -298716 -185597 -574985 -20254 -163612 -956489 -770662 -100542 -14688 -758490 -144425 -232915 -181283 -394347 -196251 -391371 -195436 -159682 -166043 -298335 -626699 -913799 -287069 -758271 -658477 -745095 -575211 -632314 -11795 -692760 -287610 -297456 -417055 -264036 -552731 -830726 -768817 -160322 -564638 -357223 -128416 -392651 -840623 -277955 -99800 -393793 -64886 -605366 -589381 -33108 -216104 -818190 -70662 -843009 -663023 -938601 -420052 -570117 -943050 -71761 -116081 -384451 -830079 -382734 -301185 -23629 -412065 -332119 -803487 -917443 -745150 -134626 -397813 -420178 -418778 -349598 -84913 -254417 -751987 -60896 -114795 -684420 -834751 -41989 -244613 -942409 -70946 -948833 -287409 -921317 -53666 -581020 -523250 -405697 -707075 -119534 -38168 -280475 -394520 -394154 -900553 -167494 -134782 -138644 -523762 -581845 -228068 -168416 -281736 -874005 -397393 -768963 -343538 -180598 -253579 -372551 -828804 -72322 -39528 -593481 -254859 -161167 -880101 -127977 -13603 -229060 -279047 -622745 -916930 -770783 -115958 -244665 -631686 -647996 -52105 -292768 -848854 -404024 -33111 -747428 -238965 -397726 -913761 -828509 -568935 -114832 -404498 -31125 -639950 -56144 -621719 -116338 -773161 -832074 -121944 -735555 -782258 -253021 -741445 -738099 -663713 -168064 -579266 -714609 -165365 -935556 -108832 -952156 -674539 -245466 -386617 -810906 -546561 -892923 -734577 -132622 -162398 -945002 -244352 -239334 -799444 -18838 -523349 -674175 -747949 -55683 -12774 -545475 -382865 -122267 -31121 -396485 -108023 -570318 -894765 -735659 -954236 -24636 -338285 -322356 -848538 -522464 -155377 -741435 -26102 -735274 -747613 -683304 -886195 -412172 -627752 -735206 -294959 -817917 -697022 -879148 -686833 -591183 -749889 -693673 -93867 -907761 -745444 -866835 -706813 -21989 -289264 -250510 -346896 -758152 -617870 -915615 -696057 -75743 -873861 -692951 -234020 -786190 -676355 -938993 -30847 -321927 -243705 -803450 -282685 -795375 -115312 -640860 -282835 -939834 -211464 -253040 -605729 -938296 -696216 -101824 -954976 -773262 -785913 -40673 -85828 -303826 -94794 -573041 -696929 -697025 -648520 -703722 -310659 -254961 -716561 -403865 -419161 -322455 -418596 -229811 -720148 -282672 -114714 -683771 -520712 -578762 -46146 -278039 -824634 -766039 -878895 -588279 -225842 -931953 -261699 -910407 -707456 -210136 -807137 -951656 -812714 -624936 -610720 -106341 -872896 -664784 -398545 -165878 -26300 -793220 -923867 -945724 -955326 -570696 -121054 -892979 -339128 -418382 -55701 -732467 -839716 -340534 -822283 -172156 -781382 -658765 -943915 -552736 -775660 -795093 -692662 -835718 -812519 -840003 -755006 -24905 -605978 -134722 -787814 -273957 -631892 -315328 -49481 -81455 -922768 -659963 -255254 -165919 -758544 -915933 -785854 -321758 -572005 -286769 -617396 -364540 -931472 -37264 -533583 -535515 -147309 -182790 -782183 -741629 -786644 -167996 -634561 -114586 -411218 -56031 -828692 -172108 -755031 -325432 -33500 -593918 -48165 -900764 -704063 -521055 -302756 -834765 -54809 -130845 -97461 -67224 -717147 -954835 -742323 -745915 -691871 -266285 -663691 -341305 -835060 -693890 -521919 -294896 -693698 -395375 -893891 -687471 -536725 -332309 -754971 -30452 -545215 -915144 -871370 -345477 -786268 -308001 -281317 -121460 -535474 -633837 -282007 -20154 -45116 -102999 -676449 -233775 -153146 -55032 -255469 -844226 -911909 -80280 -164836 -695439 -305669 -420444 -905351 -153427 -72071 -931762 -891879 -657110 -674679 -756044 -259337 -693965 -914964 -350581 -872315 -857823 -253351 -92544 -163267 -24951 -418752 -944628 -857533 -612549 -850545 -55890 -951036 -679295 -801176 -894543 -794792 -887230 -365406 -618368 -696693 -772519 -824972 -900122 -71571 -594769 -832617 -880178 -217590 -135145 -365803 -889057 -72135 -340578 -263847 -932968 -935112 -239408 -254145 -650645 -822613 -394312 -571912 -349299 -13202 -617196 -195484 -874205 -785312 -396871 -666188 -917668 -618189 -879372 -652070 -408355 -52345 -800982 -226185 -800987 -314893 -325486 -920980 -892463 -161033 -817424 -750060 -758277 -638962 -263598 -659679 -260247 -909651 -331669 -354377 -767521 -346152 -590936 -873021 -366155 -96317 -535312 -943139 -418867 -31292 -854207 -288945 -648772 -795479 -381650 -69939 -806257 -285525 -349123 -415665 -766170 -916354 -181017 -398012 -254842 -745230 -850251 -786011 -147569 -920234 -696581 -280521 -573029 -159226 -349203 -308889 -906199 -923174 -165041 -707459 -778557 -610781 -836069 -546802 -768994 -242905 -919922 -86338 -398433 -542459 -89044 -246050 -696509 -60587 -54378 -745853 -146950 -744421 -163295 -759268 -571795 -725296 -925348 -821847 -369613 -527519 -243946 -712250 -608673 -308245 -607157 -618584 -955527 -696564 -134201 -955170 -798014 -634682 -846265 -734188 -101573 -605642 -546166 -86085 -606763 -707416 -571989 -828774 -544172 -864587 -356170 -657301 -92965 -819688 -195724 -591602 -753099 -254808 -759594 -387857 -868660 -418487 -142104 -886473 -765559 -933055 -938477 -873296 -823036 -916299 -801036 -24341 -150180 -605323 -233926 -403655 -766468 -384132 -716363 -71999 -777131 -933105 -254618 -708037 -570848 -635010 -892356 -279754 -118623 -828521 -180981 -173419 -745182 -800894 -26635 -406231 -96376 -52270 -921874 -245009 -242849 -60595 -302962 -551874 -563985 -956516 -758016 -418948 -649052 -837978 -916106 -244465 -852028 -909182 -593971 -546138 -571458 -336979 -895212 -362207 -415650 -402535 -343943 -695830 -732641 -835111 -735344 -246283 -375802 -696519 -953550 -357236 -568339 -745621 -119920 -725291 -275955 -404039 -652325 -628727 -389870 -807166 -24456 -232899 -766291 -550700 -914212 -96974 -364902 -284859 -60321 -332285 -155638 -132991 -122028 -833513 -864980 -606310 -211759 -772567 -900837 -101741 -365633 -837939 -365224 -117206 -956063 -797287 -232751 -207942 -918505 -255401 -70307 -636014 -751624 -376610 -115063 -872748 -200435 -617323 -43489 -824863 -161234 -239498 -742930 -294191 -664151 -402385 -873768 -607322 -160371 -46448 -772310 -803326 -736654 -388981 -891024 -254588 -70722 -388623 -795365 -244905 -838194 -322583 -338809 -157458 -255143 -285901 -67441 -563536 -529317 -244786 -377303 -64897 -896532 -758001 -943228 -909161 -645580 -734797 -388656 -52364 -753064 -760507 -914043 -655612 -821118 -420126 -879465 -840372 -880268 -605886 -188667 -908264 -20414 -741323 -319395 -831789 -785826 -383437 -761499 -372417 -168606 -30741 -114480 -420124 -783825 -72249 -272337 -921049 -387729 -419993 -521732 -35536 -756311 -398376 -734479 -785342 -280489 -288337 -829913 -255042 -873640 -54808 -271039 -693712 -649782 -569842 -547840 -365296 -730555 -417720 -627421 -261997 -704196 -141593 -96410 -244880 -873327 -245469 -96903 -628010 -873405 -881860 -239533 -714380 -523638 -547381 -817454 -658266 -878838 -604114 -938016 -696994 -828141 -54665 -826222 -767452 -615286 -805265 -401752 -542201 -420199 -34301 -731128 -171514 -134077 -93233 -945805 -398127 -103112 -714034 -244867 -927404 -887450 -856354 -887336 -274636 -99059 -604084 -303646 -238789 -253572 -891676 -778046 -843456 -81942 -545949 -17294 -239541 -340015 -403787 -284824 -11040 -402236 -931949 -837713 -277058 -254646 -52398 -696547 -252643 -141761 -239461 -801437 -762360 -563601 -517028 -714209 -200360 -536641 -807354 -697032 -874529 -297598 -586383 -653134 -547336 -556463 -185735 -782170 -766228 -20207 -618975 -704835 -714006 -190993 -819849 -160633 -349163 -56114 -254636 -955531 -141146 -744937 -794925 -294965 -917763 -873800 -315595 -932736 -608676 -693929 -161066 -541453 -280102 -657388 -934217 -417052 -955532 -851535 -856995 -116624 -652321 -805015 -323490 -361691 -803028 -848383 -751995 -255224 -820502 -941335 -237329 -408351 -254564 -223161 -314611 -200398 -906767 -409138 -394507 -668517 -696919 -151708 -838247 -224516 -939266 -368863 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_test.jpgl deleted file mode 100644 index 119e44aff936702096a250012dec90beceadd030..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -362442 -480256 -354289 -596912 -230198 -650838 -195945 -418119 -714829 -848487 -303983 -807711 -895200 -289414 -256173 -472005 -682441 -382333 -437732 -814619 -826084 -841249 -132327 -190134 -488998 -208180 -435772 -780362 -889192 -141687 -419948 -131136 -75914 -124492 -10133 -424969 -743026 -836046 -148748 -845565 -529314 -757424 -807995 -242088 -682620 -343116 -391560 -886616 -468993 -808172 -567005 -485411 -420183 -334149 -948961 -756278 -340455 -751466 -834374 -591748 -704478 -216065 -663524 -371844 -840477 -112172 -770297 -818945 -756258 -808049 -947157 -261908 -386099 -797386 -527672 -327179 -888744 -77024 -321020 -427484 -935346 -940332 -941591 -412164 -25035 -448331 -485573 -921029 -664876 -751285 -543950 -115898 -280695 -168236 -630625 -627079 -321315 -747893 -334877 -110344 -401762 -880131 -820055 -64280 -659310 -481464 -24814 -627985 -740686 -738160 -436888 -346105 -296832 -67937 -35925 -84780 -185283 -518978 -77636 -641003 -551288 -11971 -626937 -743467 -908373 -654778 -473544 -4423 -810774 -757414 -427469 -519094 -10204 -293069 -591780 -448260 -732884 -473144 -562478 -899385 -482716 -449835 -948744 -25133 -438090 -449468 -418660 -650734 -784558 -657104 -203336 -844335 -99223 -886796 -460420 -10149 -433472 -14905 -634368 -163102 -534545 -956640 -810904 -1850 -439513 -485550 -349703 -673173 -166766 -850970 -556829 -42230 -158446 -24327 -822942 -44253 -332168 -704875 -656025 -846091 -681608 -100207 -144850 -304868 -146194 -685467 -316895 -368385 -111482 -887731 -682971 -436034 -101908 -257307 -781535 -175404 -287488 -386312 -166408 -640944 -401899 -932041 -257588 -321408 -880026 -756675 -827226 -111727 -306769 -717672 -449773 -920119 -389138 -94101 -9510 -122076 -515804 -103920 -450467 -15813 -785219 -524384 -190008 -10012 -44380 -939795 -629951 -894339 -139240 -112238 -854150 -117835 -436275 -172935 -529073 -127606 -851670 -450833 -426526 -878454 -825657 -167727 -635972 -470539 -420015 -22429 -755925 -555777 -476948 -85622 -661431 -443062 -770581 -609897 -321967 -433204 -727092 -630735 -834202 -368961 -737906 -450672 -433550 -814504 -573065 -698739 -764458 -38427 -871233 -366777 -321352 -268214 -699153 -448536 -353624 -445158 -921280 -717695 -940605 -673922 -449298 -895610 -179301 -113481 -38740 -132881 -940914 -337435 -232774 -750801 -935897 -693033 -444059 -301725 -780427 -104435 -856008 -418366 -10240 -229944 -528801 -442249 -941593 -158592 -447866 -519022 -499091 -634501 -174289 -63838 -146882 -38367 -403497 -691483 -368498 -842731 -688874 -682105 -266127 -489152 -358460 -334105 -887398 -567753 -888 -15197 -607167 -172543 -9907 -552871 -588649 -727016 -851233 -880470 -437171 -630162 -537973 -25185 -677843 -410792 -751088 -314495 -281131 -556230 -302838 -563315 -473074 -575995 -665802 -443933 -16626 -658330 -772436 -335659 -727063 -880214 -105825 -129566 -895419 -603771 -560645 -4187 -652896 -759161 -319824 -322002 -164695 -929448 -728684 -643061 -772434 -932727 -550665 -490827 -112365 -422320 -886844 -687090 -92203 -556383 -431640 -824875 -610188 -319730 -437418 -10243 -146842 -10510 -83825 -420118 -593495 -141597 -737594 -449124 -3504 -933182 -650563 -321833 -777509 -11771 -828101 -830650 -276085 -289161 -64459 -725369 -779226 -218873 -84474 -864970 -387143 -485051 -64481 -438927 -815855 -576690 -660456 -755599 -154942 -870840 -126224 -298925 -818296 -292482 -346780 -449717 -300421 -98992 -212927 -573148 -334524 -588240 -838661 -408308 -470195 -408224 -508574 -679016 -150703 -145618 -422363 -683167 -580455 -940935 -88579 -438012 -821930 -172960 -468844 -306619 -198619 -448409 -404452 -268527 -335672 -630800 -420842 -121269 -422695 -287968 -938424 -331223 -429980 -326397 -173779 -372238 -802562 -625443 -722261 -265396 -890941 -619478 -818498 -420641 -169853 -10440 -823043 -136329 -860711 -674968 -320014 -391312 -546586 -663904 -649219 -448049 -743174 -685508 -802863 -825522 -252218 -593829 -10189 -309301 -176129 -297797 -109812 -519569 -483609 -96374 -455853 -952695 -488002 -545121 -679923 -227989 -747365 -405611 -612933 -450595 -36441 -760343 -772196 -273496 -449369 -483633 -650798 -751404 -681667 -690497 -721673 -10193 -437076 -623099 -298011 -335510 -427597 -437572 -25086 -845072 -675058 -714834 -820201 -24633 -200253 -210724 -188433 -606881 -857156 -134498 -802230 -868998 -847313 -808281 -280349 -695451 -769145 -210131 -677996 -590367 -474231 -948680 -623079 -385813 -450492 -682200 -757469 -258190 -158123 -385595 -534378 -167798 -924857 -53003 -300015 -894787 -746194 -448426 -892555 -588036 -701192 -824511 -658772 -887726 -938949 -763882 -25214 -483761 -757557 -36800 -903481 -473860 -799655 -474213 -943691 -181370 -587832 -261162 -25433 -182716 -836815 -482691 -524857 -40576 -910676 -635958 -810876 -673020 -946386 -321821 -153602 -533187 -943006 -796183 -672410 -338218 -120576 -786949 -442092 -232728 -172974 -723992 -248518 -271971 -843089 -51370 -834201 -689798 -439724 -782221 -863364 -563538 -619291 -616852 -607349 -65737 -321368 -388983 -156242 -404928 -82206 -77364 -539372 -912241 -749984 -851255 -142102 -735044 -569376 -761549 -772620 -577180 -100830 -162569 -708482 -452846 -561640 -772073 -485331 -790358 -9941 -554028 -479836 -306633 -863200 -742732 -485772 -103376 -422455 -259418 -425280 -309451 -437746 -855441 -772710 -770179 -145678 -38085 -122656 -404204 -482040 -634586 -553980 -944771 -826210 -249297 -842140 -107650 -517048 -755360 -906712 -353216 -795712 -419950 -475256 -649404 -328503 -757474 -407394 -25199 -419498 -114138 -873346 -524549 -679921 -297025 -457780 -125870 -16662 -911376 -940342 -146872 -77398 -944721 -412223 -570614 -728936 -874358 -597503 -875905 -763481 -221022 -202331 -826080 -913797 -705054 -705166 -229981 -618202 -124253 -515488 -251485 -772033 -425396 -807940 -86305 -92937 -7339 -682450 -835220 -743427 -85176 -281755 -459551 -295627 -340784 -653049 -135451 -111703 -347542 -631004 -689791 -770626 -15040 -507471 -49435 -915135 -816576 -594833 -342519 -546178 -415274 -792358 -475551 -111728 -614609 -48799 -744423 -8921 -284843 -427417 -852066 -229663 -85938 -25130 -186065 -929602 -10571 -248981 -940841 -659835 -910052 -660078 -467225 -506339 -421058 -329164 -630696 -166474 -420987 -450294 -791218 -374466 -872344 -24828 -441464 -36583 -727203 -420472 -756953 -12953 -935583 -300676 -398481 -86409 -790838 -714895 -887306 -335602 -141466 -813296 -116408 -546902 -450763 -304449 -348988 -24747 -607138 -945537 -737854 -229841 -634259 -260489 -259765 -481119 -56705 -319866 -288397 -647888 -684732 -831428 -378683 -138736 -229748 -821226 -369769 -689627 -555286 -778191 -541704 -887351 -170900 -54418 -299994 -447872 -62994 -756922 -145742 -146543 -672003 -145269 -481149 -44049 -436811 -504838 -280576 -395378 -485330 -851470 -302462 -333367 -321222 -248753 -554577 -69732 -457030 -743429 -421036 -778487 -770373 -223750 -919168 -283282 -843733 -656463 -664223 -303676 -857514 -120575 -640590 -853961 -926478 -90394 -380944 -671860 -23454 -91240 -437291 -490779 -726942 -760407 -569770 -742712 -681080 -27223 -300851 -802075 -426379 -650359 -44712 -62772 -437613 -77060 -949033 -642880 -285796 -432546 -825905 -533114 -843270 -906332 -251582 -596109 -689797 -64550 -626705 -81543 -736277 -801387 -528583 -921483 -84970 -284602 -806178 -738897 -756262 -349589 -485855 -879964 -880023 -573717 -698497 -192422 -947207 -924018 -381018 -59097 -919956 -200770 -711648 -223027 -420504 -682740 -879712 -407497 -505622 -426163 -689274 -800857 -673881 -234414 -24583 -825893 -910392 -276040 -455465 -42812 -154798 -343079 -682616 -86700 -341482 -417780 -31468 -101443 -777163 -682976 -711921 -910405 -439425 -729141 -356524 -111389 -309933 -488068 -62484 -564511 -115820 -546113 -1620 -561655 -680108 -605705 -641631 -704877 -382800 -349581 -879563 -24764 -436461 -917191 -952453 -751780 -418973 -284622 -260082 -933128 -504448 -264539 -369477 -218566 -504127 -70567 -448194 -452845 -60434 -910386 -449198 -309917 -484994 -836845 -21785 -283191 -545231 -895415 -578432 -735925 -38447 -658800 -349390 -174813 -755259 -689894 -287185 -262614 -554811 -249383 -105071 -434207 -555320 -556392 -100989 -286796 -833236 -462052 -432900 -830809 -429181 -386165 -449763 -280470 -869420 -390306 -105162 -925703 -32859 -450588 -472718 -461015 -672658 -136689 -334573 -514634 -857358 -763631 -743550 -449467 -757828 -892827 -893604 -743571 -477129 -171926 -257994 -808282 -717664 -627253 -928205 -844598 -390597 -24391 -389137 -451953 -287023 -728666 -870155 -609837 -328049 -25244 -732471 -844174 -576279 -760213 -667212 -301823 -448911 -328803 -952426 -957271 -642453 -556882 -630244 -151843 -67720 -474158 -739564 -761544 -244186 -313288 -166871 -137713 -788293 -383810 -528526 -425200 -334575 -725882 -684689 -743024 -803281 -680563 -335903 -501958 -431756 -561611 -229864 -38738 -579732 -57324 -476835 -402564 -156227 -875170 -387805 -639572 -822406 -460159 -556733 -594046 -887564 -757053 -372423 -482265 -436421 -946844 -436354 -143094 -125430 -158803 -319974 -78375 -477467 -895465 -554512 -649248 -751527 -795493 -635960 -485719 -887105 -486638 -334338 -871305 -594043 -787610 -340599 -356152 -484930 -86402 -281846 -922996 -879805 -229062 -555955 -674529 -940896 -609517 -419882 -620615 -55135 -672856 -619922 -478680 -177466 -846668 -179993 -630506 -675222 -476970 -650786 -298727 -299439 -843203 -271253 -418437 -32361 -649805 -10187 -335506 -271000 -726125 -605836 -629881 -841789 -39480 -892993 -919718 -269535 -111589 -85894 -865417 -311624 -535387 -819640 -300460 -460384 -833291 -436797 -807590 -223975 -616201 -264120 -844977 -420959 -80972 -747852 -757854 -743585 -541934 -16549 -240560 -25288 -921988 -947916 -778490 -673895 -321400 -436959 -368575 -232230 -467783 -368707 -391740 -882210 -167153 -810209 -829322 -515964 -301007 -219696 -416251 -594780 -12890 -284785 -462139 -761901 -804346 -867118 -236346 -698109 -682557 -98850 -121658 -280809 -423503 -303012 -228388 -879484 -180798 -94820 -611524 -397682 -165452 -375377 -410724 -424106 -682909 -594871 -226982 -841733 -945024 -111684 -483665 -678333 -658555 -593550 -230091 -120618 -533412 -742135 -135034 -742182 -320581 -461822 -682928 -641154 -650883 -418651 -12209 -24638 -323551 -926163 -342821 -625117 -640187 -274841 -435968 -230059 -619823 -288453 -337506 -698257 -904050 -279599 -954234 -164756 -643148 -421056 -853652 -917063 -569432 -661461 -211549 -554515 -829132 -145121 -180606 -732264 -344211 -381904 -497271 -771822 -447848 -594734 -942362 -716484 -863356 -117180 -438101 -420541 -679816 -875074 -322584 -741561 -448446 -349608 -151793 -724158 -322969 -202775 -234179 -116337 -111845 -787019 -878470 -689715 -587927 -287030 -539208 -149503 -98408 -477084 -553839 -906391 -487897 -720465 -682402 -684116 -120366 -921982 -64598 -262315 -406987 -833850 -234461 -403620 -816597 -577316 -443003 -192339 -24957 -165639 -768576 -188811 -619493 -736607 -220306 -118697 -485868 -340483 -283476 -99090 -878755 -319761 -357287 -827616 -498642 -174841 -541987 -232082 -880199 -22700 -103475 -847191 -39373 -418282 -937895 -636774 -260615 -335328 -111659 -439028 -333949 -725996 -55483 -713679 -275323 -458769 -10586 -41994 -448402 -802608 -935070 -894796 -173014 -493797 -432414 -167130 -25249 -636306 -793478 -15918 -827425 -676179 -532187 -462141 -764064 -332450 -742856 -773502 -854118 -285100 -808286 -27717 -145067 -682539 -431266 -426818 -870270 -608690 -399666 -64499 -567527 -200174 -234491 -460610 -427376 -489391 -422416 -279842 -167706 -569906 -35605 -111612 -563969 -145799 -349182 -388984 -736613 -823361 -151411 -659152 -562292 -833669 -487259 -890831 -305806 -402310 -810902 -674858 -693274 -926539 -309729 -742118 -792104 -248524 -795172 -12010 -851844 -285597 -848369 -84917 -769898 -12946 -85698 -153958 -135127 -449180 -674543 -155844 -588516 -17493 -590597 -104073 -808273 -154811 -840145 -182950 -682863 -537829 -925861 -737888 -622887 -444610 -67455 -334545 -844299 -725523 -333559 -8750 -943925 -145676 -451581 -22780 -617446 -640299 -190118 -953627 -934567 -844415 -157357 -855729 -879520 -64167 -763491 -278900 -167078 -30105 -416737 -894782 -422531 -280748 -1930 -388814 -336253 -321727 -39068 -682614 -819410 -340476 -312675 -331172 -65463 -600560 -361459 -779999 -454616 -77374 -572181 -448922 -98348 -321879 -919178 -126095 -908078 -55077 -339389 -167754 -494694 -751291 -278666 -667250 -738221 -229937 -47967 -938513 -228850 -278124 -162812 -319490 -651844 -910772 -741570 -356496 -216940 -756274 -891051 -942865 -925601 -943429 -153231 -369765 -693258 -78704 -453902 -725401 -86429 -109656 -248405 -34475 -61764 -134665 -614349 -918496 -717738 -742005 -650762 -274882 -158930 -472300 -762769 -641129 -432715 -566971 -94916 -556085 -314252 -295733 -11680 -25258 -232225 -121378 -123314 -420463 -934377 -492001 -42867 -135322 -642711 -768644 -86446 -874740 -623879 -24723 -100813 -247684 -933091 -874987 -532269 -177875 -808107 -285354 -116960 -87106 -98480 -930750 -682563 -559437 -61070 -617927 -515866 -631887 -693333 -807818 -706173 -225415 -510918 -871585 -468843 -682961 -101919 -24650 -827683 -200727 -817254 -427418 -416149 -505709 -772130 -579879 -61893 -204922 -228318 -382187 -261418 -676307 -734089 -315567 -451305 -413435 -419285 -880419 -230299 -686518 -546301 -33177 -313616 -279015 -667291 -764912 -437438 -163763 -603078 -139497 -770153 -578033 -440147 -680958 -619896 -640019 -843849 -891920 -273319 -63865 -883898 -682930 -142986 -777369 -88366 -25279 -721318 -173436 -737065 -337429 -592134 -808939 -305972 -895533 -314285 -335306 -762283 -332601 -916312 -333889 -248785 -799135 -570624 -427588 -724112 -669013 -226684 -89224 -298214 -109615 -939256 -623131 -555184 -455784 -264077 -372103 -111558 -544876 -63864 -681136 -181025 -475373 -287093 -673293 -47242 -906315 -448484 -281264 -460210 -618344 -555170 -826229 -833633 -289355 -616479 -426347 -702957 -7007 -615148 -427010 -681743 -698881 -350421 -262355 -101033 -946772 -641339 -638329 -157299 -770087 -329426 -446744 -87007 -800937 -420872 -420450 -225864 -368698 -248080 -485723 -281630 -554904 -535472 -447912 -95051 -550214 -832928 -609701 -947154 -720408 -286354 -659243 -485274 -121243 -482161 -895510 -640639 -650522 -180205 -670032 -262251 -873541 -384694 -151122 -412730 -18049 -871011 -111745 -273132 -733784 -338125 -449359 -671430 -838454 -64502 -257995 -619636 -375586 -174505 -139358 -895386 -391322 -454225 -875199 -305428 -425475 -77640 -681169 -485313 -427356 -66144 -843309 -855086 -148702 -801464 -404808 -927024 -440624 -106872 -918235 -736460 -330795 -85934 -39420 -857173 -19710 -828745 -244175 -303957 -871188 -905579 -798827 -733550 -404015 -114351 -385967 -73492 -807624 -399227 -387401 -698924 -827625 -941928 -427061 -329153 -109041 -845015 -220336 -432527 -485179 -420664 -629991 -285341 -200584 -448787 -836872 -771718 -64618 -460848 -320738 -408523 -52534 -334037 -447187 -606154 -32864 -552952 -436173 -648639 -289298 -792149 -411103 -227640 -659335 -298585 -549084 -188755 -101867 -11895 -407432 -832327 -575411 -888367 -555427 -807422 -22734 -146499 -435229 -424984 -952312 -274729 -54725 -334587 -711939 -751347 -187296 -365089 -878512 -741411 -450657 -361851 -333620 -853936 -606150 -801151 -682688 -761912 -556997 -899519 -336060 -301614 -62088 -295886 -820095 -220734 -687868 -534494 -99058 -627426 -231181 -753949 -86271 -879878 -111397 -819787 -490377 -717741 -300848 -417619 -544119 -594019 -844956 -247964 -605984 -808248 -43203 -832990 -427243 -45692 -88174 -727027 -233260 -659393 -458688 -426557 -628421 -328032 -534704 -879301 -157873 -32570 -424557 -11944 -689699 -682647 -613711 -342712 -858885 -630586 -819936 -145734 -448620 -630814 -190958 -303445 -229000 -545240 -108537 -319522 -524371 -230149 -918760 -472446 -717033 -65805 -818787 -448568 -610737 -693095 -937394 -477127 -32783 -320077 -620747 -642993 -248002 -284677 -257553 -580006 -321576 -599224 -379502 -770262 -309244 -847194 -918470 -447876 -682000 -816178 -757543 -427594 -777605 -479725 -760774 -630481 -69346 -43274 -177796 -56685 -828065 -438011 -111789 -295908 -273544 -606702 -490768 -948650 -192599 -226760 -70471 -62522 -64519 -24684 -491108 -117119 -258782 -596784 -633304 -299965 -11410 -682288 -26953 -756538 -438032 -362161 -447171 -448720 -37212 -144301 -202301 -686173 -296930 -592137 -323053 -142815 -105136 -422646 -633476 -866059 -554271 -368351 -158464 -408550 -567372 -703906 -250590 -472404 -930516 -925673 -435008 -439974 -25047 -334080 -763634 -829204 -879059 -932753 -272745 -64422 -69649 -657938 -425973 -407614 -954771 -232996 -905006 -402745 -751928 -295099 -170195 -298166 -774724 -287345 -846482 -554523 -123742 -809496 -448540 -262833 -756377 -682393 -845047 -768679 -162322 -132121 -77847 -556798 -763703 -555569 -675003 -587917 -451467 -111801 -36844 -369746 -449988 -807954 -556405 -145552 -81766 -343638 -26288 -258079 -7822 -486222 -2920 -182646 -162559 -819660 -929271 -564503 -263025 -314855 -230231 -682367 -606603 -283643 -808104 -226820 -95057 -488142 -23826 -9447 -391651 -121202 -858654 -693623 -844109 -755242 -77985 -322008 -681076 -42728 -684279 -691259 -290202 -641920 -490019 -618187 -450450 -86255 -604198 -666534 -717201 -150695 -564265 -42626 -865202 -882032 -705271 -844168 -516897 -464912 -59995 -922704 -878696 -124339 -180923 -25043 -252444 -224971 -450860 -718782 -481959 -912551 -317531 -725352 -630613 -53093 -423816 -793068 -553770 -290334 -333679 -802478 -726920 -321932 -170241 -138184 -422550 -361229 -677725 -88469 -390090 -895359 -485489 -618332 -684487 -111737 -829840 -75490 -175597 -911385 -830674 -473969 -256808 -539279 -301785 -950926 -654486 -850604 -435645 -538668 -682400 -419254 -650557 -450537 -508412 -88209 -952832 -151037 -777991 -841256 -474354 -425702 -430287 -12905 -940199 -449220 -919789 -828321 -683899 -24215 -683900 -336394 -946837 -513477 -157129 -50255 -69231 -877124 -167657 -139075 -449360 -791575 -305246 -162164 -498589 -315017 -604978 -735296 -337092 -334572 -120147 -342925 -930587 -248526 -321926 -887527 -383498 -102333 -619367 -281129 -316524 -14877 -476131 -230086 -532119 -446733 -134897 -627836 -885848 -544485 -450552 -891072 -129182 -924265 -476558 -223462 -607184 -115925 -608711 -675847 -64353 -727009 -48902 -179987 -625057 -642398 -716697 -490230 -724441 -121751 -285257 -478259 -316354 -25266 -16952 -850980 -143466 -104999 -525120 -726035 -382858 -911684 -687431 -830794 -301343 -367451 -234123 -333943 -556906 -607361 -156356 -466995 -626691 -7468 -617999 -533526 -649382 -816190 -289107 -772421 -33953 -427184 -850404 -82414 -413304 -701926 -918686 -561468 -104252 -386600 -288574 -846639 -13105 -232030 -86275 -756624 -680953 -384843 -314641 -10186 -650247 -25276 -484009 -527148 -563370 -280714 -245095 -387125 -129229 -774298 -59072 -554770 -278359 -24478 -937956 -619885 -182692 -532445 -287450 -754544 -687000 -865141 -736368 -588312 -388258 -133085 -44005 -146878 -249507 -482614 -679864 -859946 -683719 -813373 -380991 -742194 -767467 -532989 -529247 -684296 -450723 -340779 -912256 -93476 -807858 -847137 -554875 -299010 -331497 -726498 -112340 -730807 -25020 -945761 -168180 -763544 -577580 -295622 -763514 -167695 -134016 -450802 -348663 -664824 -652219 -50171 -857407 -461295 -936213 -663843 -131965 -663685 -432203 -346438 -181736 -429991 -419896 -172141 -650891 -334112 -905265 -500420 -554317 -215078 -880194 -134801 -680051 -717677 -698522 -228847 -485649 -25106 -64987 -305645 -333663 -133673 -157922 -153725 -314713 -543999 -365767 -472008 -288740 -447001 -421135 -65723 -845028 -885025 -163050 -420845 -674258 -65632 -319424 -762493 -611689 -296328 -424307 -285593 -545111 -841255 -939431 -248887 -615013 -448892 -217525 -775891 -689729 -11594 -629552 -629319 -229115 -937207 -32765 -848506 -470505 -301820 -135001 -649505 -167176 -678362 -643058 -775020 -810653 -50947 -106016 -245984 -89014 -443436 -642909 -615031 -457123 -378155 -864907 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_train.jpgl deleted file mode 100644 index 9d11ee42316cdaff69575215f999809949a42ecd..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/portrait_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -944016 -848831 -334696 -473839 -105746 -750452 -659670 -67390 -871369 -174093 -167143 -309609 -101395 -546638 -816081 -808253 -876835 -161320 -769217 -426928 -779179 -827310 -529494 -844911 -39198 -162995 -750787 -734927 -876571 -66347 -825875 -619933 -116761 -396694 -577726 -515704 -382974 -649974 -433499 -171284 -802333 -917120 -886854 -504396 -222035 -321081 -420603 -145836 -564288 -567929 -852399 -418617 -516241 -678612 -432961 -608121 -405315 -680737 -156346 -932998 -427303 -423534 -857447 -132018 -650736 -912182 -277686 -411379 -842782 -488897 -454530 -682586 -556323 -333891 -898412 -484143 -120614 -229602 -672021 -324177 -167708 -285562 -316805 -886738 -848891 -880526 -280291 -900639 -425960 -115815 -258128 -792166 -678528 -350201 -418554 -608849 -13024 -895408 -319701 -556622 -349439 -855880 -143619 -723597 -803867 -420475 -804262 -436035 -788144 -408713 -351942 -895668 -284156 -310584 -111493 -436153 -229725 -933111 -556840 -672655 -693816 -322349 -188101 -261556 -769225 -261967 -844241 -759598 -956316 -437329 -596518 -750491 -343730 -505963 -425335 -59124 -427595 -295628 -682739 -52555 -627602 -321780 -420171 -760285 -404644 -726658 -599894 -280138 -434110 -446123 -750105 -268294 -207643 -167761 -554629 -635560 -929609 -405203 -263626 -629788 -486332 -880424 -686774 -677948 -146425 -484039 -22685 -321910 -728821 -454893 -650772 -301316 -347609 -863355 -741224 -459645 -733497 -289984 -432605 -594202 -336306 -35249 -142317 -162788 -672884 -553278 -419972 -390965 -911352 -10227 -608868 -635514 -573723 -512771 -678378 -111867 -514287 -676001 -772533 -871430 -151870 -151862 -940766 -436411 -806152 -556617 -851321 -143446 -229749 -819586 -386750 -336258 -418360 -682245 -523436 -763981 -807988 -682519 -309366 -719096 -173058 -593813 -355917 -348886 -497882 -102106 -44831 -841049 -651635 -699801 -450786 -937232 -396768 -53066 -430844 -425320 -134599 -309212 -674059 -949223 -795369 -513329 -240604 -444682 -550064 -106824 -422556 -298715 -642610 -527424 -934364 -24990 -280493 -321369 -287089 -486658 -682292 -776879 -624392 -146495 -838577 -262186 -161214 -449597 -568728 -773824 -303215 -865723 -99830 -229634 -938290 -24978 -438110 -719352 -281739 -688671 -650464 -162952 -913736 -852816 -808320 -333904 -85972 -192932 -544648 -449850 -533290 -24686 -25218 -492585 -64521 -674385 -943937 -340653 -360819 -13005 -890106 -619830 -804838 -336287 -477130 -710027 -121844 -321130 -466829 -725321 -727703 -618921 -174323 -294505 -879245 -10198 -444472 -485081 -624304 -247981 -278444 -431768 -696122 -433560 -335876 -121865 -667102 -685473 -716591 -132760 -145005 -630773 -488440 -111691 -808063 -862973 -545017 -224006 -449166 -228261 -167397 -240477 -867799 -791382 -382300 -770369 -66297 -880492 -652573 -119122 -845973 -717531 -642881 -920932 -673849 -229325 -249250 -826207 -190213 -281238 -321897 -485148 -418785 -751199 -30268 -821039 -268283 -128878 -125477 -12864 -853595 -131762 -673141 -319965 -421127 -248527 -115908 -649112 -111872 -232300 -480564 -111813 -228810 -467175 -520630 -326332 -52444 -859472 -567783 -83688 -530723 -10309 -482831 -523521 -443996 -627876 -562622 -604173 -940817 -556545 -42058 -556166 -408556 -940859 -294483 -714024 -24883 -150942 -515193 -596630 -295772 -668181 -947092 -131379 -447996 -921193 -664371 -380039 -63384 -915670 -106282 -470413 -822603 -505560 -420522 -442093 -268640 -244767 -808178 -436929 -277184 -408554 -755005 -277998 -427596 -674360 -305684 -810715 -851075 -315814 -274614 -286042 -447234 -649931 -451852 -858516 -634964 -556283 -768753 -594692 -245954 -701042 -563434 -335996 -464513 -550910 -342608 -894386 -843955 -343696 -342481 -826734 -483785 -46570 -813303 -110171 -42719 -887533 -652974 -863562 -396783 -432011 -834431 -167556 -475617 -415280 -276959 -703557 -297079 -111863 -418415 -829646 -555351 -133213 -24818 -630382 -450821 -902231 -230100 -556399 -702346 -436126 -230290 -63030 -431723 -726385 -167746 -690772 -17951 -224912 -605975 -307908 -26129 -319156 -562574 -921133 -528219 -322338 -152089 -660557 -16958 -334526 -450043 -450674 -564427 -129792 -675848 -435415 -263056 -146830 -287714 -623929 -794968 -799244 -625903 -321635 -659257 -143996 -905488 -756604 -292544 -556723 -327070 -594516 -134014 -485131 -384260 -397724 -867080 -737995 -412358 -135002 -10023 -544409 -922672 -391270 -552681 -751128 -135256 -54766 -824953 -747612 -101031 -761566 -847039 -336324 -280963 -437895 -144800 -555323 -483909 -649887 -53013 -281742 -446235 -167005 -228644 -461367 -944001 -677860 -229608 -619789 -341582 -474024 -641393 -633378 -231096 -140601 -484982 -656195 -356626 -810841 -706325 -237126 -764525 -574618 -803022 -375100 -651643 -449968 -167809 -205016 -485770 -708711 -871181 -132742 -461850 -108923 -758418 -934708 -910698 -646428 -674659 -444274 -449965 -65370 -791993 -876077 -447677 -263948 -738682 -140234 -458187 -869306 -120208 -302766 -448302 -725900 -25195 -853476 -411314 -390138 -164879 -438868 -290243 -446635 -693844 -449058 -555776 -25358 -228952 -93511 -223249 -449862 -854136 -484051 -449638 -184200 -158540 -930659 -682004 -682725 -38937 -870213 -674288 -555167 -854018 -594663 -840007 -219020 -940970 -312768 -556219 -763310 -627976 -93983 -422676 -182744 -153123 -295757 -767608 -822953 -10606 -556563 -470708 -840079 -594817 -629969 -129815 -295039 -896813 -630764 -925847 -264054 -935900 -275478 -420607 -666123 -437885 -215091 -305422 -574234 -225021 -229971 -931062 -697489 -321573 -479338 -682556 -766759 -502867 -477058 -726462 -307660 -309599 -46050 -420743 -14750 -218666 -449540 -814730 -770048 -228258 -124454 -422694 -421110 -611393 -898151 -593135 -432964 -297364 -805239 -941582 -298502 -648072 -207817 -885800 -113917 -36211 -778184 -633260 -286851 -412866 -566763 -895488 -512330 -426434 -791690 -415270 -847874 -550328 -86037 -93658 -760595 -43078 -546588 -554857 -308338 -158232 -519550 -898001 -64592 -25280 -588557 -854917 -879098 -910904 -418191 -891131 -809888 -789675 -125869 -230138 -544525 -274796 -619431 -741830 -169557 -450019 -167593 -703439 -281044 -189066 -289109 -426524 -325938 -205014 -150733 -265541 -339657 -703458 -283468 -86125 -627193 -165601 -788485 -636978 -555543 -880223 -295750 -145397 -302418 -33928 -871304 -395296 -675528 -162413 -162808 -851011 -577016 -426743 -670028 -725797 -492652 -259851 -575187 -791326 -785267 -298535 -321324 -930023 -120697 -185407 -309455 -864951 -64557 -551432 -624193 -899507 -450247 -546851 -921176 -710918 -528453 -445716 -776878 -726068 -468759 -121058 -12263 -526199 -938273 -825831 -64409 -418688 -152325 -155549 -334197 -439698 -573813 -25192 -106728 -390077 -231001 -691472 -210783 -420115 -229950 -710143 -190623 -11818 -303286 -814319 -55913 -809623 -618456 -689702 -77167 -619354 -418588 -298840 -106869 -435616 -830166 -750175 -48921 -117097 -33313 -229629 -693793 -133533 -791581 -386130 -408561 -879431 -482116 -885135 -427558 -279572 -104855 -735130 -429806 -455492 -25274 -868345 -156074 -342935 -218638 -434133 -712133 -836820 -764233 -791509 -643020 -524206 -530470 -10335 -422456 -178372 -450054 -436958 -638340 -827424 -356891 -617950 -281057 -281544 -748404 -723058 -12466 -668060 -357066 -333107 -682742 -321864 -598200 -678728 -449858 -778021 -682119 -432356 -146521 -171486 -310349 -625594 -771581 -482899 -541320 -717024 -593657 -513810 -5594 -166719 -871543 -10138 -485104 -630103 -524530 -33919 -488102 -730998 -491727 -808271 -322890 -85678 -656291 -630790 -949357 -450515 -158716 -287088 -751381 -162539 -630432 -855916 -844759 -450804 -555512 -937340 -146163 -844980 -449831 -725345 -10362 -921440 -678361 -848709 -849549 -714482 -524844 -460279 -637868 -920091 -689786 -838241 -69761 -418477 -854339 -34484 -684276 -594086 -807189 -434962 -534390 -136525 -520038 -296836 -501497 -522975 -706709 -853800 -619055 -448910 -452028 -921366 -879197 -349279 -447889 -484610 -504539 -755840 -945833 -611784 -458049 -66996 -448486 -481893 -427126 -943603 -228206 -754500 -606941 -657077 -327778 -106656 -1468 -380888 -334312 -88250 -552742 -733451 -534860 -556861 -146357 -880066 -144140 -421122 -321412 -727023 -180194 -728025 -886632 -682537 -485091 -850702 -103703 -927062 -111834 -217023 -336803 -808198 -701693 -551327 -432889 -691993 -232536 -112653 -539422 -7074 -470892 -185621 -24682 -450835 -880510 -498330 -436567 -453396 -879794 -952406 -913817 -533193 -772495 -439424 -206227 -947173 -336275 -241798 -290186 -294172 -954664 -25108 -830771 -683149 -298447 -210691 -880388 -25096 -483101 -236134 -266570 -33409 -705002 -86961 -686791 -772179 -363222 -485064 -442330 -738332 -408469 -64376 -902478 -727109 -658907 -258231 -67413 -25104 -898428 -146757 -321633 -432499 -179337 -708769 -556229 -32338 -839998 -229772 -666222 -246924 -462153 -711749 -689457 -427581 -920002 -459504 -900605 -675077 -695669 -600588 -25520 -354028 -579886 -44792 -906563 -144436 -450636 -297803 -308508 -875936 -425151 -247962 -717711 -716479 -682391 -925857 -949401 -859718 -1947 -488209 -27968 -426412 -321335 -90383 -420177 -408169 -450602 -181772 -349496 -335630 -445505 -303288 -803535 -939828 -448884 -68396 -112174 -407203 -319738 -170088 -866930 -594451 -424408 -640553 -426589 -516506 -104134 -206131 -273722 -25092 -702279 -3827 -818480 -385 -851018 -845035 -156401 -101170 -466690 -404531 -733685 -943284 -335902 -836926 -242834 -43902 -426217 -776374 -574715 -111887 -640331 -135037 -46561 -751440 -949989 -763217 -865166 -419291 -786692 -842139 -284889 -442172 -228703 -770248 -848625 -88657 -791390 -625510 -150125 -851457 -845057 -462009 -328447 -418008 -731831 -921377 -415198 -689706 -431925 -342828 -10259 -350567 -664484 -9915 -577783 -282003 -751695 -807935 -399103 -273242 -864645 -630740 -417682 -422360 -555182 -86410 -476026 -576306 -522148 -925872 -148249 -328373 -138408 -444272 -851322 -96909 -422788 -523496 -746150 -13100 -30603 -276013 -138619 -696138 -244560 -444663 -894041 -865221 -514025 -569334 -37290 -808212 -736594 -594024 -385192 -258092 -776329 -848725 -302497 -483229 -798810 -63788 -168280 -633732 -146585 -235358 -859078 -650885 -854730 -792239 -415095 -726717 -611128 -32043 -836793 -449082 -920996 -556346 -447319 -917396 -229428 -29893 -727164 -385287 -644439 -516402 -321229 -225248 -485584 -658558 -158499 -258162 -750893 -78481 -422600 -431636 -835454 -288231 -607461 -795613 -933014 -146762 -926678 -576573 -268905 -321957 -305802 -618742 -761541 -479991 -85152 -714313 -12454 -496946 -935202 -486466 -556764 -680979 -309376 -682954 -386997 -99705 -490591 -419511 -166742 -522674 -866421 -96223 -328601 -145749 -564380 -779916 -167919 -314267 -879295 -237227 -172752 -640039 -948287 -458768 -420804 -245851 -884826 -22284 -630232 -151463 -864803 -24831 -443811 -663223 -325616 -315750 -682471 -302946 -941205 -82438 -751559 -384877 -427189 -932065 -932530 -122887 -554328 -681158 -948162 -764137 -55706 -78633 -851306 -434119 -729196 -262837 -817910 -650826 -314751 -893520 -499025 -485183 -938076 -672717 -618162 -36193 -269387 -448569 -167562 -279809 -736422 -533780 -77268 -569814 -675801 -484403 -555596 -865754 -830657 -168294 -763828 -416820 -172834 -118192 -904790 -162979 -229753 -318582 -664150 -854456 -859380 -715555 -717968 -823956 -335400 -425533 -910456 -448132 -707372 -942229 -447567 -625835 -886554 -457954 -10123 -230115 -493871 -237094 -234268 -741418 -629684 -148562 -616087 -230130 -442102 -937511 -769228 -763913 -522910 -450057 -948313 -141321 -871873 -531857 -10117 -387936 -318953 -515757 -910214 -624877 -210663 -232798 -878524 -626319 -921455 -925545 -145593 -286986 -794389 -888966 -759611 -509485 -824854 -784100 -289090 -422549 -632818 -60242 -750936 -682942 -842053 -248449 -678964 -155222 -808048 -231360 -687872 -835034 -562746 -891057 -449199 -189780 -407040 -883980 -841258 -874321 -853918 -662564 -336076 -382206 -17400 -328113 -598389 -391421 -182776 -175003 -881227 -485724 -181544 -871883 -64573 -276003 -139220 -122059 -529278 -333447 -420520 -482880 -940378 -428514 -429739 -523213 -492656 -338696 -760287 -468345 -465052 -209653 -726098 -697732 -223831 -581080 -648297 -626311 -370230 -270639 -905367 -460881 -485623 -111571 -369738 -709101 -701334 -799176 -295872 -877100 -203653 -64589 -58386 -240898 -232413 -639778 -453596 -449101 -216705 -809052 -77271 -81265 -914603 -932801 -723208 -756755 -321114 -649666 -478463 -875870 -247313 -485750 -92668 -409990 -109140 -408422 -870228 -421100 -743396 -146885 -845010 -475243 -180931 -769669 -408110 -556621 -830301 -813225 -555359 -244172 -756606 -546448 -85928 -431905 -658239 -528871 -730899 -833347 -382638 -436359 -468650 -118606 -875984 -643909 -258502 -429390 -517758 -666518 -267904 -336333 -111873 -421033 -450589 -450351 -158973 -11438 -794743 -343502 -22410 -444495 -594819 -422343 -365709 -336084 -343673 -605903 -223992 -934968 -808240 -421121 -362521 -328538 -753104 -12005 -786590 -829547 -684553 -639603 -422714 -769462 -268051 -144476 -295032 -63643 -353932 -870868 -209466 -527782 -650855 -760417 -61031 -811386 -485491 -577258 -921336 -726635 -256783 -432590 -556393 -305967 -101347 -524807 -730533 -477223 -573685 -457186 -64525 -592680 -954557 -172608 -268582 -301871 -172434 -543759 -650881 -318865 -914296 -854162 -12995 -297813 -803286 -2524 -594751 -501714 -146864 -449437 -841040 -64593 -354676 -652390 -287001 -472430 -429287 -870748 -326047 -290277 -749888 -467227 -419049 -727072 -692736 -629522 -458472 -755104 -84381 -121280 -531383 -307424 -521757 -111370 -640677 -869949 -751316 -388956 -117288 -420857 -803240 -448608 -232343 -640094 -457757 -450538 -940884 -848008 -784364 -321193 -655468 -696722 -302611 -679264 -787701 -78296 -167074 -84607 -296650 -725585 -301339 -679873 -827479 -628605 -682319 -440114 -476685 -310572 -485294 -136229 -555698 -228125 -433551 -803471 -742866 -86280 -333361 -751334 -522233 -611600 -166924 -619309 -368139 -111765 -640965 -121592 -554865 -718231 -123531 -13120 -338848 -930631 -951269 -855838 -641981 -327072 -788533 -554325 -474275 -389920 -448572 -897952 -305037 -674752 -682502 -117106 -684554 -935688 -238736 -449373 -468507 -876599 -880353 -543395 -128412 -449956 -25290 -300829 -745615 -145559 -217368 -945051 -808182 -162985 -175303 -281015 -756087 -608731 -456016 -553635 -7727 -304604 -639464 -520878 -688061 -819599 -866918 -86603 -85240 -477351 -515198 -772154 -468637 -633545 -506551 -786534 -880047 -659344 -618238 -150294 -246418 -412957 -673956 -57040 -810739 -465053 -726643 -926254 -150992 -127999 -385754 -756913 -148916 -409 -921596 -98991 -450819 -626101 -685472 -308964 -55536 -681614 -682218 -336156 -99416 -693757 -450226 -23234 -693745 -436105 -334792 -485351 -772175 -319934 -892474 -672879 -369818 -895650 -879803 -708701 -442665 -685438 -817318 -713223 -661360 -380927 -204551 -230369 -533960 -100872 -427569 -297790 -381838 -447949 -95605 -919150 -395833 -77354 -217967 -264085 -526565 -284661 -247482 -687850 -156435 -874923 -658086 -480196 -226559 -294102 -217921 -635340 -525281 -25110 -472234 -447221 -553541 -58650 -433437 -817362 -810355 -67040 -298095 -263885 -916965 -659394 -935409 -328875 -658286 -282562 -485297 -447455 -650202 -321950 -309750 -327657 -847305 -90107 -37387 -937354 -228376 -933066 -309817 -94708 -52454 -64500 -705378 -221015 -899112 -166853 -690517 -588995 -327788 -340188 -524602 -809695 -682085 -642652 -395058 -234605 -229051 -844627 -265874 -24644 -768572 -202404 -679934 -171447 -420484 -650597 -48584 -218839 -775787 -505874 -98576 -673687 -282366 -279270 -747184 -759197 -104631 -851202 -229902 -276762 -774327 -490311 -25188 -486064 -317662 -301429 -807868 -99579 -819614 -119881 -405404 -447760 -305864 -636145 -448535 -527421 -505595 -420713 -15824 -182925 -38245 -833822 -726887 -678560 -757962 -357128 -288542 -40831 -918629 -146726 -312429 -449581 -780788 -227086 -104975 -592182 -121353 -59534 -301390 -863751 -287701 -421109 -816194 -837869 -318096 -290818 -11872 -9585 -491518 -726411 -556750 -684176 -308368 -727094 -732755 -751415 -573514 -704432 -485382 -732711 -417058 -758024 -26849 -775804 -11706 -450682 -269186 -83339 -440562 -104725 -432739 -674625 -98178 -85284 -37041 -116862 -257329 -477462 -682879 -403878 -309312 -132694 -13012 -230271 -233243 -319999 -513018 -925901 -686823 -349504 -39313 -804944 -930413 -180281 -145699 -300575 -25494 -230341 -271193 -230132 -175525 -898631 -812689 -420818 -521660 -485398 -235467 -579207 -437751 -476102 -935696 -56490 -522972 -146349 -810723 -136381 -438040 -827635 -485826 -614966 -388320 -594670 -57018 -897869 -617764 -498993 -498272 -10224 -935655 -247189 -819537 -286995 -170172 -432925 -918668 -228835 -812032 -513664 -851100 -27862 -99226 -311031 -69753 -926850 -606254 -289034 -601122 -689784 -736433 -336261 -86279 -639917 -10146 -220374 -128348 -474262 -850073 -67405 -725418 -154158 -868825 -492550 -906413 -404955 -573512 -743180 -63731 -29617 -42950 -422608 -722471 -280797 -258021 -528729 -630125 -333512 -258192 -444775 -226564 -742178 -728899 -743768 -458707 -420137 -229751 -736702 -562093 -687364 -86248 -486295 -132418 -554334 -741053 -876936 -11814 -25929 -215452 -162526 -831115 -25224 -844733 -10989 -485397 -298196 -267885 -553891 -163032 -777123 -764396 -846538 -119453 -145879 -764174 -750097 -41999 -115161 -775539 -280233 -500425 -481574 -132250 -531748 -419188 -783957 -711753 -910354 -456019 -294398 -212382 -663651 -109019 -174488 -338252 -703733 -488852 -396685 -875743 -420506 -66449 -117036 -751423 -256353 -157613 -648450 -294047 -880435 -156480 -564411 -30557 -244913 -429907 -443667 -910618 -531260 -248860 -556339 -117805 -78362 -408418 -642480 -407571 -828813 -768849 -627764 -137644 -777387 -159459 -693256 -736695 -619227 -756765 -23791 -306616 -331086 -3362 -534936 -463760 -281438 -230355 -514379 -592842 -938075 -617662 -311524 -232749 -616579 -298543 -704806 -437663 -24766 -611621 -929083 -766656 -384088 -120801 -450614 -276525 -591985 -650777 -718564 -498117 -7527 -449899 -22248 -946916 -251126 -531887 -134734 -181634 -412653 -167793 -768829 -919566 -853239 -318991 -128451 -879971 -333259 -772535 -440462 -894768 -913677 -483677 -756705 -755921 -149212 -661299 -577075 -60568 -555837 -162094 -848329 -663297 -247976 -85612 -85923 -145976 -768751 -35904 -10660 -652726 -226946 -297670 -931391 -18789 -841642 -556222 -24930 -215961 -782573 -329270 -734407 -824421 -630707 -728167 -632939 -745542 -326101 -74314 -257653 -488477 -484306 -24676 -553962 -17549 -904387 -678843 -830320 -281211 -146879 -895152 -491051 -48705 -945181 -703692 -629427 -193116 -878971 -437358 -161282 -658709 -144857 -106114 -620682 -8671 -336344 -101039 -320400 -409716 -550470 -630420 -133876 -690957 -850672 -938569 -395357 -577937 -166800 -241881 -146393 -663238 -925836 -819661 -316307 -179853 -217862 -10641 -163089 -501113 -270189 -642916 -359557 -36812 -34193 -24568 -482433 -652610 -439384 -386591 -10887 -532245 -615331 -633830 -368699 -24604 -257491 -170753 -591622 -734933 -297869 -620311 -483871 -930290 -940895 -485695 -321754 -450564 -258245 -594527 -879040 -127332 -84279 -827026 -920866 -56629 -439320 -703923 -324784 -37492 -226778 -823846 -431817 -714219 -335855 -678075 -329178 -64527 -270238 -338210 -37714 -10118 -797817 -129026 -144516 -10632 -251403 -423214 -784183 -257776 -398830 -65899 -569846 -121737 -272625 -819596 -24882 -426378 -783812 -943892 -132284 -217425 -21670 -910905 -427353 -836592 -440527 -775235 -91045 -321901 -334579 -891255 -543747 -141486 -287924 -695240 -204859 -449689 -558027 -802648 -477117 -63028 -24606 -517604 -479104 -674426 -405786 -912257 -483195 -178054 -84791 -135429 -447278 -810022 -593894 -552757 -673735 -337833 -141685 -431751 -252292 -273528 -730340 -440326 -504970 -579482 -146196 -591165 -863890 -334523 -167544 -798255 -9191 -598258 -146289 -300955 -871157 -298516 -564815 -64443 -417447 -910329 -868165 -731830 -357244 -284817 -845066 -426295 -556905 -146564 -411371 -449291 -229867 -229087 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_test.jpgl deleted file mode 100644 index 8ec9e8c8bf8141d8e5f8f51d1bb4fa57a2676992..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_test.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -893830 -932820 -119522 -673597 -12030 -350148 -702164 -687781 -703946 -346481 -577159 -475639 -802120 -93927 -432025 -479073 -633402 -837666 -75816 -651394 -363994 -309409 -212811 -83927 -115497 -609797 -724654 -617187 -267670 -174341 -111894 -208335 -413085 -9589 -442446 -83916 -61421 -404600 -315229 -74486 -871567 -827515 -744258 -214193 -650916 -494741 -61101 -21951 -559723 -325966 -336894 -312822 -905392 -367996 -38625 -34789 -724366 -14880 -336085 -95258 -257986 -288086 -133344 -92608 -746137 -334108 -53367 -459829 -12570 -88890 -343236 -906093 -308330 -870697 -135454 -304671 -461151 -94540 -306841 -70948 -260766 -260487 -11955 -570539 -18137 -44324 -668771 -239726 -189955 -634329 -449144 -630115 -206814 -761574 -602803 -65393 -68262 -618736 -934524 -781715 -133757 -679748 -755476 -102746 -14687 -354632 -940620 -363483 -263700 -464948 -18123 -50888 -112629 -710097 -505695 -209431 -350182 -51594 -43950 -82255 -129270 -742046 -291087 -482280 -369149 -659532 -793133 -525162 -764389 -763658 -863213 -306935 -356572 -46635 -11374 -335315 -315652 -116564 -292773 -35216 -133990 -538931 -421197 -494149 -50266 -555029 -623357 -9553 -71034 -193894 -24025 -24996 -327252 -360197 -591878 -769536 -793958 -670659 -938298 -303655 -709891 -151434 -14982 -926171 -94094 -21661 -658965 -912997 -80270 -726081 -49028 -305631 -668047 -70546 -361292 -144333 -167535 -16771 -3991 -713934 -301340 -343026 -702995 -160995 -581053 -92979 -64157 -42909 -61709 -371256 -938917 -71280 -314674 -24994 -124369 -847134 -444681 -17043 -72772 -140540 -36282 -912699 -703710 -286969 -12970 -381819 -771538 -25512 -28889 -741672 -362524 -159583 -516199 -761811 -387150 -654612 -262755 -12712 -109573 -874523 -91366 -49376 -501397 -335448 -943583 -23474 -207060 -18264 -70473 -337130 -312173 -664861 -826238 -560397 -372446 -71000 -125181 -162202 -21384 -67312 -781773 -13106 -48897 -431907 -148919 -305433 -20320 -431952 -44483 -243730 -343608 -276222 -795495 -49910 -20843 -703907 -709809 -492058 -526562 -49853 -209470 -822384 -523895 -358143 -84588 -780186 -773810 -118714 -114209 -661987 -392876 -67809 -354393 -110509 -835194 -29476 -707413 -676754 -89063 -949351 -475277 -83831 -597337 -334693 -81804 -67102 -900081 -576910 -856916 -286027 -78526 -487768 -14820 -732171 -564359 -633786 -871115 -780863 -203649 -119072 -36743 -75742 -372305 -60163 -591683 -566811 -42274 -372934 -31907 -83906 -480369 -71120 -29105 -95539 -9450 -132217 -480294 -821354 -529115 -340723 -13982 -209182 -708895 -7296 -728672 -719070 -263064 -782229 -409719 -298687 -294323 -10986 -14604 -78867 -458224 -625898 -593905 -764162 -11796 -24153 -162021 -348885 -320610 -449560 -85644 -851705 -799202 -44246 -379608 -717364 -810232 -625799 -334536 -664933 -9633 -446208 -762539 -208571 -368418 -780897 -48856 -121092 -50067 -123028 -177547 -16485 -44021 -769024 -22239 -64031 -537498 -30078 -650644 -11619 -465056 -66410 -179004 -174853 -777995 -936737 -769554 -58505 -56503 -721595 -47569 -13462 -74423 -79302 -622726 -479476 -316867 -235516 -223266 -828552 -501340 -273013 -774329 -626828 -41809 -126613 -304134 -703801 -28879 -847915 -375376 -199787 -892026 -18013 -616044 -60366 -222905 -747948 -527512 -134618 -73618 -4012 -169785 -49710 -762424 -180865 -341471 -793822 -724362 -130270 -7707 -593276 -39176 -420552 -321799 -832849 -608481 -541030 -427585 -355841 -314498 -124483 -893904 -5461 -751374 -178487 -48251 -870102 -345731 -43127 -71270 -88352 -939640 -957079 -312208 -303465 -427671 -268090 -28567 -70649 -118026 -29727 -179877 -230752 -242308 -58520 -273338 -776395 -11584 -856767 -418486 -178916 -590492 -9452 -249932 -236505 -673560 -901467 -95421 -513439 -672909 -746180 -30809 -55055 -45264 -459038 -501917 -267136 -560511 -91885 -236112 -129213 -15643 -825384 -316904 -788749 -870021 -14720 -56108 -319423 -526356 -170359 -242624 -506098 -637756 -356232 -50105 -125074 -301660 -897606 -747815 -267927 -654709 -158928 -353398 -66201 -704326 -70633 -570500 -121097 -674123 -77648 -455685 -99775 -943090 -330840 -368686 -12038 -389221 -740101 -489341 -275649 -18830 -306468 -255379 -957009 -168001 -151517 -738483 -626414 -93866 -73929 -354087 -119931 -733874 -357311 -21076 -67194 -782691 -78861 -275184 -956979 -8153 -224411 -208771 -316879 -525458 -323361 -407648 -567973 -215614 -802780 -903486 -215820 -62102 -710200 -952217 -383917 -43240 -121055 -288002 -739553 -82383 -300101 -208976 -788554 -91984 -167019 -468442 -654446 -60566 -690598 -77322 -140272 -673891 -316043 -930624 -64775 -186816 -547999 -546093 -475081 -202304 -559969 -611586 -166897 -211674 -361135 -741483 -34489 -824425 -141888 -588298 -248591 -761679 -919082 -439935 -368043 -262645 -319993 -843742 -301835 -601130 -793553 -124987 -654358 -464048 -673632 -467338 -558976 -316638 -527694 -34549 -9877 -335442 -53961 -463894 -434026 -623839 -527142 -42428 -841604 -23401 -234688 -31596 -104914 -209539 -482439 -219092 -786072 -278328 -54689 -305395 -316216 -881222 -270564 -125066 -27299 -889413 -711748 -661067 -633177 -286116 -521582 -208083 -277049 -800756 -40322 -383698 -847571 -425705 -88424 -140631 -478171 -62779 -945837 -12745 -927616 -590097 -528800 -367298 -94196 -693114 -199439 -1480 -288627 -492095 -62777 -372497 -11532 -623060 -178585 -487358 -3849 -522256 -445401 -443079 -166603 -17389 -8507 -221744 -54727 -77326 -847657 -938756 -379458 -461052 -134777 -642789 -92597 -886094 -181616 -750708 -175693 -191075 -60576 -701280 -927780 -373172 -606090 -512791 -817940 -34837 -13006 -49161 -12427 -122488 -626049 -13178 -882565 -295795 -315815 -251301 -24380 -626103 -166962 -79296 -155506 -451853 -793795 -852673 -25370 -938995 -45636 -423762 -35200 -58894 -267004 -538390 -335332 -303518 -320481 -68733 -50270 -122505 -315383 -763954 -332242 -673355 -19498 -75834 -60570 -377534 -20836 -45303 -747923 -753772 -44582 -79450 -139998 -761721 -64673 -655983 -20258 -322390 -468746 -709150 -95579 -32347 -63103 -821435 -140580 -124485 -538128 -310271 -335414 -232415 -9592 -65742 -42173 -50196 -606519 -233764 -11850 -703448 -315018 -771346 -611735 -390936 -920910 -58891 -471898 -905995 -66149 -738030 -148423 -925188 -554439 -821560 -763538 -66339 -336260 -558958 -673070 -342778 -606660 -883360 -78720 -669133 -423189 -672405 -950688 -232669 -65015 -263637 -709788 -656926 -231008 -10353 -559309 -594599 -196295 -288009 -230353 -871303 -123533 -224061 -575315 -475967 -142828 -113763 -943329 -341252 -472437 -127915 -151860 -11918 -51825 -361820 -24267 -167458 -45171 -262027 -676065 -46599 -433487 -902238 -729261 -88087 -793937 -363869 -527645 -150448 -62399 -606319 -866145 -277435 -892863 -702709 -619030 -264022 -320887 -475454 -195020 -84907 -869348 -943812 -58470 -68128 -42017 -556484 -653283 -60481 -122967 -11427 -271085 -488087 -307793 -26855 -933192 -278376 -135495 -29372 -704064 -703871 -12485 -924217 -175661 -935759 -21714 -42098 -798145 -709141 -238929 -719133 -559549 -334442 -921819 -35140 -21085 -174534 -329020 -625736 -60982 -663831 -718945 -13186 -57708 -369304 -26206 -309556 -793594 -937055 -71829 -423940 -919078 -784073 -446704 -64257 -652800 -197418 -671365 -567093 -206440 -45042 -387420 -14652 -180036 -611027 -283661 -41923 -38192 -327940 -854955 -821414 -559923 -697074 -590651 -747486 -267277 -195091 -432064 -449076 -803052 -460413 -288744 -227660 -885455 -707468 -943634 -684115 -863330 -59976 -913144 -534968 -85937 -279321 -722921 -196821 -8614 -52805 -674285 -236502 -83677 -13510 -442253 -281561 -95202 -168677 -12043 -647463 -130728 -140897 -477422 -905008 -288728 -24891 -86103 -101745 -472541 -317768 -682118 -956279 -60500 -649774 -17109 -121663 -914313 -567544 -134794 -871067 -907814 -256902 -940387 -896788 -858796 -13523 -124292 -264536 -27076 -761425 -197081 -167956 -60665 -558555 -62101 -203374 -234321 -483251 -335322 -640430 -670568 -547478 -20686 -952329 -791693 -736669 -337687 -271023 -478654 -698634 -351008 -48620 -226666 -334041 -168143 -903598 -748396 -61351 -181576 -832375 -11908 -623873 -625863 -560238 -503681 -471976 -9503 -177137 -913041 -112517 -772571 -222168 -516625 -263512 -329150 -446378 -517792 -459334 -53431 -50202 -393967 -774086 -177767 -814289 -209137 -383955 -717415 -555571 -631478 -166798 -465009 -795946 -442992 -677496 -263763 -93922 -288550 -77581 -301781 -75522 -74488 -199071 -198627 -903778 -769547 -114190 -135452 -351744 -84174 -675143 -180162 -180179 -16609 -125103 -94691 -939232 -947345 -793933 -422989 -42332 -558220 -237588 -494610 -442985 -45211 -11782 -34810 -188783 -125688 -24031 -91030 -522825 -174625 -134360 -309844 -682028 -174712 -852696 -583481 -447729 -73679 -23829 -718040 -166955 -936173 -52855 -901244 -22212 -110263 -737789 -125136 -831373 -10656 -61857 -569918 -131255 -378502 -209101 -368621 -654560 -856920 -143451 -60821 -144029 -831265 -42238 -468072 -556152 -926986 -148505 -413269 -335375 -18252 -50249 -903633 -750968 -738354 -750404 -665726 -474190 -854124 -65217 -41959 -74508 -34964 -837126 -237150 -632941 -68592 -332569 -34779 -702077 -497484 -119965 -68759 -15162 -473351 -397806 -4447 -372429 -180707 -12582 -386657 -735841 -479887 -495794 -717334 -863277 -930959 -13077 -769842 -933140 -199345 -793662 -704295 -869987 -425630 -557406 -17229 -29650 -617275 -178987 -93682 -772062 -145649 -760449 -875160 -833268 -460995 -310647 -731334 -956418 -304751 -703353 -892373 -192488 -42039 -255593 -257394 -790590 -773992 -94835 -567330 -793663 -690380 -30578 -465788 -104871 -334946 -758092 -792837 -905557 -267293 -602488 -782567 -17139 -460283 -892520 -440467 -201796 -947352 -14853 -412677 -383719 -369338 -47366 -221757 -335443 -442415 -249356 -814794 -90494 -115125 -149983 -632846 -479973 -196899 -43693 -717722 -119287 -581079 -903523 -814883 -93928 -249148 -559515 -78866 -408901 -372510 -765962 -463288 -693123 -946486 -270490 -149433 -27663 -5494 -271652 -264989 -139995 -416125 -43331 -148991 -795510 -858093 -13289 -135455 -591686 -623554 -328540 -110249 -198488 -306900 -380885 -47325 -131298 -792382 -51209 -843897 -930899 -792287 -336973 -452973 -84020 -132620 -57863 -678776 -751507 -703070 -47211 -941750 -26289 -249377 -271236 -384003 -4344 -20896 -656699 -112913 -273942 -486189 -574466 -679423 -21488 -10394 -651830 -627545 -890526 -94870 -151857 -20561 -825551 -450780 -73989 -262653 -20496 -57674 -70807 -271801 -95597 -35112 -887713 -343234 -118325 -315032 -810187 -818606 -277096 -199680 -270648 -356373 -21173 -616716 -343632 -950874 -774067 -120138 -34619 -334735 -903546 -159619 -69626 -7419 -337948 -67166 -149246 -263098 -120228 -75552 -296974 -94955 -79240 -36999 -61518 -73370 -23583 -458809 -926595 -491993 -354550 -458935 -519851 -467123 -635285 -70639 -881725 -257820 -742032 -759336 -118288 -793057 -9927 -468205 -107927 -69333 -342688 -68767 -386684 -638814 -242843 -75851 -938736 -37049 -174905 -429605 -92123 -560509 -62333 -390012 -226801 -750701 -338287 -289943 -20782 -664517 -736734 -60533 -328452 -16463 -926806 -518427 -390202 -534559 -427390 -600829 -801220 -271815 -259638 -183790 -49256 -23228 -42241 -99934 -431524 -769602 -664909 -704444 -626570 -13116 -191056 -704066 -334640 -231831 -225507 -26276 -95259 -301842 -45609 -335228 -126397 -486645 -433512 -956884 -645557 -461314 -58110 -49975 -555387 -9979 -315142 -830981 -452948 -77551 -276168 -446637 -688412 -665765 -945653 -633193 -663235 -77650 -51137 -21947 -131048 -289399 -180175 -277913 -931991 -772527 -144111 -480847 -81553 -209650 -735033 -753867 -329737 -340450 -945309 -636884 -277870 -907535 -901188 -176347 -931011 -54724 -550447 -199887 -54397 -413449 -249920 -813908 -377891 -247151 -18637 -488280 -92501 -291061 -61373 -569351 -210526 -188770 -20391 -397607 -335312 -22470 -687855 -60524 -638633 -88593 -928039 -332641 -472098 -50612 -413268 -225012 -566552 -462240 -883046 -120132 -736464 -248222 -268251 -12327 -11977 -664274 -335601 -441829 -213690 -863416 -198768 -12833 -48451 -75917 -485552 -307986 -260436 -718845 -443028 -15114 -483727 -903492 -364476 -693059 -275028 -536462 -342958 -29183 -316323 -121832 -424867 -903465 -451598 -317739 -691323 -869503 -269835 -335463 -903568 -45140 -366471 -44964 -737752 -636518 -867415 -103547 -52854 -747345 -237710 -387975 -800305 -108516 -25547 -395028 -262699 -167302 -11864 -23128 -732638 -824655 -45308 -161260 -625094 -101822 -379270 -30703 -126359 -715253 -412584 -335475 -73619 -609731 -864986 -60223 -159934 -533297 -460214 -695381 -942329 -56149 -42344 -641762 -678739 -330066 -161986 -11591 -12510 -686869 -756045 -20649 -811309 -141230 -129110 -349266 -297034 -428904 -867519 -267903 -72502 -14788 -661830 -847717 -461342 -34481 -63574 -63716 -634848 -2166 -846201 -130258 -438171 -527993 -956686 -6857 -717482 -276224 -602408 -447472 -205865 -248436 -15881 -475894 -34573 -415081 -35231 -380988 -12319 -863263 -930046 -952952 -53766 -236001 -505407 -192258 -161078 -190661 -193159 -100990 -416974 -351226 -410657 -118768 -809748 -900895 -84204 -251957 -45575 -859262 -83814 -66413 -143618 -198387 -58095 -50310 -956797 -804987 -471338 -197253 -50143 -647708 -575197 -28908 -501419 -636539 -54399 -650086 -699176 -937980 -325542 -512653 -13452 -185261 -517782 -12567 -635809 -41957 -347478 -617516 -793970 -1350 -43282 -305299 -97568 -846192 -453996 -911865 -942634 -457032 -196398 -334763 -380050 -4726 -51000 -465396 -612392 -227049 -60979 -836881 -401072 -140679 -933041 -150467 -427068 -75456 -356481 -384085 -654032 -120220 -34469 -49108 -125144 -107856 -604811 -600807 -791199 -805523 -141732 -255864 -155376 -63607 -295895 -952604 -222719 -709841 -360901 -287929 -197354 -103976 -956940 -182112 -200225 -115524 -167776 -20764 -192881 -390416 -337971 -855932 -279079 -721596 -271364 -224737 -784189 -224907 -782541 -350737 -377882 -284030 -6952 -317617 -110326 -545559 -651649 -769553 -332937 -859126 -550453 -560539 -37001 -166776 -21993 -101522 -202470 -35909 -186811 -79294 -22785 -77179 -88142 -881872 -48626 -289166 -703698 -236694 -54664 -119735 -387600 -645351 -95242 -366016 -9965 -101683 -292652 -167609 -712253 -129007 -931121 -48503 -367673 -493704 -171334 -836809 -943484 -13321 -768741 -99903 -99060 -648979 -793818 -74429 -16945 -560446 -198457 -693320 -534292 -8960 -10060 -747805 -224678 -534140 -699390 -693238 -605656 -642905 -79211 -479851 -793748 -57150 -501069 -29505 -461589 -709932 -543886 -351035 -95086 -679674 -436157 -664783 -549537 -279433 -628266 -262383 -751491 -82870 -830988 -665961 -670990 -768739 -678665 -852708 -25450 -17821 -773298 -622793 -121947 -326420 -179311 -429846 -292022 -26878 -1006 -106861 -323538 -41747 -93618 -224254 -682030 -196134 -780185 -56230 -17576 -35190 -833661 -131369 -647502 -88191 -692278 -69556 -379588 -394731 -689004 -93473 -694634 -119746 -913150 -104289 -42318 -21971 -750592 -445696 -348485 -335240 -236210 -192563 -536109 -806973 -483528 -352830 -263855 -277116 -457484 -257764 -78545 -138873 -326439 -199936 -520098 -109340 -387791 -688795 -864315 -644610 -40336 -51021 -791606 -16325 -20400 -599233 -232005 -559395 -308401 -135444 -169480 -692746 -110508 -335135 -942883 -554508 -251596 -130964 -953012 -663048 -712865 -63004 -635176 -205946 -896677 -3261 -753824 -119057 -200693 -260811 -557355 -209601 -119079 -346254 -640072 -524161 -261758 -21321 -467833 -42745 -88293 -431754 -481650 -303143 -89267 -297795 -782277 -879104 -130305 -347161 -324507 -53930 -89164 -871169 -781752 -402048 -106764 -76904 -60624 -250853 -551348 -265940 -42431 -64990 -9846 -222675 -829141 -304799 -848065 -23528 -266313 -194075 -19406 -773421 -274100 -59273 -57877 -515801 -129222 -771508 -176314 -11635 -113389 -168711 -466889 -48152 -111991 -401562 -80530 -177491 -58338 -149343 -678497 -610939 -76073 -575539 -8627 -139089 -13693 -380705 -23427 -65854 -840240 -73095 -12867 -72394 -764622 -720000 -77381 -97341 -58357 -60184 -454340 -622057 -407232 -287091 -224301 -340671 -593789 -74104 -836896 -151371 -319502 -63121 -499053 -200020 -567877 -61132 -577365 -373529 -201842 -128890 -719487 -893996 -35214 -750344 -927791 -742009 -647783 -3478 -28839 -306776 -710164 -139739 -30181 -197048 -39494 -928109 -54156 -869608 -616155 -67562 -480073 -837569 -9899 -129393 -60111 -484368 -903326 -859002 -457729 -486577 -37829 -927943 -952608 -426080 -380672 -224498 -94584 -248949 -846176 -943079 -458798 -22499 -45319 -240845 -170153 -928248 -956172 -409210 -482529 -921794 -20462 -168376 -459966 -661223 -721776 -316733 -74171 -105146 -61782 -249472 -151712 -199993 -92607 -236097 -324269 -777331 -343233 -300533 -140575 -61126 -166763 -13492 -24908 -259994 -922212 -45428 -275113 -266955 -348852 -45409 -460642 -46223 -329329 -624850 -827942 -271041 -129463 -287545 -367417 -811404 -272974 -372133 -60464 -368491 -144526 -590195 -124701 -45346 -770043 -442750 -630275 -455672 -59116 -69471 -251278 -78865 -104316 -134807 -845800 -280482 -956165 -329273 -151479 -28374 -93247 -316302 -344050 -203975 -162924 -373667 -368642 -39919 -258102 -654720 -781253 -158157 -260997 -334158 -151306 -359338 -902647 -458450 -201369 -10551 -318601 -44811 -50574 -811796 -29646 -614671 -308467 -937861 -21669 -76054 -607993 -605813 -923059 -269407 -862615 -814723 -12584 -333468 -367901 -13566 -861940 -858576 -886158 -68678 -223869 -364770 -289284 -45625 -635526 -580194 -494738 -902631 -263698 -214806 -938233 -85569 -83794 -73493 -603909 -26825 -206241 -347352 -589676 -223421 -120522 -663903 -480319 -79250 -693314 -612992 -903559 -21930 -23700 -131253 -763068 -247515 -434717 -347313 -334795 -751536 -119919 -486997 -35933 -83138 -61114 -46723 -922463 -500946 -673518 -480176 -762640 -6746 -62370 -35071 -901270 -179262 -32138 -397305 -903555 -671429 -599788 -439595 -671384 -13279 -208275 -52396 -69481 -54043 -822978 -381215 -295853 -208523 -224700 -34788 -7462 -72104 -387058 -231728 -747974 -6867 -40178 -291243 -284110 -650507 -373294 -819511 -600358 -932591 -574459 -618560 -738223 -76017 -292861 -566328 -492626 -588410 -325356 -871559 -773983 -256702 -249089 -749241 -441705 -120198 -279408 -17485 -934557 -190979 -549699 -88407 -60787 -197355 -124515 -624590 -704385 -334030 -14672 -92148 -304710 -99087 -192565 -210886 -444009 -127619 -46566 -885853 -52281 -554624 -798658 -196751 -68609 -885553 -863642 -364579 -2802 -45131 -29631 -12687 -309689 -908292 -61727 -786689 -82160 -451702 -704107 -227536 -416852 -205909 -80279 -234550 -317326 -88527 -31005 -254933 -64686 -848212 -836310 -134899 -516702 -594497 -267478 -149603 -458684 -897654 -215605 -248854 -956984 -279530 -55009 -333252 -251859 -332364 -717902 -66069 -836068 -807022 -45376 -127923 -582387 -586471 -719173 -881897 -305287 -429159 -160594 -192492 -33066 -215381 -793616 -803196 -89257 -440533 -6663 -459654 -45289 -49591 -133649 -13420 -237788 -73610 -14802 -29114 -240431 -271365 -57044 -197277 -62134 -408732 -477169 -460856 -63623 -363062 -467658 -626005 -524202 -12969 -673974 -250108 -669002 -642006 -475351 -75963 -666221 -25976 -723450 -787777 -786410 -904066 -269020 -276604 -451912 -62853 -936516 -890711 -724087 -381063 -57078 -625734 -80850 -803010 -814769 -355999 -29181 -790503 -337109 -761974 -154949 -71110 -334085 -356980 -458687 -79814 -93963 -494104 -928110 -792546 -107193 -793604 -167309 -885929 -20311 -297642 -324640 -911528 -663871 -34791 -224888 -497849 -174719 -83968 -768653 -849727 -128145 -95285 -383251 -491815 -625635 -44857 -10691 -703498 -54972 -62759 -179886 -903593 -75780 -769730 -395723 -335265 -743987 -343606 -60186 -180026 -711303 -15679 -45420 -84011 -674325 -15765 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_train.jpgl deleted file mode 100644 index 9875b427395c4cf06e5bef5f2e7c2aae625b084a..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/aesthetics_image_lists/stilllife_train.jpgl +++ /dev/null @@ -1,2500 +0,0 @@ -212815 -400673 -192901 -151742 -522586 -507723 -273146 -766251 -255463 -348100 -848809 -300641 -207378 -18206 -956637 -13218 -608897 -41831 -702605 -130491 -956589 -777583 -360006 -606736 -662206 -389013 -939731 -481660 -141631 -60990 -565959 -666502 -357578 -789981 -346738 -816714 -543491 -558565 -140579 -335598 -383439 -691067 -793642 -115833 -557334 -906665 -896121 -313773 -758125 -395323 -309654 -545440 -14827 -93312 -24316 -461263 -335307 -567655 -177789 -677874 -316313 -58373 -738687 -768023 -360210 -12577 -306785 -413082 -14876 -92286 -786238 -510166 -474604 -844092 -174342 -483683 -556976 -88341 -930015 -581710 -188715 -335175 -748061 -309836 -371625 -854809 -126448 -49926 -354339 -332863 -305725 -943254 -719042 -125765 -847714 -679650 -550248 -99797 -335317 -705014 -211943 -903462 -726041 -58186 -35165 -616580 -7516 -64492 -251565 -402083 -389961 -169734 -563549 -62974 -838086 -105014 -64829 -300763 -709883 -847676 -209042 -712973 -34982 -93587 -129297 -356921 -261439 -224067 -142327 -292531 -74063 -13205 -166970 -212056 -18715 -43821 -17290 -80461 -751530 -243036 -129458 -45627 -161866 -28594 -55940 -312579 -43817 -72356 -78598 -6880 -436938 -450984 -792988 -31242 -210683 -64732 -149280 -13026 -11720 -298952 -295875 -900880 -855672 -90866 -518988 -445325 -32224 -105116 -396009 -70177 -96571 -768058 -545782 -807144 -263529 -204615 -369767 -453254 -114048 -38655 -98878 -732710 -808990 -53179 -354399 -761459 -334890 -1561 -206563 -703999 -305342 -334304 -141811 -533938 -432780 -829873 -848971 -279524 -368377 -229721 -94083 -818648 -266199 -627241 -704399 -762566 -8232 -449163 -735599 -100233 -457765 -849517 -839949 -335338 -452320 -126640 -93929 -298771 -155565 -191279 -400294 -191204 -328286 -677947 -37230 -115514 -946126 -459300 -466819 -307102 -275026 -227604 -805443 -77488 -298801 -60845 -138394 -58464 -45208 -263284 -773853 -199890 -387156 -403389 -33933 -306939 -113755 -186942 -940308 -45633 -374508 -591029 -939799 -268262 -278478 -848730 -651412 -764020 -61109 -183301 -25333 -781254 -45347 -612852 -2918 -742317 -687802 -282937 -335472 -462721 -93517 -130824 -368964 -444019 -719954 -51820 -452078 -315494 -663769 -558823 -8284 -9912 -64692 -321279 -395156 -50085 -139949 -687733 -377516 -208665 -301659 -58615 -197190 -830757 -55898 -19984 -132507 -927219 -458002 -726783 -372393 -785680 -46435 -34824 -679595 -8640 -661286 -23729 -431809 -124678 -654658 -208497 -180032 -266041 -6773 -599966 -635460 -649249 -195067 -847709 -769239 -129648 -45497 -330922 -704363 -606583 -936148 -625091 -4043 -61492 -70191 -11187 -271267 -240457 -774365 -704471 -262603 -358663 -577906 -523756 -60706 -617447 -22135 -837099 -9526 -723736 -209598 -368977 -163651 -606352 -251451 -501660 -66818 -18199 -61845 -827545 -203142 -534773 -10110 -864558 -118036 -457568 -59904 -306205 -196954 -932176 -236002 -664924 -273749 -882742 -334610 -517079 -431241 -469411 -102349 -179233 -76077 -94355 -734279 -316627 -281484 -501937 -882540 -936428 -751188 -212685 -208292 -902926 -489032 -367038 -457893 -885584 -85903 -8863 -151760 -342968 -858178 -127804 -714183 -208484 -13181 -937027 -445781 -44577 -787670 -453076 -750684 -937037 -580956 -9084 -453973 -93311 -9929 -111317 -475097 -45018 -209357 -801181 -58135 -403463 -951916 -42907 -935228 -325367 -68430 -4174 -368161 -643048 -379753 -75408 -7453 -587139 -793943 -294831 -946053 -208145 -693278 -903748 -653967 -193101 -95061 -39910 -391563 -35659 -472549 -93289 -83909 -262894 -247473 -424457 -486496 -204943 -358184 -545007 -59996 -99790 -33623 -25560 -793572 -287004 -333804 -235513 -574860 -734362 -13392 -154047 -180257 -163982 -900640 -415264 -60746 -711240 -565236 -38874 -277072 -118454 -196781 -8838 -453077 -37751 -529049 -404714 -287564 -292810 -112821 -858432 -11948 -624410 -234618 -35038 -588601 -265056 -356161 -897782 -920358 -363311 -926838 -846652 -49865 -58035 -251983 -195848 -460063 -192955 -316688 -887641 -130969 -308528 -491191 -677071 -664691 -765599 -450699 -892973 -566312 -892670 -92240 -860399 -41981 -15010 -308256 -12645 -7864 -222966 -763886 -51183 -129352 -45557 -233292 -114240 -34814 -502379 -810971 -784690 -49814 -654544 -633618 -625567 -435279 -197199 -43451 -103004 -930266 -223382 -20695 -7446 -263119 -244690 -20183 -409600 -416109 -316660 -387042 -364121 -166929 -115419 -13414 -573205 -278081 -74521 -952457 -476968 -723031 -261753 -647689 -49727 -334089 -161968 -475068 -81946 -272998 -318588 -51143 -792786 -661404 -468549 -315416 -17528 -795627 -787862 -9916 -30635 -149462 -929696 -904729 -328598 -307365 -936701 -773908 -747026 -667407 -344363 -946100 -73135 -174423 -235491 -735937 -278102 -674342 -317621 -739586 -368708 -256696 -232819 -316902 -372052 -335061 -124235 -952381 -24446 -799290 -844238 -23310 -48126 -283102 -312378 -249220 -694269 -448851 -459835 -82536 -467440 -366980 -76335 -91131 -127649 -42025 -747965 -33098 -874359 -947074 -296620 -204603 -78957 -308483 -84339 -370290 -934165 -787060 -21988 -559234 -849159 -22936 -747483 -885696 -27620 -956765 -229970 -37448 -786009 -175047 -66075 -9154 -381043 -435281 -322576 -449093 -300316 -745722 -477009 -44791 -17986 -396222 -956955 -223655 -851716 -10350 -782237 -13390 -23407 -48830 -864883 -827234 -701465 -745374 -118872 -774367 -848238 -47221 -464695 -187626 -12989 -258181 -332554 -258020 -44909 -271161 -896961 -301737 -38088 -335221 -17842 -584137 -295882 -515293 -802364 -485157 -22283 -16663 -438006 -133060 -93685 -706079 -859158 -215027 -781754 -14842 -88032 -48569 -487058 -354196 -627880 -527451 -521823 -179255 -338211 -14863 -488344 -864584 -127857 -11260 -114050 -443000 -35809 -45354 -60961 -120209 -354699 -439850 -398218 -7862 -491775 -813297 -590582 -461579 -870439 -123272 -155642 -445522 -781698 -249518 -653751 -764095 -761769 -7839 -126703 -209024 -853063 -262636 -814000 -48487 -782648 -95694 -709323 -709498 -398438 -58391 -20693 -863758 -9219 -194044 -259632 -917268 -232311 -48966 -249133 -264765 -320090 -709905 -369546 -239146 -333573 -268084 -144349 -868610 -705687 -455359 -545763 -846175 -651607 -814279 -848950 -180656 -358144 -12737 -382782 -425687 -197436 -831019 -379412 -913034 -342564 -1494 -119304 -333665 -370127 -903176 -184160 -61730 -57897 -423317 -68731 -65555 -651766 -616090 -654750 -38095 -291141 -593477 -588223 -385033 -650043 -476796 -263472 -947280 -123092 -315024 -905216 -9320 -286195 -616242 -178749 -787840 -426066 -412910 -203627 -13516 -679950 -463704 -482429 -137818 -12538 -280784 -384165 -952841 -479902 -281185 -416280 -56748 -342737 -83931 -929818 -292767 -14901 -593628 -404246 -287606 -671322 -371385 -64741 -65383 -53838 -574759 -30029 -580944 -17561 -466247 -35042 -114951 -125900 -878729 -141644 -17462 -942918 -125140 -241790 -43119 -314439 -763693 -328534 -926380 -178272 -97845 -263459 -23972 -169227 -426263 -481356 -207254 -161080 -122976 -16551 -75220 -421498 -753233 -240586 -763668 -239234 -166761 -151629 -20196 -499311 -454181 -181278 -357134 -319957 -20016 -669135 -94842 -501643 -738274 -1585 -538466 -224876 -733641 -785059 -715454 -83524 -663181 -79839 -815977 -557483 -654712 -261257 -733026 -232287 -132294 -17954 -61741 -94778 -188297 -197631 -413129 -827229 -151490 -338126 -382970 -21324 -46647 -808998 -155587 -750610 -301732 -704440 -809170 -482523 -457703 -664882 -816100 -859763 -361248 -914714 -528641 -240063 -721274 -124305 -525626 -138486 -202489 -42759 -460381 -786129 -793602 -903670 -355807 -491494 -15959 -900531 -207046 -772446 -249352 -14856 -557278 -332774 -640783 -310574 -77824 -129035 -737882 -12753 -291827 -18084 -747918 -173733 -265301 -126609 -925092 -10477 -34214 -368261 -793246 -849679 -189232 -701527 -453553 -460116 -122845 -12240 -478355 -793621 -559548 -892995 -362527 -793885 -945307 -397345 -73399 -936652 -237672 -470363 -894357 -42267 -376995 -505475 -199513 -165946 -387523 -268233 -465319 -240740 -908107 -124615 -270179 -315338 -933591 -753169 -49827 -956877 -129380 -212639 -60487 -268208 -9921 -17062 -927666 -633174 -139761 -804887 -297551 -95221 -434447 -410445 -242709 -182201 -291782 -430380 -433688 -623274 -752795 -45507 -248978 -267522 -587294 -622946 -58205 -569474 -704031 -162258 -196752 -212617 -49681 -677650 -29334 -204634 -291996 -930496 -845220 -426183 -710499 -934312 -190649 -83597 -256741 -703715 -440452 -12518 -153566 -582245 -105929 -741099 -462030 -936605 -149507 -83387 -727170 -25579 -795650 -140339 -369320 -466566 -38396 -224342 -89929 -397269 -404993 -333643 -763835 -52336 -316629 -79830 -388693 -956582 -49310 -44701 -21205 -193887 -45358 -870548 -479436 -400606 -686440 -13726 -341723 -454026 -60248 -489368 -70810 -17112 -58178 -56462 -270524 -323178 -782537 -947216 -5910 -772994 -109324 -248537 -535618 -35169 -735389 -243663 -38860 -327366 -471051 -37499 -10017 -10049 -226337 -446229 -277253 -567733 -724381 -587182 -666246 -474914 -166548 -809667 -611385 -44017 -322795 -45163 -48567 -542636 -63955 -246710 -752856 -169148 -42441 -390944 -64731 -48660 -89232 -51516 -35883 -439258 -290916 -397750 -209498 -231059 -101021 -354583 -298887 -458433 -297528 -708072 -300008 -412173 -64763 -450608 -21885 -313778 -267930 -903702 -92999 -89306 -78464 -140007 -596671 -752251 -567856 -269958 -730666 -56054 -49789 -12012 -379862 -320028 -180025 -51818 -136317 -125193 -783560 -702908 -428705 -907880 -72239 -488773 -46889 -761773 -781176 -518628 -300680 -452929 -406844 -337982 -231352 -83751 -506056 -150534 -22157 -123756 -241928 -550424 -57929 -93316 -105797 -14988 -560545 -480257 -943271 -265071 -7443 -825076 -17566 -35858 -743375 -686376 -293920 -308032 -476315 -212047 -207364 -326529 -935840 -46238 -166810 -124383 -813291 -206177 -313831 -465676 -76010 -60655 -795356 -656289 -35088 -402436 -481883 -235974 -9978 -914669 -676613 -903040 -782713 -269943 -851520 -13252 -251975 -478979 -47347 -141818 -258868 -151614 -129009 -633100 -885466 -491375 -22150 -490925 -263245 -23771 -295311 -524088 -456823 -516396 -42299 -63226 -22637 -847423 -474786 -781531 -151399 -34680 -907811 -523645 -459336 -64739 -134045 -29938 -928766 -934144 -493606 -677755 -31784 -685359 -395264 -123097 -577231 -93887 -78447 -55081 -559992 -223783 -68718 -342753 -94051 -904401 -840083 -287072 -716895 -126444 -510997 -10588 -183195 -193947 -151034 -728734 -588680 -704173 -628309 -270507 -78471 -49034 -266717 -238821 -473620 -456313 -112468 -957004 -303260 -333736 -70243 -124818 -446726 -50278 -29155 -232952 -12322 -382262 -482255 -52623 -95631 -134512 -126839 -441663 -802064 -58326 -257631 -276400 -927000 -705794 -238144 -38640 -263050 -305425 -239124 -73792 -222451 -117658 -178902 -134986 -584215 -181701 -90757 -47384 -622380 -174814 -46649 -589886 -9884 -263868 -744161 -98318 -953326 -173607 -3496 -847651 -837205 -315422 -23685 -314836 -5591 -265244 -4153 -24176 -619249 -124617 -30745 -479204 -206296 -339400 -458701 -793754 -14706 -15125 -674230 -296934 -328726 -202679 -257521 -14817 -407406 -56690 -48894 -177470 -399505 -270845 -560480 -45236 -167545 -311865 -179379 -171223 -23544 -379898 -673410 -353325 -74350 -94432 -835403 -527560 -7362 -398361 -207379 -127662 -723125 -805308 -732436 -395202 -814407 -292647 -632401 -265264 -861817 -69727 -213365 -642835 -776651 -483984 -903561 -593479 -11807 -232869 -467167 -1855 -566563 -6019 -190206 -248232 -215963 -704187 -611734 -651167 -63802 -23635 -232742 -460699 -37456 -3102 -300696 -83722 -771376 -932903 -465683 -203893 -314819 -159953 -15883 -811547 -28702 -180971 -276020 -601481 -168170 -167984 -139706 -68361 -263601 -11769 -439292 -599842 -196151 -687760 -787239 -223820 -559120 -200697 -296993 -10053 -367743 -796866 -200726 -805489 -177614 -137080 -197270 -937228 -723794 -92951 -58512 -224779 -664949 -58660 -204888 -903323 -560061 -150650 -945121 -7788 -251883 -461312 -758341 -32245 -145745 -25428 -214180 -669264 -148296 -129239 -468287 -332950 -12984 -710268 -795463 -870312 -885537 -235432 -4114 -661247 -496133 -90161 -864808 -265386 -224583 -429380 -103823 -627679 -89323 -312592 -208940 -801277 -538922 -486906 -61498 -855598 -647965 -275216 -267909 -252708 -12937 -486842 -710214 -946852 -842816 -13428 -497252 -73922 -47322 -8291 -46638 -939712 -42028 -774370 -762391 -206394 -114248 -454339 -86614 -908169 -943656 -806291 -435948 -858392 -56655 -466113 -12714 -849628 -43002 -621027 -776747 -884367 -487117 -187914 -92720 -326528 -126117 -127855 -264363 -192950 -12331 -793378 -296232 -449604 -749129 -185176 -442789 -329415 -488330 -289398 -45075 -110434 -414679 -350366 -626669 -605902 -68762 -663622 -943282 -223738 -17502 -475834 -645896 -26442 -438524 -938079 -737913 -68580 -434206 -706354 -485196 -330215 -124655 -793657 -21457 -47283 -21986 -589354 -919856 -107383 -617441 -7438 -891106 -333022 -950963 -41539 -14423 -942585 -27682 -702956 -254877 -852985 -841323 -710197 -183178 -575469 -730651 -130904 -466540 -12751 -423811 -279610 -467912 -205011 -20637 -114487 -724415 -224448 -827312 -44926 -89336 -919030 -94753 -340608 -704139 -20188 -890848 -436846 -475302 -104795 -911322 -339427 -371278 -357597 -903590 -856007 -880259 -830979 -309611 -610424 -658198 -793842 -821385 -48843 -149596 -442979 -540193 -925413 -14898 -28756 -24838 -427559 -710692 -346076 -333777 -92552 -606646 -594840 -333820 -309767 -288033 -648981 -704000 -49356 -45286 -205256 -475714 -209054 -487685 -213546 -865674 -623010 -199816 -757951 -521829 -334680 -358407 -808068 -67511 -339204 -420580 -37359 -76843 -94524 -289292 -855800 -94366 -12657 -335386 -409729 -56097 -295180 -240226 -526102 -214365 -281614 -670236 -702994 -534254 -37004 -837067 -297577 -200428 -6035 -68314 -10644 -885803 -369670 -838451 -889827 -516915 -941823 -336953 -297233 -312774 -449443 -717724 -654209 -36536 -65824 -45431 -796234 -751213 -694082 -103597 -291527 -186532 -724293 -10658 -175558 -30779 -892402 -119488 -805470 -171291 -730331 -254164 -45563 -460380 -138761 -379681 -21292 -73335 -728057 -46853 -479219 -18046 -90580 -47159 -181381 -368124 -625834 -66009 -327632 -380049 -297465 -764376 -101043 -846101 -128573 -13194 -265415 -774131 -115344 -18093 -195163 -555308 -438133 -454308 -704290 -26940 -251960 -231298 -865608 -39204 -262923 -709902 -770589 -726711 -165033 -825944 -45576 -75308 -94595 -897175 -704375 -464402 -551393 -74385 -702541 -760801 -723051 -560256 -704224 -120148 -35147 -200698 -68614 -664169 -328445 -365358 -883596 -199991 -175448 -104845 -622788 -21221 -606703 -260965 -263439 -855371 -100871 -440595 -468518 -606889 -332598 -505847 -622782 -207995 -227346 -340392 -59270 -88930 -49489 -58969 -511018 -89294 -53248 -663614 -61537 -13219 -141113 -25250 -90035 -342924 -673235 -468627 -312956 -261743 -79235 -870706 -407159 -14963 -846924 -674192 -66356 -463170 -470914 -206258 -69728 -94615 -295812 -825917 -20737 -90359 -710037 -101628 -21168 -767466 -818822 -59666 -328020 -43730 -646197 -45451 -434366 -57582 -21411 -482836 -39979 -251550 -5577 -356544 -560408 -594368 -649387 -295868 -150284 -913042 -255810 -99074 -75903 -49936 -957011 -727052 -187486 -937303 -301210 -749051 -152421 -830157 -942659 -126217 -22823 -383053 -239607 -315171 -334785 -776379 -232687 -65851 -305000 -258220 -723220 -333078 -11624 -725913 -282169 -5908 -120190 -769516 -48998 -789619 -351006 -635900 -906099 -504300 -395774 -265036 -10533 -31995 -88437 -434455 -400982 -889682 -567633 -87976 -875647 -736612 -90493 -327261 -134998 -835371 -209491 -126749 -60097 -288259 -871211 -45571 -95062 -42861 -292731 -171281 -736344 -92670 -29427 -811586 -811843 -326207 -801223 -316094 -54284 -11957 -60892 -642665 -28580 -693187 -559007 -179239 -901259 -258178 -343830 -44633 -480296 -356919 -703374 -13234 -11613 -263128 -952401 -61666 -243806 -91804 -791756 -423428 -758040 -360077 -476010 -637024 -436878 -859582 -140753 -354007 -343733 -80403 -885677 -251495 -46592 -859618 -387869 -18724 -110142 -306417 -56663 -919395 -20371 -75756 -88820 -58966 -708947 -343352 -557678 -855853 -83277 -238374 -491790 -942939 -41984 -441658 -29288 -826121 -162195 -786036 -633651 -48511 -197575 -555062 -333860 -274116 -412564 -65668 -556911 -15956 -409556 -10574 -41084 -208495 -671221 -442342 -209226 -442896 -268198 -590908 -923612 -30654 -122532 -368235 -909127 -855551 -608414 -11177 -586496 -270624 -347812 -618197 -841073 -74500 -792782 -362484 -196890 -270464 -814687 -51439 -538242 -433527 -434765 -144731 -243783 -478351 -738373 -340426 -54736 -58532 -536959 -774305 -33041 -858135 -224293 -134972 -11328 -453633 -486961 -858678 -501399 -523746 -316339 -38256 -893110 -883600 -60671 -29387 -308221 -256746 -775367 -17051 -306863 -97800 -94877 -316876 -274997 -248955 -3826 -31466 -911769 -257859 -49691 -340506 -68760 -207583 -384057 -48103 -8538 -836467 -609477 -861659 -332787 -326570 -44223 -602909 -855617 -603218 -286860 -176030 -13434 -199364 -673231 -87685 -790763 -27497 -105106 -357204 -59883 -309867 -58094 -235686 -145170 -11906 -557352 -11871 -453879 -704013 -68459 -276331 -482487 -713521 -440436 -49971 -340610 -386702 -181431 -781551 -429208 -515422 -13487 -44919 -661263 -446005 -422964 -34412 -17406 -37149 -46195 -66357 -368611 -900339 -442864 -149962 -214794 -459510 -409803 -32204 -317398 -94090 -905445 -14506 -134571 -349587 -514479 -790505 -836474 -161072 -379914 -12207 -357245 -83341 -777947 -174815 -432080 -263000 -816060 -72298 -240225 -79319 -242460 -192851 -849600 -14916 -704454 -278539 -490048 -39408 -248911 -39679 -480515 -255788 -849246 -411612 -468405 -462210 -11750 -50244 -168695 -76797 -438107 -466213 -175033 -263339 -317798 -416220 -14997 -31115 -774350 -511439 -116752 -224816 -45595 -14703 -48649 -37141 -591215 -692959 -79576 -358562 -656239 -912878 -432637 -355215 -166737 -206176 -357762 -361868 -601128 -774158 -67588 -664750 -308675 -202564 -459983 -702736 -11366 -52010 -9778 -83298 -162166 -405162 -648863 -199077 -179300 -404027 -440362 -61759 -254158 -679686 -86032 -627770 -266869 -404467 -586960 -34177 -49855 -161683 -80390 -60924 -57896 -212931 -791995 -276001 -560323 -26854 -123437 -224953 -389712 -71269 -301850 -10503 -897155 -148614 -803640 -276379 -98293 -818253 -609021 -578111 -587805 -834556 -404651 -76680 -187889 -51710 -442427 -421099 -131197 -865190 -829470 -22065 -141809 -845670 -825098 -239143 -405900 -38046 -73511 -67391 -675552 -690202 -30051 -292850 -271083 -263590 -354605 -384144 -153703 -488458 -567850 -779984 -793956 -464356 -333744 -14643 -846269 -748004 -22155 -45430 -323797 -166778 -822988 -513377 -525801 -66077 -398268 -476554 -416335 -529201 -76046 -781685 -14285 -279439 -45531 -20334 -612787 -52229 -190102 -451572 -31120 -17366 -550399 -3413 -670763 -429036 -940020 -24480 -36958 -253072 -709766 -111078 -45539 -41863 -44578 -440058 -744277 -216644 -190162 -501847 -35175 -482408 -121775 -335104 -306861 -614990 -475870 -893701 -14923 -730840 -8196 -333632 -56549 -9517 -126777 -306467 -346519 -13237 -278465 -108007 -372981 -23824 -94113 -175570 -22401 -392483 -457036 -154897 -221389 -737782 -13033 -767481 -305302 -24274 -491590 -45112 -239608 -479947 -938359 -704284 -35184 -313961 -361189 -179268 -209358 -140790 -129391 -559201 -19988 -692828 -678127 -625791 -385916 -706293 -515929 -482697 -275459 -210740 -944747 -303559 -13497 -769803 -936418 -170877 -626704 -703552 -146263 -635976 -199847 -654511 -339454 -300873 -86610 -354634 -49633 -762412 -334552 -768448 -434422 -98952 -679912 -744255 -832905 -824505 -240270 -22076 -705023 -51656 -109933 -403759 -742113 -118361 -909376 -162025 -268049 -468629 -58177 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/challenges.txt b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/challenges.txt deleted file mode 100644 index 6b6bb2d0c33745bff26ecdfab737bdadaf26cef2..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/challenges.txt +++ /dev/null @@ -1,1398 +0,0 @@ -1396 100_Meters -1004 100_Years_Old -1329 100_Years_Old_II -608 12_Days_Of_Christmas -311 1970s -1009 1-Second_Exposure -876 2_Person_Portrait -465 2-Second_Exposure -511 30_Seconds_or_More -419 4_00-5_00_am -1291 4_00-5_00_am_II -1120 47_Steps -37 7_Deadly_Sins -449 7_Deadly_Sins_II -879 7_Deadly_Sins_III -450 7_Heavenly_Virtues -880 7_Heavenly_Virtues_II -605 AA_Battery -941 Abandoned -328 Abandoned_Buildings -803 Abandoned_Buildings_II -859 ABC...LMN...XYZ -1232 Absolut_Advertisement -1407 Absolut_Advertisement_II -212 Abstract -910 _Abstract_Black_and_White -925 Abstract_Emotion -524 Abstract_Food -444 Abstract_II -471 Abstract_Macro -559 Abstract_Macro_II -1082 Abstract_Macro_III -1011 Abstract_Motion -1134 A_Bugs_Life_Insects_Bugs_Beetle -325 Accidental_Letters -922 Accidental_Letters_II -639 Acronyms -169 Action_Shot -532 Action_Shot_II -998 Action_Shot_III -411 Adulthood_Without_Adults -14 Advertisement -812 Advertisement_III -233 Advertisement_Revisited -364 Affluence -936 Affluence_II -176 After_Dark -459 Afterlife -574 After_the_Game -1095 A_Hug_And_A_Kiss -1208 A_Is_For________ -150 All_Alone -1333 All_Four_Elements -966 All_I_Want_for_Christmas -1190 All_Things_Being_Equal_Three -777 Alone_In_A_Crowd -643 Alternative_Medicine -657 Anachronism -114 A_Night_on_the_Town -1117 Animal_People_Interaction -1026 Animals_In_Motion -315 Ansel_Adams -982 Ansel_Adams_II -889 Anything_But_Landscape -341 Apple -991 Apple_II -9 Architecture -299 Architecture_II -500 Architecture_III -809 Architecture_IV -1048 Architecture_V -1254 Architecture_VI -1107 Artificial_Lighting -969 Asia -878 A_Single_Line -552 A_Single_Tree -984 A_Single_Tree_II -141 At_Rest -359 At_the_Zoo -109 At_Work -280 Authority -782 Automobile_Ad -1039 Automobile_Ad_II -1249 Baby_Animals -253 Backlighting -433 Backlighting_II -729 Backlighting_III -954 Backlighting_IV -696 Backsides -134 Back_To_School -1091 Back_To_School_II -630 Bad -236 Balance -1198 Balloons -221 Banana -1078 Band_Names -1384 Bank -1350 Batteries -617 Battle_Of_The_Sexes -501 Beatles_Song_Lyrics -344 Beauty -787 Beauty_In_The_Everyday -563 Beer_or_Soft_Drink_Advertisement -1115 Beer_or_Soft_Drink_Advertisement_II -62 Before_and_After -962 Before_and_After_II -988 Begin -952 Benches -318 Best_Friends -476 Best_Of_2002 -296 Best_of_2004 -430 Best_of_2005 -616 Best_Of_2006 -804 Best_Of_2007 -980 Best_Of_2008 -1154 Best_of_2009 -1331 Best_of_2010 -1369 Between_the_Knee_and_the_Neck -384 Beverages -663 Bicycles -860 Bicycles_II -314 Billboard -348 Birds -807 Birds_II -1361 Birds_In_Flight -531 Bits___Pieces -1320 Bits___Pieces_II -1183 Bizarrechitecture -272 Bizarre_Nature -191 Black -529 Black_and_White_in_Color -823 Black_and_White_in_Color_II -1201 Black_and_White_Landscape -51 Black_and_White_Portrait -614 Black_and_White_Portrait_II -1253 Black_and_White_Still_Life -108 Black_on_Black -1192 Black_on_Black_II -18 Black___White -278 Black___White_II -583 Black___White_III -1368 Black___White_IV -44 Blue -443 Blue_II -947 Blue_III -829 Blurry_Mess -865 Boats -1225 Boats_II -1284 Body_Forms -50 Body_Parts -841 Body_Parts_II -297 Bokeh -512 Bokeh_II -573 Bokeh_III -736 Bokeh_IV -964 Bokeh_V -1213 Bokeh_VI -1379 Bokeh_VII -593 Bond,_James_Bond -1228 Books -774 Book_Smarts -158 Book_Titles -1077 Book_Titles_II -320 Bored -1139 Bored_II -247 Botany -1231 Bovine -375 Branch -1181 Branch_II -815 Bread -300 Breaking_New_Ground -74 Bridges -309 Bridges_II -1281 Bridges_III -286 Broken -446 Broken_II -602 Brown -820 Brown_Ribbon_Deja_Vu -990 Brown_Ribbon_Deja_Vu_II -379 Bubbles -671 Bubbles_II -1156 Bubbles_III -923 Bummer! -432 Burst_of_Color -700 Burst_of_Color_II -1375 Burst_of_Color_III -398 Busy -273 Calendar -997 Calendar_II -1403 Calendar_III -1211 Camera_Bag -1114 Camera_Movement -1383 Camera_Phone_Free_Study -539 Camera_Self_Portrait -407 Camouflage -31 Candid -293 Candid_II -479 Candid_III -693 Candid_IV -1024 Candid_V -1146 Candid_VI -417 Candlelight -86 Candy -739 Candy_II -895 Cardboard_Box -388 Celebration -652 Cell_Phone_Mania -321 Cemetery -216 Centered_Composition -610 Centered_Composition_II -844 Centered_Composition_III -1124 Centered_Composition_IV -661 Chains -1165 Chairs -1283 Chalk -665 Changed -204 Chaos -1234 Chaos_II -408 Cheater! -873 Chemistry -30 Childhood_Without_Children -805 Childhood_Without_Children_II -1194 Children -473 Childrens_Toy -722 Childrens_Toy_II -1178 Childrens_Toy_III -182 Chinese_Zodiac -1370 Chips -237 Chocolate -227 Choices -1318 Christmas_Song_Titles -478 Chrome -1223 Churches -355 Circle -646 Circle_II -1073 Circle_III -21 City_Life -428 City_Life_II -290 Classy_Holiday_Decorations -741 Closing -1301 Clouds -798 Clowns -1354 Coffee -386 Coffee_Shop -893 Cold -1163 Cold_II -412 Collections -83 Color -924 Color_in_Black_and_White -528 Color_on_Color -822 Color_on_Color_II -1393 Color_on_Color_III -376 Color_Portrait -650 Color_Portrait_II -230 Color_Studio_Portrait -480 Color_Studio_Portrait_II -927 Color_Studio_Portrait_III -456 Comfort -266 Communication -98 Complementary_Colors -387 Complementary_Colors_II -484 Complementary_Colors_III -778 Complementary_Colors_IV -1295 Complementary_Colors_V -260 Complexity -189 Conflict -381 Conspiracy -1169 Constructed_By_Man -1170 Constructed_By_Nature -347 Construction -1051 Construction_II -118 Contrasts -658 Contre-jour -1135 Contre-Jour_II -1322 Contre-jour_III -839 Cool_Colors -1374 Cool_Colors_II -110 Country_Life -448 Country_Life_II -1274 Country_Life_III -1405 Country_Music_Song_Titles -1019 Crayons___Colored_Pencils -875 Crime_Scene -647 Crossdress -678 Cultural_Events -10 Curves -838 Curves_II -371 Dairy -1172 Dance -864 Dappled_Light -349 Darkness -1030 Darkness_II -1355 Day_Taken_At_Night -402 Dead_End -595 Death -1071 Death_II -163 December_Free_Study -345 Decisions -224 Deep_DOF -1096 Deep_DOF_II -265 Defining_Feature -288 Deja_Vu -468 Deja_Vu_II -742 Deja_Vu_III -396 Delicate -1157 Delicate_II -1034 De-Motivational_Poster -196 Design___Engineering -126 Desolation -510 Desolation_II -70 Despair -378 Destinations -592 Diagonal -706 Dichotomy -597 Dirt -1365 Disassembled_Objects -759 Distance -372 D_L -842 DNMC -912 Dolls___Puppets -1212 Door_Knobs,_Handles___Locks -906 Doors -1202 Double_Exposure -689 Double_Take -911 Double_Take_II -1222 Double_Take_III -238 DPC_Album_Cover -490 DPC_Cinema -1108 DPC_Cinema_II -92 DPChallenge.com_Sticker_Design -1209 DPChallenge_User_Portrait -93 DPCPrints.com_Sticker_Design -680 DPL_Album_Cover -1083 Drama -336 Dreams -144 Dreams_and_Nightmares -708 Dreams_II -1287 Dreams_III -676 Dr._Seuss -567 Ducky -903 Ducky_II -100 Duotones -453 Duotones_II -770 Duotones_III -1049 Duotones_IV -810 Earliest_Memory -463 Education -1366 Edward_Weston -558 Electricity -79 Emotion -503 Empty_Room -1376 Empty_Room_II -989 End -1044 Enemies -1138 Enthused -621 Entrance -1188 Entrance_II -493 Environmental_Portrait -832 Environmental_Portrait_II -409 Even -850 Even_II -900 Evening -239 Everyday_Objects -855 Every_Picture_Tells_A_Story -622 Exit -1189 Exit_II -146 Exposed -231 Extraordinary -327 Extreme_Action -715 Extreme_Emotion_Faceless -1363 Extreme_Supermoon -301 Faceless -566 Faceless_Portrait -1306 Faces_in_Household_Items -499 Failure -250 Fairy_Tales -779 Fairy_Tales_II -1273 Fairy_Tales_III -898 Fallen -356 Family -1219 Family_II -591 Famous_Last_Words -1224 Famous_Photograph -438 Fantasy -350 Fantasy_World -555 Far_Side_Gary_Larson_Tribute -454 Fashion -1203 Fashion_II -724 Fast_Food -89 Fauna -753 Fauna_II -1013 Fauna_III -23 Fear -958 Feast -243 Feet -866 Female_Portrait -609 Fences -833 Fences_II -1386 Fences_III -961 Fill_Flash -1280 Fill_Flash_II -120 Fill_the_Frame -624 Fill_the_Frame_II -731 Fill_the_Frame_III -1177 Fine_Arts_Exhibit_1 -1367 Fine_Arts_Exhibit_2 -193 Fire -537 Fire_II -794 Fire_III -1101 Fire_IV -521 Fireworks -611 Fireworks_II -975 Fireworks_III -1323 Fireworks_IV -1240 First_Entry -659 Fitness -339 Five -142 Flight -88 Flora -754 Flora_II -1014 Flora_III -516 Flowers -1063 Flowers_II -1391 Flowers_in_Garden___Field -1362 Focal_Point -1112 Foliage -462 Footwear -1061 Footwear_II -772 Foreground_Bokeh -598 For_Sale -49 Four -251 Framing -508 Framing_II -806 Framing_III -1174 Framing_IV -139 Freedom -234 Freedom_II -24 Free_Study -615 Free_Study_2007-01 -631 Free_Study_2007-02 -645 Free_Study_2007-03 -664 Free_Study_2007-04 -679 Free_Study_2007-05 -692 Free_Study_2007-06 -707 Free_Study_2007-07 -721 Free_Study_2007-08 -738 Free_Study_2007-09 -751 Free_Study_2007-10 -769 Free_Study_2007-11 -783 Free_Study_2007-12 -797 Free_Study_2008-01 -813 Free_Study_2008-02 -827 Free_Study_2008-03 -843 Free_Study_2008-04 -858 Free_Study_2008-05 -871 Free_Study_2008-06 -886 Free_Study_2008-07 -901 Free_Study_2008-08 -915 Free_Study_2008-09 -931 Free_Study_2008-10 -944 Free_Study_2008-11 -960 Free_Study_2008-12 -976 Free_Study_2009-01 -993 Free_Study_2009-02 -1003 Free_Study_2009-03 -1018 Free_Study_2009-04 -1032 Free_Study_2009-05 -1045 Free_Study_2009-06 -1060 Free_Study_2009-07 -1075 Free_Study_2009-08 -1089 Free_Study_2009-09 -1104 Free_Study_2009-10 -1118 Free_Study_2009-11 -1136 Free_Study_2009-12 -1150 Free_Study_2010-01 -1164 Free_Study_2010-02 -1180 Free_Study_2010-03 -1193 Free_Study_2010-04 -1207 Free_Study_2010-05 -1221 Free_Study_2010-06 -1236 Free_Study_2010-07 -1250 Free_Study_2010-08 -1263 Free_Study_2010-09 -1279 Free_Study_2010-10 -1293 Free_Study_2010-11 -1308 Free_Study_2010-12 -1324 Free_Study_2011-01 -1339 Free_Study_2011-02 -1356 Free_Study_2011-03 -1371 Free_Study_2011-04 -1387 Free_Study_2011-05 -401 Free_Study_IX -322 Free_Study_VIII -431 Free_Study_X -489 Free_Study_XI -536 Free_Study_XII -551 Free_Study_XIII -565 Free_Study_XIV -581 Free_Study_XV -596 Free_Study_XVI -727 Free_Study_XVII -1043 Friends -77 From_Above -1062 From_Above_II -13 From_the_Ground_Up -383 From_the_Ground_Up_II -535 From_the_Ground_Up_III -760 From_the_Ground_Up_IV -1054 From_the_Ground_Up_V -1311 From_the_Ground_Up_VI -1282 From_The_Knees_Down -32 Fruits_and_Vegetables -632 Fruits_and_Vegetables_II -1200 Full-Length_Studio_Shot -642 Furniture -1035 Furniture_II -16 Games -852 Games_II -185 Garage_Art -36 Garbage -404 Garbage_II -995 Gas_Stations -1064 Geology -164 Giving_Thanks -1037 Glamour -95 Glass -983 Glasses -515 Glass_II__with_a_twist_ -527 Gold -1388 Gold_II -629 Good -59 Got_Milk -1336 Got_Milk_Portrait -153 Grace -1159 Graffiti_ -343 Granular -78 Green -505 Green_II -1028 Green_star_II -382 Greeting_Cards -987 Grey -750 Group_Portrait -1038 Grunge -217 Habits -669 Hair -748 Half -1349 Half_II -151 Halloween -768 Halloween_II -1119 Halloween_III -1286 Halloween_Song_Titles -466 Hands -953 Hanging_On_By_A_Thread -607 Harsh_Environments -831 Harsh_Environments_II -637 Hate -1143 Hate_II -586 HDR -1092 HDR_II -1220 HDR_III -584 Headwear -1168 Headwear_II -853 Healthy_Food -452 Heart -949 Heart_II -497 Heat -784 Heaven -785 Hell -1395 Henri_Cartier-Bresson -277 Heroes -291 Hidden_Faces -1005 Hidden_Gem_--_1,000th_Challenge! -1210 Hidden_Gem_Break_Time -1102 Hidden_Treasures -374 High_Contrast -564 High_Contrast_II -874 High_Contrast_III -1057 High_Contrast_IV -1270 High_Key -418 Holiday_Catalog -1327 Holidays -491 Holy_Places -845 Homemade_Landscapes -1292 Homemade_Landscapes_II -101 Home_Sweet_Home -1401 Honey -249 Hope -1006 Hope_II -1307 Horizon_Line -908 Horse_s_ -894 Hot -1162 Hot_II -1081 Household_Appliances -56 Humor -283 Humor_II -1152 Humor_III -786 I_Bought_It_On_eBay -640 Ice -795 Ice_II -1008 Illuminative_Subject -39 Illusions -366 Illusions_II -394 Image_Grain -648 Image_Grain_II -1056 Image_Grain_III -545 Image_without_Subject -917 Immovable -269 Implied_Lines -276 Impressionism -737 Impressionism_II -275 Indecision -358 Independence -81 Indoor_Macro_Shot -509 Indulgence -413 Industrial -1245 Industrial_II -154 Infinite -1264 Innocent_Bystander -135 Insects -667 Insects_II -125 Inside_Looking_Out -1315 Inside_Looking_Out_II -933 Inside_Out -323 In_the_Beginning... -121 In_the_Garden -1173 In_the_Garden_II -1090 In_The_Style_Of_Heida -1358 In_The_Style_Of_librodo -1260 In_The_Style_Of_Nixter -835 Intimacy -1106 Into_or_Out_Of_the_Frame -1041 I_Quit -143 Irony -1067 Its_All_About_Position -332 Jewelry_Advertisement -1312 John_Lennon_Song_or_Lyrics -477 Jump -1289 Jump_II -220 June_Free_Study -1317 Kids_With_Toys -76 Kitchen_Art -670 Kitchenware -414 Knife_Fork_Spoon -1330 Lamp -284 Landmarks -705 Land__not_sea_ -57 Landscape -403 Landscape_II -668 Landscape_III -576 Landscape_in_Portrait_Orientation -887 Landscape_IV -1126 Landscape_V -653 Langdons_Birthday -1010 Language -335 Late_Night -896 Lawn -1319 Layers -71 Leading_Lines -352 Leading_Lines_II -557 Leading_Lines_III -818 Leading_Lines_IV -1341 Leading_Lines_V -1337 Least_Most_Favorite_Household_Items -824 Led_Zeppelin -494 Lenscap -188 Letting_Go -138 Life -1070 Life___Death -594 Life_II -977 Life_III -1217 Life_In_Your_City -1309 Life_IV -303 Light -884 Light_Bulb -1397 Lighter_Than_Air -1031 Light_II -149 Lighting -342 Lighting_II -570 Lighting_III -1021 Lighting_IV -313 Light_On_White -395 Light_On_White_II -590 Light_on_White_III -1000 Light_on_White_IV -1392 Light_on_White_V -38 Light_Source -317 Lines -526 Lines_II -1086 Lines_III -1196 Lingerie -103 Liquid -157 Literalisms -461 Literary_Adventure -369 Live_Music -1347 Lonely_Shoes_In_Black_and_White -363 Long_Exposure -578 Long_Exposure_II -788 Long_Exposure_III -921 Long_Exposure_IV -1226 Long_Exposure_V -69 Love -636 Love_II -1142 Love_III -467 Low_Key -641 Low_Key_II -1103 Low_Key_IV -281 Low_Tech -973 Lucky -1364 Lucky_13 -282 Lucky_7 -710 Lucky_7_II -1259 Lucky_7_III -41 Macro -140 Macro_II -167 Macro_III -240 Macro_IV -354 Macro_V -755 Macro_VI -1276 Macro_VII -274 Macro_Without_Bugs_or_Flowers -849 Macro_Without_Bugs_or_Flowers_II -1130 Macro_Without_Bugs_or_Flowers_III -105 Magazine_Cover -200 Magazine_Cover_Revisited -541 Magic___Mystery -1321 Magnifying_Glass -1160 Main_Street -867 Male_Portrait -195 March_Free_Study -837 Marshmallow_Peeps -868 Masks -460 Master_of_Disguise -254 Masters_Free_Study -945 Masters_Free_Study_II -619 Match -905 Mathematics -292 Mechanical -1277 Memorials_and_Monuments -351 Metal -1278 Metal_on_Metal -1059 Michael_Jackson_Song_Lyrics -1288 Mid-Day_Sun -1299 Military -241 Miniature -1036 Miniature_II -333 Minimalism -623 Minimalism_II -1246 Minimalism_in_Black_and_White -1399 Minimalist_Landscape -255 Mirrors -943 Mirrors_II -1257 Misquotes -1094 Missed_Focus -1214 Missed_Focus_II -711 Missing_Link -817 Misunderstanding -170 Money -939 Money_II -1334 Monty_Python -130 Monuments -334 Moods -569 Morning -899 Morning_II -427 Mother -45 Motion -202 Motion_Blur -513 Motion_Blur_II -940 Motion_Blur_III -1310 Motion_Blur_IV -447 Motion_Panning -618 Motion_Panning_II -1050 Motion_Panning_III -168 Motivational_Poster -734 Motivational_Poster_II -1033 Motivational_Poster_III -122 Movies -1340 Movies_II -298 Movie_Titles -863 Movie_Titles_II -834 Mug_Shot -90 Multi-Image_Compositions -219 Multiple_Light_Sources -651 Multiple_Light_Sources_II -192 Mundane -271 Music -1241 Mythical_Creatures -178 National_Geographic -1197 Natural_Light_Indoors -712 Natural_Light_Portrait -346 Naturally_Framed -73 Natural_Numbers -485 Negative_Image -848 Negative_Image_II -1275 Negative_Image_III -33 Negative_Space -128 Negative_Space_II -697 Negative_Space_III -1351 Negative_Space_IV -246 Neon -579 Neon_Signs -1055 Never_Seen_on_DPC! -1398 Never_Seen_on_DPC_II -229 Newspaper -53 New_Years_Resolution -175 New_Years_Resolution_II -295 New_Years_Resolution_III -709 Nightmares -1022 Nightmares_II -8 Night_Shot -267 Night_Shot_II -492 Night_Shot_III -662 Night_Shot_IV -1182 Night_Shot_V -870 Night_Sky -1093 Nine -136 Nostalgia -1023 Not_Quite_Right -115 Nude -248 Nude_II -370 Nude_III -625 Nude_IV -1100 Nude_V -918 Numbers -792 Object_Isolation_by_Contrast -353 Obsolete -1406 Obvious_Tripod -264 October_Free_Study -410 Odd -851 Odd_II -441 Off-Centered_Subject_II -1297 Off-Centered_Subject_III -106 Off-Center_Subject -104 Office_Art -197 Off-Screen_Expectation -1072 Oil -302 Old_and_New -1053 Old_Cars -808 Old_Ways -242 Once_in_a_Blue_Moon -1149 One_In_7_Billion -525 On_the_Beach -173 On_the_Edge -19 On_the_Road -133 Oops! -425 Oops!_II -740 Opening -215 Opposites -201 Orange -1187 Orange_II -1304 Orange_III -1235 Order -1216 Other_Art_Forms -5 Outdoor_Macro_Shot -1302 Outdoor_Night_Portrait -1325 Out_of_Balance -203 Out_of_Place -1195 Out_of_The_Ordinary -337 Outside_Looking_In -1316 Outside_Looking_In_II -1251 Over- -913 Overexposed -571 Oxymoron -1255 Oxymoron_ll -307 Pain -1121 Painted_Face -180 Painting_with_Light -457 Painting_With_Light_II -690 Painting_With_Light_III -926 Pajamas -816 Panning -713 Paper -199 Parallel_Lines -1176 Parallel_Lines_II -869 Partners -263 Parts -312 Passing_Time -1085 Pasta -548 Pastels -426 Pattern -606 Pattern_II -836 Pattern_III -1147 Paul_Simon_Lyrics -1305 Payphones -819 Peace -1215 Peanuts -538 PEAS! -828 Peek-a-Boo -29 Pencil -17 People -329 People_II -946 Periodic_Table -981 Personality -389 Personification -628 Personification_II -965 Personification_III -1155 Personified_Smiles_and_Frowns -65 Perspective -377 Perspective_II -523 Perspective_III -589 Perspective_IV -789 Perspective_V -1167 Perspective_VI -324 Pet_Portrait -599 Pet_Portrait_II -821 Pet_Portrait_III -992 Pet_Portrait_IV -289 Pets_and_Their_People -422 Phobia -1113 Photographing_Photographers -43 Photojournalism -486 Photojournalism_II -761 Photojournalism_III -1261 Photojournalism_IV -1344 Photoshop_Terms -84 Pi -464 Pick_Two -1161 Pieces_of_the_Human_Form -1068 Pigeons -674 Pi_II -306 Pink -603 Pink_Floyd -1080 Pink_II -226 Planes,_Trains_and_Automobiles -1185 Planes,_Trains_and_Automobiles_II -930 Play -470 Playtime -885 Point_of_Color -1227 Point_Of_Color_II -177 Point_of_View -757 Point_of_View_II -1389 Point_of_View_III -1129 Polka_Dots -830 Pollution -766 Popcorn -654 Pop_Culture -198 Portrait -1314 Portrait_From_Behind -577 Portrait_in_Landscape_Orientation -862 Portrait_of_a_Camera -1144 Portrait_Of_A_Wild_Bird -971 Portrait_Of_The_Elderly -1066 Portraits_Without_Children -1294 Portrait_Triptych -1272 Portrait_with_Chair -799 Portrait_with_Spectacles -94 Postcard -587 Postcard_II -1015 Postcard_III -1303 Posthumous_Ribbon -994 Post-It_Note -1242 Potatoes -270 Poverty -937 Poverty_II -735 Power -1133 Powerlines -85 Practical_Jokes -390 Pride -96 Primary_Colors -612 Procrastination -914 Product_Shot -1184 Product_Shot_II -522 Progress -1046 Promote_Your_Zoo -159 Propaganda -211 Proportion -695 Pure... -560 Purple_II -907 Purple_III -999 Puzzled -698 Puzzle_Macro -951 Puzzle_Macro_II -1141 Puzzle_Macro_III -1346 Puzzle_Macro_IV -1381 Queen_Song_Lyrics -368 Rain -758 Rainbow -1123 Rainbow_II -561 Rain_II -890 Recipe__Beverage_ -147 Recipe__Food_ -434 Recipe__Food__II -891 Recipe__Food__III -1238 Rectangle -6 Red -644 Red_II -948 Red_III -35 Reflections_Without_Mirrors -392 Reflections_Without_Mirrors_II -580 Reflections_Without_Mirrors_III -814 Reflections_Without_Mirrors_IV -1298 Reflections_Without_Mirrors_V -474 Refraction_of_Light -1204 Rejected_Movie_Poster -687 Religion -1127 Religion_II -1007 Remember_Those -132 Repetition -66 Rhythm -487 Rhythm_II -714 Rhythm_III -123 Right_Angles -919 Risk -800 Rivers___Streams -436 Road -2 Road_Signs -304 Road_Signs_IV -179 Road_Signs_Re-revisited -58 Road_Signs_Revisited -330 Rock,_Paper,_Scissors -1012 Rock_Song_Titles -1047 Rocks,_Stones,_Pebbles -718 Rolling_Stones_Songs -442 Romance -365 Room -542 Rope -117 Round -326 Rubber_Ducky -1328 Rubber_Ducky_II -380 Rule_of_Thirds -549 Rule_of_Thirds_II -672 Rule_of_Thirds_III -1084 Rule_of_Thirds_IV -1342 Rule_of_Thirds_V -1140 Rural_Decay -725 Rural_Landscapes -882 Rural_Landscapes_II -213 Rusted -1256 Rusted_II -156 Sacred_Places -1377 Sadness -920 Safety -972 Sand -420 Say_Cheese! -546 Scene_It! -765 Scene_It!_II -161 Scents_and_Aromas -268 School_Days -633 School_Days_Biology -553 School_Days_Chemistry -554 School_Days_Geometry -634 School_Days_Music -148 Science -688 Science_II -1128 Science_III -702 Sci-Fi_Celebration -746 S-Curve -1262 S-Curve_II -704 Sea__not_land_ -752 Searching -129 Seasonal_Shots -97 Secondary_Colors -556 Seed -781 Seeing_the_Unseen -228 Selective_Desaturation -683 Selective_Desaturation_II -7 Self-Portrait -308 Self-Portrait_III -496 Self_Portrait_IV -107 Self-Portrait_Revisited -582 Self_Portrait_V -780 Self_Portrait_VI -1218 Self_Portrait_VII -1002 Self_Portrait_Without_People -1230 Self_Portrait_Without_People_II -1335 Sentinel -310 Separation -854 Sepia -209 Serendipity -1380 Seven_Deadly_Sins_IV -20 Shadows -152 Shadows_II -507 Shadows_III -720 Shadows_IV -968 Shadows_V -1179 Shadows_VI -186 Shallow_DOF -423 Shallow_DOF_II -626 Shallow_DOF_III -916 Shallow_DOF_IV -1243 Shallow_DOF_V -165 Shapes -429 Shapes_II -685 Shapes_III -1296 Shapes_IV -373 Shoes -1171 Shoes_II -1385 Shotglasses -399 Shutter_Speed -1153 Signature_Style -437 Signs -194 Silence -985 Silence_II -1402 Silhouette_At_Night -208 Silhouettes -340 Silhouettes_II -547 Silhouettes_III -744 Silhouettes_IV -681 Silky-Smooth -826 Silverware -550 Simple_Pleasures -171 Simplicity -435 Singled-Out -406 Single_Light_Source_II -502 Single_Light_Source_III -767 Single_Light_Source_IV -801 Six -1001 Skin -600 Sky -1266 Skyscape -256 Smoke -942 Smoke_II -162 Soft_Focus -543 Soft_Focus_II -747 Soft_Focus_III -861 Soft_Focus_IV -701 Solo -28 Something_New -214 Something_New_II -481 Something_New_III -763 Something_New_IV -27 Something_Old -482 Something_Old_II -764 Something_Old_III -54 Song_Titles -613 Song_Titles,_2006 -102 Sound -986 Sound_II -1285 Sound_III -1058 Spam -112 Speed -732 Speed_II -1239 Spheres -655 Spinning -137 Sports -357 Sports_II -677 Sports_III -1205 Sports_IV -585 Spots -61 Square -458 Square_Crop -1382 Square_Crop_II -1074 Square_II -963 Stars -517 Stationary -518 Stationery -601 Sticky -656 Still -155 Still_Life -495 Still_Life_II -716 Still_Life_III -897 Still_Life_IV -1040 Still_Life_V -1158 Still_Life_With_Fruit -745 Still_Life_With_Motion -68 Stock_Photography -319 Stock_Photography_II -979 Stock_Photos_Cooking -1017 Stock_Photos_Fitness -1247 Stock_Photos_Medicine -978 Stock_Photos_The_Business_Person -1248 Stock_Photos_The_Operator -11 Stopped_Motion -258 Stopped_Motion_II -533 Stopped_Motion_III -773 Stopped_Motion_IV -1186 Stopped_Motion_V -956 Straight -519 Straight_from_the_Camera -1098 Straight_II -55 Stranger_In_A_Strange_Land -635 Street_Photography -717 Street_Photography_II -938 Street_Photography_III -1065 Street_Photography_In_Color -1343 Street_Photography_IV -1233 Street_Portraiture -775 Street_Smarts -207 Strength -793 Stupid_Gifts -540 Stupid,_Stupid! -932 Suburbia -498 Success -877 Success_II -892 Sugar -1244 Summertime_Meals -857 Sun_in_Frame -1390 Sunrise_Sunset_Look_the_Other_Way -959 Sunset_or_Sunrise -1269 Sunshine_After_The_Rain -955 Superpowers -40 Superstitions___Urban_Legends -514 Superstitions___Urban_Legends_II -160 Surprise! -316 Surrealism -1404 Symbolic_Still_Life -82 Symmetry -675 Symmetry_II -1122 Symmetry_III -4 Table_Shot -627 Table_Shot_II -1029 Table_Shot_III -331 Tacks! -1025 Tacks!_II -174 Tacky_Holiday_Decorations -504 Take_Two -1408 Take_Two_II -1353 Tarot_Card -257 Team_Sport_Action -223 Team_Sports_Without_Players -1206 Team_Sports_Without_Players_II -42 Technology -749 Technology_II -116 Temperature -25 Texture -190 Textures_II -360 Textures_III -475 Textures_IV -790 Textures_V -1016 Textures_VI -1271 Thar_Be_Pirates -451 The_80s -762 The_Beginning_of_the_End -928 The_Brothers_Grimm -232 The_Color_Purple -26 The_Corporate_World -872 The_Cowboy -72 The_Egg -1175 The_Egg_II -811 The_Eyes_Have_It! -187 The_Four_Elements -686 The_Four_Elements_II -124 The_Future -400 The_Great_Pumpkin_Carving_Challenge -791 The_Hidden_City -111 The_Letter_B -1148 The_Moon -520 The_Number_10 -455 The_Odd_Couple -847 The_Paranormal -127 The_Past -1079 The_Ribbon -909 The_Road_Less_Traveled -730 The_Sacred -1125 The_Spirit_Of_A_Country -424 The_Username_Challenge -684 The_Username_Challenge_II -1069 The_Username_Challenge_III -1348 The_Work_Place -588 The_Year_You_Were_Born -1166 Things_that_Count -184 Things_That_Go_Together -1151 Things_That_No_Longer_Work -305 Three -222 Threes -1111 Three_Techniques -825 Tilted -996 Tilted_Horizon -1372 Tilted_Horizon_II -1116 Tilted_II -1229 Tilted_III -1132 Tilt_Shift -80 Time -367 Time_Capsule -649 Time_II -796 Time_Lapse -279 Time_Passing -1268 Toilet_Paper -415 Too_Early -416 Too_Late -131 Tools -362 Tools_of_the_Trade -1131 Tools_of_the_Trade_II -771 Topless...With_No_People -259 Touch -572 Trains___Railroads -1020 Trains___Railroads_II -12 Transitions -694 Transitions_II -22 Transparency -397 Transparency_II -970 Transparency_III -91 Transportation -534 Transportation_II -52 Travel -252 Travel_Guides -638 Trees -1027 Trees_II -119 Trends -338 Triangle -856 Triangle_Composition -1265 Triangle_II -440 Tribute -405 Triptych -673 Triptych_II -1110 Triptych_III -719 Triumph -1076 Tunnels_and_Caves -957 Twisted -1099 Twisted_II -1352 Two_Colors -1145 Umbrella -113 Unanswered_Questions -1252 Under- -1267 Underexposed -506 Unexpected_Find -974 Unlucky -562 Unrelatedness -1042 Unusual_Objects -218 Unusual_Viewpoint -15 Upside_Down -723 Upside_Down_II -934 Upside_Down_III -1137 Urban_Decay -145 Urban_Landscapes -726 Urban_Landscapes_II -883 Urban_Landscapes_III -1357 Urban_Nature -1345 Valentines_Photo -660 Vanish -244 Vanishing_Point -682 Vanishing_Point_II -1087 Veggie_Tales -172 Vehicles -1300 Vintage -421 Visual_Puns -1378 Visual_Puns_II -261 Wacky_Foods -225 Waiting -1191 Waiting_II -840 Warm_Colors -1373 Warm_Colors_II -166 Water -1359 Water_Bottle -469 Water_II -846 Water_III -743 Wealth -87 Weather -888 Weathered -1237 Weathered_II -703 Weekend -393 What -1338 What_Doesnt_Belong -1360 What_DPC_Loves_the_Most -99 What_is_the_Matrix -756 What_is_this_Pencil -1394 Whats_In_Your_Fridge -205 Wheels -1332 Wheels_II -950 Where_In_The_World_Is_Art_Roflmao -776 Where_In_The_World_Is_drewmedia -544 Where_In_The_World_Is_stdavidson -64 Wheres_Waldo -294 Wheres_Waldo_II -699 Wheres_Waldo_III -1088 Wheres_Waldo_IV -210 Where_You_Live -691 Why -391 Wide_Angle -262 Wildlife -439 Wildlife_II -620 Wildlife_III -935 Wildlife_IV -1290 Wildlife_V -287 Wind -575 Wind_II -483 Window_Framed -60 Windows_and_Doors -967 Window_Shopping -1313 Window_Shopping_II -206 Window_View -1105 Window_View_II -728 Wings -47 Without_the_Hand_of_Man -361 Wooden -568 Woody -904 Woody_II -235 Words -929 Work -1326 Work_II -733 Working_Without_a_Net -1097 X_Marks_The_Spot -67 Yellow -472 Yellow_III -802 Yellow_IV -285 Yellow_Revisited -1258 Yellow_V -34 Your_Corner_of_the_World -385 Your_Corner_of_the_World_II -1109 Your_Corner_of_the_World_III -48 Your_Occupation -604 Your_Occupation_II -902 Your_Occupation_III -183 Your_Shadow -445 Your_Shadow_II -530 Zen_Photography -1052 Zen_Photography_II -1199 Zen_Photography_III -181 Zodiac diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/styles.txt b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/styles.txt deleted file mode 100644 index 544f637d49401c8ad233effed089721e5983160f..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/styles.txt +++ /dev/null @@ -1,14 +0,0 @@ -1 Complementary_Colors -2 Duotones -3 HDR -4 Image_Grain -5 Light_On_White -6 Long_Exposure -7 Macro -8 Motion_Blur -9 Negative_Image -10 Rule_of_Thirds -11 Shallow_DOF -12 Silhouettes -13 Soft_Focus -14 Vanishing_Point diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.jpgl deleted file mode 100644 index b7f3984217b7caa9e8ff95b66dec87d79f96b07e..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.jpgl +++ /dev/null @@ -1,2809 +0,0 @@ -615561 -615681 -615746 -615982 -616084 -616191 -616259 -616483 -616570 -616603 -616734 -616876 -616909 -616963 -617013 -617100 -617175 -617213 -617249 -617292 -617299 -617316 -617332 -617360 -617380 -617397 -617410 -617424 -617441 -617449 -617470 -617481 -617496 -617515 -21502 -21641 -21699 -21741 -21780 -21824 -21843 -21866 -21907 -21927 -21937 -21952 -21962 -21969 -21986 -21992 -323348 -323400 -323604 -323636 -323737 -323886 -324012 -324102 -324225 -324331 -324401 -324465 -324582 -324613 -324633 -324708 -324749 -324886 -324940 -324981 -325082 -325162 -325198 -325240 -325381 -325401 -325452 -325511 -325540 -325740 -325868 -325889 -325910 -325973 -325981 -326046 -326071 -326083 -326107 -326128 -326148 -326178 -326212 -326235 -326272 -326345 -326375 -326396 -326412 -326438 -326465 -326481 -326492 -326499 -326553 -326573 -326586 -917941 -918089 -918259 -918305 -918370 -918514 -918625 -918666 -918842 -918876 -918924 -918970 -918996 -919028 -919077 -919100 -919122 -919133 -919169 -919191 -237547 -237705 -237825 -237953 -238071 -238147 -238173 -238305 -238400 -238456 -238510 -238740 -238777 -238832 -238899 -238932 -238975 -239163 -239260 -239375 -239394 -239504 -239567 -239607 -239624 -239645 -239684 -239734 -239742 -239751 -239818 -239895 -239927 -240007 -240044 -240065 -240105 -240129 -240156 -240191 -240210 -240227 -240242 -240275 -240306 -240351 -240359 -240379 -240392 -240409 -240422 -240438 -240454 -240480 -240497 -240521 -240532 -240543 -240560 -240581 -240592 -240604 -296122 -296156 -296194 -296269 -296313 -296336 -296376 -296390 -296422 -296452 -296472 -296522 -296578 -296628 -296736 -296778 -296808 -296831 -296864 -296916 -296942 -296967 -296993 -297009 -297039 -297058 -297076 -297124 -297143 -297222 -297241 -297266 -297302 -297337 -297366 -297411 -297436 -297459 -297493 -297517 -297547 -297560 -297573 -297606 -297663 -297687 -297710 -297774 -297814 -297867 -297883 -297929 -297941 -297961 -297978 -297988 -298011 -298035 -298048 -298065 -298098 -298109 -298131 -298143 -298153 -298176 -298196 -298246 -298255 -298268 -298291 -298318 -298328 -298346 -298372 -298414 -298421 -298428 -298433 -298445 -298452 -298467 -298477 -298488 -298501 -298516 -298530 -298544 -298553 -298561 -298570 -298592 -298608 -298614 -298621 -298628 -298635 -298640 -298657 -298708 -298716 -298725 -298731 -298741 -298751 -298769 -298783 -298815 -298839 -298851 -298857 -298871 -298878 -298889 -298896 -298906 -298918 -298928 -298955 -298963 -298982 -609299 -609377 -609594 -609809 -609926 -610109 -610188 -610293 -610405 -610415 -610543 -610611 -610641 -610703 -610760 -610859 -610899 -610929 -610940 -611028 -611088 -611128 -611207 -611341 -611389 -611403 -611434 -611455 -611470 -611476 -611496 -611511 -611524 -611539 -611556 -611573 -611581 -611595 -611611 -611636 -611675 -611698 -611710 -611729 -611736 -611742 -611752 -611758 -611770 -611795 -611806 -611820 -611829 -611839 -612230 -612245 -612264 -612278 -612285 -797817 -797932 -798174 -798268 -798355 -798408 -798518 -798626 -798737 -798766 -798806 -798839 -798869 -798904 -798952 -798990 -799038 -799074 -799094 -799118 -799141 -799156 -799181 -799195 -799214 -799225 -799238 -799245 -799260 -799266 -799275 -799286 -799295 -799305 -799313 -799326 -799334 -799346 -799359 -799365 -799370 -799381 -799393 -799407 -799418 -799427 -799434 -799440 -799447 -799452 -22023 -22069 -22146 -22214 -22273 -22295 -22313 -22395 -22410 -22451 -22464 -22474 -22500 -22618 -22628 -22656 -22680 -22689 -22696 -22701 -22709 -22729 -22734 -22745 -22753 -22759 -22769 -22780 -22786 -22792 -887580 -887876 -887999 -888114 -888147 -888190 -888213 -888280 -888287 -888360 -888377 -888778 -888797 -888820 -888832 -888857 -888919 -888937 -888959 -888970 -888989 -889012 -889037 -889068 -889079 -889186 -889194 -889215 -889241 -889254 -889267 -889279 -820096 -820305 -820374 -820483 -820518 -820552 -820592 -820672 -820708 -820729 -820758 -820791 -820829 -820849 -820889 -820902 -820914 -820923 -820945 -820963 -820977 -820988 -820995 -821005 -821033 -821043 -821058 -821071 -821093 -821098 -426125 -426212 -426272 -426346 -426399 -426428 -426486 -426520 -426546 -426573 -426617 -426700 -426734 -426755 -426781 -426800 -426820 -480690 -480747 -480849 -480993 -481181 -481279 -481374 -481444 -481521 -481654 -481697 -481723 -481808 -481877 -481914 -481978 -482030 -482094 -482177 -482221 -482280 -482356 -482413 -482474 -482516 -482642 -482746 -482883 -482903 -482990 -483031 -483085 -483125 -483156 -483169 -483200 -483249 -483281 -483301 -483317 -483362 -483394 -483507 -483528 -483564 -483589 -483594 -483606 -483616 -483633 -483655 -483668 -483686 -483720 -483739 -483750 -483761 -483773 -483797 -483819 -483835 -483843 -483853 -483869 -483875 -483886 -483902 -483921 -483927 -483939 -483964 -483978 -483986 -483996 -484004 -484016 -484024 -484030 -801315 -801822 -801890 -802116 -802230 -802350 -802377 -802410 -802458 -802480 -802513 -802528 -802568 -802580 -802617 -802630 -802646 -802656 -802668 -802683 -802718 -802728 -802739 -802746 -802762 -802788 -802812 -802821 -802831 -802841 -802854 -802861 -802877 -802890 -802901 -245494 -245685 -245944 -245995 -246282 -246521 -246759 -246813 -246901 -246937 -247050 -247245 -247311 -247337 -247365 -247484 -247515 -247589 -247656 -247720 -247815 -247834 -247888 -247934 -247981 -248008 -248037 -248055 -248072 -248084 -248111 -248125 -248155 -248176 -248224 -248239 -248247 -248257 -248270 -248288 -248343 -248353 -248369 -248382 -248389 -248405 -248417 -248428 -248441 -248462 -248469 -248485 -248499 -248526 -248531 -429892 -430266 -430449 -430643 -430805 -430882 -430925 -431047 -431085 -431120 -431203 -431251 -431300 -431315 -431383 -431411 -431501 -431549 -431614 -431640 -431672 -431727 -431757 -431789 -431812 -431842 -431861 -431876 -431889 -431900 -431917 -431941 -431949 -431955 -431980 -432007 -432016 -432026 -432049 -432063 -432073 -432078 -432086 -432093 -432102 -432108 -951295 -951438 -951507 -951642 -951823 -951960 -952080 -952208 -952233 -952321 -952330 -952354 -952381 -952394 -952404 -952413 -952436 -952451 -952460 -767784 -768307 -768419 -768496 -768585 -768670 -768740 -768815 -768879 -768934 -768981 -769044 -769145 -769204 -769260 -769296 -769319 -769330 -769367 -769380 -769409 -769429 -769436 -769455 -769473 -769482 -769501 -769516 -769535 -769546 -769553 -769567 -769588 -769600 -769605 -246556 -246812 -246934 -247033 -247113 -247198 -247302 -247357 -247485 -247502 -247614 -247705 -247835 -247926 -248106 -248142 -248222 -248295 -248525 -248562 -248754 -248821 -248880 -248902 -248931 -248964 -248980 -248998 -249029 -249076 -249123 -249167 -249199 -249207 -249261 -249287 -249322 -249343 -249382 -249394 -249406 -249417 -249430 -249451 -249472 -249505 -249537 -249551 -249566 -249576 -249584 -249597 -249602 -249614 -149700 -149797 -149846 -149875 -149962 -150020 -150051 -150075 -150128 -150169 -150185 -150225 -150259 -150291 -150304 -150317 -150346 -150363 -150388 -150438 -150459 -150491 -150518 -150534 -150564 -150597 -150616 -150681 -150701 -150717 -150737 -150763 -150782 -150813 -150850 -150873 -150917 -150971 -151031 -151151 -151184 -151254 -151279 -151303 -151319 -151347 -151365 -151372 -151387 -151394 -151407 -151434 -151453 -151459 -151475 -151482 -151490 -151508 -151562 -151568 -151582 -151611 -151624 -151650 -151664 -151674 -151685 -151695 -151712 -151719 -151736 -151756 -151762 -151783 -151788 -151797 -151818 -151827 -151835 -151842 -151848 -151856 -151861 -151868 -151873 -622160 -622260 -622510 -622633 -622707 -622777 -622939 -623068 -623183 -623208 -623270 -623315 -623339 -623373 -623459 -623482 -623514 -623543 -623620 -623666 -623685 -623704 -623717 -623772 -623792 -623829 -623839 -623850 -623856 -623864 -623879 -721101 -721272 -721488 -721726 -721827 -721898 -721938 -722072 -722160 -722309 -722475 -722493 -722567 -722635 -722686 -722711 -722743 -722758 -722773 -722790 -722818 -722887 -722912 -722921 -722929 -420113 -420439 -420614 -421139 -421375 -421624 -421699 -421766 -421926 -422091 -422212 -422318 -422487 -422536 -422602 -422670 -422699 -422732 -422785 -422868 -422979 -423015 -423040 -423049 -423064 -423099 -423132 -423145 -423166 -423184 -423203 -423225 -423236 -423246 -423259 -423265 -423276 -423286 -423296 -423305 -423332 -423357 -423371 -423383 -423414 -423423 -423430 -209134 -209262 -209692 -209895 -210057 -210141 -210371 -210453 -210561 -210726 -210816 -210882 -210946 -210983 -211169 -211292 -211312 -211400 -211440 -211467 -211492 -211517 -211558 -211611 -211661 -211680 -211709 -211731 -211759 -211782 -211788 -211795 -211806 -211828 -211842 -211855 -211869 -211875 -211892 -211897 -890166 -890477 -890630 -890808 -890892 -891010 -891140 -891234 -891282 -891315 -891378 -891395 -891472 -891519 -891530 -891538 -891558 -891588 -891630 -891666 -891690 -891736 -891756 -891810 -891824 -891851 -37984 -38011 -38060 -38134 -38192 -38247 -38281 -38300 -38352 -38399 -38425 -38458 -38480 -38568 -38622 -38642 -38667 -38686 -38724 -38734 -38760 -38792 -38807 -38824 -38853 -38865 -38873 -38878 -38895 -38903 -38924 -38944 -38956 -38965 -38971 -38981 -38991 -38999 -39005 -39011 -196784 -196941 -197052 -197205 -197530 -197648 -197787 -197876 -197909 -197937 -197970 -198071 -198127 -198181 -198228 -198272 -198288 -198356 -198382 -198448 -198488 -198525 -198614 -198633 -198665 -198719 -198762 -198793 -198806 -198846 -198926 -198961 -199021 -199038 -199061 -199080 -199114 -199140 -199165 -199197 -199218 -199266 -199286 -199296 -199306 -199316 -199331 -199341 -199347 -199354 -199393 -199401 -199409 -199417 -199427 -199443 -199453 -199471 -199483 -199499 -199514 -199523 -199533 -199539 -199546 -199583 -199591 -199598 -199613 -199641 -199652 -199659 -199668 -199676 -911557 -911764 -911946 -912022 -912177 -912296 -912392 -912429 -912482 -912491 -912534 -912556 -912590 -912611 -912630 -912643 -912650 -912662 -912679 -912695 -912703 -912718 -912728 -912735 -912744 -912755 -912763 -912770 -912791 -912797 -912805 -912811 -912817 -50991 -51122 -51167 -51209 -51295 -51338 -51367 -51393 -51462 -51508 -51519 -51557 -51580 -51604 -51621 -51643 -51656 -51679 -51688 -51698 -51710 -51719 -51727 -51737 -51752 -51761 -51774 -51781 -51792 -51802 -51812 -51821 -51826 -93652 -93759 -93798 -93877 -93985 -94047 -94192 -94233 -94247 -94296 -94349 -94381 -94394 -94419 -94463 -94478 -94493 -94526 -94554 -94592 -94627 -94663 -94688 -94729 -94753 -94786 -94827 -94875 -94935 -94973 -94988 -95006 -95032 -95047 -95072 -95095 -95118 -95138 -95167 -95179 -95196 -95214 -95232 -95244 -95262 -95281 -95302 -95316 -95326 -95343 -597432 -597603 -597985 -598131 -598510 -598810 -598862 -599044 -599154 -599212 -599259 -599314 -599342 -599381 -599451 -599504 -599606 -599635 -599666 -599692 -599778 -599839 -599859 -599906 -599944 -599952 -599974 -599988 -600023 -600049 -600067 -600080 -600101 -600119 -600133 -600150 -600188 -600199 -600216 -600236 -600257 -600304 -600313 -600332 -600344 -600358 -600371 -600380 -600389 -7875 -7891 -7898 -7906 -7914 -7921 -7928 -7933 -7940 -7945 -7953 -7958 -7969 -7978 -7986 -7993 -8001 -8015 -8021 -8029 -8039 -8045 -8050 -8061 -8067 -8072 -8079 -8087 -8097 -8106 -8115 -8122 -8128 -8133 -8141 -8151 -8160 -8166 -8173 -8179 -8184 -8192 -8198 -8205 -8215 -8222 -8229 -8234 -8239 -8245 -8250 -8255 -8261 -8267 -8272 -8278 -8285 -8291 -8299 -8305 -350184 -351323 -351587 -351891 -351942 -352338 -352528 -352738 -352850 -352983 -353035 -353098 -353153 -353202 -353249 -353297 -353314 -353332 -353365 -353380 -353393 -353408 -353423 -353447 -353486 -353513 -353520 -353591 -353624 -353655 -353704 -353720 -353737 -353743 -353754 -66242 -66489 -66690 -66827 -66914 -66942 -66966 -66983 -66996 -67018 -67034 -67047 -67057 -67072 -67089 -67112 -67124 -67136 -67143 -67160 -67166 -67171 -67179 -923283 -923395 -923501 -923549 -923650 -923700 -923848 -923921 -923999 -924033 -924053 -924070 -924100 -924147 -924169 -924181 -924202 -924225 -924233 -924250 -924263 -924276 -924288 -732928 -733049 -733137 -733279 -733341 -733374 -733451 -733537 -733555 -733662 -733737 -733901 -733950 -733973 -734089 -734168 -734198 -734215 -734244 -734264 -734291 -734332 -734364 -734388 -734413 -734446 -734480 -734512 -734529 -734545 -734564 -734577 -734604 -734616 -734631 -734653 -734662 -734679 -734713 -734729 -323361 -323490 -323540 -323580 -323605 -323676 -323731 -323775 -323888 -323924 -323977 -323993 -324045 -324115 -324189 -324230 -324266 -324315 -324335 -324356 -324400 -324423 -324443 -324459 -324529 -324557 -324626 -324644 -324677 -324740 -324807 -324851 -324882 -324926 -324954 -324998 -325021 -325070 -325106 -325133 -325150 -325176 -325245 -325324 -325367 -325384 -325431 -325447 -325481 -325492 -325514 -325542 -325628 -325811 -325876 -325900 -325916 -325965 -325987 -326019 -326037 -326060 -326093 -326102 -326121 -326134 -326155 -326180 -326199 -326224 -326250 -326268 -326276 -326324 -326343 -326357 -326389 -326402 -326420 -326436 -326445 -326461 -326478 -326503 -326561 -326587 -667513 -667577 -667745 -667832 -667961 -668041 -668110 -668165 -668206 -668269 -668338 -668361 -668404 -668482 -668506 -668570 -668601 -668630 -668665 -668693 -668760 -668940 -668976 -669013 -669135 -669179 -669264 -669282 -669335 -669359 -669395 -669418 -669443 -669472 -669491 -669529 -669552 -669571 -669595 -669618 -669641 -669660 -669671 -669687 -669732 -669745 -669753 -669773 -669782 -669790 -669805 -669811 -669825 -669836 -669842 -669850 -669855 -669868 -669879 -669887 -669902 -669929 -669951 -669965 -669975 -669995 -670010 -670029 -670042 -670051 -670060 -910871 -910902 -910950 -911058 -911085 -911148 -911177 -911223 -911262 -911274 -911302 -911351 -911389 -911434 -911499 -911531 -911550 -911564 -911589 -911618 -911678 -911698 -911739 -911761 -911769 -911783 -911799 -911815 -911827 -911846 -911856 -911867 -911878 -911889 -815358 -815602 -815785 -815817 -815900 -815966 -815995 -816063 -816108 -816159 -816211 -816280 -816312 -816368 -816395 -816411 -816436 -816477 -816526 -816549 -816572 -816584 -816643 -816662 -816686 -816705 -816725 -816737 -816777 -816785 -816811 -816856 -816871 -816880 -816897 -816907 -816916 -816924 -934197 -934327 -934684 -934796 -934873 -934936 -935021 -935084 -935153 -935202 -935241 -935287 -935346 -935363 -935405 -935432 -935444 -935466 -935483 -935494 -935524 -935541 -935553 -935585 -935611 -935626 -935645 -935654 -935668 -935685 -935695 -231525 -231877 -232061 -232156 -232280 -232318 -232357 -232421 -232455 -232501 -232565 -232611 -232655 -232726 -232774 -232809 -232819 -232840 -232891 -232914 -232995 -233043 -233090 -233241 -233281 -233352 -233388 -233765 -233799 -233811 -233839 -233900 -233915 -233921 -233936 -233965 -234010 -234040 -234075 -234105 -234149 -234164 -234179 -234194 -234273 -234281 -234297 -234312 -234321 -234341 -234368 -234398 -234414 -234432 -234447 -234458 -234479 -234491 -234503 -234527 -234539 -234573 -234581 -234593 -234603 -234616 -234628 -234638 -234649 -234659 -499796 -499887 -500073 -500161 -500241 -500323 -500420 -500489 -500512 -500553 -500604 -500705 -500770 -500830 -500890 -500972 -501014 -501036 -501070 -501109 -501141 -501173 -501191 -501271 -501313 -501402 -501435 -501473 -501525 -501577 -501617 -501650 -501689 -501718 -501746 -501779 -501813 -501829 -501843 -501858 -501875 -501961 -502005 -502018 -502048 -502059 -502074 -502094 -502107 -502123 -502180 -502201 -502230 -502249 -502295 -502334 -502368 -502378 -502391 -502401 -502440 -502449 -502459 -502478 -502527 -502550 -502573 -502608 -502623 -502638 -502646 -502655 -502668 -502685 -502693 -388304 -388721 -388992 -389219 -389413 -389558 -389654 -389711 -389860 -389916 -389961 -390039 -390073 -390140 -390211 -390351 -390445 -390550 -390586 -390628 -390679 -390739 -390830 -390860 -390895 -390941 -390981 -390997 -391036 -391074 -391089 -391118 -391150 -391183 -391200 -391237 -391248 -391270 -391307 -391322 -391374 -391392 -391419 -391431 -391441 -391487 -391513 -391527 -391556 -391566 -391581 -391602 -391625 -391641 -391655 -391666 -391679 -391705 -391718 -391745 -391773 -391784 -391800 -457881 -457955 -458098 -458142 -458196 -458357 -458455 -458611 -458675 -458831 -458982 -459106 -459248 -459309 -459336 -459355 -459387 -459416 -459461 -459529 -459574 -459742 -459813 -459853 -459891 -459915 -459969 -460048 -460118 -460171 -460218 -460247 -460376 -460403 -460433 -460457 -460505 -460527 -460570 -460629 -460645 -460684 -460746 -460798 -460817 -460882 -460901 -460952 -460988 -461015 -461033 -461070 -461095 -461117 -461151 -461201 -461219 -461259 -461290 -461312 -461326 -461347 -461367 -461392 -461419 -461435 -461451 -461466 -461496 -461544 -461558 -461569 -461579 -461590 -461599 -461610 -461623 -461632 -461652 -461670 -461686 -461697 -58389 -58503 -58602 -58639 -58660 -58736 -58793 -58840 -58853 -58892 -58908 -58927 -58950 -58967 -59013 -59021 -59031 -59051 -59065 -59086 -59103 -59116 -59132 -59158 -59187 -59205 -59217 -59229 -59242 -59258 -59269 -59288 -59300 -59306 -59311 -717932 -717964 -718032 -718175 -718197 -718269 -718380 -718494 -718564 -718599 -718631 -718656 -718733 -718796 -718813 -718865 -718889 -718983 -718996 -719050 -719068 -719146 -719170 -719276 -719378 -719424 -719452 -719473 -719501 -719511 -719548 -719566 -719611 -719624 -719653 -719660 -719823 -719862 -719884 -719947 -719962 -719981 -720016 -720026 -720041 -720058 -720070 -720075 -720094 -720121 -720132 -720142 -720150 -720158 -720165 -720172 -720179 -720205 -720233 -720244 -720251 -720264 -720292 -720299 -269105 -269211 -269249 -269311 -269359 -269424 -269505 -269536 -269602 -269669 -269728 -269802 -269846 -269917 -269956 -269968 -269994 -270090 -270135 -270191 -270238 -270254 -270332 -270400 -270424 -270449 -270459 -270506 -270533 -270602 -270625 -270650 -270688 -270716 -270725 -270762 -270790 -270815 -270845 -270887 -270920 -270944 -270979 -270994 -271017 -271038 -271057 -271085 -271093 -271106 -271115 -271126 -271152 -271171 -271214 -271227 -271243 -271255 -271271 -271291 -271309 -271319 -271331 -271338 -271353 -271361 -271369 -271378 -898340 -898498 -898577 -898687 -898779 -898904 -898918 -898961 -899003 -899034 -899064 -899089 -899130 -899173 -899196 -899212 -899229 -899255 -899269 -899304 -899311 -899332 -899347 -899383 -899395 -899422 -899434 -899452 -899463 -899475 -899496 -899509 -899525 -899541 -899551 -899570 -899580 -899587 -899604 -899612 -899617 -899627 -899633 -899641 -899646 -899651 -586181 -586456 -586586 -586803 -586878 -586972 -587149 -587273 -587380 -587457 -587594 -587699 -587798 -587929 -587977 -588008 -588064 -588086 -588138 -588179 -588210 -588225 -588260 -588279 -588320 -588329 -588363 -588395 -588410 -588438 -588473 -588511 -588525 -588546 -588558 -588568 -588598 -588648 -588653 -588670 -588686 -588719 -588742 -588752 -588765 -588783 -588799 -588808 -588828 -588845 -588883 -588909 -588941 -588968 -176001 -176132 -176252 -176399 -176524 -176749 -176957 -177092 -177235 -177393 -177466 -177492 -177515 -177618 -177657 -177693 -177723 -177795 -177879 -177934 -177952 -177979 -178004 -178046 -178073 -178130 -178158 -178209 -178243 -178258 -178285 -178297 -178329 -178354 -178362 -178384 -178436 -178452 -178464 -178486 -178493 -178514 -178532 -178544 -178559 -178565 -178571 -178576 -178586 -384303 -384455 -384708 -384841 -384886 -385098 -385154 -385249 -385424 -385563 -385682 -385837 -385905 -385949 -386047 -386120 -386171 -386241 -386263 -386327 -386366 -386430 -386511 -386605 -386627 -386729 -386760 -386814 -386881 -386921 -386996 -387040 -387075 -387094 -387114 -387147 -387180 -387247 -387266 -387305 -387356 -387411 -387459 -387477 -387523 -387536 -387563 -387612 -387640 -387657 -387680 -387712 -387729 -387746 -387771 -387782 -387800 -387818 -387840 -387853 -387862 -387870 -387883 -387900 -387931 -387949 -387975 -388011 -388028 -69936 -70201 -70268 -70302 -70341 -70411 -70471 -70511 -70546 -70579 -70616 -70639 -70662 -70684 -70701 -70712 -70725 -70738 -70768 -70782 -70799 -70806 -70820 -70836 -70857 -70889 -70901 -70910 -70916 -70921 -70929 -70937 -70948 -70959 -381462 -381585 -381730 -381838 -382055 -382124 -382224 -382261 -382360 -382439 -382653 -382679 -382721 -382786 -382835 -382931 -382974 -383029 -383157 -383217 -383243 -383307 -383342 -383398 -383460 -383539 -383578 -383633 -383684 -383721 -383753 -383780 -383816 -383848 -383889 -383915 -383946 -383994 -384034 -384054 -384079 -384089 -384106 -384125 -384138 -384160 -384170 -384194 -48503 -48592 -48758 -48799 -48845 -48903 -48955 -49012 -49033 -49041 -49082 -49101 -49107 -49119 -49129 -49138 -49155 -49178 -49195 -589052 -589160 -589376 -589606 -589795 -589950 -590111 -590266 -590446 -590542 -590654 -590770 -590864 -590957 -591070 -591139 -591190 -591222 -591273 -591342 -591508 -591531 -591561 -591582 -591618 -591644 -591667 -591691 -591719 -591748 -591780 -591794 -591800 -591816 -591828 -676879 -677034 -677133 -677189 -677389 -677417 -677484 -677543 -677634 -677665 -677715 -677762 -677828 -677890 -677967 -678031 -678051 -678152 -678196 -678236 -678255 -678333 -678364 -678378 -678394 -678425 -678451 -678473 -678486 -678507 -678530 -678555 -678566 -678573 -678592 -678605 -678615 -678629 -678644 -678654 -95822 -95893 -95990 -96085 -96133 -96168 -96219 -96252 -96315 -96347 -96362 -96377 -96401 -96409 -96440 -96461 -96479 -96503 -96546 -96595 -96625 -96651 -96679 -96699 -96724 -96739 -96755 -96766 -96785 -96798 -96808 -96850 -96860 -96872 -96881 -96903 -96910 -96917 -96926 -96939 -96951 -96960 -96972 -96979 -96985 -96991 -96997 -97003 -97009 -510135 -510324 -510457 -510548 -510698 -510747 -510815 -510898 -510996 -511057 -511125 -511213 -511293 -511527 -511622 -511723 -511767 -511830 -511995 -512153 -512262 -512296 -512360 -512424 -512470 -512487 -512525 -512557 -512591 -512640 -512669 -512718 -512751 -512781 -512815 -512847 -512887 -512901 -1282 -1341 -800749 -801279 -801736 -236303 -237101 -603032 -924600 -374774 -375541 -376461 -377201 -377475 -193157 -195013 -195588 -195774 -196011 -196211 -196263 -196379 -933824 -934269 -934584 -647466 -647839 -648045 -648215 -648562 -648715 -648820 -648931 -649019 -13330 -396136 -397272 -398095 -398454 -64211 -64399 -64578 -64740 -64851 -856476 -857688 -858130 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.multilab b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.multilab deleted file mode 100644 index 053f42bbb4811b09a488856b0cfca8e98158774f..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/test.multilab +++ /dev/null @@ -1,2809 +0,0 @@ -1 0 0 0 0 0 0 0 0 1 0 1 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 1 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 1 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 1 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 1 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 1 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 1 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 1 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 1 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 1 0 0 0 0 -0 0 1 0 0 1 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 1 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 1 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 1 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 1 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 1 1 0 0 1 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 1 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 1 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 1 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 1 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 1 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 1 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 1 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 1 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 1 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 1 0 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 1 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 1 0 0 -0 0 0 1 0 0 0 0 0 0 0 1 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 1 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 1 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 1 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 1 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 1 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 1 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 1 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 1 0 0 1 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 1 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 1 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 1 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 1 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 1 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 1 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 1 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 1 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 1 0 0 0 -0 0 0 0 0 1 1 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 1 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 1 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 1 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 1 1 1 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 1 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 1 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 1 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 1 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 1 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 1 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 1 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 1 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 1 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 1 0 0 0 0 0 0 -0 1 0 0 0 0 0 1 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 1 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 1 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 1 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 1 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -1 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 1 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 1 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 0 -0 1 0 0 0 0 0 0 1 0 0 0 0 1 -0 0 0 0 0 0 1 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 1 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 1 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 1 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 0 0 0 0 -0 0 0 0 0 0 0 0 1 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 1 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 1 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 1 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 1 0 0 0 1 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 1 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 1 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 1 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 1 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 1 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 1 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 1 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 1 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 1 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 1 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 1 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 1 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 1 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 1 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 1 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 1 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 1 0 1 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 1 1 0 -0 0 0 0 0 0 1 1 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 1 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 1 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 1 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 0 0 0 0 0 1 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 1 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 1 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 1 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 1 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 1 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 1 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 1 0 1 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 1 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 1 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 1 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 1 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 1 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 1 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 0 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 1 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 1 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 1 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 0 0 0 1 -0 0 0 0 0 0 0 0 0 0 1 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 -0 1 0 0 0 0 0 0 0 0 1 0 0 1 -0 0 0 0 0 0 0 0 0 0 0 0 0 1 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.jpgl b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.jpgl deleted file mode 100644 index cd34ad82e6fc492131a30a6ce4fb8db66f070f5b..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.jpgl +++ /dev/null @@ -1,11270 +0,0 @@ -615485 -615595 -615709 -615751 -616023 -616104 -616199 -616273 -616488 -616571 -616659 -616758 -616885 -616912 -616970 -617028 -617106 -617182 -617222 -617257 -617293 -617301 -617319 -617334 -617364 -617383 -617403 -617412 -617427 -617444 -617451 -617472 -617483 -617498 -617518 -21503 -21657 -21700 -21742 -21781 -21826 -21847 -21885 -21911 -21929 -21938 -21954 -21963 -21971 -21988 -21993 -323351 -323440 -323610 -323701 -323773 -323929 -324016 -324109 -324236 -324344 -324402 -324469 -324586 -324615 -324638 -324717 -324751 -324887 -324941 -324999 -325088 -325166 -325199 -325241 -325386 -325410 -325468 -325512 -325541 -325745 -325871 -325892 -325941 -325974 -326007 -326048 -326074 -326095 -326109 -326142 -326149 -326198 -326217 -326242 -326325 -326351 -326381 -326397 -326413 -326439 -326468 -326487 -326495 -326508 -326565 -326576 -326591 -917986 -918116 -918262 -918320 -918378 -918522 -918647 -918675 -918857 -918913 -918947 -918971 -919004 -919029 -919082 -919109 -919129 -919135 -919172 -919192 -237550 -237710 -237856 -237962 -238074 -238149 -238183 -238356 -238408 -238459 -238556 -238742 -238781 -238838 -238923 -238933 -239128 -239165 -239274 -239379 -239395 -239512 -239580 -239608 -239625 -239654 -239698 -239735 -239743 -239752 -239838 -239898 -239937 -240008 -240045 -240071 -240111 -240134 -240169 -240192 -240211 -240228 -240247 -240281 -240307 -240354 -240360 -240381 -240393 -240410 -240425 -240440 -240457 -240486 -240503 -240522 -240533 -240547 -240561 -240583 -240594 -240606 -296019 -296134 -296161 -296205 -296283 -296324 -296341 -296377 -296391 -296425 -296453 -296481 -296528 -296593 -296630 -296739 -296781 -296815 -296834 -296866 -296917 -296952 -296973 -296995 -297010 -297040 -297063 -297085 -297127 -297217 -297223 -297252 -297268 -297310 -297345 -297368 -297417 -297442 -297467 -297494 -297519 -297548 -297562 -297582 -297607 -297670 -297690 -297734 -297777 -297841 -297868 -297884 -297933 -297942 -297967 -297979 -297990 -298015 -298037 -298049 -298072 -298099 -298111 -298138 -298144 -298160 -298182 -298202 -298247 -298257 -298276 -298294 -298319 -298330 -298360 -298375 -298415 -298423 -298429 -298438 -298448 -298455 -298468 -298479 -298490 -298503 -298517 -298531 -298547 -298557 -298563 -298571 -298593 -298610 -298615 -298622 -298630 -298636 -298647 -298685 -298710 -298717 -298726 -298733 -298745 -298753 -298771 -298784 -298817 -298840 -298852 -298861 -298872 -298879 -298891 -298897 -298907 -298920 -298936 -298957 -298967 -298989 -609317 -609401 -609641 -609838 -609982 -610143 -610205 -610315 -610407 -610417 -610550 -610613 -610651 -610708 -610761 -610877 -610917 -610930 -610952 -611042 -611089 -611140 -611208 -611368 -611393 -611406 -611435 -611460 -611471 -611480 -611501 -611512 -611525 -611540 -611557 -611575 -611585 -611599 -611619 -611638 -611681 -611700 -611722 -611731 -611737 -611743 -611753 -611759 -611771 -611796 -611810 -611823 -611831 -611847 -612232 -612246 -612269 -612280 -612286 -797821 -797977 -798175 -798271 -798378 -798434 -798530 -798650 -798738 -798780 -798810 -798841 -798878 -798917 -798953 -798994 -799041 -799078 -799105 -799132 -799145 -799173 -799184 -799197 -799215 -799226 -799239 -799246 -799262 -799267 -799276 -799287 -799297 -799308 -799314 -799328 -799336 -799349 -799360 -799366 -799372 -799382 -799395 -799408 -799419 -799430 -799436 -799442 -799448 -799453 -22024 -22072 -22149 -22215 -22274 -22299 -22317 -22401 -22420 -22456 -22466 -22479 -22505 -22620 -22632 -22657 -22683 -22691 -22697 -22702 -22710 -22730 -22735 -22747 -22754 -22763 -22770 -22781 -22787 -22793 -887234 -887771 -887877 -888018 -888116 -888159 -888191 -888217 -888282 -888307 -888367 -888380 -888788 -888806 -888823 -888846 -888905 -888926 -888947 -888961 -888975 -888990 -889017 -889044 -889069 -889086 -889187 -889205 -889216 -889243 -889257 -889271 -819788 -820141 -820323 -820410 -820486 -820526 -820556 -820610 -820679 -820715 -820732 -820760 -820794 -820834 -820857 -820891 -820904 -820916 -820927 -820951 -820964 -820978 -820989 -820996 -821006 -821036 -821044 -821060 -821078 -821094 -821099 -426142 -426226 -426303 -426365 -426404 -426463 -426491 -426521 -426549 -426586 -426623 -426707 -426740 -426763 -426785 -426802 -426824 -480608 -480695 -480751 -480901 -481000 -481193 -481306 -481413 -481464 -481533 -481657 -481700 -481770 -481813 -481878 -481938 -481981 -482036 -482097 -482186 -482223 -482286 -482376 -482426 -482476 -482527 -482712 -482757 -482884 -482905 -482995 -483037 -483101 -483127 -483159 -483177 -483229 -483262 -483283 -483308 -483319 -483378 -483396 -483518 -483530 -483567 -483590 -483599 -483607 -483618 -483634 -483656 -483677 -483688 -483723 -483741 -483751 -483762 -483775 -483809 -483821 -483837 -483847 -483858 -483870 -483876 -483889 -483907 -483922 -483929 -483952 -483965 -483979 -483987 -483998 -484010 -484018 -484025 -484031 -801422 -801827 -801906 -802167 -802299 -802354 -802379 -802426 -802471 -802486 -802514 -802530 -802569 -802586 -802618 -802632 -802648 -802657 -802669 -802701 -802720 -802729 -802740 -802747 -802771 -802793 -802813 -802824 -802833 -802844 -802855 -802865 -802879 -802896 -802903 -245496 -245847 -245961 -246126 -246339 -246582 -246762 -246826 -246920 -246938 -247052 -247266 -247315 -247345 -247370 -247491 -247520 -247596 -247662 -247722 -247819 -247845 -247909 -247953 -247991 -248032 -248040 -248062 -248076 -248099 -248115 -248126 -248160 -248183 -248228 -248242 -248251 -248262 -248275 -248290 -248344 -248356 -248372 -248384 -248392 -248410 -248418 -248431 -248443 -248464 -248474 -248491 -248501 -248527 -429501 -430024 -430294 -430451 -430658 -430831 -430887 -430948 -431051 -431086 -431129 -431214 -431266 -431304 -431326 -431397 -431433 -431517 -431567 -431624 -431642 -431684 -431751 -431758 -431790 -431814 -431843 -431865 -431880 -431891 -431902 -431918 -431945 -431950 -431958 -431981 -432008 -432017 -432028 -432051 -432064 -432074 -432079 -432087 -432094 -432104 -432110 -951324 -951447 -951554 -951697 -951838 -951965 -952116 -952213 -952238 -952322 -952338 -952356 -952386 -952398 -952405 -952415 -952446 -952452 -952475 -768016 -768313 -768423 -768532 -768637 -768677 -768741 -768840 -768895 -768935 -768988 -769079 -769151 -769250 -769267 -769297 -769322 -769335 -769368 -769381 -769410 -769430 -769438 -769465 -769475 -769487 -769502 -769518 -769536 -769547 -769554 -769577 -769589 -769601 -769606 -246686 -246820 -246943 -247034 -247131 -247220 -247318 -247384 -247487 -247527 -247639 -247707 -247866 -247964 -248107 -248148 -248223 -248315 -248537 -248563 -248766 -248827 -248887 -248909 -248943 -248970 -248981 -248999 -249033 -249078 -249127 -249183 -249200 -249221 -249266 -249298 -249326 -249350 -249385 -249399 -249410 -249418 -249433 -249458 -249478 -249506 -249540 -249555 -249569 -249577 -249590 -249598 -249603 -249617 -149746 -149803 -149859 -149882 -149974 -150023 -150052 -150078 -150129 -150171 -150212 -150233 -150269 -150294 -150306 -150335 -150357 -150380 -150389 -150443 -150464 -150493 -150525 -150535 -150578 -150605 -150618 -150682 -150702 -150720 -150741 -150764 -150788 -150832 -150851 -150881 -150931 -150976 -151062 -151161 -151188 -151255 -151291 -151306 -151320 -151348 -151367 -151373 -151388 -151395 -151411 -151435 -151454 -151460 -151476 -151484 -151493 -151509 -151563 -151571 -151585 -151612 -151628 -151652 -151670 -151675 -151686 -151698 -151715 -151720 -151737 -151757 -151763 -151784 -151789 -151798 -151820 -151829 -151837 -151843 -151849 -151857 -151862 -151869 -151874 -621985 -622191 -622264 -622537 -622635 -622715 -622786 -623029 -623110 -623184 -623212 -623275 -623322 -623348 -623376 -623460 -623483 -623520 -623557 -623626 -623673 -623688 -623708 -623741 -623775 -623796 -623830 -623842 -623852 -623857 -623866 -720822 -721104 -721273 -721536 -721732 -721840 -721904 -721996 -722077 -722162 -722419 -722480 -722551 -722572 -722636 -722688 -722717 -722752 -722764 -722777 -722793 -722819 -722898 -722913 -722923 -419539 -420169 -420500 -420750 -421152 -421390 -421643 -421705 -421771 -422006 -422121 -422232 -422325 -422509 -422546 -422623 -422677 -422707 -422742 -422788 -422941 -422984 -423027 -423042 -423050 -423065 -423108 -423140 -423146 -423169 -423185 -423204 -423227 -423238 -423250 -423260 -423267 -423279 -423288 -423297 -423314 -423347 -423359 -423372 -423387 -423416 -423425 -423431 -209155 -209269 -209697 -209906 -210121 -210158 -210425 -210465 -210638 -210738 -210821 -210903 -210947 -210987 -211183 -211297 -211313 -211424 -211454 -211475 -211493 -211548 -211559 -211612 -211673 -211688 -211711 -211733 -211760 -211783 -211790 -211798 -211816 -211831 -211844 -211858 -211870 -211877 -211893 -211898 -890302 -890478 -890693 -890812 -890895 -891011 -891146 -891236 -891283 -891316 -891379 -891398 -891490 -891521 -891531 -891540 -891567 -891591 -891639 -891669 -891694 -891747 -891761 -891813 -891826 -37864 -37996 -38038 -38088 -38146 -38222 -38257 -38283 -38337 -38366 -38406 -38428 -38465 -38483 -38592 -38623 -38643 -38670 -38687 -38725 -38739 -38763 -38793 -38811 -38827 -38856 -38868 -38874 -38879 -38897 -38904 -38931 -38947 -38958 -38966 -38972 -38982 -38994 -39000 -39006 -196659 -196814 -196968 -197058 -197333 -197576 -197649 -197801 -197886 -197911 -197938 -198022 -198103 -198143 -198204 -198236 -198278 -198291 -198358 -198412 -198472 -198494 -198557 -198620 -198643 -198669 -198724 -198763 -198798 -198815 -198857 -198927 -198962 -199026 -199039 -199069 -199101 -199120 -199142 -199166 -199210 -199247 -199269 -199289 -199298 -199310 -199317 -199333 -199342 -199348 -199363 -199395 -199403 -199410 -199419 -199430 -199445 -199456 -199472 -199484 -199500 -199515 -199526 -199534 -199540 -199579 -199584 -199594 -199605 -199622 -199647 -199653 -199661 -199669 -199677 -911561 -911772 -911965 -912031 -912245 -912302 -912401 -912451 -912485 -912492 -912535 -912557 -912592 -912613 -912631 -912644 -912651 -912663 -912680 -912697 -912704 -912719 -912729 -912736 -912746 -912757 -912764 -912773 -912792 -912799 -912806 -912813 -912818 -51000 -51128 -51169 -51211 -51299 -51343 -51368 -51428 -51465 -51511 -51541 -51565 -51582 -51607 -51622 -51644 -51658 -51682 -51689 -51701 -51714 -51720 -51729 -51740 -51753 -51766 -51776 -51782 -51793 -51805 -51817 -51822 -51827 -93659 -93778 -93801 -93883 -93995 -94076 -94196 -94235 -94250 -94314 -94355 -94382 -94396 -94432 -94465 -94479 -94511 -94530 -94564 -94595 -94636 -94664 -94690 -94731 -94764 -94789 -94828 -94881 -94938 -94976 -94996 -95009 -95034 -95053 -95073 -95102 -95129 -95147 -95168 -95182 -95198 -95220 -95237 -95245 -95267 -95282 -95309 -95317 -95329 -597164 -597446 -597606 -598070 -598160 -598529 -598826 -598869 -599063 -599159 -599218 -599265 -599326 -599344 -599404 -599452 -599510 -599610 -599638 -599667 -599693 -599783 -599841 -599865 -599911 -599945 -599962 -599977 -599990 -600024 -600058 -600068 -600087 -600103 -600127 -600137 -600165 -600189 -600201 -600219 -600242 -600278 -600305 -600318 -600333 -600345 -600359 -600373 -600381 -600390 -7876 -7892 -7899 -7909 -7916 -7922 -7929 -7935 -7941 -7946 -7954 -7962 -7973 -7980 -7987 -7994 -8005 -8016 -8022 -8032 -8040 -8046 -8052 -8062 -8068 -8073 -8081 -8088 -8101 -8107 -8116 -8123 -8129 -8135 -8144 -8153 -8161 -8168 -8174 -8180 -8186 -8193 -8200 -8208 -8216 -8223 -8230 -8235 -8240 -8246 -8251 -8256 -8262 -8268 -8273 -8279 -8286 -8293 -8301 -8306 -349735 -350203 -351360 -351691 -351895 -351943 -352358 -352659 -352740 -352882 -352984 -353048 -353101 -353154 -353214 -353263 -353304 -353323 -353336 -353367 -353383 -353397 -353409 -353434 -353459 -353488 -353514 -353542 -353592 -353635 -353656 -353708 -353728 -353739 -353746 -65955 -66289 -66589 -66771 -66870 -66923 -66951 -66969 -66984 -67003 -67023 -67039 -67048 -67059 -67074 -67090 -67116 -67128 -67138 -67145 -67161 -67167 -67172 -67180 -923288 -923399 -923502 -923554 -923658 -923726 -923852 -923937 -924002 -924040 -924054 -924074 -924112 -924163 -924173 -924185 -924203 -924227 -924235 -924255 -924264 -924278 -924289 -732960 -733073 -733158 -733281 -733349 -733389 -733498 -733538 -733587 -733664 -733754 -733932 -733957 -733986 -734090 -734186 -734199 -734221 -734245 -734276 -734292 -734333 -734365 -734399 -734418 -734455 -734494 -734513 -734531 -734549 -734565 -734591 -734605 -734618 -734642 -734654 -734663 -734684 -734716 -734736 -323321 -323370 -323494 -323541 -323581 -323614 -323688 -323738 -323786 -323899 -323928 -323979 -324001 -324077 -324117 -324193 -324233 -324269 -324330 -324337 -324358 -324403 -324426 -324447 -324468 -324531 -324584 -324630 -324645 -324682 -324754 -324829 -324854 -324883 -324933 -324958 -325006 -325025 -325076 -325110 -325141 -325151 -325177 -325307 -325326 -325374 -325385 -325440 -325448 -325486 -325504 -325518 -325552 -325629 -325823 -325887 -325901 -325944 -325971 -325996 -326023 -326044 -326061 -326098 -326113 -326122 -326137 -326157 -326187 -326204 -326234 -326256 -326269 -326277 -326332 -326348 -326358 -326391 -326407 -326421 -326437 -326449 -326462 -326479 -326504 -326563 -326588 -667514 -667590 -667772 -667838 -667971 -668044 -668115 -668167 -668210 -668273 -668342 -668379 -668444 -668491 -668514 -668573 -668605 -668635 -668666 -668697 -668762 -668942 -668986 -669020 -669145 -669189 -669269 -669283 -669342 -669362 -669405 -669424 -669444 -669473 -669501 -669530 -669553 -669572 -669598 -669625 -669644 -669662 -669672 -669691 -669733 -669747 -669764 -669776 -669783 -669791 -669806 -669813 -669827 -669837 -669846 -669851 -669860 -669869 -669880 -669893 -669908 -669932 -669952 -669967 -669978 -669996 -670012 -670031 -670044 -670052 -670061 -910877 -910903 -911002 -911069 -911095 -911155 -911182 -911235 -911263 -911276 -911303 -911359 -911409 -911439 -911511 -911532 -911552 -911568 -911599 -911659 -911682 -911708 -911744 -911762 -911770 -911784 -911802 -911816 -911831 -911848 -911857 -911868 -911879 -911892 -815280 -815401 -815624 -815805 -815824 -815901 -815969 -816000 -816093 -816113 -816160 -816213 -816282 -816321 -816369 -816396 -816418 -816437 -816478 -816531 -816557 -816575 -816593 -816651 -816663 -816689 -816708 -816727 -816742 -816778 -816786 -816818 -816857 -816875 -816886 -816902 -816911 -816917 -816925 -934216 -934598 -934722 -934805 -934926 -934944 -935029 -935108 -935166 -935207 -935243 -935332 -935347 -935364 -935409 -935434 -935446 -935472 -935485 -935497 -935525 -935544 -935559 -935588 -935614 -935627 -935646 -935655 -935669 -935686 -935706 -231831 -231893 -232097 -232220 -232281 -232319 -232358 -232426 -232465 -232506 -232568 -232614 -232657 -232746 -232781 -232812 -232835 -232861 -232893 -232920 -233002 -233047 -233103 -233256 -233309 -233353 -233401 -233773 -233801 -233816 -233859 -233901 -233916 -233926 -233937 -233966 -234017 -234047 -234084 -234123 -234156 -234168 -234181 -234264 -234274 -234283 -234299 -234313 -234323 -234342 -234376 -234399 -234420 -234437 -234451 -234460 -234480 -234492 -234506 -234528 -234541 -234575 -234583 -234596 -234604 -234617 -234629 -234640 -234651 -234660 -499806 -500001 -500131 -500165 -500247 -500326 -500425 -500496 -500517 -500559 -500630 -500718 -500773 -500858 -500903 -500988 -501017 -501051 -501074 -501113 -501160 -501174 -501253 -501290 -501323 -501403 -501442 -501485 -501532 -501582 -501618 -501652 -501696 -501720 -501762 -501781 -501816 -501832 -501845 -501863 -501880 -501974 -502007 -502020 -502049 -502062 -502080 -502097 -502112 -502127 -502184 -502202 -502231 -502257 -502312 -502348 -502369 -502382 -502393 -502407 -502441 -502450 -502460 -502509 -502531 -502553 -502576 -502609 -502626 -502640 -502647 -502657 -502669 -502686 -388207 -388587 -388745 -389097 -389224 -389422 -389562 -389657 -389768 -389879 -389917 -389966 -390063 -390079 -390142 -390216 -390365 -390446 -390557 -390587 -390629 -390691 -390747 -390834 -390863 -390925 -390944 -390983 -391013 -391049 -391080 -391096 -391127 -391154 -391187 -391217 -391238 -391251 -391284 -391312 -391325 -391375 -391396 -391421 -391432 -391444 -391490 -391515 -391531 -391557 -391569 -391587 -391607 -391626 -391645 -391659 -391667 -391681 -391707 -391726 -391752 -391774 -391789 -391802 -457804 -457888 -458075 -458099 -458143 -458223 -458417 -458475 -458624 -458717 -458862 -458993 -459131 -459274 -459313 -459337 -459367 -459388 -459419 -459467 -459530 -459611 -459763 -459819 -459856 -459903 -459935 -459976 -460049 -460121 -460184 -460226 -460283 -460385 -460411 -460436 -460467 -460517 -460533 -460572 -460632 -460648 -460690 -460781 -460799 -460857 -460884 -460902 -460957 -460989 -461016 -461038 -461080 -461096 -461119 -461153 -461202 -461220 -461263 -461297 -461313 -461328 -461348 -461368 -461405 -461421 -461437 -461452 -461468 -461499 -461551 -461561 -461570 -461581 -461592 -461601 -461612 -461625 -461642 -461654 -461671 -461687 -461699 -58414 -58524 -58615 -58640 -58661 -58756 -58794 -58847 -58855 -58894 -58919 -58941 -58953 -58987 -59015 -59022 -59036 -59054 -59067 -59089 -59109 -59119 -59136 -59160 -59196 -59208 -59221 -59234 -59247 -59259 -59270 -59290 -59301 -59307 -717830 -717951 -717968 -718081 -718177 -718223 -718290 -718394 -718504 -718565 -718600 -718634 -718674 -718752 -718798 -718850 -718871 -718926 -718989 -719013 -719051 -719102 -719153 -719172 -719287 -719381 -719425 -719453 -719474 -719504 -719528 -719549 -719572 -719616 -719625 -719655 -719665 -719827 -719863 -719901 -719950 -719963 -719997 -720018 -720027 -720051 -720059 -720071 -720079 -720098 -720124 -720133 -720144 -720152 -720159 -720166 -720173 -720180 -720209 -720238 -720245 -720257 -720287 -720295 -269049 -269110 -269222 -269260 -269317 -269384 -269429 -269524 -269538 -269606 -269672 -269755 -269812 -269856 -269920 -269957 -269970 -270012 -270095 -270158 -270199 -270243 -270255 -270343 -270401 -270430 -270450 -270466 -270508 -270543 -270603 -270631 -270653 -270693 -270717 -270727 -270777 -270791 -270816 -270866 -270889 -270921 -270945 -270987 -270998 -271026 -271039 -271059 -271086 -271101 -271108 -271117 -271127 -271154 -271172 -271215 -271228 -271244 -271258 -271272 -271294 -271312 -271322 -271332 -271339 -271354 -271363 -271371 -271380 -898348 -898523 -898591 -898689 -898791 -898906 -898922 -898962 -899005 -899039 -899075 -899090 -899132 -899176 -899200 -899216 -899230 -899259 -899278 -899305 -899315 -899337 -899350 -899384 -899397 -899424 -899437 -899453 -899468 -899477 -899501 -899514 -899527 -899542 -899559 -899573 -899581 -899588 -899608 -899613 -899620 -899628 -899634 -899642 -899647 -899652 -585848 -586274 -586496 -586602 -586828 -586886 -587062 -587168 -587278 -587401 -587511 -587608 -587702 -587831 -587953 -587978 -588012 -588070 -588090 -588147 -588180 -588212 -588232 -588267 -588281 -588323 -588335 -588368 -588397 -588414 -588441 -588475 -588516 -588528 -588550 -588559 -588574 -588602 -588649 -588654 -588676 -588699 -588721 -588743 -588754 -588767 -588787 -588800 -588813 -588829 -588847 -588887 -588911 -588958 -588972 -176064 -176199 -176255 -176434 -176528 -176797 -177011 -177108 -177263 -177394 -177468 -177500 -177528 -177620 -177660 -177700 -177738 -177796 -177890 -177942 -177956 -177983 -178012 -178048 -178083 -178132 -178174 -178222 -178245 -178264 -178290 -178321 -178330 -178356 -178367 -178390 -178440 -178453 -178477 -178487 -178495 -178517 -178533 -178546 -178560 -178566 -178572 -178577 -178587 -384362 -384477 -384740 -384843 -384903 -385105 -385155 -385258 -385449 -385602 -385697 -385857 -385913 -385967 -386054 -386129 -386175 -386243 -386265 -386337 -386375 -386434 -386517 -386611 -386664 -386735 -386766 -386830 -386902 -386971 -387018 -387042 -387077 -387095 -387119 -387150 -387221 -387248 -387276 -387323 -387368 -387417 -387462 -387482 -387525 -387537 -387593 -387613 -387650 -387665 -387684 -387715 -387731 -387753 -387775 -387785 -387802 -387820 -387844 -387854 -387863 -387871 -387887 -387902 -387934 -387952 -387994 -388012 -388029 -69939 -70206 -70295 -70307 -70354 -70420 -70481 -70522 -70566 -70580 -70626 -70642 -70667 -70697 -70702 -70714 -70731 -70745 -70771 -70783 -70800 -70808 -70828 -70840 -70858 -70892 -70902 -70911 -70917 -70925 -70931 -70938 -70951 -70960 -381338 -381478 -381592 -381784 -381870 -382077 -382180 -382225 -382294 -382368 -382472 -382658 -382682 -382727 -382789 -382842 -382937 -382989 -383059 -383158 -383225 -383254 -383310 -383344 -383401 -383468 -383547 -383588 -383649 -383692 -383724 -383754 -383785 -383817 -383850 -383891 -383927 -383955 -383995 -384035 -384064 -384080 -384094 -384108 -384130 -384145 -384161 -384172 -48432 -48515 -48631 -48765 -48801 -48856 -48909 -48968 -49014 -49035 -49063 -49088 -49102 -49108 -49121 -49130 -49141 -49161 -49181 -49196 -589054 -589167 -589380 -589660 -589885 -589955 -590134 -590347 -590451 -590594 -590683 -590798 -590866 -591028 -591081 -591156 -591197 -591224 -591278 -591412 -591513 -591533 -591563 -591584 -591619 -591646 -591673 -591692 -591724 -591766 -591784 -591795 -591803 -591820 -591829 -676963 -677037 -677155 -677196 -677406 -677444 -677502 -677571 -677638 -677675 -677718 -677781 -677841 -677910 -677971 -678035 -678127 -678154 -678198 -678246 -678258 -678342 -678365 -678379 -678400 -678426 -678452 -678474 -678487 -678509 -678538 -678559 -678567 -678576 -678594 -678610 -678619 -678631 -678646 -95754 -95833 -95898 -95999 -96114 -96145 -96169 -96222 -96264 -96325 -96352 -96366 -96378 -96403 -96410 -96446 -96463 -96481 -96508 -96548 -96618 -96628 -96661 -96682 -96702 -96725 -96746 -96758 -96772 -96786 -96799 -96824 -96851 -96864 -96874 -96891 -96906 -96911 -96918 -96927 -96940 -96952 -96963 -96974 -96980 -96986 -96993 -96998 -97004 -509630 -510197 -510347 -510460 -510592 -510702 -510756 -510821 -510919 -511009 -511067 -511157 -511221 -511323 -511546 -511636 -511731 -511785 -511902 -512018 -512196 -512267 -512323 -512388 -512439 -512471 -512497 -512527 -512558 -512596 -512653 -512677 -512724 -512752 -512789 -512817 -512848 -512891 -512916 -1295 -1347 -800804 -801365 -235008 -236373 -237193 -603304 -924680 -374848 -375684 -376468 -377222 -192981 -193891 -195053 -195597 -195829 -196171 -196212 -196277 -196382 -933832 -934395 -647073 -647530 -647863 -648046 -648288 -648564 -648744 -648829 -648974 -649031 -13335 -396485 -397335 -398138 -398465 -64279 -64452 -64611 -64750 -64856 -856616 -857895 -858132 -615486 -615625 -615719 -615754 -616030 -616122 -616205 -616366 -616533 -616573 -616675 -616760 -616892 -616924 -616971 -617078 -617109 -617185 -617225 -617258 -617295 -617307 -617320 -617343 -617372 -617389 -617404 -617413 -617428 -617445 -617466 -617476 -617485 -617499 -21412 -21577 -21661 -21708 -21753 -21798 -21830 -21850 -21894 -21914 -21930 -21947 -21955 -21965 -21974 -21989 -21994 -323365 -323459 -323621 -323707 -323797 -323938 -324033 -324143 -324237 -324360 -324424 -324509 -324592 -324623 -324666 -324736 -324798 -324898 -324947 -325063 -325101 -325175 -325203 -325331 -325387 -325432 -325478 -325515 -325604 -325806 -325880 -325896 -325964 -325976 -326018 -326056 -326076 -326096 -326110 -326143 -326150 -326200 -326218 -326251 -326329 -326353 -326383 -326403 -326414 -326441 -326470 -326488 -326496 -326515 -326566 -326579 -326592 -918016 -918180 -918270 -918337 -918433 -918586 -918655 -918681 -918858 -918915 -918950 -918982 -919009 -919030 -919089 -919116 -919130 -919137 -919174 -237468 -237672 -237761 -237901 -237976 -238077 -238150 -238206 -238360 -238435 -238468 -238568 -238752 -238785 -238845 -238924 -238959 -239139 -239191 -239300 -239381 -239475 -239556 -239589 -239610 -239640 -239655 -239700 -239736 -239744 -239809 -239840 -239904 -239951 -240013 -240047 -240073 -240112 -240137 -240173 -240193 -240223 -240229 -240262 -240284 -240310 -240355 -240365 -240383 -240398 -240412 -240430 -240444 -240464 -240489 -240505 -240523 -240537 -240551 -240573 -240586 -240596 -240607 -296038 -296146 -296175 -296232 -296287 -296325 -296345 -296378 -296393 -296446 -296455 -296484 -296529 -296613 -296685 -296741 -296795 -296821 -296835 -296884 -296918 -296953 -296974 -296998 -297028 -297049 -297064 -297092 -297130 -297218 -297224 -297254 -297277 -297311 -297355 -297395 -297421 -297443 -297477 -297503 -297528 -297551 -297567 -297585 -297616 -297674 -297695 -297738 -297794 -297848 -297873 -297914 -297935 -297954 -297968 -297981 -297991 -298026 -298038 -298052 -298085 -298103 -298115 -298139 -298150 -298162 -298187 -298204 -298248 -298259 -298277 -298305 -298320 -298331 -298362 -298387 -298416 -298424 -298430 -298439 -298449 -298459 -298471 -298482 -298498 -298504 -298520 -298532 -298548 -298558 -298565 -298576 -298603 -298611 -298616 -298624 -298631 -298637 -298648 -298686 -298711 -298720 -298727 -298735 -298747 -298757 -298775 -298785 -298825 -298846 -298853 -298863 -298874 -298881 -298892 -298898 -298909 -298923 -298948 -298959 -298970 -298995 -609332 -609404 -609675 -609869 -610076 -610155 -610208 -610325 -610409 -610511 -610558 -610614 -610673 -610738 -610765 -610882 -610919 -610931 -611012 -611057 -611092 -611146 -611213 -611375 -611394 -611410 -611436 -611463 -611473 -611481 -611505 -611520 -611533 -611547 -611558 -611576 -611586 -611600 -611621 -611643 -611689 -611702 -611725 -611733 -611738 -611746 -611755 -611760 -611784 -611801 -611814 -611824 -611833 -611851 -612233 -612247 -612270 -612281 -797648 -797836 -798008 -798222 -798272 -798379 -798494 -798540 -798658 -798741 -798781 -798827 -798858 -798880 -798924 -798958 -799018 -799043 -799079 -799107 -799133 -799146 -799177 -799189 -799199 -799217 -799233 -799241 -799248 -799263 -799268 -799277 -799288 -799299 -799309 -799318 -799329 -799338 -799354 -799361 -799367 -799374 -799386 -799403 -799414 -799421 -799431 -799437 -799444 -799449 -799454 -22049 -22082 -22194 -22248 -22275 -22305 -22335 -22403 -22427 -22458 -22468 -22488 -22510 -22625 -22633 -22658 -22685 -22692 -22698 -22703 -22724 -22731 -22736 -22750 -22755 -22766 -22773 -22782 -22788 -887284 -887785 -887893 -888053 -888135 -888176 -888194 -888238 -888283 -888326 -888369 -888762 -888789 -888809 -888828 -888847 -888908 -888927 -888948 -888967 -888980 -888991 -889022 -889046 -889071 -889088 -889188 -889206 -889221 -889244 -889258 -889274 -819840 -820167 -820343 -820425 -820502 -820529 -820559 -820620 -820686 -820716 -820734 -820771 -820795 -820837 -820858 -820894 -820906 -820917 -820932 -820959 -820965 -820983 -820990 -821000 -821012 -821038 -821048 -821063 -821088 -821095 -821101 -426144 -426246 -426315 -426376 -426410 -426481 -426498 -426523 -426558 -426588 -426650 -426708 -426742 -426765 -426786 -426803 -480626 -480710 -480763 -480953 -481080 -481194 -481333 -481417 -481472 -481535 -481664 -481703 -481771 -481818 -481888 -481950 -482009 -482040 -482138 -482188 -482259 -482306 -482380 -482465 -482482 -482609 -482713 -482793 -482886 -482938 -482999 -483070 -483121 -483135 -483162 -483179 -483233 -483265 -483297 -483310 -483320 -483387 -483398 -483519 -483531 -483570 -483591 -483600 -483610 -483622 -483638 -483658 -483681 -483690 -483727 -483742 -483753 -483764 -483787 -483811 -483825 -483839 -483849 -483859 -483872 -483881 -483890 -483908 -483923 -483930 -483953 -483971 -483982 -483988 -484001 -484011 -484019 -484026 -801227 -801433 -801852 -801949 -802177 -802323 -802360 -802380 -802427 -802475 -802487 -802518 -802533 -802571 -802590 -802625 -802637 -802649 -802661 -802670 -802713 -802721 -802730 -802743 -802756 -802780 -802794 -802816 -802826 -802834 -802848 -802857 -802866 -802884 -802898 -802906 -245561 -245871 -245970 -246201 -246488 -246685 -246764 -246835 -246924 -246960 -247053 -247284 -247329 -247346 -247473 -247492 -247521 -247598 -247684 -247751 -247822 -247857 -247923 -247974 -247994 -248033 -248046 -248063 -248080 -248103 -248116 -248130 -248163 -248192 -248232 -248244 -248254 -248264 -248278 -248293 -248345 -248360 -248375 -248386 -248393 -248411 -248419 -248435 -248444 -248466 -248479 -248492 -248505 -248528 -429805 -430105 -430353 -430588 -430689 -430839 -430896 -430952 -431061 -431087 -431132 -431227 -431294 -431306 -431348 -431398 -431435 -431524 -431596 -431625 -431649 -431704 -431754 -431761 -431792 -431816 -431849 -431870 -431882 -431892 -431906 -431925 -431946 -431951 -431967 -431992 -432010 -432018 -432029 -432056 -432065 -432075 -432080 -432088 -432097 -432105 -432111 -951397 -951462 -951595 -951716 -951839 -952008 -952118 -952227 -952291 -952324 -952342 -952357 -952387 -952401 -952406 -952417 -952447 -952453 -952483 -768055 -768314 -768433 -768535 -768653 -768699 -768746 -768844 -768905 -768965 -769011 -769091 -769163 -769252 -769268 -769298 -769325 -769337 -769372 -769382 -769416 -769433 -769445 -769467 -769477 -769488 -769504 -769519 -769537 -769549 -769555 -769579 -769590 -769602 -769607 -246689 -246865 -246951 -247038 -247143 -247262 -247319 -247386 -247488 -247528 -247666 -247810 -247878 -248002 -248112 -248159 -248253 -248396 -248545 -248591 -248785 -248854 -248893 -248911 -248949 -248973 -248986 -249020 -249043 -249081 -249135 -249188 -249203 -249247 -249272 -249308 -249332 -249370 -249390 -249401 -249411 -249421 -249440 -249461 -249479 -249514 -249541 -249556 -249570 -249578 -249593 -249599 -249604 -249618 -149764 -149807 -149865 -149884 -149985 -150025 -150054 -150087 -150132 -150172 -150214 -150241 -150276 -150296 -150308 -150336 -150358 -150381 -150406 -150445 -150474 -150513 -150526 -150539 -150586 -150606 -150619 -150687 -150704 -150731 -150743 -150768 -150794 -150833 -150856 -150895 -150935 -150980 -151067 -151163 -151235 -151256 -151293 -151307 -151324 -151355 -151368 -151374 -151390 -151398 -151413 -151437 -151455 -151463 -151478 -151486 -151494 -151511 -151564 -151574 -151589 -151613 -151629 -151653 -151671 -151681 -151687 -151700 -151716 -151723 -151742 -151759 -151768 -151785 -151793 -151802 -151822 -151830 -151838 -151844 -151850 -151858 -151863 -151870 -151876 -621993 -622220 -622316 -622569 -622654 -622730 -622792 -623042 -623130 -623186 -623231 -623277 -623323 -623352 -623377 -623463 -623486 -623522 -623558 -623655 -623676 -623693 -623709 -623752 -623779 -623808 -623836 -623843 -623853 -623859 -623871 -720932 -721191 -721329 -721602 -721755 -721880 -721905 -721998 -722111 -722224 -722451 -722481 -722559 -722575 -722640 -722690 -722724 -722755 -722765 -722780 -722802 -722837 -722906 -722918 -722924 -419996 -420201 -420571 -420871 -421174 -421505 -421645 -421711 -421809 -422066 -422123 -422233 -422398 -422511 -422548 -422629 -422684 -422713 -422751 -422799 -422953 -422985 -423029 -423043 -423053 -423070 -423110 -423141 -423148 -423172 -423190 -423206 -423228 -423239 -423253 -423262 -423270 -423280 -423290 -423299 -423320 -423348 -423360 -423374 -423393 -423418 -423426 -423433 -209156 -209384 -209799 -209914 -210135 -210298 -210440 -210474 -210657 -210740 -210837 -210905 -210973 -211016 -211199 -211301 -211322 -211425 -211464 -211476 -211500 -211549 -211574 -211628 -211674 -211692 -211714 -211745 -211767 -211785 -211792 -211799 -211820 -211833 -211845 -211860 -211871 -211886 -211894 -211899 -890335 -890547 -890778 -890816 -890899 -891013 -891182 -891239 -891284 -891322 -891382 -891405 -891491 -891522 -891532 -891549 -891573 -891615 -891646 -891673 -891699 -891748 -891780 -891816 -891839 -37882 -38001 -38046 -38108 -38179 -38227 -38260 -38292 -38338 -38384 -38407 -38438 -38474 -38518 -38606 -38626 -38655 -38677 -38717 -38726 -38741 -38773 -38800 -38812 -38832 -38857 -38870 -38875 -38881 -38899 -38909 -38934 -38949 -38959 -38967 -38975 -38985 -38995 -39001 -39007 -196724 -196836 -196987 -197169 -197391 -197579 -197756 -197841 -197893 -197916 -197945 -198026 -198113 -198150 -198209 -198254 -198283 -198332 -198360 -198421 -198478 -198503 -198558 -198622 -198644 -198698 -198739 -198765 -198801 -198816 -198866 -198931 -198973 -199027 -199042 -199071 -199103 -199124 -199149 -199170 -199213 -199250 -199272 -199291 -199301 -199311 -199320 -199334 -199344 -199349 -199378 -199397 -199404 -199412 -199422 -199438 -199446 -199459 -199473 -199489 -199502 -199519 -199527 -199535 -199541 -199580 -199586 -199595 -199607 -199631 -199649 -199654 -199662 -199671 -911476 -911604 -911779 -911980 -912040 -912261 -912333 -912405 -912452 -912486 -912496 -912536 -912558 -912605 -912614 -912632 -912645 -912652 -912664 -912683 -912699 -912705 -912720 -912732 -912737 -912747 -912758 -912766 -912774 -912794 -912801 -912807 -912814 -912819 -51019 -51142 -51171 -51222 -51332 -51350 -51370 -51438 -51473 -51513 -51545 -51567 -51593 -51614 -51624 -51645 -51662 -51683 -51691 -51702 -51715 -51721 -51732 -51742 -51758 -51769 -51777 -51787 -51794 -51806 -51818 -51823 -51828 -93710 -93789 -93840 -93891 -94009 -94121 -94220 -94240 -94260 -94328 -94362 -94386 -94398 -94434 -94467 -94481 -94512 -94531 -94565 -94602 -94650 -94666 -94694 -94735 -94769 -94791 -94859 -94901 -94947 -94977 -94998 -95014 -95039 -95054 -95074 -95105 -95132 -95157 -95169 -95183 -95199 -95224 -95238 -95246 -95269 -95283 -95311 -95318 -95337 -597301 -597450 -597613 -598077 -598299 -598541 -598829 -598904 -599128 -599195 -599227 -599274 -599330 -599356 -599408 -599456 -599548 -599611 -599653 -599673 -599707 -599784 -599844 -599879 -599912 -599946 -599965 -599983 -600009 -600040 -600060 -600071 -600095 -600108 -600128 -600139 -600177 -600194 -600202 -600221 -600243 -600296 -600308 -600320 -600334 -600349 -600363 -600375 -600382 -600391 -7881 -7895 -7900 -7910 -7917 -7923 -7930 -7936 -7942 -7949 -7955 -7964 -7974 -7982 -7988 -7995 -8006 -8018 -8023 -8036 -8042 -8047 -8056 -8063 -8069 -8074 -8083 -8093 -8103 -8108 -8117 -8124 -8130 -8137 -8145 -8154 -8162 -8169 -8175 -8181 -8188 -8194 -8201 -8211 -8218 -8224 -8231 -8236 -8241 -8247 -8252 -8257 -8263 -8269 -8274 -8280 -8287 -8294 -8302 -8307 -349946 -350223 -351382 -351693 -351904 -352198 -352467 -352722 -352782 -352883 -353005 -353049 -353119 -353161 -353215 -353267 -353305 -353324 -353348 -353370 -353384 -353403 -353410 -353438 -353464 -353496 -353515 -353558 -353613 -353642 -353663 -353714 -353730 -353740 -353748 -66049 -66425 -66638 -66795 -66882 -66925 -66953 -66970 -66985 -67007 -67024 -67040 -67049 -67062 -67080 -67101 -67117 -67129 -67140 -67154 -67162 -67168 -67174 -67181 -923292 -923420 -923513 -923567 -923662 -923731 -923856 -923956 -924017 -924044 -924055 -924080 -924115 -924164 -924176 -924186 -924217 -924229 -924236 -924257 -924266 -924283 -924292 -732963 -733076 -733236 -733296 -733350 -733390 -733503 -733543 -733616 -733669 -733788 -733941 -733966 -734012 -734115 -734188 -734200 -734223 -734253 -734281 -734295 -734334 -734376 -734400 -734430 -734473 -734501 -734518 -734532 -734553 -734569 -734592 -734607 -734620 -734646 -734656 -734665 -734693 -734717 -734739 -323323 -323375 -323507 -323546 -323588 -323634 -323715 -323740 -323787 -323913 -323958 -323980 -324020 -324086 -324145 -324198 -324235 -324273 -324332 -324340 -324368 -324412 -324428 -324448 -324507 -324539 -324587 -324637 -324646 -324688 -324761 -324845 -324859 -324897 -324944 -324965 -325009 -325043 -325080 -325111 -325142 -325161 -325189 -325313 -325338 -325376 -325398 -325441 -325455 -325487 -325505 -325521 -325571 -325632 -325835 -325888 -325902 -325948 -325977 -326009 -326027 -326047 -326062 -326099 -326115 -326130 -326141 -326168 -326189 -326208 -326239 -326260 -326273 -326279 -326335 -326350 -326359 -326393 -326408 -326422 -326440 -326450 -326464 -326484 -326505 -326569 -326594 -667522 -667650 -667791 -667856 -668000 -668047 -668118 -668189 -668218 -668284 -668343 -668380 -668449 -668497 -668517 -668577 -668607 -668645 -668681 -668699 -668802 -668944 -669002 -669057 -669151 -669215 -669274 -669285 -669346 -669365 -669407 -669434 -669450 -669477 -669516 -669546 -669565 -669573 -669601 -669632 -669651 -669663 -669675 -669698 -669740 -669748 -669766 -669777 -669784 -669796 -669807 -669819 -669828 -669838 -669847 -669852 -669861 -669870 -669881 -669894 -669920 -669937 -669953 -669968 -669980 -669998 -670013 -670032 -670045 -670055 -910821 -910878 -910910 -911003 -911070 -911144 -911156 -911185 -911248 -911264 -911279 -911307 -911365 -911418 -911444 -911525 -911542 -911554 -911569 -911603 -911664 -911684 -911714 -911754 -911766 -911776 -911785 -911804 -911821 -911834 -911849 -911858 -911869 -911884 -911895 -815323 -815449 -815631 -815807 -815843 -815911 -815971 -816005 -816094 -816116 -816164 -816216 -816303 -816353 -816391 -816397 -816420 -816469 -816498 -816532 -816558 -816576 -816595 -816653 -816678 -816697 -816711 -816728 -816744 -816779 -816789 -816821 -816864 -816877 -816891 -816903 -816912 -816920 -816926 -934222 -934605 -934730 -934809 -934928 -934958 -935035 -935126 -935183 -935209 -935248 -935333 -935356 -935381 -935416 -935437 -935448 -935477 -935486 -935515 -935528 -935545 -935568 -935589 -935617 -935628 -935647 -935659 -935672 -935688 -935712 -231832 -231919 -232098 -232225 -232289 -232338 -232387 -232437 -232473 -232536 -232581 -232643 -232669 -232748 -232789 -232813 -232837 -232863 -232897 -232944 -233006 -233068 -233109 -233266 -233324 -233365 -233404 -233775 -233802 -233817 -233861 -233905 -233917 -233928 -233942 -233973 -234019 -234053 -234095 -234125 -234157 -234170 -234186 -234265 -234278 -234290 -234301 -234314 -234324 -234344 -234388 -234405 -234422 -234443 -234452 -234463 -234483 -234496 -234507 -234532 -234550 -234576 -234587 -234600 -234605 -234621 -234630 -234642 -234654 -234661 -499818 -500007 -500132 -500235 -500281 -500346 -500451 -500497 -500522 -500561 -500644 -500725 -500797 -500866 -500918 -500992 -501020 -501053 -501090 -501119 -501161 -501178 -501259 -501304 -501337 -501404 -501447 -501501 -501551 -501596 -501619 -501680 -501700 -501734 -501763 -501799 -501822 -501840 -501847 -501865 -501906 -501987 -502013 -502022 -502051 -502067 -502084 -502103 -502117 -502168 -502185 -502205 -502232 -502263 -502318 -502349 -502370 -502385 -502394 -502410 -502442 -502451 -502464 -502515 -502534 -502566 -502593 -502616 -502627 -502642 -502649 -502659 -502679 -502690 -388208 -388625 -388821 -389137 -389247 -389517 -389586 -389658 -389778 -389885 -389921 -389984 -390064 -390103 -390143 -390311 -390385 -390474 -390577 -390599 -390648 -390698 -390760 -390841 -390865 -390929 -390951 -390985 -391022 -391053 -391082 -391108 -391139 -391166 -391192 -391220 -391239 -391254 -391285 -391314 -391363 -391379 -391403 -391422 -391434 -391446 -391495 -391519 -391532 -391560 -391570 -391589 -391611 -391627 -391651 -391661 -391669 -391686 -391708 -391727 -391756 -391778 -391791 -391805 -457866 -457893 -458085 -458105 -458163 -458323 -458426 -458476 -458636 -458728 -458882 -459011 -459136 -459284 -459328 -459339 -459369 -459393 -459444 -459473 -459551 -459616 -459778 -459827 -459876 -459904 -459942 -459990 -460074 -460124 -460192 -460229 -460347 -460387 -460413 -460439 -460483 -460518 -460566 -460602 -460633 -460656 -460712 -460789 -460802 -460858 -460890 -460904 -460959 -460993 -461022 -461044 -461088 -461102 -461127 -461154 -461205 -461223 -461279 -461298 -461314 -461337 -461355 -461371 -461407 -461424 -461439 -461454 -461476 -461507 -461554 -461564 -461572 -461586 -461593 -461603 -461613 -461627 -461646 -461655 -461672 -461690 -58271 -58470 -58548 -58616 -58647 -58672 -58766 -58820 -58849 -58861 -58895 -58922 -58944 -58959 -58992 -59016 -59026 -59038 -59055 -59072 -59090 -59110 -59121 -59137 -59168 -59199 -59211 -59223 -59237 -59248 -59260 -59273 -59292 -59302 -59308 -717854 -717954 -717971 -718137 -718181 -718231 -718316 -718449 -718552 -718568 -718602 -718648 -718683 -718755 -718806 -718851 -718881 -718957 -718991 -719025 -719052 -719113 -719156 -719228 -719352 -719395 -719432 -719455 -719478 -719505 -719529 -719550 -719578 -719618 -719646 -719657 -719789 -719852 -719865 -719923 -719955 -719974 -720000 -720020 -720028 -720054 -720061 -720072 -720083 -720110 -720126 -720135 -720146 -720154 -720160 -720168 -720174 -720186 -720210 -720239 -720246 -720258 -720288 -720296 -269057 -269186 -269229 -269261 -269318 -269387 -269463 -269526 -269539 -269610 -269688 -269785 -269825 -269881 -269943 -269958 -269971 -270053 -270112 -270172 -270215 -270244 -270297 -270363 -270415 -270434 -270451 -270481 -270515 -270564 -270608 -270638 -270666 -270699 -270719 -270732 -270778 -270795 -270823 -270876 -270900 -270924 -270947 -270989 -271000 -271029 -271041 -271064 -271089 -271103 -271109 -271119 -271134 -271160 -271181 -271220 -271231 -271246 -271262 -271277 -271298 -271314 -271324 -271334 -271344 -271355 -271364 -271373 -271382 -898445 -898540 -898607 -898742 -898829 -898910 -898926 -898970 -899006 -899042 -899082 -899091 -899143 -899177 -899207 -899218 -899240 -899264 -899297 -899306 -899317 -899340 -899373 -899385 -899402 -899426 -899438 -899454 -899470 -899479 -899506 -899519 -899528 -899547 -899560 -899575 -899583 -899589 -899609 -899614 -899623 -899629 -899636 -899643 -899648 -899653 -585983 -586339 -586498 -586605 -586832 -586895 -587067 -587182 -587338 -587430 -587512 -587617 -587713 -587838 -587955 -587982 -588018 -588073 -588112 -588148 -588186 -588213 -588235 -588275 -588282 -588325 -588339 -588371 -588399 -588417 -588444 -588478 -588517 -588537 -588553 -588564 -588576 -588603 -588650 -588655 -588677 -588702 -588727 -588749 -588755 -588769 -588788 -588802 -588814 -588831 -588849 -588893 -588918 -588961 -588974 -176082 -176200 -176296 -176447 -176533 -176828 -177017 -177121 -177335 -177398 -177475 -177503 -177541 -177623 -177664 -177703 -177754 -177797 -177904 -177945 -177970 -177990 -178033 -178065 -178093 -178136 -178176 -178232 -178246 -178269 -178291 -178323 -178335 -178357 -178368 -178393 -178441 -178456 -178478 -178489 -178496 -178520 -178534 -178548 -178561 -178567 -178573 -178580 -178588 -384370 -384487 -384757 -384880 -384959 -385116 -385157 -385336 -385457 -385603 -385698 -385868 -385916 -386014 -386070 -386130 -386226 -386249 -386266 -386340 -386392 -386435 -386547 -386616 -386678 -386736 -386770 -386854 -386905 -386974 -387020 -387057 -387082 -387104 -387127 -387156 -387223 -387251 -387300 -387327 -387371 -387419 -387463 -387503 -387528 -387541 -387598 -387617 -387651 -387669 -387688 -387720 -387732 -387762 -387776 -387789 -387807 -387826 -387845 -387856 -387866 -387874 -387889 -387908 -387938 -387953 -387998 -388016 -388031 -69943 -70239 -70296 -70317 -70370 -70434 -70486 -70523 -70568 -70599 -70627 -70646 -70669 -70698 -70704 -70715 -70732 -70746 -70773 -70785 -70801 -70814 -70829 -70841 -70862 -70896 -70903 -70912 -70918 -70926 -70933 -70940 -70953 -381341 -381499 -381649 -381794 -381871 -382089 -382184 -382229 -382333 -382418 -382579 -382661 -382709 -382728 -382791 -382857 -382947 -383013 -383100 -383176 -383229 -383257 -383319 -383364 -383402 -383496 -383551 -383601 -383654 -383699 -383725 -383757 -383788 -383827 -383858 -383892 -383931 -383956 -384019 -384039 -384070 -384081 -384097 -384118 -384132 -384147 -384163 -384177 -48455 -48568 -48705 -48766 -48803 -48885 -48919 -48969 -49018 -49036 -49076 -49089 -49103 -49110 -49123 -49131 -49142 -49164 -49184 -49197 -589114 -589227 -589525 -589677 -589911 -590023 -590147 -590348 -590496 -590601 -590713 -590812 -590919 -591029 -591090 -591159 -591203 -591235 -591282 -591453 -591514 -591535 -591570 -591590 -591631 -591647 -591678 -591703 -591725 -591767 -591789 -591796 -591812 -591823 -591830 -676991 -677047 -677167 -677314 -677407 -677450 -677509 -677578 -677648 -677698 -677721 -677795 -677848 -677920 -677977 -678036 -678129 -678158 -678199 -678249 -678259 -678347 -678366 -678384 -678412 -678430 -678464 -678478 -678488 -678511 -678546 -678560 -678570 -678582 -678597 -678611 -678620 -678636 -678648 -95763 -95838 -95899 -96049 -96121 -96152 -96190 -96230 -96289 -96327 -96355 -96372 -96384 -96404 -96412 -96454 -96467 -96485 -96511 -96559 -96619 -96631 -96662 -96688 -96708 -96727 -96748 -96760 -96773 -96789 -96800 -96825 -96852 -96865 -96877 -96895 -96907 -96913 -96922 -96931 -96941 -96954 -96964 -96975 -96981 -96987 -96994 -96999 -97005 -509928 -510227 -510413 -510486 -510632 -510703 -510771 -510831 -510934 -511025 -511069 -511170 -511232 -511474 -511554 -511640 -511738 -511790 -511920 -512031 -512206 -512284 -512348 -512411 -512442 -512473 -512503 -512541 -512570 -512599 -512661 -512689 -512727 -512753 -512791 -512828 -512874 -512892 -1187 -1300 -1350 -801174 -801397 -235765 -236529 -237262 -603781 -924788 -374861 -376039 -376946 -377273 -193041 -194321 -195269 -195622 -195832 -196198 -196216 -196329 -196425 -933852 -934535 -647377 -647612 -647893 -648054 -648352 -648601 -648752 -648851 -648997 -649040 -13366 -396508 -397349 -398154 -398501 -64312 -64484 -64680 -64765 -64862 -856904 -857896 -858142 -615501 -615627 -615731 -615818 -616042 -616135 -616232 -616374 -616541 -616577 -616691 -616769 -616893 -616927 -616982 -617096 -617139 -617187 -617231 -617275 -617297 -617311 -617324 -617350 -617373 -617390 -617405 -617414 -617429 -617446 -617467 -617477 -617486 -617500 -21477 -21578 -21669 -21714 -21756 -21803 -21831 -21852 -21895 -21920 -21931 -21948 -21957 -21966 -21978 -21990 -21995 -323369 -323596 -323632 -323719 -323807 -323952 -324034 -324164 -324280 -324385 -324452 -324528 -324609 -324625 -324674 -324742 -324827 -324901 -324957 -325073 -325131 -325182 -325204 -325366 -325391 -325443 -325483 -325530 -325616 -325813 -325881 -325904 -325966 -325978 -326028 -326058 -326078 -326097 -326111 -326144 -326156 -326207 -326226 -326252 -326330 -326360 -326392 -326409 -326423 -326448 -326475 -326489 -326497 -326528 -326570 -326580 -326593 -918056 -918214 -918276 -918350 -918449 -918601 -918658 -918743 -918861 -918916 -918954 -918983 -919013 -919074 -919091 -919120 -919131 -919155 -919185 -237469 -237677 -237779 -237907 -237991 -238082 -238170 -238244 -238374 -238451 -238479 -238590 -238759 -238792 -238881 -238926 -238968 -239143 -239194 -239348 -239392 -239482 -239558 -239601 -239617 -239643 -239676 -239721 -239738 -239745 -239812 -239841 -239905 -239964 -240015 -240059 -240074 -240114 -240139 -240181 -240198 -240225 -240231 -240270 -240291 -240322 -240356 -240368 -240385 -240399 -240416 -240431 -240449 -240466 -240493 -240512 -240529 -240538 -240553 -240576 -240589 -240597 -240608 -296044 -296151 -296179 -296248 -296290 -296328 -296367 -296380 -296410 -296447 -296458 -296487 -296530 -296620 -296686 -296755 -296803 -296827 -296838 -296899 -296926 -296964 -296981 -297001 -297034 -297051 -297067 -297112 -297133 -297219 -297225 -297261 -297278 -297316 -297360 -297406 -297423 -297444 -297481 -297506 -297536 -297552 -297569 -297587 -297638 -297682 -297706 -297751 -297801 -297849 -297881 -297916 -297938 -297955 -297970 -297985 -298001 -298028 -298039 -298054 -298089 -298104 -298129 -298140 -298151 -298163 -298189 -298216 -298249 -298261 -298281 -298308 -298325 -298335 -298363 -298396 -298417 -298426 -298431 -298440 -298450 -298460 -298472 -298484 -298499 -298505 -298524 -298540 -298549 -298559 -298566 -298583 -298604 -298612 -298617 -298625 -298632 -298638 -298650 -298687 -298713 -298723 -298728 -298736 -298749 -298760 -298778 -298801 -298828 -298848 -298854 -298865 -298875 -298884 -298894 -298900 -298913 -298926 -298952 -298960 -298974 -298998 -609334 -609514 -609696 -609873 -610079 -610182 -610242 -610359 -610411 -610523 -610587 -610616 -610677 -610745 -610809 -610885 -610923 -610934 -611020 -611063 -611104 -611195 -611332 -611383 -611397 -611411 -611437 -611465 -611474 -611485 -611506 -611522 -611534 -611553 -611559 -611577 -611588 -611607 -611626 -611646 -611693 -611704 -611727 -611734 -611739 -611747 -611756 -611764 -611790 -611803 -611816 -611825 -611834 -612223 -612234 -612258 -612272 -612283 -797702 -797888 -798167 -798253 -798273 -798382 -798503 -798616 -798702 -798756 -798799 -798830 -798866 -798900 -798936 -798962 -799027 -799060 -799090 -799110 -799134 -799149 -799179 -799191 -799202 -799218 -799234 -799242 -799255 -799264 -799273 -799278 -799289 -799302 -799310 -799319 -799331 -799343 -799356 -799362 -799368 -799375 -799387 -799404 -799416 -799422 -799432 -799438 -799445 -799450 -799455 -22054 -22117 -22199 -22257 -22283 -22307 -22362 -22405 -22429 -22462 -22470 -22490 -22614 -22626 -22635 -22665 -22686 -22693 -22699 -22705 -22726 -22732 -22743 -22751 -22757 -22767 -22776 -22784 -22790 -887333 -887852 -887925 -888090 -888140 -888178 -888201 -888241 -888284 -888352 -888372 -888767 -888794 -888817 -888829 -888849 -888911 -888933 -888954 -888968 -888985 -888995 -889033 -889049 -889073 -889091 -889192 -889210 -889225 -889250 -889259 -889277 -820036 -820171 -820368 -820455 -820509 -820530 -820575 -820631 -820692 -820717 -820735 -820779 -820806 -820838 -820860 -820896 -820907 -820919 -820941 -820960 -820971 -820986 -820991 -821003 -821017 -821040 -821049 -821066 -821090 -821096 -821102 -426152 -426253 -426317 -426392 -426414 -426484 -426501 -426533 -426564 -426601 -426669 -426717 -426745 -426766 -426789 -426804 -480683 -480727 -480824 -480970 -481149 -481209 -481340 -481434 -481510 -481574 -481671 -481721 -481773 -481850 -481893 -481956 -482013 -482058 -482155 -482198 -482264 -482322 -482387 -482468 -482510 -482624 -482728 -482865 -482899 -482980 -483010 -483082 -483123 -483147 -483163 -483181 -483235 -483266 -483298 -483311 -483345 -483390 -483412 -483520 -483540 -483582 -483592 -483603 -483611 -483623 -483643 -483662 -483683 -483699 -483732 -483747 -483755 -483765 -483790 -483813 -483828 -483841 -483851 -483862 -483873 -483882 -483898 -483914 -483925 -483931 -483957 -483973 -483984 -483989 -484002 -484013 -484020 -484027 -801277 -801728 -801869 -802027 -802207 -802340 -802364 -802381 -802435 -802478 -802495 -802522 -802565 -802572 -802608 -802627 -802640 -802650 -802664 -802671 -802715 -802722 -802732 -802744 -802758 -802783 -802801 -802817 -802827 -802837 -802851 -802858 -802872 -802887 -802899 -245296 -245640 -245931 -245984 -246275 -246506 -246707 -246765 -246836 -246927 -246969 -247147 -247305 -247330 -247358 -247482 -247494 -247522 -247606 -247696 -247798 -247831 -247868 -247929 -247979 -248004 -248034 -248051 -248065 -248081 -248108 -248117 -248132 -248165 -248210 -248235 -248245 -248255 -248265 -248280 -248296 -248346 -248362 -248377 -248387 -248394 -248412 -248422 -248436 -248447 -248467 -248482 -248496 -248518 -248529 -429846 -430159 -430421 -430594 -430707 -430842 -430897 -431012 -431072 -431095 -431140 -431232 -431296 -431308 -431362 -431402 -431441 -431538 -431598 -431636 -431659 -431705 -431755 -431762 -431802 -431817 -431852 -431873 -431885 -431895 -431907 -431931 -431947 -431952 -431976 -431995 -432011 -432024 -432032 -432059 -432069 -432076 -432082 -432090 -432099 -432106 -432112 -951400 -951467 -951615 -951752 -951916 -952014 -952145 -952229 -952297 -952325 -952343 -952362 -952388 -952402 -952407 -952422 -952448 -952457 -767647 -768058 -768369 -768448 -768571 -768664 -768703 -768766 -768850 -768910 -768969 -769017 -769129 -769171 -769255 -769288 -769299 -769326 -769351 -769373 -769384 -769427 -769434 -769450 -769470 -769478 -769489 -769510 -769523 -769540 -769550 -769558 -769586 -769596 -769603 -769608 -246699 -246867 -246953 -247077 -247151 -247263 -247335 -247402 -247489 -247590 -247667 -247816 -247908 -248006 -248123 -248194 -248285 -248438 -248548 -248592 -248802 -248860 -248894 -248915 -248956 -248976 -248994 -249022 -249046 -249089 -249145 -249193 -249204 -249249 -249274 -249312 -249336 -249377 -249392 -249402 -249415 -249427 -249441 -249462 -249484 -249520 -249547 -249564 -249571 -249581 -249594 -249600 -249608 -149663 -149789 -149818 -149870 -149904 -149994 -150044 -150058 -150110 -150159 -150180 -150215 -150251 -150284 -150301 -150309 -150337 -150359 -150382 -150429 -150448 -150478 -150516 -150527 -150546 -150588 -150610 -150650 -150691 -150705 -150732 -150753 -150769 -150806 -150834 -150865 -150904 -150957 -150982 -151110 -151165 -151240 -151270 -151296 -151312 -151326 -151357 -151370 -151375 -151392 -151399 -151423 -151438 -151456 -151465 -151479 -151488 -151501 -151517 -151566 -151579 -151591 -151614 -151646 -151654 -151672 -151683 -151688 -151702 -151717 -151728 -151744 -151760 -151769 -151786 -151795 -151806 -151823 -151831 -151840 -151845 -151851 -151859 -151865 -151871 -151877 -622023 -622229 -622382 -622592 -622672 -622745 -622844 -623048 -623178 -623194 -623252 -623279 -623325 -623359 -623380 -623480 -623500 -623529 -623576 -623661 -623680 -623697 -623713 -623753 -623785 -623810 -623837 -623845 -623854 -623861 -623873 -721062 -721206 -721382 -721704 -721808 -721884 -721908 -722010 -722140 -722289 -722452 -722486 -722563 -722592 -722666 -722701 -722725 -722756 -722768 -722786 -722803 -722861 -722909 -722919 -722925 -420008 -420222 -420585 -420929 -421282 -421549 -421663 -421715 -421823 -422084 -422139 -422274 -422470 -422523 -422557 -422654 -422686 -422718 -422754 -422862 -422970 -423013 -423032 -423046 -423054 -423085 -423117 -423143 -423150 -423176 -423192 -423207 -423231 -423240 -423254 -423263 -423271 -423283 -423292 -423300 -423324 -423355 -423361 -423380 -423400 -423421 -423428 -423434 -209188 -209519 -209823 -209941 -210136 -210330 -210441 -210513 -210692 -210764 -210840 -210927 -210977 -211023 -211213 -211306 -211357 -211431 -211465 -211479 -211501 -211550 -211576 -211632 -211677 -211697 -211720 -211756 -211771 -211786 -211793 -211800 -211821 -211838 -211849 -211861 -211872 -211887 -211895 -211900 -890400 -890551 -890804 -890843 -890971 -891040 -891204 -891259 -891305 -891327 -891389 -891417 -891501 -891524 -891535 -891552 -891578 -891624 -891651 -891681 -891703 -891752 -891794 -891817 -891846 -37929 -38005 -38052 -38109 -38181 -38228 -38261 -38296 -38341 -38390 -38408 -38449 -38476 -38520 -38608 -38628 -38660 -38678 -38721 -38727 -38754 -38774 -38803 -38813 -38836 -38858 -38871 -38876 -38888 -38900 -38914 -38935 -38951 -38962 -38968 -38976 -38989 -38996 -39002 -39008 -196767 -196888 -197022 -197179 -197427 -197624 -197764 -197846 -197894 -197917 -197960 -198048 -198115 -198154 -198220 -198256 -198284 -198338 -198376 -198423 -198480 -198505 -198569 -198623 -198655 -198700 -198756 -198786 -198802 -198830 -198921 -198935 -198977 -199036 -199048 -199074 -199106 -199126 -199158 -199175 -199216 -199251 -199279 -199292 -199304 -199314 -199328 -199337 -199345 -199351 -199380 -199398 -199406 -199413 -199425 -199441 -199447 -199466 -199475 -199492 -199511 -199520 -199528 -199536 -199543 -199581 -199589 -199596 -199608 -199635 -199650 -199656 -199666 -199672 -911492 -911709 -911916 -911981 -912067 -912265 -912345 -912419 -912461 -912489 -912505 -912542 -912559 -912607 -912621 -912640 -912647 -912655 -912666 -912687 -912701 -912706 -912721 -912733 -912740 -912748 -912759 -912767 -912775 -912795 -912802 -912809 -912815 -50909 -51103 -51148 -51176 -51252 -51333 -51360 -51385 -51439 -51481 -51514 -51554 -51571 -51594 -51618 -51629 -51648 -51663 -51685 -51693 -51705 -51717 -51722 -51734 -51744 -51759 -51770 -51778 -51790 -51799 -51807 -51819 -51824 -93635 -93723 -93794 -93870 -93958 -94029 -94128 -94228 -94245 -94280 -94329 -94364 -94390 -94401 -94445 -94470 -94485 -94513 -94536 -94566 -94617 -94652 -94677 -94709 -94739 -94773 -94793 -94865 -94928 -94948 -94980 -94999 -95015 -95042 -95057 -95077 -95108 -95134 -95158 -95170 -95186 -95201 -95225 -95239 -95251 -95272 -95295 -95313 -95323 -95338 -597321 -597506 -597673 -598083 -598384 -598554 -598830 -599005 -599129 -599203 -599250 -599277 -599340 -599369 -599437 -599480 -599586 -599619 -599661 -599679 -599709 -599787 -599846 -599901 -599936 -599948 -599967 -599985 -600021 -600044 -600061 -600075 -600099 -600112 -600129 -600141 -600184 -600196 -600204 -600226 -600251 -600302 -600311 -600321 -600337 -600352 -600368 -600376 -600385 -7869 -7887 -7896 -7904 -7911 -7918 -7926 -7931 -7938 -7943 -7950 -7956 -7965 -7976 -7984 -7989 -7999 -8012 -8019 -8026 -8037 -8043 -8048 -8057 -8065 -8070 -8076 -8085 -8094 -8104 -8109 -8118 -8126 -8131 -8138 -8146 -8157 -8163 -8170 -8176 -8182 -8190 -8196 -8202 -8212 -8219 -8226 -8232 -8237 -8242 -8248 -8253 -8258 -8265 -8270 -8275 -8281 -8288 -8297 -8303 -8308 -350052 -351039 -351489 -351741 -351911 -352294 -352486 -352731 -352819 -352892 -353012 -353054 -353137 -353168 -353218 -353283 -353306 -353328 -353351 -353375 -353388 -353404 -353412 -353440 -353468 -353510 -353516 -353569 -353614 -353649 -353671 -353716 -353734 -353741 -353751 -66053 -66446 -66669 -66799 -66889 -66927 -66962 -66977 -66987 -67016 -67027 -67041 -67050 -67063 -67082 -67107 -67119 -67133 -67141 -67155 -67163 -67169 -67176 -923239 -923318 -923471 -923516 -923612 -923670 -923745 -923865 -923991 -924018 -924046 -924061 -924097 -924123 -924166 -924177 -924187 -924220 -924230 -924241 -924258 -924267 -924285 -924293 -733011 -733078 -733244 -733319 -733364 -733430 -733513 -733548 -733619 -733677 -733791 -733946 -733969 -734032 -734159 -734191 -734203 -734225 -734256 -734285 -734326 -734337 -734385 -734401 -734432 -734476 -734505 -734524 -734534 -734554 -734573 -734595 -734613 -734626 -734650 -734657 -734671 -734703 -734722 -734748 -323336 -323411 -323527 -323550 -323595 -323645 -323716 -323742 -323790 -323922 -323969 -323984 -324026 -324100 -324148 -324201 -324241 -324275 -324333 -324342 -324380 -324418 -324429 -324449 -324520 -324555 -324590 -324640 -324652 -324725 -324787 -324847 -324860 -324907 -324949 -324975 -325010 -325052 -325091 -325115 -325148 -325172 -325191 -325314 -325352 -325377 -325403 -325442 -325456 -325489 -325509 -325528 -325606 -325682 -325846 -325890 -325903 -325953 -325982 -326012 -326029 -326054 -326067 -326100 -326116 -326131 -326147 -326177 -326194 -326211 -326246 -326265 -326274 -326284 -326336 -326352 -326386 -326398 -326411 -326425 -326442 -326458 -326467 -326494 -326506 -326584 -667441 -667570 -667659 -667793 -667859 -668011 -668048 -668120 -668193 -668230 -668288 -668344 -668394 -668450 -668500 -668559 -668589 -668622 -668652 -668682 -668710 -668936 -668949 -669003 -669093 -669166 -669250 -669277 -669293 -669351 -669388 -669408 -669435 -669452 -669486 -669521 -669548 -669568 -669574 -669606 -669636 -669655 -669664 -669677 -669713 -669742 -669750 -669768 -669779 -669785 -669800 -669808 -669820 -669829 -669839 -669848 -669853 -669862 -669871 -669883 -669895 -669921 -669938 -669955 -669970 -669984 -669999 -670022 -670033 -670046 -670056 -910837 -910896 -910917 -911017 -911074 -911145 -911157 -911197 -911256 -911265 -911283 -911308 -911376 -911419 -911449 -911526 -911544 -911556 -911575 -911606 -911676 -911685 -911717 -911758 -911767 -911778 -911789 -911811 -911822 -911838 -911854 -911863 -911873 -911885 -911896 -815334 -815455 -815665 -815809 -815865 -815922 -815988 -816036 -816100 -816130 -816182 -816234 -816308 -816359 -816392 -816401 -816426 -816470 -816512 -816537 -816561 -816582 -816607 -816654 -816679 -816698 -816714 -816731 -816757 -816780 -816809 -816833 -816865 -816878 -816892 -816905 -816913 -816922 -816927 -934264 -934647 -934782 -934863 -934929 -934967 -935052 -935128 -935195 -935224 -935259 -935336 -935358 -935386 -935417 -935442 -935453 -935479 -935492 -935517 -935533 -935547 -935573 -935601 -935620 -935632 -935648 -935661 -935678 -935689 -231489 -231838 -231973 -232107 -232227 -232301 -232341 -232388 -232438 -232475 -232550 -232585 -232645 -232714 -232755 -232791 -232814 -232838 -232873 -232900 -232959 -233031 -233075 -233113 -233277 -233337 -233373 -233407 -233776 -233805 -233819 -233869 -233909 -233918 -233930 -233945 -233977 -234020 -234054 -234103 -234134 -234158 -234174 -234189 -234267 -234279 -234293 -234302 -234315 -234328 -234363 -234392 -234408 -234423 -234444 -234454 -234465 -234485 -234500 -234508 -234535 -234553 -234577 -234589 -234601 -234609 -234622 -234632 -234644 -234656 -499486 -499850 -500035 -500134 -500236 -500310 -500392 -500475 -500498 -500527 -500566 -500651 -500728 -500801 -500876 -500934 -500998 -501023 -501055 -501095 -501128 -501164 -501188 -501263 -501308 -501388 -501406 -501453 -501503 -501575 -501597 -501621 -501682 -501701 -501736 -501764 -501807 -501823 -501841 -501853 -501866 -501916 -501989 -502015 -502031 -502052 -502068 -502087 -502104 -502120 -502170 -502196 -502227 -502241 -502264 -502323 -502354 -502371 -502386 -502395 -502428 -502443 -502454 -502467 -502522 -502535 -502567 -502600 -502619 -502629 -502643 -502651 -502660 -502681 -502691 -388219 -388695 -388924 -389176 -389273 -389524 -389606 -389672 -389796 -389899 -389940 -390000 -390066 -390122 -390172 -390341 -390395 -390505 -390584 -390609 -390669 -390727 -390787 -390849 -390866 -390930 -390965 -390986 -391025 -391062 -391083 -391109 -391140 -391173 -391193 -391226 -391240 -391260 -391290 -391317 -391366 -391389 -391408 -391425 -391435 -391451 -391501 -391522 -391534 -391564 -391576 -391591 -391621 -391633 -391652 -391663 -391672 -391696 -391715 -391728 -391764 -391779 -391794 -391811 -457870 -457900 -458090 -458116 -458189 -458348 -458429 -458511 -458646 -458753 -458893 -459022 -459180 -459287 -459330 -459340 -459378 -459396 -459447 -459507 -459558 -459671 -459794 -459836 -459881 -459905 -459966 -460008 -460091 -460152 -460206 -460237 -460355 -460393 -460431 -460444 -460486 -460519 -460567 -460610 -460635 -460658 -460721 -460791 -460809 -460860 -460893 -460910 -460983 -460994 -461023 -461055 -461093 -461103 -461128 -461158 -461206 -461242 -461283 -461301 -461324 -461341 -461357 -461377 -461412 -461426 -461443 -461458 -461480 -461514 -461555 -461565 -461573 -461587 -461595 -461607 -461618 -461628 -461649 -461657 -461673 -461691 -58302 -58473 -58554 -58624 -58650 -58675 -58781 -58829 -58851 -58863 -58898 -58924 -58945 -58960 -59006 -59018 -59027 -59042 -59058 -59081 -59097 -59114 -59125 -59155 -59174 -59200 -59214 -59225 -59238 -59249 -59261 -59276 -59298 -59303 -59309 -717874 -717958 -717984 -718164 -718184 -718254 -718337 -718455 -718560 -718597 -718615 -718649 -718690 -718756 -718807 -718853 -718884 -718959 -718992 -719037 -719053 -719118 -719158 -719234 -719354 -719419 -719442 -719458 -719485 -719507 -719534 -719552 -719593 -719622 -719647 -719658 -719797 -719853 -719882 -719924 -719958 -719975 -720014 -720022 -720038 -720055 -720063 -720073 -720087 -720117 -720130 -720138 -720147 -720156 -720162 -720170 -720176 -720200 -720213 -720242 -720249 -720259 -720289 -720297 -269071 -269189 -269231 -269273 -269331 -269397 -269494 -269534 -269573 -269620 -269692 -269792 -269843 -269891 -269948 -269959 -269977 -270062 -270126 -270179 -270222 -270246 -270307 -270366 -270416 -270438 -270455 -270490 -270524 -270595 -270610 -270641 -270678 -270700 -270721 -270747 -270780 -270799 -270825 -270879 -270910 -270925 -270952 -270991 -271003 -271030 -271045 -271074 -271090 -271104 -271110 -271120 -271135 -271165 -271210 -271222 -271236 -271249 -271266 -271278 -271299 -271316 -271325 -271335 -271346 -271357 -271365 -271376 -271384 -898464 -898549 -898663 -898745 -898832 -898911 -898943 -898971 -899031 -899053 -899086 -899093 -899166 -899183 -899210 -899219 -899252 -899267 -899298 -899309 -899326 -899342 -899376 -899388 -899406 -899430 -899440 -899459 -899471 -899493 -899507 -899521 -899532 -899548 -899561 -899576 -899584 -899596 -899610 -899615 -899625 -899630 -899637 -899644 -899649 -899654 -586008 -586383 -586531 -586663 -586848 -586932 -587068 -587244 -587359 -587450 -587546 -587628 -587754 -587888 -587957 -587987 -588050 -588082 -588117 -588150 -588187 -588222 -588251 -588276 -588297 -588326 -588342 -588376 -588401 -588420 -588453 -588485 -588521 -588540 -588555 -588565 -588581 -588608 -588651 -588657 -588680 -588714 -588729 -588750 -588760 -588772 -588792 -588803 -588817 -588834 -588854 -588898 -588924 -588962 -175911 -176117 -176208 -176338 -176450 -176595 -176889 -177036 -177122 -177348 -177447 -177478 -177505 -177569 -177648 -177672 -177715 -177766 -177803 -177905 -177946 -177975 -177999 -178037 -178066 -178116 -178138 -178196 -178239 -178255 -178270 -178293 -178324 -178342 -178358 -178372 -178427 -178444 -178459 -178482 -178490 -178499 -178529 -178536 -178553 -178562 -178568 -178574 -178581 -384203 -384380 -384492 -384759 -384881 -385076 -385135 -385168 -385367 -385516 -385668 -385703 -385893 -385933 -386025 -386099 -386138 -386228 -386258 -386307 -386347 -386400 -386481 -386563 -386617 -386684 -386742 -386785 -386859 -386912 -386978 -387028 -387067 -387084 -387107 -387140 -387157 -387229 -387253 -387302 -387331 -387393 -387455 -387472 -387517 -387530 -387559 -387600 -387620 -387654 -387672 -387692 -387727 -387735 -387763 -387778 -387791 -387808 -387831 -387846 -387857 -387867 -387875 -387893 -387910 -387939 -387970 -388008 -388020 -69737 -70129 -70252 -70298 -70320 -70382 -70437 -70496 -70536 -70570 -70600 -70633 -70649 -70671 -70699 -70708 -70719 -70734 -70749 -70779 -70792 -70802 -70818 -70833 -70846 -70885 -70899 -70904 -70914 -70919 -70927 -70934 -70945 -70955 -381364 -381541 -381686 -381825 -381955 -382111 -382190 -382237 -382334 -382430 -382616 -382668 -382711 -382742 -382800 -382859 -382957 -383018 -383115 -383178 -383230 -383271 -383338 -383384 -383414 -383498 -383560 -383622 -383658 -383716 -383726 -383761 -383791 -383835 -383868 -383899 -383932 -383969 -384030 -384044 -384075 -384085 -384103 -384119 -384134 -384156 -384164 -384190 -48465 -48572 -48714 -48771 -48815 -48894 -48921 -49001 -49026 -49038 -49080 -49090 -49105 -49117 -49126 -49133 -49143 -49165 -49193 -49198 -589115 -589320 -589551 -589680 -589919 -590053 -590245 -590413 -590501 -590616 -590716 -590815 -590933 -591051 -591096 -591164 -591204 -591247 -591298 -591457 -591515 -591543 -591574 -591591 -591633 -591665 -591684 -591704 -591727 -591774 -591792 -591797 -591814 -591826 -591831 -676998 -677071 -677182 -677366 -677412 -677467 -677527 -677580 -677654 -677701 -677736 -677802 -677871 -677935 -677993 -678037 -678140 -678164 -678222 -678250 -678288 -678354 -678367 -678389 -678422 -678442 -678465 -678481 -678498 -678512 -678550 -678563 -678571 -678583 -678599 -678612 -678625 -678639 -678649 -95797 -95839 -95919 -96050 -96124 -96154 -96203 -96232 -96299 -96331 -96356 -96375 -96393 -96406 -96418 -96455 -96474 -96495 -96520 -96564 -96621 -96640 -96674 -96697 -96711 -96732 -96751 -96762 -96781 -96791 -96801 -96846 -96858 -96869 -96878 -96899 -96908 -96915 -96923 -96936 -96948 -96956 -96966 -96977 -96983 -96988 -96995 -97000 -97007 -510019 -510238 -510426 -510512 -510641 -510710 -510775 -510882 -510978 -511039 -511080 -511188 -511244 -511476 -511561 -511671 -511752 -511791 -511927 -512087 -512236 -512293 -512354 -512413 -512446 -512482 -512521 -512546 -512577 -512615 -512663 -512696 -512746 -512758 -512812 -512842 -512878 -512897 -1270 -1307 -1351 -801184 -801441 -235915 -236753 -602003 -924455 -374418 -375421 -376053 -376952 -377407 -193084 -194746 -195397 -195675 -195839 -196208 -196251 -196347 -933530 -933958 -934566 -647459 -647650 -647927 -648098 -648492 -648654 -648766 -648858 -649001 -649055 -13396 -396871 -397814 -398334 -398513 -64378 -64507 -64715 -64813 -64908 -857521 -857946 -858165 -615518 -615652 -615744 -615830 -616044 -616159 -616242 -616390 -616544 -616592 -616724 -616854 -616901 -616954 -617006 -617097 -617152 -617211 -617234 -617289 -617298 -617313 -617328 -617351 -617376 -617396 -617407 -617418 -617440 -617448 -617468 -617480 -617492 -617509 -21500 -21596 -21676 -21740 -21774 -21805 -21842 -21857 -21899 -21922 -21932 -21949 -21960 -21968 -21983 -21991 -323279 -323372 -323599 -323635 -323735 -323814 -323995 -324088 -324190 -324309 -324399 -324462 -324544 -324612 -324629 -324689 -324746 -324881 -324929 -324969 -325081 -325143 -325196 -325235 -325370 -325396 -325449 -325500 -325534 -325631 -325824 -325882 -325906 -325970 -325980 -326038 -326068 -326080 -326106 -326123 -326146 -326158 -326210 -326231 -326259 -326333 -326374 -326395 -326410 -326427 -326460 -326477 -326491 -326498 -326529 -326571 -326581 -917919 -918057 -918226 -918298 -918352 -918510 -918608 -918661 -918750 -918866 -918918 -918958 -918990 -919016 -919075 -919094 -919121 -919132 -919167 -919189 -237531 -237688 -237788 -237938 -238048 -238133 -238172 -238251 -238399 -238453 -238499 -238681 -238770 -238821 -238897 -238929 -238971 -239156 -239230 -239365 -239393 -239501 -239562 -239606 -239621 -239644 -239677 -239733 -239740 -239746 -239814 -239886 -239907 -240002 -240043 -240064 -240081 -240124 -240142 -240186 -240200 -240226 -240232 -240273 -240304 -240350 -240358 -240375 -240386 -240408 -240417 -240434 -240452 -240472 -240495 -240513 -240531 -240542 -240556 -240578 -240591 -240602 -240610 -296080 -296155 -296190 -296259 -296311 -296335 -296368 -296381 -296421 -296449 -296459 -296515 -296576 -296623 -296688 -296772 -296805 -296830 -296859 -296902 -296938 -296966 -296983 -297008 -297038 -297055 -297073 -297114 -297141 -297220 -297233 -297265 -297298 -297336 -297363 -297408 -297427 -297456 -297488 -297512 -297546 -297557 -297571 -297598 -297648 -297685 -297708 -297773 -297803 -297855 -297882 -297925 -297939 -297959 -297972 -297986 -298010 -298030 -298041 -298064 -298097 -298108 -298130 -298141 -298152 -298164 -298192 -298242 -298253 -298263 -298288 -298309 -298327 -298339 -298371 -298411 -298419 -298427 -298432 -298444 -298451 -298461 -298476 -298485 -298500 -298509 -298526 -298543 -298550 -298560 -298567 -298585 -298606 -298613 -298618 -298626 -298634 -298639 -298654 -298700 -298714 -298724 -298730 -298738 -298750 -298762 -298779 -298810 -298829 -298850 -298855 -298870 -298877 -298887 -298895 -298901 -298917 -298927 -298953 -298962 -298975 -609237 -609350 -609593 -609783 -609907 -610088 -610184 -610284 -610368 -610413 -610529 -610592 -610635 -610678 -610746 -610847 -610894 -610926 -610939 -611022 -611066 -611120 -611205 -611333 -611386 -611401 -611416 -611452 -611467 -611475 -611494 -611509 -611523 -611537 -611554 -611565 -611578 -611590 -611610 -611629 -611653 -611694 -611707 -611728 -611735 -611740 -611748 -611757 -611768 -611793 -611805 -611819 -611826 -611835 -612229 -612238 -612260 -612274 -612284 -797730 -797905 -798172 -798261 -798354 -798400 -798511 -798617 -798707 -798763 -798803 -798835 -798867 -798903 -798948 -798986 -799033 -799061 -799091 -799112 -799135 -799151 -799180 -799192 -799211 -799224 -799237 -799244 -799258 -799265 -799274 -799283 -799290 -799303 -799311 -799322 -799333 -799345 -799357 -799364 -799369 -799380 -799391 -799406 -799417 -799425 -799433 -799439 -799446 -799451 -799457 -22057 -22145 -22213 -22267 -22284 -22308 -22371 -22409 -22450 -22463 -22471 -22491 -22617 -22627 -22637 -22667 -22688 -22695 -22700 -22708 -22728 -22733 -22744 -22752 -22758 -22768 -22777 -22785 -22791 -887550 -887873 -887973 -888110 -888142 -888179 -888210 -888268 -888286 -888359 -888376 -888771 -888796 -888819 -888831 -888855 -888914 -888934 -888957 -888969 -888986 -888997 -889034 -889065 -889074 -889092 -889193 -889214 -889240 -889253 -889264 -889278 -820058 -820206 -820373 -820465 -820513 -820549 -820590 -820634 -820694 -820719 -820748 -820781 -820828 -820844 -820878 -820900 -820908 -820922 -820944 -820962 -820974 -820987 -820994 -821004 -821025 -821041 -821053 -821070 -821091 -821097 -821103 -426155 -426266 -426340 -426396 -426420 -426485 -426503 -426539 -426569 -426609 -426691 -426729 -426753 -426768 -426790 -426813 -480689 -480746 -480847 -480990 -481180 -481222 -481352 -481440 -481518 -481616 -481687 -481722 -481806 -481871 -481897 -481959 -482015 -482093 -482173 -482217 -482268 -482326 -482389 -482473 -482514 -482641 -482738 -482878 -482900 -482989 -483012 -483083 -483124 -483155 -483167 -483197 -483246 -483279 -483299 -483314 -483351 -483392 -483430 -483523 -483560 -483587 -483593 -483605 -483613 -483626 -483653 -483665 -483684 -483701 -483735 -483748 -483758 -483770 -483795 -483814 -483829 -483842 -483852 -483864 -483874 -483885 -483899 -483919 -483926 -483933 -483963 -483974 -483985 -483991 -484003 -484014 -484023 -484028 -801308 -801773 -801872 -802074 -802222 -802347 -802371 -802409 -802456 -802479 -802499 -802523 -802566 -802573 -802614 -802629 -802642 -802651 -802666 -802672 -802717 -802727 -802734 -802745 -802760 -802784 -802804 -802818 -802828 -802838 -802853 -802859 -802874 -802889 -802900 -245466 -245659 -245939 -245994 -246279 -246508 -246709 -246796 -246890 -246933 -247045 -247148 -247306 -247331 -247361 -247483 -247506 -247534 -247649 -247704 -247813 -247833 -247875 -247933 -247980 -248007 -248035 -248054 -248066 -248083 -248109 -248124 -248151 -248175 -248217 -248238 -248246 -248256 -248269 -248286 -248336 -248347 -248364 -248381 -248388 -248401 -248415 -248424 -248439 -248460 -248468 -248483 -248498 -248524 -248530 -429891 -430232 -430445 -430637 -430748 -430865 -430918 -431034 -431073 -431096 -431156 -431241 -431299 -431310 -431374 -431405 -431463 -431547 -431610 -431639 -431666 -431720 -431756 -431764 -431804 -431822 -431858 -431874 -431886 -431897 -431908 -431934 -431948 -431953 -431978 -432002 -432014 -432025 -432041 -432061 -432072 -432077 -432084 -432091 -432101 -432107 -951282 -951408 -951490 -951616 -951757 -951918 -952025 -952206 -952230 -952312 -952329 -952344 -952373 -952393 -952403 -952410 -952426 -952449 -952458 -767725 -768256 -768386 -768488 -768573 -768669 -768714 -768767 -768874 -768915 -768980 -769024 -769130 -769201 -769257 -769294 -769301 -769328 -769365 -769378 -769388 -769428 -769435 -769451 -769471 -769479 -769500 -769513 -769530 -769542 -769552 -769560 -769587 -769597 -769604 -246492 -246717 -246917 -246959 -247106 -247190 -247301 -247352 -247472 -247490 -247597 -247698 -247823 -247922 -248043 -248127 -248219 -248294 -248480 -248559 -248658 -248804 -248868 -248900 -248927 -248963 -248978 -248997 -249026 -249073 -249108 -249148 -249198 -249206 -249250 -249275 -249319 -249342 -249379 -249393 -249403 -249416 -249429 -249444 -249469 -249498 -249522 -249548 -249565 -249573 -249583 -249595 -249601 -249610 -149694 -149790 -149825 -149872 -149960 -149995 -150050 -150073 -150125 -150162 -150183 -150220 -150253 -150289 -150302 -150315 -150344 -150362 -150386 -150434 -150453 -150483 -150517 -150528 -150556 -150593 -150613 -150678 -150694 -150712 -150735 -150761 -150780 -150812 -150836 -150867 -150914 -150966 -150995 -151133 -151167 -151248 -151275 -151297 -151318 -151341 -151363 -151371 -151386 -151393 -151404 -151424 -151450 -151457 -151466 -151480 -151489 -151504 -151552 -151567 -151581 -151599 -151621 -151649 -151662 -151673 -151684 -151694 -151707 -151718 -151732 -151755 -151761 -151773 -151787 -151796 -151809 -151826 -151834 -151841 -151846 -151852 -151860 -151866 -151872 -151878 -622085 -622236 -622447 -622609 -622695 -622746 -622886 -623066 -623181 -623195 -623258 -623281 -623330 -623366 -623441 -623481 -623510 -623541 -623608 -623664 -623684 -623699 -623714 -623756 -623790 -623825 -623838 -623847 -623855 -623863 -623876 -721065 -721247 -721478 -721712 -721817 -721886 -721910 -722041 -722158 -722295 -722463 -722487 -722566 -722631 -722673 -722707 -722728 -722757 -722770 -722787 -722806 -722865 -722911 -722920 -722928 -420037 -420408 -420608 -420943 -421283 -421553 -421667 -421740 -421857 -422087 -422186 -422278 -422472 -422533 -422572 -422659 -422688 -422728 -422760 -422867 -422972 -423014 -423039 -423047 -423056 -423098 -423128 -423144 -423163 -423183 -423197 -423215 -423234 -423242 -423256 -423264 -423274 -423285 -423294 -423301 -423329 -423356 -423364 -423382 -423411 -423422 -423429 -208981 -209206 -209669 -209838 -209997 -210138 -210355 -210448 -210540 -210724 -210804 -210848 -210943 -210979 -211030 -211214 -211311 -211386 -211439 -211466 -211491 -211504 -211557 -211605 -211644 -211679 -211705 -211721 -211758 -211777 -211787 -211794 -211805 -211824 -211841 -211851 -211864 -211873 -211889 -211896 -889911 -890469 -890564 -890807 -890860 -890996 -891069 -891212 -891278 -891306 -891353 -891394 -891440 -891506 -891527 -891536 -891553 -891587 -891628 -891663 -891684 -891715 -891753 -891808 -891819 -891848 -37969 -38010 -38055 -38129 -38188 -38239 -38278 -38298 -38351 -38396 -38409 -38451 -38477 -38537 -38620 -38631 -38665 -38681 -38722 -38733 -38757 -38782 -38804 -38814 -38848 -38859 -38872 -38877 -38892 -38902 -38919 -38939 -38955 -38963 -38969 -38977 -38990 -38998 -39003 -39009 -196770 -196891 -197030 -197202 -197524 -197647 -197784 -197848 -197895 -197923 -197963 -198055 -198116 -198175 -198226 -198270 -198285 -198339 -198381 -198435 -198481 -198506 -198610 -198632 -198656 -198704 -198761 -198787 -198803 -198841 -198922 -198937 -199011 -199037 -199052 -199078 -199110 -199131 -199164 -199178 -199217 -199252 -199285 -199294 -199305 -199315 -199329 -199338 -199346 -199352 -199385 -199399 -199408 -199415 -199426 -199442 -199451 -199467 -199481 -199497 -199513 -199521 -199531 -199537 -199545 -199582 -199590 -199597 -199612 -199639 -199651 -199657 -199667 -199674 -911537 -911763 -911918 -912017 -912131 -912291 -912352 -912424 -912463 -912490 -912528 -912549 -912572 -912609 -912629 -912641 -912648 -912661 -912668 -912689 -912702 -912715 -912727 -912734 -912742 -912754 -912762 -912769 -912789 -912796 -912803 -912810 -912816 -50973 -51108 -51161 -51181 -51286 -51334 -51366 -51387 -51461 -51498 -51516 -51556 -51575 -51599 -51619 -51631 -51649 -51678 -51687 -51697 -51706 -51718 -51726 -51735 -51746 -51760 -51771 -51779 -51791 -51800 -51811 -51820 -51825 -93648 -93758 -93797 -93875 -93966 -94037 -94163 -94231 -94246 -94283 -94348 -94366 -94392 -94405 -94456 -94473 -94492 -94523 -94547 -94586 -94624 -94655 -94681 -94722 -94743 -94785 -94818 -94866 -94930 -94955 -94987 -95003 -95029 -95045 -95062 -95088 -95111 -95136 -95164 -95171 -95187 -95202 -95229 -95241 -95252 -95277 -95298 -95314 -95324 -95339 -597361 -597542 -597917 -598090 -598502 -598753 -598839 -599022 -599142 -599208 -599252 -599285 -599341 -599372 -599438 -599503 -599601 -599620 -599665 -599685 -599776 -599807 -599847 -599903 -599937 -599951 -599968 -599987 -600022 -600047 -600062 -600079 -600100 -600115 -600132 -600145 -600187 -600197 -600213 -600230 -600254 -600303 -600312 -600323 -600340 -600356 -600370 -600377 -600387 -7871 -7888 -7897 -7905 -7912 -7920 -7927 -7932 -7939 -7944 -7951 -7957 -7967 -7977 -7985 -7991 -8000 -8013 -8020 -8027 -8038 -8044 -8049 -8058 -8066 -8071 -8078 -8086 -8096 -8105 -8114 -8121 -8127 -8132 -8140 -8147 -8159 -8164 -8172 -8178 -8183 -8191 -8197 -8204 -8213 -8221 -8227 -8233 -8238 -8243 -8249 -8254 -8259 -8266 -8271 -8276 -8284 -8290 -8298 -8304 -350166 -351272 -351544 -351746 -351937 -352311 -352512 -352736 -352846 -352902 -353015 -353083 -353146 -353198 -353219 -353296 -353308 -353329 -353362 -353379 -353389 -353406 -353413 -353446 -353472 -353511 -353517 -353583 -353616 -353654 -353701 -353717 -353735 -353742 -353753 -66151 -66449 -66676 -66824 -66899 -66934 -66963 -66979 -66988 -67017 -67031 -67042 -67051 -67064 -67087 -67108 -67123 -67134 -67142 -67156 -67165 -67170 -67177 -923258 -923392 -923479 -923538 -923626 -923692 -923756 -923878 -923997 -924030 -924049 -924068 -924099 -924139 -924168 -924180 -924201 -924221 -924231 -924247 -924261 -924271 -924287 -732905 -733029 -733100 -733266 -733320 -733369 -733432 -733531 -733554 -733621 -733707 -733804 -733948 -733970 -734033 -734161 -734193 -734213 -734231 -734262 -734287 -734327 -734340 -734387 -734407 -734444 -734477 -734506 -734525 -734544 -734555 -734576 -734603 -734615 -734630 -734652 -734659 -734675 -734711 -734723 -734753 -323337 -323484 -323538 -323568 -323598 -323664 -323720 -323752 -323805 -323923 -323975 -323990 -324042 -324111 -324150 -324215 -324264 -324308 -324334 -324350 -324394 -324419 -324442 -324451 -324525 -324556 -324624 -324643 -324662 -324728 -324791 -324849 -324880 -324915 -324951 -324983 -325018 -325065 -325097 -325124 -325149 -325174 -325205 -325315 -325356 -325379 -325406 -325446 -325466 -325490 -325510 -325538 -325626 -325691 -325850 -325893 -325912 -325960 -325986 -326017 -326036 -326057 -326081 -326101 -326117 -326133 -326151 -326179 -326195 -326220 -326248 -326267 -326275 -326323 -326338 -326354 -326388 -326401 -326418 -326430 -326444 -326459 -326472 -326502 -326507 -326585 -667443 -667574 -667714 -667802 -667863 -668013 -668059 -668156 -668195 -668259 -668294 -668348 -668402 -668455 -668503 -668568 -668597 -668623 -668663 -668690 -668737 -668939 -668960 -669011 -669098 -669177 -669257 -669279 -669322 -669353 -669394 -669416 -669436 -669468 -669489 -669526 -669550 -669569 -669594 -669610 -669637 -669656 -669667 -669686 -669728 -669743 -669751 -669771 -669781 -669787 -669802 -669809 -669821 -669831 -669841 -669849 -669854 -669864 -669872 -669886 -669896 -669926 -669950 -669962 -669973 -669990 -670006 -670025 -670034 -670047 -670059 -910840 -910899 -910939 -911052 -911078 -911147 -911163 -911198 -911257 -911273 -911289 -911333 -911381 -911423 -911463 -911528 -911545 -911559 -911581 -911617 -911677 -911695 -911720 -911759 -911768 -911780 -911797 -911812 -911826 -911845 -911855 -911865 -911876 -911888 -911898 -815355 -815490 -815761 -815812 -815873 -815926 -815989 -816058 -816105 -816139 -816194 -816243 -816311 -816363 -816393 -816404 -816429 -816471 -816520 -816543 -816570 -816583 -816637 -816657 -816685 -816703 -816715 -816734 -816758 -816781 -816810 -816851 -816869 -816879 -816893 -816906 -816915 -816923 -934179 -934286 -934669 -934788 -934867 -934934 -935000 -935069 -935139 -935199 -935227 -935264 -935337 -935360 -935400 -935423 -935443 -935464 -935481 -935493 -935523 -935539 -935549 -935583 -935607 -935621 -935638 -935651 -935663 -935682 -935692 -231516 -231844 -231997 -232141 -232229 -232302 -232344 -232389 -232444 -232478 -232559 -232598 -232654 -232715 -232766 -232806 -232816 -232839 -232884 -232901 -232965 -233038 -233077 -233114 -233279 -233348 -233379 -233763 -233781 -233806 -233821 -233876 -233914 -233919 -233933 -233952 -233978 -234032 -234058 -234104 -234147 -234162 -234175 -234191 -234268 -234280 -234296 -234311 -234320 -234340 -234364 -234393 -234410 -234424 -234445 -234455 -234468 -234489 -234502 -234509 -234536 -234564 -234578 -234590 -234602 -234612 -234626 -234637 -234646 -234657 -499487 -499855 -500046 -500138 -500240 -500322 -500419 -500480 -500510 -500533 -500588 -500653 -500763 -500812 -500879 -500950 -501008 -501026 -501057 -501102 -501135 -501170 -501190 -501266 -501309 -501400 -501426 -501462 -501507 -501576 -501603 -501646 -501685 -501711 -501744 -501777 -501809 -501827 -501842 -501856 -501869 -501922 -501993 -502016 -502032 -502058 -502070 -502093 -502105 -502122 -502173 -502197 -502228 -502247 -502275 -502326 -502356 -502375 -502390 -502398 -502432 -502448 -502457 -502477 -502526 -502545 -502568 -502601 -502621 -502637 -502644 -502653 -502667 -502682 -502692 -388235 -388718 -388968 -389200 -389279 -389529 -389639 -389676 -389801 -389915 -389949 -390017 -390068 -390135 -390204 -390342 -390444 -390506 -390585 -390626 -390674 -390731 -390789 -390858 -390893 -390939 -390970 -390995 -391032 -391072 -391085 -391117 -391147 -391181 -391199 -391231 -391241 -391266 -391303 -391318 -391370 -391390 -391417 -391427 -391437 -391477 -391511 -391525 -391551 -391565 -391578 -391593 -391622 -391636 -391653 -391665 -391677 -391703 -391716 -391735 -391767 -391780 -391798 -391814 -457876 -457950 -458091 -458137 -458194 -458349 -458443 -458518 -458649 -458784 -458894 -459043 -459186 -459292 -459334 -459354 -459379 -459408 -459455 -459509 -459572 -459723 -459796 -459850 -459882 -459910 -459967 -460009 -460116 -460166 -460207 -460245 -460360 -460397 -460432 -460451 -460502 -460523 -460569 -460612 -460644 -460682 -460722 -460795 -460812 -460880 -460899 -460917 -460986 -461013 -461031 -461062 -461094 -461105 -461148 -461200 -461212 -461255 -461285 -461307 -461325 -461345 -461361 -461391 -461413 -461433 -461450 -461460 -461488 -461517 -461556 -461567 -461576 -461589 -461597 -461609 -461622 -461631 -461651 -461664 -461681 -461693 -58332 -58491 -58555 -58626 -58654 -58730 -58784 -58839 -58852 -58891 -58899 -58925 -58949 -58966 -59011 -59019 -59030 -59043 -59061 -59085 -59102 -59115 -59127 -59157 -59183 -59204 -59216 -59227 -59240 -59253 -59262 -59282 -59299 -59305 -59310 -717892 -717960 -717985 -718166 -718187 -718261 -718378 -718465 -718563 -718598 -718627 -718651 -718714 -718782 -718812 -718856 -718886 -718966 -718993 -719040 -719066 -719132 -719161 -719245 -719364 -719421 -719450 -719470 -719487 -719509 -719545 -719556 -719594 -719623 -719651 -719659 -719798 -719859 -719883 -719928 -719961 -719976 -720015 -720025 -720039 -720056 -720066 -720074 -720088 -720119 -720131 -720140 -720149 -720157 -720164 -720171 -720177 -720204 -720231 -720243 -720250 -720261 -720290 -720298 -269094 -269193 -269243 -269301 -269334 -269407 -269498 -269535 -269596 -269631 -269697 -269801 -269844 -269911 -269954 -269966 -269984 -270066 -270130 -270189 -270227 -270251 -270320 -270380 -270418 -270443 -270456 -270505 -270528 -270598 -270621 -270649 -270680 -270705 -270722 -270756 -270783 -270807 -270835 -270885 -270914 -270939 -270960 -270993 -271016 -271035 -271046 -271083 -271091 -271105 -271114 -271125 -271149 -271166 -271213 -271225 -271237 -271254 -271267 -271287 -271307 -271317 -271326 -271336 -271352 -271360 -271367 -271377 -898281 -898476 -898555 -898682 -898759 -898886 -898912 -898947 -898990 -899032 -899062 -899087 -899106 -899170 -899184 -899211 -899220 -899254 -899268 -899299 -899310 -899330 -899343 -899381 -899394 -899407 -899431 -899443 -899460 -899474 -899495 -899508 -899522 -899537 -899549 -899567 -899577 -899586 -899602 -899611 -899616 -899626 -899632 -899638 -899645 -899650 -899655 -586061 -586388 -586538 -586786 -586867 -586964 -587082 -587250 -587379 -587451 -587583 -587636 -587792 -587925 -587959 -587991 -588058 -588085 -588130 -588164 -588202 -588223 -588255 -588278 -588312 -588327 -588356 -588381 -588405 -588422 -588461 -588497 -588522 -588545 -588557 -588566 -588596 -588647 -588652 -588665 -588684 -588716 -588735 -588751 -588764 -588782 -588793 -588806 -588824 -588838 -588879 -588901 -588939 -588965 -175917 -176129 -176243 -176344 -176493 -176742 -176916 -177051 -177172 -177358 -177465 -177491 -177510 -177581 -177654 -177674 -177721 -177770 -177805 -177924 -177951 -177978 -178003 -178045 -178069 -178128 -178149 -178207 -178241 -178256 -178271 -178295 -178328 -178353 -178360 -178382 -178428 -178447 -178460 -178484 -178492 -178507 -178530 -178538 -178558 -178563 -178569 -178575 -178585 -384206 -384445 -384681 -384765 -384882 -385083 -385152 -385218 -385392 -385559 -385680 -385807 -385903 -385942 -386032 -386113 -386139 -386233 -386261 -386312 -386361 -386407 -386486 -386600 -386623 -386725 -386759 -386801 -386867 -386920 -386985 -387029 -387069 -387092 -387110 -387143 -387177 -387232 -387260 -387303 -387339 -387410 -387458 -387476 -387521 -387535 -387561 -387604 -387622 -387655 -387677 -387696 -387728 -387740 -387767 -387781 -387792 -387814 -387838 -387851 -387860 -387869 -387878 -387898 -387927 -387944 -387972 -388010 -388022 -69777 -70132 -70265 -70300 -70322 -70401 -70448 -70503 -70537 -70573 -70610 -70637 -70652 -70681 -70700 -70710 -70724 -70736 -70758 -70781 -70797 -70803 -70819 -70834 -70847 -70887 -70900 -70906 -70915 -70920 -70928 -70935 -70946 -70958 -381405 -381559 -381726 -381834 -382039 -382120 -382198 -382250 -382335 -382433 -382624 -382677 -382718 -382749 -382821 -382914 -382973 -383028 -383134 -383179 -383238 -383274 -383340 -383385 -383446 -383512 -383566 -383627 -383673 -383717 -383727 -383768 -383812 -383836 -383886 -383914 -383941 -383975 -384033 -384052 -384078 -384088 -384104 -384120 -384137 -384157 -384166 -384191 -48487 -48584 -48730 -48788 -48816 -48901 -48945 -49011 -49030 -49039 -49081 -49094 -49106 -49118 -49128 -49134 -49154 -49172 -49194 -49199 -589134 -589355 -589594 -589756 -589943 -590098 -590252 -590424 -590534 -590649 -590768 -590836 -590941 -591061 -591112 -591165 -591214 -591272 -591326 -591473 -591527 -591548 -591581 -591616 -591642 -591666 -591689 -591714 -591730 -591777 -591793 -591798 -591815 -591827 -591833 -677019 -677105 -677186 -677382 -677414 -677472 -677536 -677595 -677664 -677713 -677746 -677826 -677878 -677944 -677994 -678045 -678143 -678171 -678226 -678251 -678316 -678361 -678374 -678392 -678424 -678443 -678472 -678483 -678504 -678529 -678554 -678565 -678572 -678588 -678601 -678613 -678626 -678642 -678652 -95817 -95875 -95982 -96062 -96125 -96163 -96205 -96251 -96307 -96335 -96357 -96376 -96398 -96408 -96438 -96458 -96478 -96500 -96538 -96587 -96623 -96649 -96678 -96698 -96720 -96733 -96753 -96763 -96784 -96795 -96802 -96847 -96859 -96871 -96879 -96900 -96909 -96916 -96924 -96937 -96950 -96957 -96967 -96978 -96984 -96989 -96996 -97001 -97008 -510106 -510322 -510455 -510517 -510687 -510745 -510796 -510895 -510982 -511049 -511083 -511209 -511272 -511481 -511618 -511715 -511757 -511817 -511983 -512129 -512256 -512294 -512357 -512422 -512447 -512485 -512522 -512556 -512578 -512638 -512664 -512705 -512749 -512770 -512813 -512845 -512883 -512898 -1279 -1328 -800283 -801261 -801551 -236108 -236921 -602752 -924565 -374509 -375521 -376384 -377173 -377438 -193156 -194777 -195436 -195760 -195954 -196210 -196254 -196368 -933814 -934069 -934580 -647463 -647760 -648015 -648111 -648500 -648696 -648789 -648929 -649016 -13318 -395603 -397221 -398075 -398444 -64145 -64398 -64536 -64727 -64825 -65023 -857527 -858053 -858212 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.lab b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.lab deleted file mode 100644 index dcb9569c6b5066e9be421240eae85d5289e93f03..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/style_image_lists/train.lab +++ /dev/null @@ -1,11270 +0,0 @@ -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 diff --git a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/tags.txt b/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/tags.txt deleted file mode 100644 index 834e49af64700d578c60c4b66dbf1a943fed266c..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/detection/NIMA_ID0158_for_TensorFlow/AVA_dataset/tags.txt +++ /dev/null @@ -1,66 +0,0 @@ -1 Abstract -24 Action -31 Advertisement -66 Analog -19 Animals -20 Architecture -43 Astrophotography -57 Birds -21 Black and White -51 Blur -64 Camera Phones -16 Candid -50 Children -2 Cityscape -34 Digital Art -37 Diptych / Triptych -49 DPChallenge GTGs -12 Emotive -4 Family -3 Fashion -63 Fish Eye -38 Floral -40 Food and Drink -53 High Dynamic Range (HDR) -45 History -58 Horror -5 Humorous -46 Infrared -65 Insects, etc -6 Interior -14 Landscape -62 Lensbaby -22 Macro -56 Maternity -44 Military -59 Music -15 Nature -26 Nude -55 Overlays -33 Panoramic -13 Performance -32 Persuasive -52 Photo-Impressionism -25 Photojournalism -60 Pinhole/Zone Plate -30 Political -17 Portraiture -27 Rural -41 Science and Technology -35 Seascapes -47 Self Portrait -7 Sky -8 Snapshot -9 Sports -18 Still Life -61 Street -29 Studio -54 Texture Library -48 Textures -36 Traditional Art -39 Transportation -23 Travel -10 Urban -11 Vintage -28 Water -42 Wedding diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/Dockerfile b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/Dockerfile index 6076e6fcde50a005f94e3d57f1c03ec2d7b448aa..0dd97d97fd08921d2ac594e511efce5c452e4b79 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/Dockerfile +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/Dockerfile @@ -1,9 +1,7 @@ ARG FROM_IMAGE_NAME=ascend-tensorflow-arm:20.1.0 FROM ${FROM_IMAGE_NAME} - +USER root RUN apt -y install libgl1-mesa-glx COPY requirements.txt . RUN pip3.7 install -r requirements.txt - -RUN ln -s /usr/bin/python3.7 /usr/bin/python3 \ No newline at end of file diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/docker_start.sh b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/docker_start.sh index e99bbe7685ba6f302f694e54a5d15ebe0f1c6b08..949698ab8238841d4b1d1adef4cf6387b0642e2f 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/docker_start.sh +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/docker_start.sh @@ -1,5 +1,19 @@ #!/bin/bash +#Copyright 2022 Huawei Technologies Co., Ltd + +#Licensed under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at + +#http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. + docker_image=$1 data_dir=$2 model_dir=$3 diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/infer/docker_start_infer.sh b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/infer/docker_start_infer.sh index 72889067a499eb55e93cac635d4b00454799524a..69f4fbbf409fdef7a78bc6c3a6d484f93bdec952 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/infer/docker_start_infer.sh +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/infer/docker_start_infer.sh @@ -38,7 +38,7 @@ function param_check() { param_check -docker run -it \ +docker run -it -u root \ --device=/dev/davinci0 \ --device=/dev/davinci_manager \ --device=/dev/devmm_svm \ diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_multi.py b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_multi.py index 2d7d2032f135a8f9330f61eeffd7f99ff50d093a..7ae30ea0cbadcb0115bad2844c437896d8996223 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_multi.py +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_multi.py @@ -45,7 +45,7 @@ if not os.path.exists(save_dir): if not os.path.exists(log_dir): os.makedirs(log_dir) -work_path = '/cache/user-job-dir/YoloV3_for_TensorFlow' +work_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../") ### Some paths train_file = os.path.join(work_path, './modelarts/coco2014_trainval_modelarts.txt') # The path of the training txt file. diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_single.py b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_single.py index 3cf9c95680169187bfb77d8251d09177f4173576..7f5d4bd356c4835bdfaeda5be64088b23627a90f 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_single.py +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/modelarts/args_modelarts_single.py @@ -45,7 +45,7 @@ if not os.path.exists(save_dir): if not os.path.exists(log_dir): os.makedirs(log_dir) -work_path = '/cache/user-job-dir/YoloV3_for_TensorFlow' +work_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../") ### Some paths train_file = os.path.join(work_path, './modelarts/coco2014_trainval_modelarts.txt') # The path of the training txt file. diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_1p.sh index 714d7a8b5ddbaa3291cebf414bbff110e202a79f..ad5504a6d7fbb1e61758be164d293b9a02d19b1b 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_1p.sh @@ -135,6 +135,7 @@ do --mode single \ --data_url $data_path/coco \ --train_url ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ + --total_epoches 1 \ --over_dump ${over_dump} \ --over_dump_path ${over_dump_path} \ > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_8p.sh b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_8p.sh index 2d7e9b1c5e1df38e2afd4f664087c6374ff7d270..79e72a8da8dad4b8ff3dce733fc8f2538d976ed9 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_8p.sh +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/test/train_performance_8p.sh @@ -152,6 +152,7 @@ do --mode single \ --data_url $data_path/coco \ --train_url ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ + --total_epoches 1 \ --over_dump ${over_dump} \ --over_dump_path ${over_dump_path} \ > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/train.py b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/train.py index 9fdc9b34e8a921e006f19e303db69325e84fe114..2348373849d32bebe03a546faeb4c475cd495c5a 100644 --- a/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/train.py +++ b/TensorFlow/built-in/cv/detection/YoloV3_ID0076_for_TensorFlow/train.py @@ -71,7 +71,8 @@ parser.add_argument("--save_dir", default='./training/', help="path of ckpt.") parser.add_argument("--batch_size", type=int, default=16, help="batchsize.") - +parser.add_argument("--total_epoches", type=int, default=200, + help="epoches of train.") # modify for npu overflow start # enable overflow parser.add_argument("--over_dump", type=str, default="False", @@ -108,7 +109,8 @@ if args_input.save_dir: args.save_dir = args_input.save_dir if args_input.batch_size: args.batch_size = args_input.batch_size - +if args_input.total_epoches: + args.total_epoches = args_input.total_epoches print('setting train mode %s.' % args_input.mode) # setting loggers diff --git a/TensorFlow/built-in/cv/image_classification/AM3_ID1260_for_TensorFlow/datasets/mini_imagenet_class_label_dict3.txt b/TensorFlow/built-in/cv/image_classification/AM3_ID1260_for_TensorFlow/datasets/mini_imagenet_class_label_dict3.txt deleted file mode 100644 index 0d93f41323c8f320ae043d931d2774548139cc71..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/image_classification/AM3_ID1260_for_TensorFlow/datasets/mini_imagenet_class_label_dict3.txt +++ /dev/null @@ -1,1000 +0,0 @@ -n02119789 1 kit_fox -n02100735 2 English_setter -n02110185 3 Siberian_husky -n02096294 4 Australian_terrier -n02102040 5 English_springer -n02066245 6 grey_whale -n02509815 7 lesser_panda -n02124075 8 Egyptian_cat -n02417914 9 ibex -n02123394 10 Persian_cat -n02125311 11 cougar -n02423022 12 gazelle -n02346627 13 porcupine -n02077923 14 sea_lion -n02110063 15 malamute -n02447366 16 badger -n02109047 17 Great_Dane -n02089867 18 Walker_hound -n02102177 19 Welsh_springer_spaniel -n02091134 20 whippet -n02092002 21 Scottish_deerhound -n02071294 22 killer_whale -n02442845 23 mink -n02504458 24 African_elephant -n02092339 25 Weimaraner -n02098105 26 soft-coated_wheaten_terrier -n02096437 27 Dandie_Dinmont_terrier -n02114712 28 red_wolf -n02105641 29 Old_English_sheepdog -n02128925 30 jaguar -n02091635 31 otter_hound -n02088466 32 bloodhound -n02096051 33 Airedale -n02117135 34 hyena -n02138441 35 meerkat -n02097130 36 giant_schnauzer -n02493509 37 titi -n02457408 38 three-toed_sloth -n02389026 39 sorrel -n02443484 40 black-footed_ferret -n02110341 41 dalmatian -n02089078 42 black-and-tan_coonhound -n02086910 43 papillon -n02445715 44 skunk -n02093256 45 Staffordshire_bullterrier -n02113978 46 Mexican_hairless -n02106382 47 Bouvier_des_Flandres -n02441942 48 weasel -n02113712 49 miniature_poodle -n02113186 50 Cardigan -n02105162 51 malinois -n02415577 52 bighorn -n02356798 53 fox_squirrel -n02488702 54 colobus -n02123159 55 tiger_cat -n02098413 56 Lhasa -n02422699 57 impala -n02114855 58 coyote -n02094433 59 Yorkshire_terrier -n02111277 60 Newfoundland -n02132136 61 brown_bear -n02119022 62 red_fox -n02091467 63 Norwegian_elkhound -n02106550 64 Rottweiler -n02422106 65 hartebeest -n02091831 66 Saluki -n02120505 67 grey_fox -n02104365 68 schipperke -n02086079 69 Pekinese -n02112706 70 Brabancon_griffon -n02098286 71 West_Highland_white_terrier -n02095889 72 Sealyham_terrier -n02484975 73 guenon -n02137549 74 mongoose -n02500267 75 indri -n02129604 76 tiger -n02090721 77 Irish_wolfhound -n02396427 78 wild_boar -n02108000 79 EntleBucher -n02391049 80 zebra -n02412080 81 ram -n02108915 82 French_bulldog -n02480495 83 orangutan -n02110806 84 basenji -n02128385 85 leopard -n02107683 86 Bernese_mountain_dog -n02085936 87 Maltese_dog -n02094114 88 Norfolk_terrier -n02087046 89 toy_terrier -n02100583 90 vizsla -n02096177 91 cairn -n02494079 92 squirrel_monkey -n02105056 93 groenendael -n02101556 94 clumber -n02123597 95 Siamese_cat -n02481823 96 chimpanzee -n02105505 97 komondor -n02088094 98 Afghan_hound -n02085782 99 Japanese_spaniel -n02489166 100 proboscis_monkey -n02364673 101 guinea_pig -n02114548 102 white_wolf -n02134084 103 ice_bear -n02480855 104 gorilla -n02090622 105 borzoi -n02113624 106 toy_poodle -n02093859 107 Kerry_blue_terrier -n02403003 108 ox -n02097298 109 Scotch_terrier -n02108551 110 Tibetan_mastiff -n02493793 111 spider_monkey -n02107142 112 Doberman -n02096585 113 Boston_bull -n02107574 114 Greater_Swiss_Mountain_dog -n02107908 115 Appenzeller -n02086240 116 Shih-Tzu -n02102973 117 Irish_water_spaniel -n02112018 118 Pomeranian -n02093647 119 Bedlington_terrier -n02397096 120 warthog -n02437312 121 Arabian_camel -n02483708 122 siamang -n02097047 123 miniature_schnauzer -n02106030 124 collie -n02099601 125 golden_retriever -n02093991 126 Irish_terrier -n02110627 127 affenpinscher -n02106166 128 Border_collie -n02326432 129 hare -n02108089 130 boxer -n02097658 131 silky_terrier -n02088364 132 beagle -n02111129 133 Leonberg -n02100236 134 German_short-haired_pointer -n02486261 135 patas -n02115913 136 dhole -n02486410 137 baboon -n02487347 138 macaque -n02099849 139 Chesapeake_Bay_retriever -n02108422 140 bull_mastiff -n02104029 141 kuvasz -n02492035 142 capuchin -n02110958 143 pug -n02099429 144 curly-coated_retriever -n02094258 145 Norwich_terrier -n02099267 146 flat-coated_retriever -n02395406 147 hog -n02112350 148 keeshond -n02109961 149 Eskimo_dog -n02101388 150 Brittany_spaniel -n02113799 151 standard_poodle -n02095570 152 Lakeland_terrier -n02128757 153 snow_leopard -n02101006 154 Gordon_setter -n02115641 155 dingo -n02097209 156 standard_schnauzer -n02342885 157 hamster -n02097474 158 Tibetan_terrier -n02120079 159 Arctic_fox -n02095314 160 wire-haired_fox_terrier -n02088238 161 basset -n02408429 162 water_buffalo -n02133161 163 American_black_bear -n02328150 164 Angora -n02410509 165 bison -n02492660 166 howler_monkey -n02398521 167 hippopotamus -n02112137 168 chow -n02510455 169 giant_panda -n02093428 170 American_Staffordshire_terrier -n02105855 171 Shetland_sheepdog -n02111500 172 Great_Pyrenees -n02085620 173 Chihuahua -n02123045 174 tabby -n02490219 175 marmoset -n02099712 176 Labrador_retriever -n02109525 177 Saint_Bernard -n02454379 178 armadillo -n02111889 179 Samoyed -n02088632 180 bluetick -n02090379 181 redbone -n02443114 182 polecat -n02361337 183 marmot -n02105412 184 kelpie -n02483362 185 gibbon -n02437616 186 llama -n02107312 187 miniature_pinscher -n02325366 188 wood_rabbit -n02091032 189 Italian_greyhound -n02129165 190 lion -n02102318 191 cocker_spaniel -n02100877 192 Irish_setter -n02074367 193 dugong -n02504013 194 Indian_elephant -n02363005 195 beaver -n02102480 196 Sussex_spaniel -n02113023 197 Pembroke -n02086646 198 Blenheim_spaniel -n02497673 199 Madagascar_cat -n02087394 200 Rhodesian_ridgeback -n02127052 201 lynx -n02116738 202 African_hunting_dog -n02488291 203 langur -n02091244 204 Ibizan_hound -n02114367 205 timber_wolf -n02130308 206 cheetah -n02089973 207 English_foxhound -n02105251 208 briard -n02134418 209 sloth_bear -n02093754 210 Border_terrier -n02106662 211 German_shepherd -n02444819 212 otter -n01882714 213 koala -n01871265 214 tusker -n01872401 215 echidna -n01877812 216 wallaby -n01873310 217 platypus -n01883070 218 wombat -n04086273 219 revolver -n04507155 220 umbrella -n04147183 221 schooner -n04254680 222 soccer_ball -n02672831 223 accordion -n02219486 224 ant -n02317335 225 starfish -n01968897 226 chambered_nautilus -n03452741 227 grand_piano -n03642806 228 laptop -n07745940 229 strawberry -n02690373 230 airliner -n04552348 231 warplane -n02692877 232 airship -n02782093 233 balloon -n04266014 234 space_shuttle -n03344393 235 fireboat -n03447447 236 gondola -n04273569 237 speedboat -n03662601 238 lifeboat -n02951358 239 canoe -n04612504 240 yawl -n02981792 241 catamaran -n04483307 242 trimaran -n03095699 243 container_ship -n03673027 244 liner -n03947888 245 pirate -n02687172 246 aircraft_carrier -n04347754 247 submarine -n04606251 248 wreck -n03478589 249 half_track -n04389033 250 tank -n03773504 251 missile -n02860847 252 bobsled -n03218198 253 dogsled -n02835271 254 tandem_bicycle -n03792782 255 mountain_bike -n03393912 256 freight_car -n03895866 257 passenger_car -n02797295 258 barrow -n04204347 259 shopping_cart -n03791053 260 motor_scooter -n03384352 261 forklift -n03272562 262 electric_locomotive -n04310018 263 steam_locomotive -n02704792 264 amphibian -n02701002 265 ambulance -n02814533 266 beach_wagon -n02930766 267 cab -n03100240 268 convertible -n03594945 269 jeep -n03670208 270 limousine -n03770679 271 minivan -n03777568 272 Model_T -n04037443 273 racer -n04285008 274 sports_car -n03444034 275 go-kart -n03445924 276 golfcart -n03785016 277 moped -n04252225 278 snowplow -n03345487 279 fire_engine -n03417042 280 garbage_truck -n03930630 281 pickup -n04461696 282 tow_truck -n04467665 283 trailer_truck -n03796401 284 moving_van -n03977966 285 police_van -n04065272 286 recreational_vehicle -n04335435 287 streetcar -n04252077 288 snowmobile -n04465501 289 tractor -n03776460 290 mobile_home -n04482393 291 tricycle -n04509417 292 unicycle -n03538406 293 horse_cart -n03599486 294 ricksha_rickshaw -n03868242 295 oxcart -n02804414 296 bassinet -n03125729 297 cradle -n03131574 298 crib -n03388549 299 four-poster -n02870880 300 bookcase -n03018349 301 china_cabinet -n03742115 302 medicine_chest -n03016953 303 chiffonier -n04380533 304 table_lamp -n03337140 305 file -n03891251 306 park_bench -n02791124 307 barber_chair -n04429376 308 throne -n03376595 309 folding_chair -n04099969 310 rocking_chair -n04344873 311 studio_couch -n04447861 312 toilet_seat -n03179701 313 desk -n03982430 314 pool_table -n03201208 315 dining_table -n03290653 316 entertainment_center -n04550184 317 wardrobe -n07742313 318 Granny_Smith -n07747607 319 orange -n07749582 320 lemon -n07753113 321 fig -n07753275 322 pineapple -n07753592 323 banana -n07754684 324 jackfruit -n07760859 325 custard_apple -n07768694 326 pomegranate -n12267677 327 acorn -n12620546 328 hip -n13133613 329 ear -n11879895 330 rapeseed -n12144580 331 corn -n12768682 332 buckeye -n03854065 333 organ -n04515003 334 upright -n03017168 335 chime -n03249569 336 drum -n03447721 337 gong -n03720891 338 maraca -n03721384 339 marimba -n04311174 340 steel_drum -n02787622 341 banjo -n02992211 342 cello -n04536866 343 violin -n03495258 344 harp -n02676566 345 acoustic_guitar -n03272010 346 electric_guitar -n03110669 347 cornet -n03394916 348 French_horn -n04487394 349 trombone -n03494278 350 harmonica -n03840681 351 ocarina -n03884397 352 panpipe -n02804610 353 bassoon -n03838899 354 oboe -n04141076 355 sax -n03372029 356 flute -n11939491 357 daisy -n12057211 358 yellow_lady's_slipper -n09246464 359 cliff -n09468604 360 valley -n09193705 361 alp -n09472597 362 volcano -n09399592 363 promontory -n09421951 364 sandbar -n09256479 365 coral_reef -n09332890 366 lakeside -n09428293 367 seashore -n09288635 368 geyser -n03498962 369 hatchet -n03041632 370 cleaver -n03658185 371 letter_opener -n03954731 372 plane -n03995372 373 power_drill -n03649909 374 lawn_mower -n03481172 375 hammer -n03109150 376 corkscrew -n02951585 377 can_opener -n03970156 378 plunger -n04154565 379 screwdriver -n04208210 380 shovel -n03967562 381 plow -n03000684 382 chain_saw -n01514668 383 cock -n01514859 384 hen -n01518878 385 ostrich -n01530575 386 brambling -n01531178 387 goldfinch -n01532829 388 house_finch -n01534433 389 junco -n01537544 390 indigo_bunting -n01558993 391 robin -n01560419 392 bulbul -n01580077 393 jay -n01582220 394 magpie -n01592084 395 chickadee -n01601694 396 water_ouzel -n01608432 397 kite -n01614925 398 bald_eagle -n01616318 399 vulture -n01622779 400 great_grey_owl -n01795545 401 black_grouse -n01796340 402 ptarmigan -n01797886 403 ruffed_grouse -n01798484 404 prairie_chicken -n01806143 405 peacock -n01806567 406 quail -n01807496 407 partridge -n01817953 408 African_grey -n01818515 409 macaw -n01819313 410 sulphur-crested_cockatoo -n01820546 411 lorikeet -n01824575 412 coucal -n01828970 413 bee_eater -n01829413 414 hornbill -n01833805 415 hummingbird -n01843065 416 jacamar -n01843383 417 toucan -n01847000 418 drake -n01855032 419 red-breasted_merganser -n01855672 420 goose -n01860187 421 black_swan -n02002556 422 white_stork -n02002724 423 black_stork -n02006656 424 spoonbill -n02007558 425 flamingo -n02009912 426 American_egret -n02009229 427 little_blue_heron -n02011460 428 bittern -n02012849 429 crane -n02013706 430 Aramus_pictus -n02018207 431 American_coot -n02018795 432 bustard -n02025239 433 ruddy_turnstone -n02027492 434 red-backed_sandpiper -n02028035 435 redshank -n02033041 436 dowitcher -n02037110 437 oystercatcher -n02017213 438 European_gallinule -n02051845 439 pelican -n02056570 440 king_penguin -n02058221 441 albatross -n01484850 442 great_white_shark -n01491361 443 tiger_shark -n01494475 444 hammerhead -n01496331 445 electric_ray -n01498041 446 stingray -n02514041 447 snoek -n02536864 448 coho -n01440764 449 tench -n01443537 450 goldfish -n02526121 451 eel -n02606052 452 rock_beauty -n02607072 453 anemone_fish -n02643566 454 lionfish -n02655020 455 puffer -n02640242 456 sturgeon -n02641379 457 gar -n01664065 458 loggerhead -n01665541 459 leatherback_turtle -n01667114 460 mud_turtle -n01667778 461 terrapin -n01669191 462 box_turtle -n01675722 463 banded_gecko -n01677366 464 common_iguana -n01682714 465 American_chameleon -n01685808 466 whiptail -n01687978 467 agama -n01688243 468 frilled_lizard -n01689811 469 alligator_lizard -n01692333 470 Gila_monster -n01693334 471 green_lizard -n01694178 472 African_chameleon -n01695060 473 Komodo_dragon -n01704323 474 triceratops -n01697457 475 African_crocodile -n01698640 476 American_alligator -n01728572 477 thunder_snake -n01728920 478 ringneck_snake -n01729322 479 hognose_snake -n01729977 480 green_snake -n01734418 481 king_snake -n01735189 482 garter_snake -n01737021 483 water_snake -n01739381 484 vine_snake -n01740131 485 night_snake -n01742172 486 boa_constrictor -n01744401 487 rock_python -n01748264 488 Indian_cobra -n01749939 489 green_mamba -n01751748 490 sea_snake -n01753488 491 horned_viper -n01755581 492 diamondback -n01756291 493 sidewinder -n01629819 494 European_fire_salamander -n01630670 495 common_newt -n01631663 496 eft -n01632458 497 spotted_salamander -n01632777 498 axolotl -n01641577 499 bullfrog -n01644373 500 tree_frog -n01644900 501 tailed_frog -n04579432 502 whistle -n04592741 503 wing -n03876231 504 paintbrush -n03483316 505 hand_blower -n03868863 506 oxygen_mask -n04251144 507 snorkel -n03691459 508 loudspeaker -n03759954 509 microphone -n04152593 510 screen -n03793489 511 mouse -n03271574 512 electric_fan -n03843555 513 oil_filter -n04332243 514 strainer -n04265275 515 space_heater -n04330267 516 stove -n03467068 517 guillotine -n02794156 518 barometer -n04118776 519 rule -n03841143 520 odometer -n04141975 521 scale -n02708093 522 analog_clock -n03196217 523 digital_clock -n04548280 524 wall_clock -n03544143 525 hourglass -n04355338 526 sundial -n03891332 527 parking_meter -n04328186 528 stopwatch -n03197337 529 digital_watch -n04317175 530 stethoscope -n04376876 531 syringe -n03706229 532 magnetic_compass -n02841315 533 binoculars -n04009552 534 projector -n04356056 535 sunglasses -n03692522 536 loupe -n04044716 537 radio_telescope -n02879718 538 bow -n02950826 539 cannon -n02749479 540 assault_rifle -n04090263 541 rifle -n04008634 542 projectile -n03085013 543 computer_keyboard -n04505470 544 typewriter_keyboard -n03126707 545 crane -n03666591 546 lighter -n02666196 547 abacus -n02977058 548 cash_machine -n04238763 549 slide_rule -n03180011 550 desktop_computer -n03485407 551 hand-held_computer -n03832673 552 notebook -n06359193 553 web_site -n03496892 554 harvester -n04428191 555 thresher -n04004767 556 printer -n04243546 557 slot -n04525305 558 vending_machine -n04179913 559 sewing_machine -n03602883 560 joystick -n04372370 561 switch -n03532672 562 hook -n02974003 563 car_wheel -n03874293 564 paddlewheel -n03944341 565 pinwheel -n03992509 566 potter's_wheel -n03425413 567 gas_pump -n02966193 568 carousel -n04371774 569 swing -n04067472 570 reel -n04040759 571 radiator -n04019541 572 puck -n03492542 573 hard_disc -n04355933 574 sunglass -n03929660 575 pick -n02965783 576 car_mirror -n04258138 577 solar_dish -n04074963 578 remote_control -n03208938 579 disk_brake -n02910353 580 buckle -n03476684 581 hair_slide -n03627232 582 knot -n03075370 583 combination_lock -n03874599 584 padlock -n03804744 585 nail -n04127249 586 safety_pin -n04153751 587 screw -n03803284 588 muzzle -n04162706 589 seat_belt -n04228054 590 ski -n02948072 591 candle -n03590841 592 jack-o'-lantern -n04286575 593 spotlight -n04456115 594 torch -n03814639 595 neck_brace -n03933933 596 pier -n04485082 597 tripod -n03733131 598 maypole -n03794056 599 mousetrap -n04275548 600 spider_web -n01768244 601 trilobite -n01770081 602 harvestman -n01770393 603 scorpion -n01773157 604 black_and_gold_garden_spider -n01773549 605 barn_spider -n01773797 606 garden_spider -n01774384 607 black_widow -n01774750 608 tarantula -n01775062 609 wolf_spider -n01776313 610 tick -n01784675 611 centipede -n01990800 612 isopod -n01978287 613 Dungeness_crab -n01978455 614 rock_crab -n01980166 615 fiddler_crab -n01981276 616 king_crab -n01983481 617 American_lobster -n01984695 618 spiny_lobster -n01985128 619 crayfish -n01986214 620 hermit_crab -n02165105 621 tiger_beetle -n02165456 622 ladybug -n02167151 623 ground_beetle -n02168699 624 long-horned_beetle -n02169497 625 leaf_beetle -n02172182 626 dung_beetle -n02174001 627 rhinoceros_beetle -n02177972 628 weevil -n02190166 629 fly -n02206856 630 bee -n02226429 631 grasshopper -n02229544 632 cricket -n02231487 633 walking_stick -n02233338 634 cockroach -n02236044 635 mantis -n02256656 636 cicada -n02259212 637 leafhopper -n02264363 638 lacewing -n02268443 639 dragonfly -n02268853 640 damselfly -n02276258 641 admiral -n02277742 642 ringlet -n02279972 643 monarch -n02280649 644 cabbage_butterfly -n02281406 645 sulphur_butterfly -n02281787 646 lycaenid_butterfly -n01910747 647 jellyfish -n01914609 648 sea_anemone -n01917289 649 brain_coral -n01924916 650 flatworm -n01930112 651 nematode -n01943899 652 conch -n01944390 653 snail -n01945685 654 slug -n01950731 655 sea_slug -n01955084 656 chiton -n02319095 657 sea_urchin -n02321529 658 sea_cucumber -n03584829 659 iron -n03297495 660 espresso_maker -n03761084 661 microwave -n03259280 662 Dutch_oven -n04111531 663 rotisserie -n04442312 664 toaster -n04542943 665 waffle_iron -n04517823 666 vacuum -n03207941 667 dishwasher -n04070727 668 refrigerator -n04554684 669 washer -n03133878 670 Crock_Pot -n03400231 671 frying_pan -n04596742 672 wok -n02939185 673 caldron -n03063689 674 coffeepot -n04398044 675 teapot -n04270147 676 spatula -n02699494 677 altar -n04486054 678 triumphal_arch -n03899768 679 patio -n04311004 680 steel_arch_bridge -n04366367 681 suspension_bridge -n04532670 682 viaduct -n02793495 683 barn -n03457902 684 greenhouse -n03877845 685 palace -n03781244 686 monastery -n03661043 687 library -n02727426 688 apiary -n02859443 689 boathouse -n03028079 690 church -n03788195 691 mosque -n04346328 692 stupa -n03956157 693 planetarium -n04081281 694 restaurant -n03032252 695 cinema -n03529860 696 home_theater -n03697007 697 lumbermill -n03065424 698 coil -n03837869 699 obelisk -n04458633 700 totem_pole -n02980441 701 castle -n04005630 702 prison -n03461385 703 grocery_store -n02776631 704 bakery -n02791270 705 barbershop -n02871525 706 bookshop -n02927161 707 butcher_shop -n03089624 708 confectionery -n04200800 709 shoe_shop -n04443257 710 tobacco_shop -n04462240 711 toyshop -n03388043 712 fountain -n03042490 713 cliff_dwelling -n04613696 714 yurt -n03216828 715 dock -n02892201 716 brass -n03743016 717 megalith -n02788148 718 bannister -n02894605 719 breakwater -n03160309 720 dam -n03000134 721 chainlink_fence -n03930313 722 picket_fence -n04604644 723 worm_fence -n04326547 724 stone_wall -n03459775 725 grille -n04239074 726 sliding_door -n04501370 727 turnstile -n03792972 728 mountain_tent -n04149813 729 scoreboard -n03530642 730 honeycomb -n03961711 731 plate_rack -n03903868 732 pedestal -n02814860 733 beacon -n07711569 734 mashed_potato -n07720875 735 bell_pepper -n07714571 736 head_cabbage -n07714990 737 broccoli -n07715103 738 cauliflower -n07716358 739 zucchini -n07716906 740 spaghetti_squash -n07717410 741 acorn_squash -n07717556 742 butternut_squash -n07718472 743 cucumber -n07718747 744 artichoke -n07730033 745 cardoon -n07734744 746 mushroom -n04209239 747 shower_curtain -n03594734 748 jean -n02971356 749 carton -n03485794 750 handkerchief -n04133789 751 sandal -n02747177 752 ashcan -n04125021 753 safe -n07579787 754 plate -n03814906 755 necklace -n03134739 756 croquet_ball -n03404251 757 fur_coat -n04423845 758 thimble -n03877472 759 pajama -n04120489 760 running_shoe -n03062245 761 cocktail_shaker -n03014705 762 chest -n03717622 763 manhole_cover -n03777754 764 modem -n04493381 765 tub -n04476259 766 tray -n02777292 767 balance_beam -n07693725 768 bagel -n03998194 769 prayer_rug -n03617480 770 kimono -n07590611 771 hot_pot -n04579145 772 whiskey_jug -n03623198 773 knee_pad -n07248320 774 book_jacket -n04277352 775 spindle -n04229816 776 ski_mask -n02823428 777 beer_bottle -n03127747 778 crash_helmet -n02877765 779 bottlecap -n04435653 780 tile_roof -n03724870 781 mask -n03710637 782 maillot -n03920288 783 Petri_dish -n03379051 784 football_helmet -n02807133 785 bathing_cap -n04399382 786 teddy -n03527444 787 holster -n03983396 788 pop_bottle -n03924679 789 photocopier -n04532106 790 vestment -n06785654 791 crossword_puzzle -n03445777 792 golf_ball -n07613480 793 trifle -n04350905 794 suit -n04562935 795 water_tower -n03325584 796 feather_boa -n03045698 797 cloak -n07892512 798 red_wine -n03250847 799 drumstick -n04192698 800 shield -n03026506 801 Christmas_stocking -n03534580 802 hoopskirt -n07565083 803 menu -n04296562 804 stage -n02869837 805 bonnet -n07871810 806 meat_loaf -n02799071 807 baseball -n03314780 808 face_powder -n04141327 809 scabbard -n04357314 810 sunscreen -n02823750 811 beer_glass -n13052670 812 hen_of_the_woods -n07583066 813 guacamole -n03637318 814 lampshade -n04599235 815 wool -n07802026 816 hay -n02883205 817 bow_tie -n03709823 818 mailbag -n04560804 819 water_jug -n02909870 820 bucket -n03207743 821 dishrag -n04263257 822 soup_bowl -n07932039 823 eggnog -n03786901 824 mortar -n04479046 825 trench_coat -n03873416 826 paddle -n02999410 827 chain -n04367480 828 swab -n03775546 829 mixing_bowl -n07875152 830 potpie -n04591713 831 wine_bottle -n04201297 832 shoji -n02916936 833 bulletproof_vest -n03240683 834 drilling_platform -n02840245 835 binder -n02963159 836 cardigan -n04370456 837 sweatshirt -n03991062 838 pot -n02843684 839 birdhouse -n03482405 840 hamper -n03942813 841 ping-pong_ball -n03908618 842 pencil_box -n03902125 843 pay-phone -n07584110 844 consomme -n02730930 845 apron -n04023962 846 punching_bag -n02769748 847 backpack -n10148035 848 groom -n02817516 849 bearskin -n03908714 850 pencil_sharpener -n02906734 851 broom -n03788365 852 mosquito_net -n02667093 853 abaya -n03787032 854 mortarboard -n03980874 855 poncho -n03141823 856 crutch -n03976467 857 Polaroid_camera -n04264628 858 space_bar -n07930864 859 cup -n04039381 860 racket -n06874185 861 traffic_light -n04033901 862 quill -n04041544 863 radio -n07860988 864 dough -n03146219 865 cuirass -n03763968 866 military_uniform -n03676483 867 lipstick -n04209133 868 shower_cap -n03782006 869 monitor -n03857828 870 oscilloscope -n03775071 871 mitten -n02892767 872 brassiere -n07684084 873 French_loaf -n04522168 874 vase -n03764736 875 milk_can -n04118538 876 rugby_ball -n03887697 877 paper_towel -n13044778 878 earthstar -n03291819 879 envelope -n03770439 880 miniskirt -n03124170 881 cowboy_hat -n04487081 882 trolleybus -n03916031 883 perfume -n02808440 884 bathtub -n07697537 885 hotdog -n12985857 886 coral_fungus -n02917067 887 bullet_train -n03938244 888 pillow -n15075141 889 toilet_tissue -n02978881 890 cassette -n02966687 891 carpenter's_kit -n03633091 892 ladle -n13040303 893 stinkhorn -n03690938 894 lotion -n03476991 895 hair_spray -n02669723 896 academic_gown -n03220513 897 dome -n03127925 898 crate -n04584207 899 wig -n07880968 900 burrito -n03937543 901 pill_bottle -n03000247 902 chain_mail -n04418357 903 theater_curtain -n04590129 904 window_shade -n02795169 905 barrel -n04553703 906 washbasin -n02783161 907 ballpoint -n02802426 908 basketball -n02808304 909 bath_towel -n03124043 910 cowboy_boot -n03450230 911 gown -n04589890 912 window_screen -n12998815 913 agaric -n02992529 914 cellular_telephone -n03825788 915 nipple -n02790996 916 barbell -n03710193 917 mailbox -n03630383 918 lab_coat -n03347037 919 fire_screen -n03769881 920 minibus -n03871628 921 packet -n03733281 922 maze -n03976657 923 pole -n03535780 924 horizontal_bar -n04259630 925 sombrero -n03929855 926 pickelhaube -n04049303 927 rain_barrel -n04548362 928 wallet -n02979186 929 cassette_player -n06596364 930 comic_book -n03935335 931 piggy_bank -n06794110 932 street_sign -n02825657 933 bell_cote -n03388183 934 fountain_pen -n04591157 935 Windsor_tie -n04540053 936 volleyball -n03866082 937 overskirt -n04136333 938 sarong -n04026417 939 purse -n02865351 940 bolo_tie -n02834397 941 bib -n03888257 942 parachute -n04235860 943 sleeping_bag -n04404412 944 television -n04371430 945 swimming_trunks -n03733805 946 measuring_cup -n07920052 947 espresso -n07873807 948 pizza -n02895154 949 breastplate -n04204238 950 shopping_basket -n04597913 951 wooden_spoon -n04131690 952 saltshaker -n07836838 953 chocolate_sauce -n09835506 954 ballplayer -n03443371 955 goblet -n13037406 956 gyromitra -n04336792 957 stretcher -n04557648 958 water_bottle -n03187595 959 dial_telephone -n04254120 960 soap_dispenser -n03595614 961 jersey -n04146614 962 school_bus -n03598930 963 jigsaw_puzzle -n03958227 964 plastic_bag -n04069434 965 reflex_camera -n03188531 966 diaper -n02786058 967 Band_Aid -n07615774 968 ice_lolly -n04525038 969 velvet -n04409515 970 tennis_ball -n03424325 971 gasmask -n03223299 972 doormat -n03680355 973 Loafer -n07614500 974 ice_cream -n07695742 975 pretzel -n04033995 976 quilt -n03710721 977 maillot -n04392985 978 tape_player -n03047690 979 clog -n03584254 980 iPod -n13054560 981 bolete -n10565667 982 scuba_diver -n03950228 983 pitcher -n03729826 984 matchstick -n02837789 985 bikini -n04254777 986 sock -n02988304 987 CD_player -n03657121 988 lens_cap -n04417672 989 thatch -n04523525 990 vault -n02815834 991 beaker -n09229709 992 bubble -n07697313 993 cheeseburger -n03888605 994 parallel_bars -n03355925 995 flagpole -n03063599 996 coffee_mug -n04116512 997 rubber_eraser -n04325704 998 stole -n07831146 999 carbonara -n03255030 1000 dumbbell \ No newline at end of file diff --git a/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/set_ranktable.py b/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/set_ranktable.py new file mode 100644 index 0000000000000000000000000000000000000000..c25b51462c5df2325462786688d4a206ee29fb9a --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/set_ranktable.py @@ -0,0 +1,1740 @@ +import argparse +parser = argparse.ArgumentParser() +parser.add_argument('-n', '--npu_nums', type=int, default='2', help='nums of npu') +parser.add_argument('-c', '--conf_path', type=str, default='./', help='the path of server_info') +FLAGS = parser.parse_args() + +import json +import os +server = [] +server_conf = [] +server_list = ["0", "1", "2", "3", "4", "5", "6", "7"] +if os.path.isdir(FLAGS.conf_path): + for f in os.listdir(FLAGS.conf_path): + if (f.split("_")[-1]).split(".")[0] in server_list and (f.split("_")[-1]).split(".")[1] == 'info' and f.split("_")[0] == 'server': + server_conf.append(f) + + + + + + +rank_address = [] +for i in range(FLAGS.npu_nums): + for x in server_conf: + if (x.split("_")[-1]).split(".")[0] == str(i): + server.append(x.split("_")[1]) + l = FLAGS.conf_path + "/" + x + with open(l, "r") as a: + s = a.readlines() + for s_ in s: + if 'address_0' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_1' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_2' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_3' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_4' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_5' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_6' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_7' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + +if FLAGS.npu_nums == 1: + rank = { + "server_count":"1", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 2: + rank = { + "server_count":"2", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]} + ], + + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 3: + rank = { + "server_count":"3", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 4: + rank = { + "server_count":"4", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 5: + rank = { + "server_count":"5", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + +elif FLAGS.npu_nums == 6: + rank = { + "server_count":"6", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 7: + rank = { + "server_count":"7", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +elif FLAGS.npu_nums == 8: + rank = { + "server_count":"8", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]}, + { + "server_id":server[7], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[56], + "rank_id":"56" + }, + { + "device_id":"1", + "device_ip":rank_address[57], + "rank_id":"57" + }, + { + "device_id":"2", + "device_ip":rank_address[58], + "rank_id":"58" + }, + { + "device_id":"3", + "device_ip":rank_address[59], + "rank_id":"59" + }, + { + "device_id":"4", + "device_ip":rank_address[60], + "rank_id":"60" + }, + { + "device_id":"5", + "device_ip":rank_address[61], + "rank_id":"61" + }, + { + "device_id":"6", + "device_ip":rank_address[62], + "rank_id":"62" + }, + { + "device_id":"7", + "device_ip":rank_address[63], + "rank_id":"63" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +with open("rank_table.json", "w") as f: + json.dump(rank, f) + + + + + + diff --git a/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/train_performance_16p.sh b/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/train_performance_16p.sh new file mode 100644 index 0000000000000000000000000000000000000000..2c22303b31d5b7840b66693ba46a639842e30aff --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/Densenet_3D_ID0121_for_TensorFlow/test/train_performance_16p.sh @@ -0,0 +1,106 @@ +#!/bin/bash + + +cur_path=`pwd`/.. +RANK_ID_START=0 +export RANK_ID=0 +export RANK_SIZE=16 +export JOB_ID=888886 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 +export ASCEND_GLOBAL_LOG_LEVEL=3 + +#基础参数 需要模型审视修改 +#网络名称,同目录名称 +Network="Densenet_3D_ID0121_for_TensorFlow" +batch_size=2 +#维测参数,precision_mode需要模型审视修改 +autotune=False + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --server_index* ]];then + server_index=`echo ${para#*=}` + elif [[ $para == --conf_path* ]];then + conf_path=`echo ${para#*=}` + fi +done + +# 自动生成ranktable的脚本 +rank_size=8 +nohup python3 set_ranktable.py --npu_nums=$((RANK_SIZE/rank_size)) --conf_path=$conf_path +wait +export RANK_TABLE_FILE=${cur_path}/test/rank_table.json + +start=$(date +%s) +for((RANK_ID=$((rank_size*server_index));RANK_ID<$((((server_index+1))*rank_size));RANK_ID++)); +do + # 设置环境变量 + export RANK_ID=$RANK_ID + export DEVICE_INDEX=`expr ${RANK_ID} - $((rank_size*server_index))` + export ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + echo "DEVICE ID: $ASCEND_DEVICE_ID" + #进入训练脚本目录,需要模型审视修改 + if [ -d $cur_path/test/output/$ASCEND_DEVICE_ID ];then + rm -rf $cur_path/test/output/$ASCEND_DEVICE_ID + mkdir -p $cur_path/test/output/$ASCEND_DEVICE_ID + else + mkdir -p $cur_path/test/output/$ASCEND_DEVICE_ID + fi + cd $cur_path + python3 train.py -bs 2 -mn dense24 -sp dense24_correction -nc True -e 1 -r ${data_path} -per True -mul_rank_size=$RANK_SIZE -mul_device_id=$RANK_ID > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +end=$(date +%s) +e2etime=$(( $end - $start )) +step_sec=`grep -a 'epoch-patient' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log |awk 'END {print $16}'` +ActualFPS=`awk 'BEGIN{printf "%.2f\n",'${RANK_SIZE}'*'${batch_size}'/'$step_sec'}'` +echo "--------Final Result ----------" +echo "Final Performance ms/step : $ActualFPS" +echo "Final Training Duration sec : $e2etime" +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +grep 'patient acc:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log|awk '{print $6}'|sed 's/,//g' >> $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt +#最后一个迭代acc值,不需要修改 +train_accuracy=`awk 'END {print}' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt` + +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +#稳定性精度看护结果汇总 + +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据 +#单迭代训练时长,不需要修改 +TrainingTime=`grep "time cust:" $cur_path/test/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $16}'` +#ActualFPS=`echo "scale=2;${BatchSize} / ${TrainingTime}"|bc` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'patient loss:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log|awk '{print $3}'|sed 's/,//g' >> $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2etime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet-B4_ID0162_for_TensorFlow/efficientnet_model.py b/TensorFlow/built-in/cv/image_classification/EfficientNet-B4_ID0162_for_TensorFlow/efficientnet_model.py index 60996d90b404064435af6034dd83090c61d3edf2..6ff2da5f748147fb7ebd286fe7fd60ee438c4545 100644 --- a/TensorFlow/built-in/cv/image_classification/EfficientNet-B4_ID0162_for_TensorFlow/efficientnet_model.py +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet-B4_ID0162_for_TensorFlow/efficientnet_model.py @@ -37,6 +37,8 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +from npu_bridge.estimator.npu import npu_convert_dropout + import collections import math import numpy as np diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/ReduceMeanD.json b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/ReduceMeanD.json new file mode 100644 index 0000000000000000000000000000000000000000..6de932d0c6b151ecb1ffca1a26d3af4e0feca412 --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/ReduceMeanD.json @@ -0,0 +1,17 @@ +{ + "black-list":{ + "to-remove":[ + ], + "to-add":[ + ] + }, + "white-list":{ + "to-remove":[ + ], + "to-add":[ + "ReduceMeanD","ReduceMean" + ] + }, + "gray-list":{ + } +} \ No newline at end of file diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/efficientnet_model.py b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/efficientnet_model.py index 8319a3514587c8bb1fafdee192533e9a47f0019f..3f34d6a38d3a167986d4c90e5960ed8c63f07855 100644 --- a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/efficientnet_model.py +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/efficientnet_model.py @@ -29,7 +29,7 @@ import numpy as np import six from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf - +from npu_bridge.estimator import npu_ops import utils GlobalParams = collections.namedtuple('GlobalParams', [ @@ -636,7 +636,8 @@ class Model(tf.keras.Model): self.endpoints['pooled_features'] = outputs if not pooled_features_only: if self._dropout: - outputs = self._dropout(outputs, training=training) + #outputs = self._dropout(outputs, training=training) + outputs = npu_ops.dropout(outputs, keep_prob = 1.0 - self._global_params.drop_connect_rate) self.endpoints['global_pool'] = outputs if self._fc: outputs = tf.squeeze(outputs, self._spatial_dims) @@ -647,7 +648,8 @@ class Model(tf.keras.Model): self.endpoints['pooled_features'] = outputs if not pooled_features_only: if self._dropout: - outputs = self._dropout(outputs, training=training) + #outputs = self._dropout(outputs, training=training) + outputs = npu_ops.dropout(outputs, keep_prob = 1.0 - self._global_params.drop_connect_rate) self.endpoints['global_pool'] = outputs if self._fc: outputs = self._fc(outputs) diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/main_npu.py b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/main_npu.py index f4b7fb74ec725bf1ea2fbecd3e8e660a669d761d..e8131ad312aedde72f252b52619b2fc0b7e71c74 100644 --- a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/main_npu.py +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/efficientnet/main_npu.py @@ -692,6 +692,7 @@ def main(unused_argv): session_config=estimator_config, model_dir=FLAGS.model_dir, iterations_per_loop=FLAGS.iterations_per_loop, + modify_mixlist='./efficientnet/ReduceMeanD.json', keep_checkpoint_max=5) # for NPU diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/set_ranktable.py b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/set_ranktable.py new file mode 100644 index 0000000000000000000000000000000000000000..216dd25a50fac45e841830d0d7c1b0e51433919d --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/set_ranktable.py @@ -0,0 +1,1740 @@ +import argparse +parser = argparse.ArgumentParser() +parser.add_argument('-n', '--npu_nums', type=int, default='2', help='nums of npu') +parser.add_argument('-c', '--conf_path', type=str, default='./', help='the path of server_info') +FLAGS = parser.parse_args() + +import json +import os +server = [] +server_conf = [] +server_list = ["0", "1", "2", "3", "4", "5", "6", "7"] +if os.path.isdir(FLAGS.conf_path): + for f in os.listdir(FLAGS.conf_path): + if (f.split("_")[-1]).split(".")[0] in server_list and (f.split("_")[-1]).split(".")[1] == 'info' and f.split("_")[0] == 'server': + server_conf.append(f) + + + + + + +rank_address = [] +for i in range(FLAGS.npu_nums): + for x in server_conf: + if (x.split("_")[-1]).split(".")[0] == str(i): + server.append(x.split("_")[1]) + l = FLAGS.conf_path + "/" + x + with open(l, "r") as a: + s = a.readlines() + for s_ in s: + if 'address_0' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_1' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_2' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_3' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_4' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_5' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_6' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_7' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + +if FLAGS.npu_nums == 1: + rank = { + "server_count":"1", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 2: + rank = { + "server_count":"2", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]} + ], + + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 3: + rank = { + "server_count":"3", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 4: + rank = { + "server_count":"4", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 5: + rank = { + "server_count":"5", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + +elif FLAGS.npu_nums == 6: + rank = { + "server_count":"6", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 7: + rank = { + "server_count":"7", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +elif FLAGS.npu_nums == 8: + rank = { + "server_count":"8", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]}, + { + "server_id":server[7], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[56], + "rank_id":"56" + }, + { + "device_id":"1", + "device_ip":rank_address[57], + "rank_id":"57" + }, + { + "device_id":"2", + "device_ip":rank_address[58], + "rank_id":"58" + }, + { + "device_id":"3", + "device_ip":rank_address[59], + "rank_id":"59" + }, + { + "device_id":"4", + "device_ip":rank_address[60], + "rank_id":"60" + }, + { + "device_id":"5", + "device_ip":rank_address[61], + "rank_id":"61" + }, + { + "device_id":"6", + "device_ip":rank_address[62], + "rank_id":"62" + }, + { + "device_id":"7", + "device_ip":rank_address[63], + "rank_id":"63" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +with open("rank_table.json", "w") as f: + json.dump(rank, f) + + + + + + diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_16p.sh b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_16p.sh new file mode 100644 index 0000000000000000000000000000000000000000..07b28dcfd26e53d57612566451d1f3cc42ee853c --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_16p.sh @@ -0,0 +1,216 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +#保证rank table file 文件rank_table_8p.json存放在和test同级的configs目录下 +#export RANK_SIZE=8 +#export RANK_TABLE_FILE=${cur_path}/../8p.json +#export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="/npu/traindata/imagenet_TF" + +#设置默认日志级别,不需要修改 +export ASCEND_GLOBAL_LOG_LEVEL=3 + +#基础参数 需要模型审视修改 +#网络名称,同目录名称 +Network="EfficientNet_B0_ID0009_for_TensorFlow" +#训练epoch +train_epochs=1 +#训练batch_size +batch_size=256 +#训练step +train_steps=`expr 1281167 / ${batch_size}` +#学习率 +learning_rate="" + +#TF2.X独有,不需要修改 +export NPU_LOOP_SIZE=${train_steps} + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False +autotune=False + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_8p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is 0 + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --autotune whether to enable autotune, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]]; then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --server_index* ]];then + server_index=`echo ${para#*=}` + elif [[ $para == --conf_path* ]];then + conf_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#修改参数 +sed -i "695s|./efficientnet/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + +#autotune时,先开启autotune执行单P训练,不需要修改 +if [[ $autotune == True ]]; then + train_full_1p.sh --autotune=$autotune --data_path=$data_path + wait + autotune=False +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) +export RANK_SIZE=16 +rank_size=8 +nohup python3 set_ranktable.py --npu_nums=$((RANK_SIZE/rank_size)) --conf_path=$conf_path +wait +export RANK_TABLE_FILE=${cur_path}/rank_table.json +export JOB_ID=10087 +export DEVICE_INDEX=0 + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$((rank_size*server_index));RANK_ID<$((((server_index+1))*rank_size));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + # 绑核,不需要的绑核的模型删除,需要模型审视修改 + corenum=`cat /proc/cpuinfo |grep "processor"|wc -l` + let a=RANK_ID*${corenum}/${RANK_SIZE} + let b=RANK_ID+1 + let c=b*${corenum}/${RANK_SIZE}-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + nohup ${bind_core} python3.7 efficientnet/main_npu.py \ + --data_dir=${data_path} \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ + --mode=train_and_eval \ + --train_batch_size=256 \ + --train_steps=500 \ + --iterations_per_loop=100 \ + --steps_per_eval=31250 \ + --eval_batch_size=128 \ + --base_learning_rate=0.2 \ + --model_name=efficientnet-b0 > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#恢复参数 +sed -i "695s|${cur_path}/../ReduceMeanD.json|./efficientnet/ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +FPS=`grep 'logger.py:54' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F 'ips:' '{print $2}'|awk '{print $1}'|awk 'NR>1'|awk '{sum+=$1} END {print sum/NR}'` +#FPS=`grep 'logger.py:54' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F 'ips:' '{print $2}'|awk '{print $1}'|sort -n -r|head -1` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#稳定性精度看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'logger.py:54' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $8}' |awk -F ":" '{print $2}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_1p.sh index 037b342b51a43dae1879688557c9bc021ab7e738..943ceae25b48e18e2132ff896f8fec49a4c101f8 100644 --- a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_1p.sh @@ -92,6 +92,9 @@ if [[ $data_path == "" ]];then exit 1 fi +#修改参数 +sed -i "695s|./efficientnet/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + #训练开始时间,不需要修改 start_time=$(date +%s) cd $cur_path/../ @@ -117,8 +120,8 @@ do --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ --mode=train \ --train_batch_size=256 \ - --train_steps=250 \ - --iterations_per_loop=10 \ + --train_steps=500 \ + --iterations_per_loop=100 \ --model_name=efficientnet-b0 > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -127,6 +130,9 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) +#恢复参数 +sed -i "695s|${cur_path}/../ReduceMeanD.json|./efficientnet/ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + #结果打印,不需要修改 echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 diff --git a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_8p.sh b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_8p.sh index 087e7104498773af4229c0a98e3a7b2ede4f5e49..8502e079e3dc5fd811164d3bd602cd2089118483 100644 --- a/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_8p.sh +++ b/TensorFlow/built-in/cv/image_classification/EfficientNet_B0_ID0009_for_TensorFlow/test/train_performance_8p.sh @@ -98,6 +98,9 @@ if [[ $data_path == "" ]];then exit 1 fi +#修改参数 +sed -i "695s|./efficientnet/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + #autotune时,先开启autotune执行单P训练,不需要修改 if [[ $autotune == True ]]; then train_full_1p.sh --autotune=$autotune --data_path=$data_path @@ -142,8 +145,8 @@ do --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ --mode=train_and_eval \ --train_batch_size=256 \ - --train_steps=250 \ - --iterations_per_loop=10 \ + --train_steps=500 \ + --iterations_per_loop=100 \ --steps_per_eval=31250 \ --eval_batch_size=128 \ --base_learning_rate=0.2 \ @@ -155,6 +158,9 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) +#恢复参数 +sed -i "695s|${cur_path}/../ReduceMeanD.json|./efficientnet/ReduceMeanD.json|g" $cur_path/../efficientnet/main_npu.py + #结果打印,不需要修改 echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 diff --git a/TensorFlow/built-in/cv/image_classification/Oct-ResNet_ID0251_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/cv/image_classification/Oct-ResNet_ID0251_for_TensorFlow/test/train_full_1p.sh index e9100627aea62b026ecb378f049293ae86748095..ef4ae989b3606ce99bcc5d1d19427e05b15169ed 100644 --- a/TensorFlow/built-in/cv/image_classification/Oct-ResNet_ID0251_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/cv/image_classification/Oct-ResNet_ID0251_for_TensorFlow/test/train_full_1p.sh @@ -155,7 +155,7 @@ FPS=`awk 'BEGIN {printf "%.2f\n",'${batch_size}'*'${batch_per_sec}'}'` echo "Final Performance images/sec : $FPS" #输出训练精度,需要模型审视修改 -train_accuracy=`grep "acc =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | tail -n 1 | awk -F " " '{print $9}'` +train_accuracy=`grep "acc =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | tail -n 1 | tr -d ','| awk -F " " '{print $9}'` #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/ReduceMeanD.json b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/ReduceMeanD.json new file mode 100644 index 0000000000000000000000000000000000000000..6de932d0c6b151ecb1ffca1a26d3af4e0feca412 --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/ReduceMeanD.json @@ -0,0 +1,17 @@ +{ + "black-list":{ + "to-remove":[ + ], + "to-add":[ + ] + }, + "white-list":{ + "to-remove":[ + ], + "to-add":[ + "ReduceMeanD","ReduceMean" + ] + }, + "gray-list":{ + } +} \ No newline at end of file diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/gpu_base_trainer.py b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/gpu_base_trainer.py index d42c68ec57bafe02426a8e38d0ceaef22f56a64e..9a517ee1985239e05b9d9f73578011a183916f41 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/gpu_base_trainer.py +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/src/trainers/gpu_base_trainer.py @@ -100,7 +100,16 @@ class GPUBaseTrain(object): run_config = NPURunConfig(dump_config=dump_config, hcom_parallel=True, precision_mode="allow_mix_precision", save_summary_steps=0, log_step_count_steps=None, enable_data_pre_proc=True,save_checkpoints_secs=1e9, session_config=session_config, model_dir = self.config['model_dir'], iterations_per_loop=self.config['iterations_per_loop']) else: if self.config['debug'] : - run_config = NPURunConfig(hcom_parallel=True, precision_mode="allow_mix_precision", enable_data_pre_proc=True, save_checkpoints_steps=112590, session_config=session_config, model_dir = self.config['model_dir'], iterations_per_loop=self.config['iterations_per_loop'], keep_checkpoint_max=5) + run_config = NPURunConfig(hcom_parallel=True, + precision_mode="allow_mix_precision", + enable_data_pre_proc=True, + save_checkpoints_steps=112590, + session_config=session_config, + model_dir = self.config['model_dir'], + iterations_per_loop=self.config['iterations_per_loop'], + keep_checkpoint_max=5, + enable_small_channel=1, + modify_mixlist='./src/trainers/ReduceMeanD.json') else : run_config = NPURunConfig(hcom_parallel=True, precision_mode="allow_mix_precision", save_summary_steps=0, log_step_count_steps=None, enable_data_pre_proc=True,save_checkpoints_secs=1e9, session_config=session_config, model_dir = self.config['model_dir'], iterations_per_loop=self.config['iterations_per_loop']) diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/set_ranktable.py b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/set_ranktable.py new file mode 100644 index 0000000000000000000000000000000000000000..c25b51462c5df2325462786688d4a206ee29fb9a --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/set_ranktable.py @@ -0,0 +1,1740 @@ +import argparse +parser = argparse.ArgumentParser() +parser.add_argument('-n', '--npu_nums', type=int, default='2', help='nums of npu') +parser.add_argument('-c', '--conf_path', type=str, default='./', help='the path of server_info') +FLAGS = parser.parse_args() + +import json +import os +server = [] +server_conf = [] +server_list = ["0", "1", "2", "3", "4", "5", "6", "7"] +if os.path.isdir(FLAGS.conf_path): + for f in os.listdir(FLAGS.conf_path): + if (f.split("_")[-1]).split(".")[0] in server_list and (f.split("_")[-1]).split(".")[1] == 'info' and f.split("_")[0] == 'server': + server_conf.append(f) + + + + + + +rank_address = [] +for i in range(FLAGS.npu_nums): + for x in server_conf: + if (x.split("_")[-1]).split(".")[0] == str(i): + server.append(x.split("_")[1]) + l = FLAGS.conf_path + "/" + x + with open(l, "r") as a: + s = a.readlines() + for s_ in s: + if 'address_0' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_1' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_2' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_3' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_4' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_5' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_6' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_7' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + +if FLAGS.npu_nums == 1: + rank = { + "server_count":"1", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 2: + rank = { + "server_count":"2", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]} + ], + + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 3: + rank = { + "server_count":"3", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 4: + rank = { + "server_count":"4", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 5: + rank = { + "server_count":"5", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + +elif FLAGS.npu_nums == 6: + rank = { + "server_count":"6", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 7: + rank = { + "server_count":"7", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +elif FLAGS.npu_nums == 8: + rank = { + "server_count":"8", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]}, + { + "server_id":server[7], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[56], + "rank_id":"56" + }, + { + "device_id":"1", + "device_ip":rank_address[57], + "rank_id":"57" + }, + { + "device_id":"2", + "device_ip":rank_address[58], + "rank_id":"58" + }, + { + "device_id":"3", + "device_ip":rank_address[59], + "rank_id":"59" + }, + { + "device_id":"4", + "device_ip":rank_address[60], + "rank_id":"60" + }, + { + "device_id":"5", + "device_ip":rank_address[61], + "rank_id":"61" + }, + { + "device_id":"6", + "device_ip":rank_address[62], + "rank_id":"62" + }, + { + "device_id":"7", + "device_ip":rank_address[63], + "rank_id":"63" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +with open("rank_table.json", "w") as f: + json.dump(rank, f) + + + + + + diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_1p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_1p.sh index 2107632c35bd6d37b43ef24bc298451c6a97249c..30fd680e6e538521e855940e1add830d8f4f591f 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_1p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_1p.sh @@ -96,6 +96,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_256bs_1p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_256bs_1p.py +sed -i "112s|./src/trainers/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #训练开始时间,不需要修改 @@ -135,6 +136,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_1p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_1p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_8p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_8p.sh index 024f66aaa686ce46d78c8c77e668d2555b8d80c0..35f1881cc11d8b41c6e8a52066b6807fb6d60c2e 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_8p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_8p.sh @@ -101,6 +101,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_256bs_8p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_256bs_8p.py +sed -i "112s|./src/trainers/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #autotune时,先开启autotune执行单P训练,不需要修改 @@ -159,6 +160,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_8p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_8p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_16p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_16p.sh new file mode 100644 index 0000000000000000000000000000000000000000..e18ba26d3a76b015705a20244462cc8473204e60 --- /dev/null +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_16p.sh @@ -0,0 +1,209 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=16 +export JOB_ID=99990001 +export RANK_ID=8 +export SLOG_PRINT_TO_STDOUT=0 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 +export ASCEND_GLOBAL_LOG_LEVEL=3 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="ResNet50_ID0058_for_TensorFlow" +#训练epoch +train_epochs=1 +#训练batch_size +batch_size=256 +#训练step +train_steps=2000 +#学习率 +learning_rate= + +#维测参数,precision_mode需要模型审视修改 +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False +autotune=False + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_1p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --autotune whether to enable autotune, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --server_index* ]];then + server_index=`echo ${para#*=}` + elif [[ $para == --conf_path* ]];then + conf_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]]; then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +# 自动生成ranktable的脚本 +rank_size=8 +nohup python3 set_ranktable.py --npu_nums=$((RANK_SIZE/rank_size)) --conf_path=$conf_path +wait +export RANK_TABLE_FILE=${cur_path}/rank_table.json + + +#修改参数 +sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py +sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py + +cp data_loader.py $cur_path/../src/data_loader/resnet50/ +#autotune时,先开启autotune执行单P训练,不需要修改 +if [[ $autotune == True ]]; then + train_full_1p.sh --autotune=$autotune --data_path=$data_path + wait + autotune=False +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$((rank_size*server_index));RANK_ID<$((((server_index+1))*rank_size));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export DEVICE_INDEX=`expr ${RANK_ID} - $((rank_size*server_index))` + export ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + # 绑核,不需要的绑核的模型删除,需要模型审视修改 + corenum=`cat /proc/cpuinfo |grep "processor"|wc -l` + let a=ASCEND_DEVICE_ID*${corenum}/8 + let b=ASCEND_DEVICE_ID+1 + let c=b*${corenum}/8-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + nohup ${bind_core} python3.7 ${cur_path}/../src/mains/res50.py --config_file=res50_256bs_HW192_8p \ + --max_train_steps=${train_steps} \ + --iterations_per_loop=100 \ + --debug=True \ + --eval=False \ + --model_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/d_solution/ckpt${ASCEND_DEVICE_ID} >> ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#参数改回 +sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py +sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +FPS=`cat ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | grep "FPS:" | awk -F "FPS:" '{print $2}' | awk -F " loss:" '{print $1}' | tail -n +2 | awk '{sum+=$1} END {print sum*2/NR}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#打印,不需要修改 +echo "E2E Training Duration sec : $e2e_time" + +#稳定性精度看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p_hw192'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${batch_size}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "FPS:" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss: " '{print $2}' | awk -F "total" '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_1p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_1p.sh index 9c2c2c93e845d58ae3231e4c5e29912d3105650f..b70f86f1ab0dd204be34b7ef8d2a06a143522c0a 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_1p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_1p.sh @@ -96,6 +96,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_256bs_HW192_1p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_256bs_HW192_1p.py +sed -i "112s|./src/trainers/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #训练开始时间,不需要修改 @@ -135,6 +136,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_1p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_1p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_8p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_8p.sh index 7046b5c50cd2819c96797c67e0a5ba005b6b1311..a32343d7684739180b568cbc40946fbe0a9ec01f 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_8p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs256_hw192_8p.sh @@ -101,6 +101,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py +sed -i "112s|./src/trainers/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #autotune时,先开启autotune执行单P训练,不需要修改 @@ -159,6 +160,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_256bs_HW192_8p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_1p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_1p.sh index 6c08021b499cdd6fcc6dbfefd9397ce13e0833a1..551f6d7ef7fcc7199d35e817e6247c2557bfc0af 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_1p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_1p.sh @@ -96,6 +96,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_32bs_1p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_32bs_1p.py +sed -i "112s|./src/trainers/ReduceMeanD.jsonn|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #训练开始时间,不需要修改 @@ -135,6 +136,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_32bs_1p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_32bs_1p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_8p.sh b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_8p.sh index e18a92d993c952020ecf4ac43f12075e9bf9e31f..d6b1f66d5e722f0fc0b8184f926177b29c7e0354 100644 --- a/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_8p.sh +++ b/TensorFlow/built-in/cv/image_classification/ResNet50_ID0058_for_TensorFlow/test/train_performance_bs32_8p.sh @@ -101,6 +101,7 @@ fi #修改参数 sed -i "50s|PATH_TO_BE_CONFIGURED|${data_path}|g" $cur_path/../src/configs/res50_32bs_8p.py sed -i "107s|PATH_TO_BE_CONFIGURED|${cur_path}/output/0/d\_solution/ckpt0|g" $cur_path/../src/configs/res50_32bs_8p.py +sed -i "112s|./src/trainers/ReduceMeanD.json|${cur_path}/../ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py cp data_loader.py $cur_path/../src/data_loader/resnet50/ #autotune时,先开启autotune执行单P训练,不需要修改 @@ -159,6 +160,7 @@ e2e_time=$(( $end_time - $start_time )) #参数改回 sed -i "50s|${data_path}|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_32bs_8p.py sed -i "107s|${cur_path}/output/0/d\_solution/ckpt0|PATH_TO_BE_CONFIGURED|g" $cur_path/../src/configs/res50_32bs_8p.py +sed -i "112s|${cur_path}/../ReduceMeanD.json|./src/trainers/ReduceMeanD.json|g" $cur_path/../src/trainers/gpu_base_trainer.py #结果打印,不需要修改 echo "------------------ Final result ------------------" diff --git a/TensorFlow/built-in/cv/image_segmentation/UNet3D_ID0057_for_TensorFlow/main_npu.py b/TensorFlow/built-in/cv/image_segmentation/UNet3D_ID0057_for_TensorFlow/main_npu.py index f0cfc1c366126831f74ebe8e6b05ff4ba1b1454b..fd1bc90d78820c6d682984cc72b0bb4a08781f25 100644 --- a/TensorFlow/built-in/cv/image_segmentation/UNet3D_ID0057_for_TensorFlow/main_npu.py +++ b/TensorFlow/built-in/cv/image_segmentation/UNet3D_ID0057_for_TensorFlow/main_npu.py @@ -39,8 +39,8 @@ from dataset.data_loader import Dataset, CLASSES from runtime.hooks import get_hooks, ProfilingHook, TrainingHook from runtime.arguments import PARSER from runtime.setup import prepare_model_dir, build_estimator, set_flags, get_logger -#from hccl.split.api import set_split_strategy_by_idx -#set_split_strategy_by_idx([1,90,99]) +from hccl.split.api import set_split_strategy_by_idx +set_split_strategy_by_idx([1,90,99]) def parse_evaluation_results(result): data = {CLASSES[i]: result[CLASSES[i]] for i in range(len(CLASSES))} diff --git a/TensorFlow/built-in/cv/image_synthesis/DCGAN_ID2196_for_TensorFlow/web/index.html b/TensorFlow/built-in/cv/image_synthesis/DCGAN_ID2196_for_TensorFlow/web/index.html deleted file mode 100644 index 6ea2fe6158ffbc6f72e303c305ec055980f885e5..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/cv/image_synthesis/DCGAN_ID2196_for_TensorFlow/web/index.html +++ /dev/null @@ -1,427 +0,0 @@ - - - - - - - - - - - - - - - - - - - Neural Face | 프사 뉴럴 - - - - - - - - - - - - - - - - - - -
-
- - - - - - -
- -
-
-
- -
-
-
-
- -
-
-

프사 뉴럴

-

Neural Face

-
-

프사 뉴럴은 Facebook AI Research에서 개발한 Deep Convolutional Generative Adversarial Networks (DCGAN) 이라는 기계 학습 모델을 사용해 만들어졌습니다.

-

프사 뉴럴은 얼굴 사진을 만드는 인공 지능이며
이 페이지에 나오는 모든 사람들은 이세상에 존재하지 않습니다.

-
-
-

Neural Face uses Deep Convolutional Generative Adversarial Networks (DCGAN), which is developed by Facebook AI Research.

-

Neural Face is an Artificial Intelligence which generates face images
and all images in this page are not REAL.

-
-
-
-
-
-
- - -
-
-

Image Generation

-
- - - - -
-
-

프사 뉴럴은 0에서 1 사이의 100개의 숫자 z로 사람의 이미지를 만들어내는 인공지능입니다.

-

1. 아래에 보이는 100개의 픽셀을 z의 각 숫자를 나타냅니다.
2. 만들어진 사진 위에 마우스를 올리면 사진에 사용된 z가 보입니다.
3. 만들어진 이미지를 누르시면 그 이미지의 z가 복사됩니다.

-
-
-

Neural Face uses a vector z that consists of 100 real numbers ranging from 0 to 1.

-

1. Each pixel in the below pallete represents a value in z.
2. If you hover your mouse over an image, z for that image will be displayed.
3. If you click an image, z will be copied to the palette.

-
-
-

(브라우저 성능에 따라 1~10초가 걸립니다)

-

(Might take 1 to 10 seconds depending on your browser)

-
-
-
-
-
- -

- Sorry, your browser doesn't support the element. -

-

- Please upgrade to - IE 9 - or use the latest - Chrome - or - Firefox -

-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-

프사 뉴럴을 불러오고 있습니다...

-

Neural Face is preparing to draw...

-
-
- -
-
-
- -
-
-
-

알고리즘

-

Algorithm

-
- - - -
-

프사 뉴럴의 핵심 모델인 DCGAN은 두 개의 인공 신경망으로 구성되어 있으며, 각각

-

1. 사진을 만들어내는 생성자 (G)
2. 진짜 사진과 생성자가 만든 사진을 구분하는 구분자 (D)

-

라고 부릅니다.

-

두 신경망은 수많은 이미지를 반복적으로 보면서 생성자는 구분자를 속이기 위해, 구분자는 생성자가 만든 사진을 판별하기 위해 학습합니다. 이러한 학습 방법을 적대적 학습 (Adversarial Learning)이라고 하며, 생성자와 구분자를 도둑경찰로 비유하기도 합니다.

-
-
-

DCGAN, which is the core of Neural Face, consists of two different neural networks which are:

-

1. Generator (G) that generates an image
2. Discriminator (D) that discriminate real images from generated images

-

Two neural networks compete as one tries to deceive the other. This kind of learning is called Adversarial Learning. Because of this, Generator and Discriminator are described as a thief and police, respectively.

-
-
- - -
-


생성자와 구분자는 여러 가지 인공 신경망 종류 중에서 각각 Deconvolutional Network (DNN)Convolutional Neural Network (CNN)로 구현되어 있습니다. CNN은 수백 개의 픽셀로 이루어진 이미지를 작은 차원의 숫자들 (z)로 잘 요약할 수 있는 필터를 배우는 인공 신경망이며, DNN은 이렇게 작아진 차원의 숫자들로 원래 이미지를 복원하는 필터를 배우는 신경망입니다.

-

구분자는 인공 신경망에 실제 이미지를 넣은 결과를 1로, 만들어진 이미지의 결과는 0으로 구분하도록 학습합니다. 반대로 생성자는 Gaussian Distribution을 따르는 z라는 확률 변수를 두고, 사람의 이미지의 확률 분포를 z를 사용해 계산합니다. 이렇게 만들어진 이미지를 구분자가 실제 이미지라고 잘못 판단하도록 계속 학습합니다.

-
-
-


Generator and Discriminator consist of Deconvolutional Network (DNN) and Convolutional Neural Network (CNN). CNN is a neural network which encodes the hundreds of pixels of an image into a vector of small dimensions (z) which is a summary of the image. DNN is a network that learns filters to recover the original image from z.

-

When a real image is given, Discriminator should output 1 or 0 for whether the image was generated from Generator. In the contrast, Generator generates an image from z, which follows a Gaussian Distribution, and tries to figure out the distribution of human images from z. In this way, a Generator tries to cheat Discriminator into making a wrong decision.

-
-
-
- -
-
-

Results

-
-

프사 뉴럴를 학습시키기 위해 인터넷에 10만 개 이상의 사진들을 모았고 이 사진들에서 얼굴 사진만 잘라서 얼굴 데이터 셋을 만들었습니다. 코드는 최근에 구글에서 공개한 TensorFlow로 구현했으며 GTX 980 Ti를 사용하여 이틀간 학습시켰습니다.

-

아래는 초기 학습 단계에서 프사 뉴럴이 정해진 z로 얼굴 사진을 만들어 가는 과정을 보여줍니다.

-

More than 100K images are crawled from online communities and those images are cropped by using openface which is a face recognition framework. Neural Face is implemented with TensorFlow and a GTX 980 Ti is used to train for two days.

-

Below is a series of images generated by Generator with a fixed z between the first and the fith epoch of training.

- -
- -
-



생성자가 사용하는 z는 -1에서 1 사이의 Gaussian Distribution을 따르는 확률 변수이며, 평균값인 0으로 이미지를 만들게 되면, 프사 뉴럴이 생각하는 평균적인 얼굴을 알 수 있습니다.

-



The vector z has real values from -1 to 1 and it follows the Gaussian Distribution. We can see the most common face that is interpreted by Neural Face using 0 as all values of z.

-
-
- -
- -
-



평균값 0에서 랜덤한 차원의 값을 조금씩 바꾸면 아래와 같은 변화를 볼 수 있습니다.

-



The below images are generated by changing the values of z continuously, starting from the average value (0) to -1 or 1.

-
-
- -
- -
-



아래의 사진들은 100차원의 z 값 중에서 임의의 차원들을 -1부터 1까지 바꾸면서 생성자 신경망에 넣은 결과이며, 점점 미소를 짓거나, 안경이 생기거나, 흑백 사진이 되거나, 성별이 바뀌는 등의 결과를 확인하실 수 있습니다.

-



The below images are generated by changing ten different values of z from -1 to 1. People in the images vary in characteristics such as smiling, wearing glasses, turning into black and white images, and changing into different sex.

-
-
-
-
-
- - - - - - -
-
-
-
-
-



프사 뉴럴의 코드는 이곳에 공개되어 있습니다.

-



The code of Neural Face can be found here.

-
-
-
- -
-
-

Misc.

-
-

마지막으로 튜링 테스트를 해 보겠습니다 :)
아래 사진 중에서 진짜 사진은 무엇일까요?

-

(마우스로 클릭하면 정답이 보입니다)

-
-
-

Lastly, let's conduct a Turing Test :)
Can you guess which are the real images?

-

(Answer will be showed if you click an image)

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - -
-
-
-
- -
-
- - -
-
-
-

Taehoon Kim

-

@carpedm20

- - - - - - - - - - - - - -
-
-
- - -
-
-
-
- - - - - - - - - - - - - - - - - - - - diff --git a/TensorFlow/built-in/cv/image_synthesis/VAE-GAN_ID1800_for_TensorFlow/utils.py b/TensorFlow/built-in/cv/image_synthesis/VAE-GAN_ID1800_for_TensorFlow/utils.py index 5e8fc04028a4709b590cf3ade1f753acf796ea97..2529eb3685a92725365d4a44f583adb578dc67cf 100644 --- a/TensorFlow/built-in/cv/image_synthesis/VAE-GAN_ID1800_for_TensorFlow/utils.py +++ b/TensorFlow/built-in/cv/image_synthesis/VAE-GAN_ID1800_for_TensorFlow/utils.py @@ -45,7 +45,8 @@ def encoder(input_tensor, output_size): net = layers.conv2d(net, 32, 5, stride=2) net = layers.conv2d(net, 64, 5, stride=2) net = layers.conv2d(net, 128, 5, stride=2, padding='VALID') - net = layers.dropout(net, keep_prob=0.9) + # net = layers.dropout(net, keep_prob=0.9) + net = npu_ops.dropout(net, keep_prob=0.9) net = layers.flatten(net) return layers.fully_connected(net, output_size, activation_fn=None) diff --git a/TensorFlow/built-in/nlp/Albert_ID0632_for_TensorFlow/albert/albert_config/vocab.txt b/TensorFlow/built-in/nlp/Albert_ID0632_for_TensorFlow/albert/albert_config/vocab.txt deleted file mode 100644 index ca4f9781030019ab9b253c6dcb8c7878b6dc87a5..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/Albert_ID0632_for_TensorFlow/albert/albert_config/vocab.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/TensorFlow/built-in/nlp/Albert_ZH_for_TensorFlow/albert_config/vocab.txt b/TensorFlow/built-in/nlp/Albert_ZH_for_TensorFlow/albert_config/vocab.txt deleted file mode 100644 index ca4f9781030019ab9b253c6dcb8c7878b6dc87a5..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/Albert_ZH_for_TensorFlow/albert_config/vocab.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/cased_L-12_H-768_A-12/download_from_bert_first.txt b/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/cased_L-12_H-768_A-12/download_from_bert_first.txt deleted file mode 100644 index 2ea941cc79a6f3d7985ca6991ef4f67dad62af04..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/cased_L-12_H-768_A-12/download_from_bert_first.txt +++ /dev/null @@ -1,28996 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] -[unused100] -[unused101] -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -A -B -C -D -E -F -G -H -I -J -K -L -M -N -O -P -Q -R -S -T -U -V -W -X -Y -Z -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -¡ -¢ -£ -¥ -§ -¨ -© -ª -« -¬ -® -° -± -² -³ -´ -µ -¶ -· -¹ -º -» -¼ -½ -¾ -¿ -À -Á - -Ä -Å -Æ -Ç -È -É -Í -Î -Ñ -Ó -Ö -× -Ø -Ú -Ü -Þ -ß -à -á -â -ã -ä -å -æ -ç -è -é -ê -ë -ì -í -î -ï -ð -ñ -ò -ó -ô -õ -ö -÷ -ø -ù -ú -û -ü -ý -þ -ÿ -Ā -ā -ă -ą -Ć -ć -Č -č -ď -Đ -đ -ē -ė -ę -ě -ğ -ġ -Ħ -ħ -ĩ -Ī -ī -İ -ı -ļ -Ľ -ľ -Ł -ł -ń -ņ -ň -ŋ -Ō -ō -ŏ -ő -Œ -œ -ř -Ś -ś -Ş -ş -Š -š -Ţ -ţ -ť -ũ -ū -ŭ -ů -ű -ų -ŵ -ŷ -ź -Ż -ż -Ž -ž -Ə -ƒ -ơ -ư -ǎ -ǐ -ǒ -ǔ -ǫ -Ș -ș -Ț -ț -ɐ -ɑ -ɔ -ɕ -ə -ɛ -ɡ -ɣ -ɨ -ɪ -ɲ -ɾ -ʀ -ʁ -ʂ -ʃ -ʊ -ʋ -ʌ -ʐ -ʑ -ʒ -ʔ -ʰ -ʲ -ʳ -ʷ -ʻ -ʼ -ʾ -ʿ -ˈ -ː -ˡ -ˢ -ˣ -́ -̃ -̍ -̯ -͡ -Α -Β -Γ -Δ -Ε -Η -Θ -Ι -Κ -Λ -Μ -Ν -Ο -Π -Σ -Τ -Φ -Χ -Ψ -Ω -ά -έ -ή -ί -α -β -γ -δ -ε -ζ -η -θ -ι -κ -λ -μ -ν -ξ -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -ό -ύ -ώ -І -Ј -А -Б -В -Г -Д -Е -Ж -З -И -К -Л -М -Н -О -П -Р -С -Т -У -Ф -Х -Ц -Ч -Ш -Э -Ю -Я -а -б -в -г -д -е -ж -з -и -й -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -щ -ъ -ы -ь -э -ю -я -ё -і -ї -ј -њ -ћ -Ա -Հ -ա -ե -ի -կ -մ -յ -ն -ո -ս -տ -ր -ւ -ְ -ִ -ֵ -ֶ -ַ -ָ -ֹ -ּ -א -ב -ג -ד -ה -ו -ז -ח -ט -י -כ -ל -ם -מ -ן -נ -ס -ע -פ -צ -ק -ר -ש -ת -، -ء -آ -أ -إ -ئ -ا -ب -ة -ت -ث -ج -ح -خ -د -ذ -ر -ز -س -ش -ص -ض -ط -ظ -ع -غ -ف -ق -ك -ل -م -ن -ه -و -ى -ي -َ -ِ -ٹ -پ -چ -ک -گ -ہ -ی -ے -ं -आ -क -ग -च -ज -ण -त -द -ध -न -प -ब -भ -म -य -र -ल -व -श -ष -स -ह -ा -ि -ी -ु -े -ो -् -। -॥ -আ -ই -এ -ও -ক -খ -গ -চ -ছ -জ -ট -ত -থ -দ -ধ -ন -প -ব -ম -য -র -ল -শ -স -হ -় -া -ি -ী -ু -ে -ো -্ -য় -க -த -ப -ம -ய -ர -ல -வ -ா -ி -ு -் -ร -་ -ག -ང -ད -ན -བ -མ -ར -ལ -ས -ི -ུ -ེ -ོ -ა -ე -ი -ლ -ნ -ო -რ -ს -ᴬ -ᴵ -ᵀ -ᵃ -ᵇ -ᵈ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵖ -ᵗ -ᵘ -ᵢ -ᵣ -ᵤ -ᵥ -ᶜ -ᶠ -ḍ -Ḥ -ḥ -Ḩ -ḩ -ḳ -ṃ -ṅ -ṇ -ṛ -ṣ -ṭ -ạ -ả -ấ -ầ -ẩ -ậ -ắ -ế -ề -ể -ễ -ệ -ị -ọ -ố -ồ -ổ -ộ -ớ -ờ -ợ -ụ -ủ -ứ -ừ -ử -ữ -ự -ỳ -ỹ -ἀ -ἐ -ὁ -ὐ -ὰ -ὶ -ὸ -ῆ -ῖ -ῦ -ῶ -‐ -‑ -‒ -– -— -― -‖ -‘ -’ -‚ -“ -” -„ -† -‡ -• -… -‰ -′ -″ -⁄ -⁰ -ⁱ -⁴ -⁵ -⁶ -⁷ -⁸ -⁹ -⁺ -⁻ -ⁿ -₀ -₁ -₂ -₃ -₄ -₅ -₆ -₇ -₈ -₉ -₊ -₍ -₎ -ₐ -ₑ -ₒ -ₓ -ₕ -ₖ -ₘ -ₙ -ₚ -ₛ -ₜ -₤ -€ -₱ -₹ -ℓ -№ -ℝ -⅓ -← -↑ -→ -↔ -⇌ -⇒ -∂ -∈ -− -∗ -∘ -√ -∞ -∧ -∨ -∩ -∪ -≈ -≠ -≡ -≤ -≥ -⊂ -⊆ -⊕ -⋅ -─ -│ -■ -● -★ -☆ -☉ -♠ -♣ -♥ -♦ -♭ -♯ -⟨ -⟩ -ⱼ -、 -。 -《 -》 -「 -」 -『 -』 -〜 -い -う -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -つ -て -と -な -に -の -は -ひ -ま -み -む -め -も -や -ゆ -よ -ら -り -る -れ -ん -ア -ィ -イ -ウ -エ -オ -カ -ガ -キ -ク -グ -コ -サ -シ -ジ -ス -ズ -タ -ダ -ッ -テ -デ -ト -ド -ナ -ニ -ハ -バ -パ -フ -ブ -プ -マ -ミ -ム -ャ -ュ -ラ -リ -ル -レ -ロ -ン -・ -ー -一 -三 -上 -下 -中 -事 -二 -井 -京 -人 -亻 -仁 -佐 -侍 -光 -公 -力 -北 -十 -南 -原 -口 -史 -司 -吉 -同 -和 -囗 -国 -國 -土 -城 -士 -大 -天 -太 -夫 -女 -子 -宀 -安 -宮 -宿 -小 -尚 -山 -島 -川 -州 -平 -年 -心 -愛 -戸 -文 -新 -方 -日 -明 -星 -書 -月 -木 -本 -李 -村 -東 -松 -林 -正 -武 -氏 -水 -氵 -江 -河 -海 -版 -犬 -王 -生 -田 -白 -皇 -省 -真 -石 -社 -神 -竹 -美 -義 -花 -藤 -西 -谷 -車 -辶 -道 -郎 -郡 -部 -野 -金 -長 -門 -陽 -青 -食 -馬 -高 -龍 -龸 -사 -씨 -의 -이 -한 -fi -fl -! -( -) -, -- -/ -: -the -of -and -to -in -was -The -is -for -as -on -with -that -##s -his -by -he -at -from -it -her -He -had -an -were -you -be -In -she -are -but -which -It -not -or -have -my -him -one -this -me -has -also -up -their -first -out -who -been -they -She -into -all -would -its -##ing -time -two -##a -##e -said -about -when -over -more -other -can -after -back -them -then -##ed -there -like -so -only -##n -could -##d -##i -##y -what -no -##o -where -This -made -than -if -You -##ly -through -we -before -##r -just -some -##er -years -do -New -##t -down -between -new -now -will -three -most -On -around -year -used -such -being -well -during -They -know -against -under -later -did -part -known -off -while -His -re -... -##l -people -until -way -American -didn -University -your -both -many -get -United -became -head -There -second -As -work -any -But -still -again -born -even -eyes -After -including -de -took -And -long -team -season -family -see -right -same -called -name -because -film -don -10 -found -much -school -##es -going -won -place -away -We -day -left -John -000 -hand -since -World -these -how -make -number -each -life -area -man -four -go -No -here -very -National -##m -played -released -never -began -States -album -home -last -too -held -several -May -own -##on -take -end -School -##h -ll -series -What -want -use -another -city -When -2010 -side -At -may -That -came -face -June -think -game -those -high -March -early -September -##al -2011 -looked -July -state -small -thought -went -January -October -##u -based -August -##us -world -good -April -York -us -12 -2012 -2008 -For -2009 -group -along -few -South -little -##k -following -November -something -2013 -December -set -2007 -old -2006 -2014 -located -##an -music -County -City -former -##in -room -ve -next -All -##man -got -father -house -##g -body -15 -20 -18 -started -If -2015 -town -our -line -War -large -population -named -British -company -member -five -My -single -##en -age -State -moved -February -11 -Her -should -century -government -built -come -best -show -However -within -look -men -door -without -need -wasn -2016 -water -One -system -knew -every -died -League -turned -asked -North -St -wanted -building -received -song -served -though -felt -##ia -station -band -##ers -local -public -himself -different -death -say -##1 -30 -##2 -2005 -16 -night -behind -children -English -members -near -saw -together -son -14 -voice -village -13 -hands -help -##3 -due -French -London -top -told -open -published -third -2017 -play -across -During -put -final -often -include -25 -##le -main -having -2004 -once -ever -let -book -led -gave -late -front -find -club -##4 -German -included -species -College -form -opened -mother -women -enough -West -must -2000 -power -really -17 -making -half -##6 -order -might -##is -given -million -times -days -point -full -service -With -km -major -##7 -original -become -seen -II -north -six -##te -love -##0 -national -International -##5 -24 -So -District -lost -run -couldn -career -always -##9 -2003 -##th -country -##z -House -air -tell -south -worked -woman -player -##A -almost -war -River -##ic -married -continued -Then -James -close -black -short -##8 -##na -using -history -returned -light -car -##ra -sure -William -things -General -##ry -2002 -better -support -100 -among -From -feet -King -anything -21 -19 -established -district -2001 -feel -great -##ton -level -Cup -These -written -games -others -already -title -story -##p -law -thing -US -record -role -however -By -students -England -white -control -least -inside -land -##C -22 -give -community -hard -##ie -non -##c -produced -George -round -period -Park -business -various -##ne -does -present -wife -far -taken -per -reached -David -able -version -working -young -live -created -joined -East -living -appeared -case -High -done -23 -important -President -Award -France -position -office -looking -total -general -class -To -production -##S -football -party -brother -keep -mind -free -Street -hair -announced -development -either -nothing -moment -Church -followed -wrote -why -India -San -election -1999 -lead -How -##ch -##rs -words -European -course -considered -America -arms -Army -political -##la -28 -26 -west -east -ground -further -church -less -site -First -Not -Australia -toward -California -##ness -described -works -An -Council -heart -past -military -27 -##or -heard -field -human -soon -founded -1998 -playing -trying -##x -##ist -##ta -television -mouth -although -taking -win -fire -Division -##ity -Party -Royal -program -Some -Don -Association -According -tried -TV -Paul -outside -daughter -Best -While -someone -match -recorded -Canada -closed -region -Air -above -months -elected -##da -##ian -road -##ar -brought -move -1997 -leave -##um -Thomas -1996 -am -low -Robert -formed -person -services -points -Mr -miles -##b -stop -rest -doing -needed -international -release -floor -start -sound -call -killed -real -dark -research -finished -language -Michael -professional -change -sent -50 -upon -29 -track -hit -event -2018 -term -example -Germany -similar -return -##ism -fact -pulled -stood -says -ran -information -yet -result -developed -girl -##re -God -1995 -areas -signed -decided -##ment -Company -seemed -##el -co -turn -race -common -video -Charles -Indian -##ation -blood -art -red -##able -added -rather -1994 -met -director -addition -design -average -minutes -##ies -##ted -available -bed -coming -friend -idea -kind -Union -Road -remained -##ting -everything -##ma -running -care -finally -Chinese -appointed -1992 -Australian -##ley -popular -mean -teams -probably -##land -usually -project -social -Championship -possible -word -Russian -instead -mi -herself -##T -Peter -Hall -Center -seat -style -money -1993 -else -Department -table -Music -current -31 -features -special -events -character -Two -square -sold -debut -##v -process -Although -Since -##ka -40 -Central -currently -education -placed -lot -China -quickly -forward -seven -##ling -Europe -arm -performed -Japanese -1991 -Henry -Now -Dr -##ion -week -Group -myself -big -UK -Washington -ten -deep -1990 -Club -Japan -space -La -directed -smile -episode -hours -whole -##de -##less -Why -wouldn -designed -strong -training -changed -Society -stage -involved -hadn -towards -leading -police -eight -kept -Institute -study -largest -child -eventually -private -modern -Court -throughout -getting -originally -attack -##E -talk -Great -longer -songs -alone -##ine -wide -dead -walked -shot -##ri -Oh -force -##st -Art -today -friends -Island -Richard -1989 -center -construction -believe -size -White -ship -completed -##B -gone -Just -rock -sat -##R -radio -below -entire -families -league -includes -type -lived -official -range -hold -featured -Most -##ter -president -passed -means -##f -forces -lips -Mary -Do -guitar -##ce -food -wall -Of -spent -Its -performance -hear -##P -Western -reported -sister -##et -morning -##M -especially -##ive -Minister -itself -post -bit -groups -1988 -##tion -Black -##ng -Well -raised -sometimes -Canadian -Paris -Spanish -replaced -schools -Academy -leaving -central -female -Christian -Jack -whose -college -onto -provided -##D -##ville -players -actually -stopped -##son -Museum -doesn -##ts -books -fight -allowed -##ur -beginning -Records -awarded -parents -coach -##os -Red -saying -##ck -Smith -Yes -Lake -##L -aircraft -1987 -##ble -previous -ft -action -Italian -African -happened -vocals -Act -future -court -##ge -1986 -degree -phone -##ro -Is -countries -winning -breath -Love -river -matter -Lord -Other -list -self -parts -##ate -provide -cut -shows -plan -1st -interest -##ized -Africa -stated -Sir -fell -owned -earlier -ended -competition -attention -1985 -lower -nearly -bad -older -stay -Saint -##se -certain -1984 -fingers -blue -try -fourth -Grand -##as -king -##nt -makes -chest -movement -states -moving -data -introduced -model -date -section -Los -deal -##I -skin -entered -middle -success -Texas -##w -summer -island -##N -Republic -length -husband -1980 -##ey -reason -anyone -forced -via -base -500 -job -covered -Festival -Roman -successful -rights -cover -Man -writing -Ireland -##F -related -goal -takes -buildings -true -weeks -1983 -Because -opening -novel -ISBN -meet -gold -##ous -mid -km² -standing -Football -Chicago -shook -whom -##ki -1982 -Day -feeling -scored -boy -higher -Force -leader -heavy -fall -question -sense -army -Second -energy -meeting -themselves -kill -##am -board -census -##ya -##ns -mine -meant -market -required -battle -campaign -attended -approximately -Kingdom -runs -active -##ha -contract -clear -previously -health -1979 -Arts -complete -Catholic -couple -units -##ll -##ty -Committee -shoulder -sea -systems -listed -##O -caught -tournament -##G -northern -author -Film -Your -##men -holding -offered -personal -1981 -southern -artist -traditional -studio -200 -capital -##ful -regular -ask -giving -organization -month -news -Are -read -managed -helped -studied -student -defeated -natural -industry -Year -noted -decision -Government -quite -##id -smiled -1972 -Maybe -tracks -##ke -Mark -al -media -engine -hour -Their -relationship -plays -property -structure -1976 -ago -Hill -Martin -1978 -ready -Many -Like -Bay -immediately -generally -Italy -Greek -practice -caused -division -significant -Joseph -speed -Let -thinking -completely -1974 -primary -mostly -##field -##K -1975 -##to -Even -writer -##led -dropped -magazine -collection -understand -route -highest -particular -films -lines -network -Science -loss -carried -direction -green -1977 -location -producer -according -Women -Queen -neck -thus -independent -view -1970 -Angeles -Soviet -distance -problem -Board -tour -western -income -appearance -access -Mexico -nodded -street -surface -arrived -believed -Old -1968 -1973 -becoming -whether -1945 -figure -singer -stand -Following -issue -window -wrong -pain -everyone -lives -issues -park -slowly -la -act -##va -bring -Lee -operations -key -comes -fine -cold -famous -Navy -1971 -Me -additional -individual -##ner -Zealand -goals -county -contains -Service -minute -2nd -reach -talking -particularly -##ham -movie -Director -glass -paper -studies -##co -railway -standard -Education -45 -represented -Chief -Louis -launched -Star -terms -60 -1969 -experience -watched -Another -Press -Tom -staff -starting -subject -break -Virginia -nine -eye -##age -evidence -foot -##est -companies -Prince -##V -gun -create -Big -People -guy -Green -simply -numerous -##line -increased -twenty -##ga -##do -1967 -award -officer -stone -Before -material -Northern -grew -male -plant -Life -legs -step -Al -unit -35 -except -answer -##U -report -response -Edward -commercial -edition -trade -science -##ca -Irish -Law -shown -rate -failed -##ni -remains -changes -mm -limited -larger -Later -cause -waiting -Time -##wood -cost -Bill -manager -activities -likely -allow -operated -retired -##ping -65 -directly -Who -associated -effect -hell -Florida -straight -hot -Valley -management -girls -expected -eastern -Mike -chance -cast -centre -chair -hurt -problems -##li -walk -programs -Team -characters -Battle -edge -pay -maybe -corner -majority -medical -Joe -Summer -##io -attempt -Pacific -command -Radio -##by -names -municipality -1964 -train -economic -Brown -feature -sex -source -agreed -remember -Three -1966 -1965 -Pennsylvania -victory -senior -annual -III -Southern -results -Sam -serving -religious -Jones -appears -##der -despite -claimed -Both -musical -matches -fast -security -selected -Young -double -complex -hospital -chief -Times -##ve -Championships -filled -Public -Despite -beautiful -Research -plans -Province -##ally -Wales -##ko -artists -metal -nearby -Spain -##il -32 -houses -supported -piece -##no -stared -recording -nature -legal -Russia -##ization -remaining -looks -##sh -bridge -closer -cases -scene -marriage -Little -##é -uses -Earth -specific -Frank -theory -Good -discovered -referred -bass -culture -university -presented -Congress -##go -metres -continue -1960 -isn -Awards -meaning -cell -composed -separate -Series -forms -Blue -cross -##tor -increase -test -computer -slightly -Where -Jewish -Town -tree -status -1944 -variety -responsible -pretty -initially -##way -realized -pass -provides -Captain -Alexander -recent -score -broke -Scott -drive -financial -showed -Line -stories -ordered -soldiers -genus -operation -gaze -sitting -society -Only -hope -actor -follow -Empire -Yeah -technology -happy -focus -policy -spread -situation -##ford -##ba -Mrs -watch -Can -1963 -Commission -touch -earned -troops -Under -1962 -individuals -cannot -19th -##lin -mile -expression -exactly -suddenly -weight -dance -stepped -places -appear -difficult -Railway -anti -numbers -kilometres -star -##ier -department -ice -Britain -removed -Once -##lo -Boston -value -##ant -mission -trees -Order -sports -join -serve -Major -poor -Poland -mainly -Theatre -pushed -Station -##it -Lady -federal -silver -##ler -foreign -##ard -Eastern -##den -box -hall -subsequently -lies -acquired -1942 -ancient -CD -History -Jean -beyond -##ger -El -##les -growing -championship -native -Parliament -Williams -watching -direct -overall -offer -Also -80 -Secretary -spoke -Latin -ability -##ated -safe -presence -##ial -headed -regional -planned -1961 -Johnson -throat -consists -##W -extended -Or -bar -walls -Chris -stations -politician -Olympics -influence -share -fighting -speak -hundred -Carolina -die -stars -##tic -color -Chapter -##ish -fear -sleep -goes -Francisco -oil -Bank -sign -physical -##berg -Dutch -seasons -##rd -Games -Governor -sorry -lack -Centre -memory -baby -smaller -charge -Did -multiple -ships -shirt -Assembly -amount -leaves -3rd -Foundation -conditions -1943 -Rock -Democratic -Daniel -##at -winner -products -##ina -store -latter -Professor -civil -prior -host -1956 -soft -vote -needs -Each -rules -1958 -pressure -letter -normal -proposed -levels -records -1959 -paid -intended -Victoria -purpose -okay -historical -issued -1980s -broadcast -rule -simple -picked -firm -Sea -1941 -Elizabeth -1940 -serious -featuring -highly -graduated -mentioned -choice -1948 -replied -percent -Scotland -##hi -females -constructed -1957 -settled -Steve -recognized -cities -crew -glanced -kiss -competed -flight -knowledge -editor -More -Conference -##H -fifth -elements -##ee -##tes -function -newspaper -recently -Miss -cultural -brown -twice -Office -1939 -truth -Creek -1946 -households -USA -1950 -quality -##tt -border -seconds -destroyed -pre -wait -ahead -build -image -90 -cars -##mi -33 -promoted -professor -et -bank -medal -text -broken -Middle -revealed -sides -wing -seems -channel -1970s -Ben -loved -effort -officers -Will -##ff -70 -Israel -Jim -upper -fully -label -Jr -assistant -powerful -pair -positive -##ary -gives -1955 -20th -races -remain -kitchen -primarily -##ti -Sydney -easy -Tour -whispered -buried -300 -News -Polish -1952 -Duke -Columbia -produce -accepted -00 -approach -minor -1947 -Special -44 -Asian -basis -visit -Fort -Civil -finish -formerly -beside -leaned -##ite -median -rose -coast -effects -supposed -Cross -##hip -Corps -residents -Jackson -##ir -Bob -basketball -36 -Asia -seem -Bishop -Book -##ber -ring -##ze -owner -BBC -##ja -transferred -acting -De -appearances -walking -Le -press -grabbed -1954 -officially -1953 -##pe -risk -taught -review -##X -lay -##well -council -Avenue -seeing -losing -Ohio -Super -province -ones -travel -##sa -projects -equipment -spot -Berlin -administrative -heat -potential -shut -capacity -elections -growth -fought -Republican -mixed -Andrew -teacher -turning -strength -shoulders -beat -wind -1949 -Health -follows -camp -suggested -perhaps -Alex -mountain -contact -divided -candidate -fellow -34 -Show -necessary -workers -ball -horse -ways -questions -protect -gas -activity -younger -bottom -founder -Scottish -screen -treatment -easily -com -##house -dedicated -Master -warm -Night -Georgia -Long -von -##me -perfect -website -1960s -piano -efforts -##ide -Tony -sort -offers -Development -Simon -executive -##nd -save -Over -Senate -1951 -1990s -draw -master -Police -##ius -renamed -boys -initial -prominent -damage -Co -##ov -##za -online -begin -occurred -captured -youth -Top -account -tells -Justice -conducted -forest -##town -bought -teeth -Jersey -##di -purchased -agreement -Michigan -##ure -campus -prison -becomes -product -secret -guess -Route -huge -types -drums -64 -split -defeat -estate -housing -##ot -brothers -Coast -declared -happen -titled -therefore -sun -commonly -alongside -Stadium -library -Home -article -steps -telling -slow -assigned -refused -laughed -wants -Nick -wearing -Rome -Open -##ah -Hospital -pointed -Taylor -lifted -escape -participated -##j -drama -parish -Santa -##per -organized -mass -pick -Airport -gets -Library -unable -pull -Live -##ging -surrounding -##ries -focused -Adam -facilities -##ning -##ny -38 -##ring -notable -era -connected -gained -operating -laid -Regiment -branch -defined -Christmas -machine -Four -academic -Iran -adopted -concept -Men -compared -search -traffic -Max -Maria -greater -##ding -widely -##burg -serves -1938 -37 -Go -hotel -shared -typically -scale -1936 -leg -suffered -yards -pieces -Ministry -Wilson -episodes -empty -1918 -safety -continues -yellow -historic -settlement -400 -Come -Corporation -enemy -content -picture -evening -territory -method -trial -solo -driver -Here -##ls -entrance -Prize -spring -whatever -##ent -75 -##ji -reading -Arthur -##cy -Our -clothes -Prime -Illinois -Kong -code -##ria -sit -Harry -Federal -chosen -administration -bodies -begins -stomach -Though -seats -Hong -density -Sun -leaders -Field -museum -chart -platform -languages -##ron -birth -holds -Gold -##un -fish -combined -##ps -4th -1937 -largely -captain -trust -Game -van -boat -Oxford -basic -beneath -Islands -painting -nice -Toronto -path -males -sources -block -conference -parties -murder -clubs -crowd -calling -About -Business -peace -knows -lake -speaking -stayed -Brazil -allowing -Born -unique -thick -Technology -##que -receive -des -semi -alive -noticed -format -##ped -coffee -digital -##ned -handed -guard -tall -faced -setting -plants -partner -claim -reduced -temple -animals -determined -classes -##out -estimated -##ad -Olympic -providing -Massachusetts -learned -Inc -Philadelphia -Social -carry -42 -possibly -hosted -tonight -respectively -Today -shape -Mount -roles -designated -brain -etc -Korea -thoughts -Brian -Highway -doors -background -drew -models -footballer -tone -turns -1935 -quiet -tower -wood -bus -write -software -weapons -flat -marked -1920 -newly -tight -Eric -finger -Journal -FC -Van -rise -critical -Atlantic -granted -returning -communities -humans -quick -39 -48 -ranked -sight -pop -Swedish -Stephen -card -analysis -attacked -##wa -Sunday -identified -Jason -champion -situated -1930 -expanded -tears -##nce -reaching -Davis -protection -Emperor -positions -nominated -Bridge -tax -dress -allows -avoid -leadership -killing -actress -guest -steel -knowing -electric -cells -disease -grade -unknown -##ium -resulted -Pakistan -confirmed -##ged -tongue -covers -##Y -roof -entirely -applied -votes -drink -interview -exchange -Township -reasons -##ised -page -calls -dog -agent -nose -teaching -##ds -##ists -advanced -wish -Golden -existing -vehicle -del -1919 -develop -attacks -pressed -Sports -planning -resulting -facility -Sarah -notes -1933 -Class -Historic -winter -##mo -audience -Community -household -Netherlands -creation -##ize -keeping -1914 -claims -dry -guys -opposite -##ak -explained -Ontario -secondary -difference -Francis -actions -organizations -yard -animal -Up -Lewis -titles -Several -1934 -Ryan -55 -Supreme -rolled -1917 -distribution -figures -afraid -rural -yourself -##rt -sets -barely -Instead -passing -awards -41 -silence -authority -occupied -environment -windows -engineering -surprised -flying -crime -reports -Mountain -powers -driving -succeeded -reviews -1929 -Head -missing -Song -Jesus -opportunity -inspired -ends -albums -conversation -impact -injury -surprise -billion -learning -heavily -oldest -union -creating -##ky -festival -literature -letters -sexual -##tte -apartment -Final -comedy -nation -orders -##sen -contemporary -Power -drawn -existence -connection -##ating -Post -Junior -remembered -message -Medal -castle -note -engineer -sounds -Beach -crossed -##dy -ear -scientific -sales -##ai -theme -starts -clearly -##ut -trouble -##gan -bag -##han -BC -sons -1928 -silent -versions -daily -Studies -ending -Rose -guns -1932 -headquarters -reference -obtained -Squadron -concert -none -du -Among -##don -prevent -Member -answered -staring -Between -##lla -portion -drug -liked -association -performances -Nations -formation -Castle -lose -learn -scoring -relatively -quarter -47 -Premier -##ors -Sweden -baseball -attempted -trip -worth -perform -airport -fields -enter -honor -Medical -rear -commander -officials -condition -supply -materials -52 -Anna -volume -threw -Persian -43 -interested -Gallery -achieved -visited -laws -relief -Area -Matt -singles -Lieutenant -Country -fans -Cambridge -sky -Miller -effective -tradition -Port -##ana -minister -extra -entitled -System -sites -authorities -acres -committee -racing -1931 -desk -trains -ass -weren -Family -farm -##ance -industrial -##head -iron -49 -abandoned -Out -Holy -chairman -waited -frequently -display -Light -transport -starring -Patrick -Engineering -eat -FM -judge -reaction -centuries -price -##tive -Korean -defense -Get -arrested -1927 -send -urban -##ss -pilot -Okay -Media -reality -arts -soul -thirty -##be -catch -generation -##nes -apart -Anne -drop -See -##ving -sixth -trained -Management -magic -cm -height -Fox -Ian -resources -vampire -principal -Was -haven -##au -Walter -Albert -rich -1922 -causing -entry -##ell -shortly -46 -worry -doctor -composer -rank -Network -bright -showing -regions -1924 -wave -carrying -kissed -finding -missed -Earl -lying -target -vehicles -Military -controlled -dinner -##board -briefly -lyrics -motion -duty -strange -attempts -invited -kg -villages -5th -Land -##mer -Christ -prepared -twelve -check -thousand -earth -copies -en -transfer -citizens -Americans -politics -nor -theatre -Project -##bo -clean -rooms -laugh -##ran -application -contained -anyway -containing -Sciences -1925 -rare -speech -exist -1950s -falling -passenger -##im -stands -51 -##ol -##ow -phase -governor -kids -details -methods -Vice -employed -performing -counter -Jane -heads -Channel -wine -opposition -aged -1912 -Every -1926 -highway -##ura -1921 -aired -978 -permanent -Forest -finds -joint -approved -##pur -brief -doubt -acts -brand -wild -closely -Ford -Kevin -chose -shall -port -sweet -fun -asking -Be -##bury -sought -Dave -Mexican -mom -Right -Howard -Moscow -Charlie -Stone -##mann -admitted -##ver -wooden -1923 -Officer -relations -Hot -combat -publication -chain -shop -inhabitants -proved -ideas -address -1915 -Memorial -explain -increasing -conflict -Anthony -Melbourne -narrow -temperature -slid -1916 -worse -selling -documentary -Ali -Ray -opposed -vision -dad -extensive -Infantry -commissioned -Doctor -offices -programming -core -respect -storm -##pa -##ay -##om -promotion -der -struck -anymore -shit -Region -receiving -DVD -alternative -##ue -ride -maximum -1910 -##ious -Third -Affairs -cancer -Executive -##op -dream -18th -Due -##ker -##worth -economy -IV -Billboard -identity -subsequent -statement -skills -##back -funding -##ons -Round -Foreign -truck -Please -lights -wondered -##ms -frame -yes -Still -districts -fiction -Colonel -converted -150 -grown -accident -critics -fit -Information -architecture -Point -Five -armed -Billy -poet -functions -consisted -suit -Turkish -Band -object -desire -##ities -sounded -flow -Norwegian -articles -Marie -pulling -thin -singing -Hunter -Human -Battalion -Federation -Kim -origin -represent -dangerous -weather -fuel -ex -##sing -Last -bedroom -aid -knees -Alan -angry -assumed -plane -Something -founding -concerned -global -Fire -di -please -Portuguese -touched -Roger -nuclear -Register -Jeff -fixed -royal -lie -finals -NFL -Manchester -towns -handle -shaped -Chairman -Dean -launch -understanding -Children -violence -failure -sector -Brigade -wrapped -fired -sharp -tiny -developing -expansion -Free -institutions -technical -Nothing -otherwise -Main -inch -Saturday -wore -Senior -attached -cheek -representing -Kansas -##chi -##kin -actual -advantage -Dan -Austria -##dale -hoped -multi -squad -Norway -streets -1913 -Services -hired -grow -pp -wear -painted -Minnesota -stuff -Building -54 -Philippines -1900 -##ties -educational -Khan -Magazine -##port -Cape -signal -Gordon -sword -Anderson -cool -engaged -Commander -images -Upon -tied -Security -cup -rail -Vietnam -successfully -##red -Muslim -gain -bringing -Native -hers -occurs -negative -Philip -Kelly -Colorado -category -##lan -600 -Have -supporting -wet -56 -stairs -Grace -observed -##ung -funds -restaurant -1911 -Jews -##ments -##che -Jake -Back -53 -asks -journalist -accept -bands -bronze -helping -##ice -decades -mayor -survived -usual -influenced -Douglas -Hey -##izing -surrounded -retirement -Temple -derived -Pope -registered -producing -##ral -structures -Johnny -contributed -finishing -buy -specifically -##king -patients -Jordan -internal -regarding -Samuel -Clark -##q -afternoon -Finally -scenes -notice -refers -quietly -threat -Water -Those -Hamilton -promise -freedom -Turkey -breaking -maintained -device -lap -ultimately -Champion -Tim -Bureau -expressed -investigation -extremely -capable -qualified -recognition -items -##up -Indiana -adult -rain -greatest -architect -Morgan -dressed -equal -Antonio -collected -drove -occur -Grant -graduate -anger -Sri -worried -standards -##ore -injured -somewhere -damn -Singapore -Jimmy -pocket -homes -stock -religion -aware -regarded -Wisconsin -##tra -passes -fresh -##ea -argued -Ltd -EP -Diego -importance -Census -incident -Egypt -Missouri -domestic -leads -ceremony -Early -camera -Father -challenge -Switzerland -lands -familiar -hearing -spend -educated -Tennessee -Thank -##ram -Thus -concern -putting -inches -map -classical -Allen -crazy -valley -Space -softly -##my -pool -worldwide -climate -experienced -neighborhood -scheduled -neither -fleet -1908 -Girl -##J -Part -engines -locations -darkness -Revolution -establishment -lawyer -objects -apparently -Queensland -Entertainment -bill -mark -Television -##ong -pale -demand -Hotel -selection -##rn -##ino -Labour -Liberal -burned -Mom -merged -Arizona -request -##lia -##light -hole -employees -##ical -incorporated -95 -independence -Walker -covering -joining -##ica -task -papers -backing -sell -biggest -6th -strike -establish -##ō -gently -59 -Orchestra -Winter -protein -Juan -locked -dates -Boy -aren -shooting -Luke -solid -charged -Prior -resigned -interior -garden -spoken -improve -wonder -promote -hidden -##med -combination -Hollywood -Swiss -consider -##ks -Lincoln -literary -drawing -Marine -weapon -Victor -Trust -Maryland -properties -##ara -exhibition -understood -hung -Tell -installed -loud -fashion -affected -junior -landing -flowers -##he -Internet -beach -Heart -tries -Mayor -programme -800 -wins -noise -##ster -##ory -58 -contain -fair -delivered -##ul -wedding -Square -advance -behavior -Program -Oregon -##rk -residence -realize -certainly -hill -Houston -57 -indicated -##water -wounded -Village -massive -Moore -thousands -personnel -dating -opera -poetry -##her -causes -feelings -Frederick -applications -push -approached -foundation -pleasure -sale -fly -gotten -northeast -costs -raise -paintings -##ney -views -horses -formal -Arab -hockey -typical -representative -rising -##des -clock -stadium -shifted -Dad -peak -Fame -vice -disappeared -users -Way -Naval -prize -hoping -values -evil -Bell -consisting -##ón -Regional -##ics -improved -circle -carefully -broad -##ini -Fine -maintain -operate -offering -mention -Death -stupid -Through -Princess -attend -interests -ruled -somewhat -wings -roads -grounds -##ual -Greece -Champions -facing -hide -voted -require -Dark -Matthew -credit -sighed -separated -manner -##ile -Boys -1905 -committed -impossible -lip -candidates -7th -Bruce -arranged -Islamic -courses -criminal -##ened -smell -##bed -08 -consecutive -##ening -proper -purchase -weak -Prix -1906 -aside -introduction -Look -##ku -changing -budget -resistance -factory -Forces -agency -##tone -northwest -user -1907 -stating -##one -sport -Design -environmental -cards -concluded -Carl -250 -accused -##ology -Girls -sick -intelligence -Margaret -responsibility -Guard -##tus -17th -sq -goods -1909 -hate -##ek -capture -stores -Gray -comic -Modern -Silver -Andy -electronic -wheel -##ied -Deputy -##bs -Czech -zone -choose -constant -reserve -##lle -Tokyo -spirit -sub -degrees -flew -pattern -compete -Dance -##ik -secretary -Imperial -99 -reduce -Hungarian -confused -##rin -Pierre -describes -regularly -Rachel -85 -landed -passengers -##ise -##sis -historian -meters -Youth -##ud -participate -##cing -arrival -tired -Mother -##gy -jumped -Kentucky -faces -feed -Israeli -Ocean -##Q -##án -plus -snow -techniques -plate -sections -falls -jazz -##ris -tank -loan -repeated -opinion -##res -unless -rugby -journal -Lawrence -moments -shock -distributed -##ded -adjacent -Argentina -crossing -uncle -##ric -Detroit -communication -mental -tomorrow -session -Emma -Without -##gen -Miami -charges -Administration -hits -coat -protected -Cole -invasion -priest -09 -Gary -enjoyed -plot -measure -bound -friendly -throw -musician -##lon -##ins -Age -knife -damaged -birds -driven -lit -ears -breathing -Arabic -Jan -faster -Jonathan -##gate -Independent -starred -Harris -teachers -Alice -sequence -mph -file -translated -decide -determine -Review -documents -sudden -threatened -##ft -bear -distinct -decade -burning -##sky -1930s -replace -begun -extension -##time -1904 -equivalent -accompanied -Christopher -Danish -##ye -Besides -##more -persons -fallen -Rural -roughly -saved -willing -ensure -Belgium -05 -musicians -##ang -giant -Six -Retrieved -worst -purposes -##bly -mountains -seventh -slipped -brick -07 -##py -somehow -Carter -Iraq -cousin -favor -islands -journey -FIFA -contrast -planet -vs -calm -##ings -concrete -branches -gray -profit -Russell -##ae -##ux -##ens -philosophy -businesses -talked -parking -##ming -owners -Place -##tle -agricultural -Kate -06 -southeast -draft -Eddie -earliest -forget -Dallas -Commonwealth -edited -66 -inner -ed -operates -16th -Harvard -assistance -##si -designs -Take -bathroom -indicate -CEO -Command -Louisiana -1902 -Dublin -Books -1901 -tropical -1903 -##tors -Places -tie -progress -forming -solution -62 -letting -##ery -studying -##jo -duties -Baseball -taste -Reserve -##ru -Ann -##gh -visible -##vi -notably -link -NCAA -southwest -Never -storage -mobile -writers -favorite -Pro -pages -truly -count -##tta -string -kid -98 -Ross -row -##idae -Kennedy -##tan -Hockey -hip -waist -grandfather -listen -##ho -feels -busy -72 -stream -obvious -cycle -shaking -Knight -##ren -Carlos -painter -trail -web -linked -04 -Palace -existed -##ira -responded -closing -End -examples -Marshall -weekend -jaw -Denmark -lady -township -medium -chin -Story -option -fifteen -Moon -represents -makeup -investment -jump -childhood -Oklahoma -roll -normally -Ten -Operation -Graham -Seattle -Atlanta -paused -promised -rejected -treated -returns -flag -##ita -Hungary -danger -glad -movements -visual -subjects -credited -soldier -Norman -ill -translation -José -Quebec -medicine -warning -theater -praised -municipal -01 -commune -churches -acid -folk -8th -testing -add -survive -Sound -devices -residential -severe -presidential -Mississippi -Austin -Perhaps -Charlotte -hanging -Montreal -grin -##ten -racial -partnership -shoot -shift -##nie -Les -downtown -Brothers -Garden -matters -restored -mirror -forever -winners -rapidly -poverty -##ible -Until -DC -faith -hundreds -Real -Ukraine -Nelson -balance -Adams -contest -relative -ethnic -Edinburgh -composition -##nts -emergency -##van -marine -reputation -Down -pack -12th -Communist -Mountains -pro -stages -measures -##ld -ABC -Li -victims -benefit -Iowa -Broadway -gathered -rating -Defense -classic -##ily -ceiling -##ions -snapped -Everything -constituency -Franklin -Thompson -Stewart -entering -Judge -forth -##sk -wanting -smiling -moves -tunnel -premiered -grass -unusual -Ukrainian -bird -Friday -tail -Portugal -coal -element -Fred -guards -Senator -collaboration -beauty -Wood -chemical -beer -justice -signs -##Z -sees -##zi -Puerto -##zed -96 -smooth -Bowl -gift -limit -97 -heading -Source -wake -requires -Ed -Constitution -factor -Lane -factors -adding -Note -cleared -pictures -pink -##ola -Kent -Local -Singh -moth -Ty -##ture -courts -Seven -temporary -involving -Vienna -emerged -fishing -agree -defensive -stuck -secure -Tamil -##ick -bottle -03 -Player -instruments -Spring -patient -flesh -contributions -cry -Malaysia -120 -Global -da -Alabama -Within -##work -debuted -expect -Cleveland -concerns -retained -horror -10th -spending -Peace -Transport -grand -Crown -instance -institution -acted -Hills -mounted -Campbell -shouldn -1898 -##ably -chamber -soil -88 -Ethan -sand -cheeks -##gi -marry -61 -weekly -classification -DNA -Elementary -Roy -definitely -Soon -Rights -gate -suggests -aspects -imagine -golden -beating -Studios -Warren -differences -significantly -glance -occasionally -##od -clothing -Assistant -depth -sending -possibility -mode -prisoners -requirements -daughters -dated -Representatives -prove -guilty -interesting -smoke -cricket -93 -##ates -rescue -Connecticut -underground -Opera -13th -reign -##ski -thanks -leather -equipped -routes -fan -##ans -script -Wright -bishop -Welsh -jobs -faculty -eleven -Railroad -appearing -anniversary -Upper -##down -anywhere -Rugby -Metropolitan -Meanwhile -Nicholas -champions -forehead -mining -drinking -76 -Jerry -membership -Brazilian -Wild -Rio -scheme -Unlike -strongly -##bility -fill -##rian -easier -MP -Hell -##sha -Stanley -banks -Baron -##ique -Robinson -67 -Gabriel -Austrian -Wayne -exposed -##wan -Alfred -1899 -manage -mix -visitors -eating -##rate -Sean -commission -Cemetery -policies -Camp -parallel -traveled -guitarist -02 -supplies -couples -poem -blocks -Rick -Training -Energy -achieve -appointment -Wing -Jamie -63 -novels -##em -1890 -songwriter -Base -Jay -##gar -naval -scared -miss -labor -technique -crisis -Additionally -backed -destroy -seriously -tools -tennis -91 -god -##ington -continuing -steam -obviously -Bobby -adapted -fifty -enjoy -Jacob -publishing -column -##ular -Baltimore -Donald -Liverpool -92 -drugs -movies -##ock -Heritage -##je -##istic -vocal -strategy -gene -advice -##bi -Ottoman -riding -##side -Agency -Indonesia -11th -laughing -sleeping -und -muttered -listening -deck -tip -77 -ownership -grey -Claire -deeply -provincial -popularity -Cooper -##á -Emily -##sed -designer -Murray -describe -Danny -Around -Parker -##dae -68 -rates -suffering -considerable -78 -nervous -powered -tons -circumstances -wished -belonged -Pittsburgh -flows -9th -##use -belt -81 -useful -15th -context -List -Dead -Iron -seek -Season -worn -frequency -legislation -replacement -memories -Tournament -Again -Barry -organisation -copy -Gulf -waters -meets -struggle -Oliver -1895 -Susan -protest -kick -Alliance -components -1896 -Tower -Windows -demanded -regiment -sentence -Woman -Logan -Referee -hosts -debate -knee -Blood -##oo -universities -practices -Ward -ranking -correct -happening -Vincent -attracted -classified -##stic -processes -immediate -waste -increasingly -Helen -##po -Lucas -Phil -organ -1897 -tea -suicide -actors -lb -crash -approval -waves -##ered -hated -grip -700 -amongst -69 -74 -hunting -dying -lasted -illegal -##rum -stare -defeating -##gs -shrugged -°C -Jon -Count -Orleans -94 -affairs -formally -##and -##ves -criticized -Disney -Vol -successor -tests -scholars -palace -Would -celebrated -rounds -grant -Schools -Such -commanded -demon -Romania -##all -Karl -71 -##yn -84 -Daily -totally -Medicine -fruit -Die -upset -Lower -Conservative -14th -Mitchell -escaped -shoes -Morris -##tz -queen -harder -prime -Thanks -indeed -Sky -authors -rocks -definition -Nazi -accounts -printed -experiences -##ters -divisions -Cathedral -denied -depending -Express -##let -73 -appeal -loose -colors -filed -##isation -gender -##ew -throne -forests -Finland -domain -boats -Baker -squadron -shore -remove -##ification -careful -wound -railroad -82 -seeking -agents -##ved -Blues -##off -customers -ignored -net -##ction -hiding -Originally -declined -##ess -franchise -eliminated -NBA -merely -pure -appropriate -visiting -forty -markets -offensive -coverage -cave -##nia -spell -##lar -Benjamin -##ire -Convention -filmed -Trade -##sy -##ct -Having -palm -1889 -Evans -intense -plastic -Julia -document -jeans -vessel -SR -##fully -proposal -Birmingham -le -##ative -assembly -89 -fund -lock -1893 -AD -meetings -occupation -modified -Years -odd -aimed -reform -Mission -Works -shake -cat -exception -convinced -executed -pushing -dollars -replacing -soccer -manufacturing -##ros -expensive -kicked -minimum -Josh -coastal -Chase -ha -Thailand -publications -deputy -Sometimes -Angel -effectively -##illa -criticism -conduct -Serbian -landscape -NY -absence -passage -##ula -Blake -Indians -1892 -admit -Trophy -##ball -Next -##rated -##ians -charts -kW -orchestra -79 -heritage -1894 -rough -exists -boundary -Bible -Legislative -moon -medieval -##over -cutting -print -##ett -birthday -##hood -destruction -Julian -injuries -influential -sisters -raising -statue -colour -dancing -characteristics -orange -##ok -##aries -Ken -colonial -twin -Larry -surviving -##shi -Barbara -personality -entertainment -assault -##ering -talent -happens -license -86 -couch -Century -soundtrack -shower -swimming -cash -Staff -bent -1885 -bay -lunch -##lus -dozen -vessels -CBS -greatly -critic -Test -symbol -panel -shell -output -reaches -87 -Front -motor -ocean -##era -##ala -maintenance -violent -scent -Limited -Las -Hope -Theater -Which -survey -Robin -recordings -compilation -##ward -bomb -insurance -Authority -sponsored -satellite -Jazz -refer -stronger -blow -whilst -Wrestling -suggest -##rie -climbed -##els -voices -shopping -1891 -Neil -discovery -##vo -##ations -burst -Baby -peaked -Brooklyn -knocked -lift -##try -false -nations -Hugh -Catherine -preserved -distinguished -terminal -resolution -ratio -pants -cited -competitions -completion -DJ -bone -uniform -schedule -shouted -83 -1920s -rarely -Basketball -Taiwan -artistic -bare -vampires -arrest -Utah -Marcus -assist -gradually -qualifying -Victorian -vast -rival -Warner -Terry -Economic -##cia -losses -boss -versus -audio -runner -apply -surgery -Play -twisted -comfortable -##cs -Everyone -guests -##lt -Harrison -UEFA -lowered -occasions -##lly -##cher -chapter -youngest -eighth -Culture -##room -##stone -1888 -Songs -Seth -Digital -involvement -expedition -relationships -signing -1000 -fault -annually -circuit -afterwards -meat -creature -##ou -cable -Bush -##net -Hispanic -rapid -gonna -figured -extent -considering -cried -##tin -sigh -dynasty -##ration -cabinet -Richmond -stable -##zo -1864 -Admiral -Unit -occasion -shares -badly -longest -##ify -Connor -extreme -wondering -girlfriend -Studio -##tions -1865 -tribe -exact -muscles -hat -Luis -Orthodox -decisions -amateur -description -##lis -hips -kingdom -##ute -Portland -whereas -Bachelor -outer -discussion -partly -Arkansas -1880 -dreams -perfectly -Lloyd -##bridge -asleep -##tti -Greg -permission -trading -pitch -mill -Stage -liquid -Keith -##tal -wolf -processing -stick -Jerusalem -profile -rushed -spiritual -argument -Ice -Guy -till -Delhi -roots -Section -missions -Glasgow -penalty -NBC -encouraged -identify -keyboards -##zing -##ston -disc -plain -informed -Bernard -thinks -fled -Justin -##day -newspapers -##wick -Ralph -##zer -unlike -Stars -artillery -##ified -recovered -arrangement -searching -##pers -##tory -##rus -deaths -Egyptian -diameter -##í -marketing -corporate -teach -marks -Turner -staying -hallway -Sebastian -chapel -naked -mistake -possession -1887 -dominated -jacket -creative -Fellow -Falls -Defence -suspended -employment -##rry -Hebrew -Hudson -Week -Wars -recognize -Natural -controversial -Tommy -thank -Athletic -benefits -decline -intention -##ets -Lost -Wall -participation -elevation -supports -parliament -1861 -concentration -Movement -##IS -competing -stops -behalf -##mm -limits -funded -discuss -Collins -departure -obtain -woods -latest -universe -alcohol -Laura -rush -blade -funny -Dennis -forgotten -Amy -Symphony -apparent -graduating -1862 -Rob -Grey -collections -Mason -emotions -##ugh -literally -Any -counties -1863 -nomination -fighter -habitat -respond -external -Capital -exit -Video -carbon -sharing -Bad -opportunities -Perry -photo -##mus -Orange -posted -remainder -transportation -portrayed -Labor -recommended -percussion -rated -Grade -rivers -partially -suspected -strip -adults -button -struggled -intersection -Canal -##ability -poems -claiming -Madrid -1886 -Together -##our -Much -Vancouver -instrument -instrumental -1870 -mad -angle -Control -Phoenix -Leo -Communications -mail -##ette -##ev -preferred -adaptation -alleged -discussed -deeper -##ane -Yet -Monday -volumes -thrown -Zane -##logy -displayed -rolling -dogs -Along -Todd -##ivity -withdrew -representation -belief -##sia -crown -Late -Short -hardly -grinned -romantic -Pete -##ken -networks -enemies -Colin -Eventually -Side -donated -##su -steady -grab -guide -Finnish -Milan -pregnant -controversy -reminded -1884 -Stuart -##bach -##ade -Race -Belgian -LP -Production -Zone -lieutenant -infantry -Child -confusion -sang -resident -##ez -victim -1881 -channels -Ron -businessman -##gle -Dick -colony -pace -producers -##ese -agencies -Craig -Lucy -Very -centers -Yorkshire -photography -##ched -Album -championships -Metro -substantial -Standard -terrible -directors -contribution -advertising -emotional -##its -layer -segment -sir -folded -Roberts -ceased -Hampshire -##ray -detailed -partners -m² -##pt -Beth -genre -commented -generated -remote -aim -Hans -credits -concerts -periods -breakfast -gay -shadow -defence -Too -Had -transition -Afghanistan -##book -eggs -defend -##lli -writes -Systems -bones -mess -seed -scientists -Shortly -Romanian -##zy -Freedom -muscle -hero -parent -agriculture -checked -Islam -Bristol -Freyja -Arena -cabin -Germans -electricity -ranks -viewed -medals -Wolf -associate -Madison -Sorry -fort -Chile -detail -widespread -attorney -boyfriend -##nan -Students -Spencer -##ig -bite -Maine -demolished -Lisa -erected -Someone -operational -Commissioner -NHL -Coach -Bar -forcing -Dream -Rico -cargo -Murphy -##fish -##ase -distant -##master -##ora -Organization -doorway -Steven -traded -electrical -frequent -##wn -Branch -Sure -1882 -placing -Manhattan -attending -attributed -excellent -pounds -ruling -principles -component -Mediterranean -Vegas -machines -percentage -infrastructure -throwing -affiliated -Kings -secured -Caribbean -Track -Ted -honour -opponent -Virgin -Construction -grave -produces -Challenge -stretched -paying -murmured -##ata -integrated -waved -Nathan -##ator -transmission -videos -##yan -##hu -Nova -descent -AM -Harold -conservative -Therefore -venue -competitive -##ui -conclusion -funeral -confidence -releases -scholar -##sson -Treaty -stress -mood -##sm -Mac -residing -Action -Fund -##ship -animated -fitted -##kar -defending -voting -tend -##berry -answers -believes -##ci -helps -Aaron -##tis -themes -##lay -populations -Players -stroke -Trinity -electoral -paint -abroad -charity -keys -Fair -##pes -interrupted -participants -murdered -Days -supporters -##ab -expert -borders -mate -##llo -solar -architectural -tension -##bling -Parish -tape -operator -Cultural -Clinton -indicates -publisher -ordinary -sugar -arrive -rifle -acoustic -##uring -assets -##shire -SS -sufficient -options -HMS -Classic -bars -rebuilt -governments -Beijing -reporter -screamed -Abbey -crying -mechanical -instantly -communications -Political -cemetery -Cameron -Stop -representatives -USS -texts -mathematics -innings -civilian -Serbia -##hill -practical -patterns -dust -Faculty -debt -##end -##cus -junction -suppose -experimental -Computer -Food -wrist -abuse -dealing -bigger -cap -principle -##pin -Muhammad -Fleet -Collection -attempting -dismissed -##burn -regime -Herbert -##ua -shadows -1883 -Eve -Lanka -1878 -Performance -fictional -##lock -Noah -Run -Voivodeship -exercise -broadcasting -##fer -RAF -Magic -Bangladesh -suitable -##low -##del -styles -toured -Code -identical -links -insisted -110 -flash -Model -slave -Derek -Rev -fairly -Greater -sole -##lands -connecting -zero -bench -##ome -switched -Fall -Owen -yours -Electric -shocked -convention -##bra -climb -memorial -swept -Racing -decides -belong -##nk -parliamentary -##und -ages -proof -##dan -delivery -1860 -##ów -sad -publicly -leaning -Archbishop -dirt -##ose -categories -1876 -burn -##bing -requested -Guinea -Historical -rhythm -relation -##heim -ye -pursue -merchant -##mes -lists -continuous -frowned -colored -tool -gods -involves -Duncan -photographs -Cricket -slight -Gregory -atmosphere -wider -Cook -##tar -essential -Being -FA -emperor -wealthy -nights -##bar -licensed -Hawaii -viewers -Language -load -nearest -milk -kilometers -platforms -##ys -territories -Rogers -sheet -Rangers -contested -##lation -isolated -assisted -swallowed -Small -Contemporary -Technical -Edwards -express -Volume -endemic -##ei -tightly -Whatever -indigenous -Colombia -##ulation -hp -characterized -##ida -Nigeria -Professional -duo -Soccer -slaves -Farm -smart -Attorney -Attendance -Common -salt -##vin -tribes -nod -sentenced -bid -sample -Drive -switch -instant -21st -Cuba -drunk -Alaska -proud -awareness -hitting -sessions -Thai -locally -elsewhere -Dragon -gentle -touching -##lee -Springs -Universal -Latino -spin -1871 -Chart -recalled -Type -pointing -##ii -lowest -##ser -grandmother -Adelaide -Jacques -spotted -Buffalo -restoration -Son -Joan -farmers -Lily -1879 -lucky -##dal -luck -eldest -##rant -Market -drummer -deployed -warned -prince -sing -amazing -sailed -##oon -1875 -Primary -traveling -Masters -Sara -cattle -Trail -gang -Further -desert -relocated -##tch -##ord -Flight -illness -Munich -ninth -repair -Singles -##lated -Tyler -tossed -boots -Work -sized -earning -shoved -magazines -housed -dam -researchers -Former -spun -premiere -spaces -organised -wealth -crimes -devoted -stones -Urban -automatic -hop -affect -outstanding -tanks -mechanism -Muslims -Ms -shots -argue -Jeremy -connections -Armenian -increases -rubbed -1867 -retail -gear -Pan -bonus -jurisdiction -weird -concerning -whisper -##gal -Microsoft -tenure -hills -www -Gmina -porch -files -reportedly -venture -Storm -##ence -Nature -killer -panic -fate -Secret -Wang -scream -drivers -belongs -Chamber -clan -monument -mixing -Peru -bet -Riley -Friends -Isaac -submarine -1877 -130 -judges -harm -ranging -affair -prepare -pupils -householder -Policy -decorated -Nation -slammed -activist -implemented -Room -qualify -Publishing -establishing -Baptist -touring -subsidiary -##nal -legend -1872 -laughter -PC -Athens -settlers -ties -dual -dear -Draft -strategic -Ivan -reveal -closest -dominant -Ah -##ult -Denver -bond -boundaries -drafted -tables -##TV -eyed -Edition -##ena -1868 -belonging -1874 -Industrial -cream -Ridge -Hindu -scholarship -Ma -opens -initiated -##ith -yelled -compound -random -Throughout -grades -physics -sank -grows -exclusively -settle -Saints -brings -Amsterdam -Make -Hart -walks -battery -violin -##born -explanation -##ware -1873 -##har -provinces -thrust -exclusive -sculpture -shops -##fire -VI -constitution -Barcelona -monster -Devon -Jefferson -Sullivan -bow -##din -desperate -##ć -Julie -##mon -##ising -terminus -Jesse -abilities -golf -##ple -##via -##away -Raymond -measured -jury -firing -revenue -suburb -Bulgarian -1866 -##cha -timber -Things -##weight -Morning -spots -Alberta -Data -explains -Kyle -friendship -raw -tube -demonstrated -aboard -immigrants -reply -breathe -Manager -ease -##ban -##dia -Diocese -##vy -##ía -pit -ongoing -##lie -Gilbert -Costa -1940s -Report -voters -cloud -traditions -##MS -gallery -Jennifer -swung -Broadcasting -Does -diverse -reveals -arriving -initiative -##ani -Give -Allied -Pat -Outstanding -monastery -blind -Currently -##war -bloody -stopping -focuses -managing -Florence -Harvey -creatures -900 -breast -internet -Artillery -purple -##mate -alliance -excited -fee -Brisbane -lifetime -Private -##aw -##nis -##gue -##ika -phrase -regulations -reflected -manufactured -conventional -pleased -client -##ix -##ncy -Pedro -reduction -##con -welcome -jail -comfort -Iranian -Norfolk -Dakota -##tein -evolution -everywhere -Initially -sensitive -Olivia -Oscar -implementation -sits -stolen -demands -slide -grandson -##ich -merger -##mic -Spirit -##° -ticket -root -difficulty -Nevada -##als -lined -Dylan -Original -Call -biological -EU -dramatic -##hn -Operations -treaty -gap -##list -Am -Romanized -moral -Butler -perspective -Furthermore -Manuel -absolutely -unsuccessful -disaster -dispute -preparation -tested -discover -##ach -shield -squeezed -brushed -battalion -Arnold -##ras -superior -treat -clinical -##so -Apple -Syria -Cincinnati -package -flights -editions -Leader -minority -wonderful -hang -Pop -Philippine -telephone -bell -honorary -##mar -balls -Democrat -dirty -thereafter -collapsed -Inside -slip -wrestling -##ín -listened -regard -bowl -None -Sport -completing -trapped -##view -copper -Wallace -Honor -blame -Peninsula -##ert -##oy -Anglo -bearing -simultaneously -honest -##ias -Mix -Got -speaker -voiced -impressed -prices -error -1869 -##feld -trials -Nine -Industry -substitute -Municipal -departed -slept -##ama -Junction -Socialist -flower -dropping -comment -fantasy -##ress -arrangements -travelled -furniture -fist -relieved -##tics -Leonard -linear -earn -expand -Soul -Plan -Leeds -Sierra -accessible -innocent -Winner -Fighter -Range -winds -vertical -Pictures -101 -charter -cooperation -prisoner -interviews -recognised -sung -manufacturer -exposure -submitted -Mars -leaf -gauge -screaming -likes -eligible -##ac -gathering -columns -##dra -belly -UN -maps -messages -speakers -##ants -garage -unincorporated -Number -Watson -sixteen -lots -beaten -Could -Municipality -##ano -Horse -talks -Drake -scores -Venice -genetic -##mal -##ère -Cold -Jose -nurse -traditionally -##bus -Territory -Key -Nancy -##win -thumb -São -index -dependent -carries -controls -Comics -coalition -physician -referring -Ruth -Based -restricted -inherited -internationally -stretch -THE -plates -margin -Holland -knock -significance -valuable -Kenya -carved -emotion -conservation -municipalities -overseas -resumed -Finance -graduation -blinked -temperatures -constantly -productions -scientist -ghost -cuts -permitted -##ches -firmly -##bert -patrol -##yo -Croatian -attacking -1850 -portrait -promoting -sink -conversion -##kov -locomotives -Guide -##val -nephew -relevant -Marc -drum -originated -Chair -visits -dragged -Price -favour -corridor -properly -respective -Caroline -reporting -inaugural -1848 -industries -##ching -edges -Christianity -Maurice -Trent -Economics -carrier -Reed -##gon -tribute -Pradesh -##ale -extend -attitude -Yale -##lu -settlements -glasses -taxes -targets -##ids -quarters -##ological -connect -hence -metre -collapse -underneath -banned -Future -clients -alternate -explosion -kinds -Commons -hungry -dragon -Chapel -Buddhist -lover -depression -pulls -##ges -##uk -origins -computers -crosses -kissing -assume -emphasis -lighting -##ites -personally -crashed -beam -touchdown -lane -comparison -##mont -Hitler -##las -execution -##ene -acre -sum -Pearl -ray -##point -essentially -worker -convicted -tear -Clay -recovery -Literature -Unfortunately -##row -partial -Petersburg -Bulgaria -coaching -evolved -reception -enters -narrowed -elevator -therapy -defended -pairs -##lam -breaks -Bennett -Uncle -cylinder -##ison -passion -bases -Actor -cancelled -battles -extensively -oxygen -Ancient -specialized -negotiations -##rat -acquisition -convince -interpretation -##00 -photos -aspect -colleges -Artist -keeps -##wing -Croatia -##ona -Hughes -Otto -comments -##du -Ph -Sweet -adventure -describing -Student -Shakespeare -scattered -objective -Aviation -Phillips -Fourth -athletes -##hal -##tered -Guitar -intensity -née -dining -curve -Obama -topics -legislative -Mill -Cruz -##ars -Members -recipient -Derby -inspiration -corresponding -fed -YouTube -coins -pressing -intent -Karen -cinema -Delta -destination -shorter -Christians -imagined -canal -Newcastle -Shah -Adrian -super -Males -160 -liberal -lord -bat -supplied -Claude -meal -worship -##atic -Han -wire -°F -##tha -punishment -thirteen -fighters -##ibility -1859 -Ball -gardens -##ari -Ottawa -pole -indicating -Twenty -Higher -Bass -Ivy -farming -##urs -certified -Saudi -plenty -##ces -restaurants -Representative -Miles -payment -##inger -##rit -Confederate -festivals -references -##ić -Mario -PhD -playoffs -witness -rice -mask -saving -opponents -enforcement -automatically -relegated -##oe -radar -whenever -Financial -imperial -uncredited -influences -Abraham -skull -Guardian -Haven -Bengal -impressive -input -mixture -Warsaw -altitude -distinction -1857 -collective -Annie -##ean -##bal -directions -Flying -##nic -faded -##ella -contributing -##ó -employee -##lum -##yl -ruler -oriented -conductor -focusing -##die -Giants -Mills -mines -Deep -curled -Jessica -guitars -Louise -procedure -Machine -failing -attendance -Nepal -Brad -Liam -tourist -exhibited -Sophie -depicted -Shaw -Chuck -##can -expecting -challenges -##nda -equally -resignation -##logical -Tigers -loop -pitched -outdoor -reviewed -hopes -True -temporarily -Borough -torn -jerked -collect -Berkeley -Independence -cotton -retreat -campaigns -participating -Intelligence -Heaven -##ked -situations -borough -Democrats -Harbor -##len -Liga -serial -circles -fourteen -##lot -seized -filling -departments -finance -absolute -Roland -Nate -floors -raced -struggling -deliver -protests -##tel -Exchange -efficient -experiments -##dar -faint -3D -binding -Lions -lightly -skill -proteins -difficulties -##cal -monthly -camps -flood -loves -Amanda -Commerce -##oid -##lies -elementary -##tre -organic -##stein -##ph -receives -Tech -enormous -distinctive -Joint -experiment -Circuit -citizen -##hy -shelter -ideal -practically -formula -addressed -Foster -Productions -##ax -variable -punk -Voice -fastest -concentrated -##oma -##yer -stored -surrender -vary -Sergeant -Wells -ward -Wait -##ven -playoff -reducing -cavalry -##dle -Venezuela -tissue -amounts -sweat -##we -Non -##nik -beetle -##bu -##tu -Jared -Hunt -##₂ -fat -Sultan -Living -Circle -Secondary -Suddenly -reverse -##min -Travel -##bin -Lebanon -##mas -virus -Wind -dissolved -enrolled -holiday -Keep -helicopter -Clarke -constitutional -technologies -doubles -instructions -##ace -Azerbaijan -##ill -occasional -frozen -trick -wiped -writings -Shanghai -preparing -challenged -mainstream -summit -180 -##arian -##rating -designation -##ada -revenge -filming -tightened -Miguel -Montana -reflect -celebration -bitch -flashed -signals -rounded -peoples -##tation -renowned -Google -characteristic -Campaign -sliding -##rman -usage -Record -Using -woke -solutions -holes -theories -logo -Protestant -relaxed -brow -nickname -Reading -marble -##tro -symptoms -Overall -capita -##ila -outbreak -revolution -deemed -Principal -Hannah -approaches -inducted -Wellington -vulnerable -Environmental -Drama -incumbent -Dame -1854 -travels -samples -accurate -physically -Sony -Nashville -##sville -##lic -##og -Producer -Lucky -tough -Stanford -resort -repeatedly -eyebrows -Far -choir -commenced -##ep -##ridge -rage -swing -sequel -heir -buses -ad -Grove -##late -##rick -updated -##SA -Delaware -##fa -Athletics -warmth -Off -excitement -verse -Protection -Villa -corruption -intellectual -Jenny -##lyn -mystery -prayer -healthy -##ologist -Bear -lab -Ernest -Remix -register -basement -Montgomery -consistent -tier -1855 -Preston -Brooks -##maker -vocalist -laboratory -delayed -wheels -rope -bachelor -pitcher -Block -Nevertheless -suspect -efficiency -Nebraska -siege -FBI -planted -##AC -Newton -breeding -##ain -eighteen -Argentine -encounter -servant -1858 -elder -Shadow -Episode -fabric -doctors -survival -removal -chemistry -volunteers -Kane -variant -arrives -Eagle -Left -##fe -Jo -divorce -##ret -yesterday -Bryan -handling -diseases -customer -Sheriff -Tiger -Harper -##oi -resting -Linda -Sheffield -gasped -sexy -economics -alien -tale -footage -Liberty -yeah -fundamental -Ground -flames -Actress -photographer -Maggie -Additional -joke -custom -Survey -Abu -silk -consumption -Ellis -bread -##uous -engagement -puts -Dog -##hr -poured -guilt -CDP -boxes -hardware -clenched -##cio -stem -arena -extending -##com -examination -Steel -encountered -revised -140 -picking -Car -hasn -Minor -pride -Roosevelt -boards -##mia -blocked -curious -drag -narrative -brigade -Prefecture -mysterious -namely -connects -Devil -historians -CHAPTER -quit -installation -Golf -empire -elevated -##eo -releasing -Bond -##uri -harsh -ban -##BA -contracts -cloth -presents -stake -chorus -##eau -swear -##mp -allies -generations -Motor -meter -pen -warrior -veteran -##EC -comprehensive -missile -interaction -instruction -Renaissance -rested -Dale -fix -fluid -les -investigate -loaded -widow -exhibit -artificial -select -rushing -tasks -signature -nowhere -Engineer -feared -Prague -bother -extinct -gates -Bird -climbing -heels -striking -artwork -hunt -awake -##hin -Formula -thereby -commitment -imprisoned -Beyond -##MA -transformed -Agriculture -Low -Movie -radical -complicated -Yellow -Auckland -mansion -tenth -Trevor -predecessor -##eer -disbanded -sucked -circular -witch -gaining -lean -Behind -illustrated -rang -celebrate -bike -consist -framework -##cent -Shane -owns -350 -comprises -collaborated -colleagues -##cast -engage -fewer -##ave -1856 -observation -diplomatic -legislature -improvements -Interstate -craft -MTV -martial -administered -jet -approaching -permanently -attraction -manuscript -numbered -Happy -Andrea -shallow -Gothic -Anti -##bad -improvement -trace -preserve -regardless -rode -dies -achievement -maintaining -Hamburg -spine -##air -flowing -encourage -widened -posts -##bound -125 -Southeast -Santiago -##bles -impression -receiver -Single -closure -##unt -communist -honors -Northwest -105 -##ulated -cared -un -hug -magnetic -seeds -topic -perceived -prey -prevented -Marvel -Eight -Michel -Transportation -rings -Gate -##gne -Byzantine -accommodate -floating -##dor -equation -ministry -##ito -##gled -Rules -earthquake -revealing -Brother -Celtic -blew -chairs -Panama -Leon -attractive -descendants -Care -Ambassador -tours -breathed -threatening -##cho -smiles -Lt -Beginning -##iness -fake -assists -fame -strings -Mobile -Liu -parks -http -1852 -brush -Aunt -bullet -consciousness -##sta -##ther -consequences -gather -dug -1851 -bridges -Doug -##sion -Artists -ignore -Carol -brilliant -radiation -temples -basin -clouds -##cted -Stevens -spite -soap -consumer -Damn -Snow -recruited -##craft -Advanced -tournaments -Quinn -undergraduate -questioned -Palmer -Annual -Others -feeding -Spider -printing -##orn -cameras -functional -Chester -readers -Alpha -universal -Faith -Brandon -François -authored -Ring -el -aims -athletic -possessed -Vermont -programmes -##uck -bore -Fisher -statements -shed -saxophone -neighboring -pronounced -barrel -bags -##dge -organisations -pilots -casualties -Kenneth -##brook -silently -Malcolm -span -Essex -anchor -##hl -virtual -lessons -Henri -Trump -Page -pile -locomotive -wounds -uncomfortable -sustained -Diana -Eagles -##pi -2000s -documented -##bel -Cassie -delay -kisses -##ines -variation -##ag -growled -##mark -##ways -Leslie -studios -Friedrich -aunt -actively -armor -eaten -historically -Better -purse -honey -ratings -##ée -naturally -1840 -peer -Kenny -Cardinal -database -Looking -runners -handsome -Double -PA -##boat -##sted -protecting -##jan -Diamond -concepts -interface -##aki -Watch -Article -Columbus -dialogue -pause -##rio -extends -blanket -pulse -1853 -affiliate -ladies -Ronald -counted -kills -demons -##zation -Airlines -Marco -Cat -companion -mere -Yugoslavia -Forum -Allan -pioneer -Competition -Methodist -patent -nobody -Stockholm -##ien -regulation -##ois -accomplished -##itive -washed -sake -Vladimir -crops -prestigious -humor -Sally -labour -tributary -trap -altered -examined -Mumbai -bombing -Ash -noble -suspension -ruins -##bank -spare -displays -guided -dimensional -Iraqi -##hon -sciences -Franz -relating -fence -followers -Palestine -invented -proceeded -Batman -Bradley -##yard -##ova -crystal -Kerala -##ima -shipping -handled -Want -abolished -Drew -##tter -Powell -Half -##table -##cker -exhibitions -Were -assignment -assured -##rine -Indonesian -Grammy -acknowledged -Kylie -coaches -structural -clearing -stationed -Say -Total -Rail -besides -glow -threats -afford -Tree -Musical -##pp -elite -centered -explore -Engineers -Stakes -Hello -tourism -severely -assessment -##tly -crack -politicians -##rrow -sheets -volunteer -##borough -##hold -announcement -recover -contribute -lungs -##ille -mainland -presentation -Johann -Writing -1849 -##bird -Study -Boulevard -coached -fail -airline -Congo -Plus -Syrian -introduce -ridge -Casey -manages -##fi -searched -Support -succession -progressive -coup -cultures -##lessly -sensation -Cork -Elena -Sofia -Philosophy -mini -trunk -academy -Mass -Liz -practiced -Reid -##ule -satisfied -experts -Wilhelm -Woods -invitation -Angels -calendar -joy -Sr -Dam -packed -##uan -bastard -Workers -broadcasts -logic -cooking -backward -##ack -Chen -creates -enzyme -##xi -Davies -aviation -VII -Conservation -fucking -Knights -##kan -requiring -hectares -wars -ate -##box -Mind -desired -oak -absorbed -Really -Vietnamese -Paulo -athlete -##car -##eth -Talk -Wu -##cks -survivors -Yang -Joel -Almost -Holmes -Armed -Joshua -priests -discontinued -##sey -blond -Rolling -suggesting -CA -clay -exterior -Scientific -##sive -Giovanni -Hi -farther -contents -Winners -animation -neutral -mall -Notes -layers -professionals -Armstrong -Against -Piano -involve -monitor -angel -parked -bears -seated -feat -beliefs -##kers -Version -suffer -##ceae -guidance -##eur -honored -raid -alarm -Glen -Ellen -Jamaica -trio -enabled -##ils -procedures -##hus -moderate -upstairs -##ses -torture -Georgian -rebellion -Fernando -Nice -##are -Aires -Campus -beast -##hing -1847 -##FA -Isle -##logist -Princeton -cathedral -Oakland -Solomon -##tto -Milwaukee -upcoming -midfielder -Neither -sacred -Eyes -appreciate -Brunswick -secrets -Rice -Somerset -Chancellor -Curtis -##gel -Rich -separation -grid -##los -##bon -urge -##ees -##ree -freight -towers -psychology -requirement -dollar -##fall -##sman -exile -tomb -Salt -Stefan -Buenos -Revival -Porter -tender -diesel -chocolate -Eugene -Legion -Laboratory -sheep -arched -hospitals -orbit -Full -##hall -drinks -ripped -##RS -tense -Hank -leagues -##nberg -PlayStation -fool -Punjab -relatives -Comedy -sur -1846 -Tonight -Sox -##if -Rabbi -org -speaks -institute -defender -painful -wishes -Weekly -literacy -portions -snake -item -deals -##tum -autumn -sharply -reforms -thighs -prototype -##ition -argues -disorder -Physics -terror -provisions -refugees -predominantly -independently -march -##graphy -Arabia -Andrews -Bus -Money -drops -##zar -pistol -matrix -revolutionary -##ust -Starting -##ptic -Oak -Monica -##ides -servants -##hed -archaeological -divorced -rocket -enjoying -fires -##nel -assembled -qualification -retiring -##fied -Distinguished -handful -infection -Durham -##itz -fortune -renewed -Chelsea -##sley -curved -gesture -retain -exhausted -##ifying -Perth -jumping -Palestinian -Simpson -colonies -steal -##chy -corners -Finn -arguing -Martha -##var -Betty -emerging -Heights -Hindi -Manila -pianist -founders -regret -Napoleon -elbow -overhead -bold -praise -humanity -##ori -Revolutionary -##ere -fur -##ole -Ashley -Official -##rm -lovely -Architecture -##sch -Baronet -virtually -##OS -descended -immigration -##das -##kes -Holly -Wednesday -maintains -theatrical -Evan -Gardens -citing -##gia -segments -Bailey -Ghost -##city -governing -graphics -##ined -privately -potentially -transformation -Crystal -Cabinet -sacrifice -hesitated -mud -Apollo -Desert -bin -victories -Editor -Railways -Web -Case -tourists -Brussels -Franco -compiled -topped -Gene -engineers -commentary -egg -escort -nerve -arch -necessarily -frustration -Michelle -democracy -genes -Facebook -halfway -##ient -102 -flipped -Won -##mit -NASA -Lynn -Provincial -ambassador -Inspector -glared -Change -McDonald -developments -tucked -noting -Gibson -circulation -dubbed -armies -resource -Headquarters -##iest -Mia -Albanian -Oil -Albums -excuse -intervention -Grande -Hugo -integration -civilians -depends -reserves -Dee -compositions -identification -restrictions -quarterback -Miranda -Universe -favourite -ranges -hint -loyal -Op -entity -Manual -quoted -dealt -specialist -Zhang -download -Westminster -Rebecca -streams -Anglican -variations -Mine -detective -Films -reserved -##oke -##key -sailing -##gger -expanding -recall -discovers -particles -behaviour -Gavin -blank -permit -Java -Fraser -Pass -##non -##TA -panels -statistics -notion -courage -dare -venues -##roy -Box -Newport -travelling -Thursday -warriors -Glenn -criteria -360 -mutual -restore -varied -bitter -Katherine -##lant -ritual -bits -##à -Henderson -trips -Richardson -Detective -curse -psychological -Il -midnight -streak -facts -Dawn -Indies -Edmund -roster -Gen -##nation -1830 -congregation -shaft -##ically -##mination -Indianapolis -Sussex -loving -##bit -sounding -horrible -Continental -Griffin -advised -magical -millions -##date -1845 -Safety -lifting -determination -valid -dialect -Penn -Know -triple -avoided -dancer -judgment -sixty -farmer -lakes -blast -aggressive -Abby -tag -chains -inscription -##nn -conducting -Scout -buying -##wich -spreading -##OC -array -hurried -Environment -improving -prompted -fierce -Taking -Away -tune -pissed -Bull -catching -##ying -eyebrow -metropolitan -terrain -##rel -Lodge -manufacturers -creator -##etic -happiness -ports -##ners -Relations -fortress -targeted -##ST -allegedly -blues -##osa -Bosnia -##dom -burial -similarly -stranger -pursued -symbols -rebels -reflection -routine -traced -indoor -eventual -##ska -##ão -##una -MD -##phone -oh -grants -Reynolds -rid -operators -##nus -Joey -vital -siblings -keyboard -br -removing -societies -drives -solely -princess -lighter -Various -Cavalry -believing -SC -underwent -relay -smelled -syndrome -welfare -authorized -seemingly -Hard -chicken -##rina -Ages -Bo -democratic -barn -Eye -shorts -##coming -##hand -disappointed -unexpected -centres -Exhibition -Stories -Site -banking -accidentally -Agent -conjunction -André -Chloe -resist -width -Queens -provision -##art -Melissa -Honorary -Del -prefer -abruptly -duration -##vis -Glass -enlisted -##ado -discipline -Sisters -carriage -##ctor -##sburg -Lancashire -log -fuck -##iz -closet -collecting -holy -rape -trusted -cleaning -inhabited -Rocky -104 -editorial -##yu -##ju -succeed -strict -Cuban -##iya -Bronze -outcome -##ifies -##set -corps -Hero -barrier -Kumar -groaned -Nina -Burton -enable -stability -Milton -knots -##ination -slavery -##borg -curriculum -trailer -warfare -Dante -Edgar -revival -Copenhagen -define -advocate -Garrett -Luther -overcome -pipe -750 -construct -Scotia -kings -flooding -##hard -Ferdinand -Felix -forgot -Fish -Kurt -elaborate -##BC -graphic -gripped -colonel -Sophia -Advisory -Self -##uff -##lio -monitoring -seal -senses -rises -peaceful -journals -1837 -checking -legendary -Ghana -##power -ammunition -Rosa -Richards -nineteenth -ferry -aggregate -Troy -inter -##wall -Triple -steep -tent -Cyprus -1844 -##woman -commanding -farms -doi -navy -specified -na -cricketer -transported -Think -comprising -grateful -solve -##core -beings -clerk -grain -vector -discrimination -##TC -Katie -reasonable -drawings -veins -consideration -Monroe -repeat -breed -dried -witnessed -ordained -Current -spirits -remarkable -consultant -urged -Remember -anime -singers -phenomenon -Rhode -Carlo -demanding -findings -manual -varying -Fellowship -generate -safely -heated -withdrawn -##ao -headquartered -##zon -##lav -##ency -Col -Memphis -imposed -rivals -Planet -healing -##hs -ensemble -Warriors -##bone -cult -Frankfurt -##HL -diversity -Gerald -intermediate -##izes -reactions -Sister -##ously -##lica -quantum -awkward -mentions -pursuit -##ography -varies -profession -molecular -consequence -lectures -cracked -103 -slowed -##tsu -cheese -upgraded -suite -substance -Kingston -1800 -Idaho -Theory -##een -ain -Carson -Molly -##OR -configuration -Whitney -reads -audiences -##tie -Geneva -Outside -##nen -##had -transit -volleyball -Randy -Chad -rubber -motorcycle -respected -eager -Level -coin -##lets -neighbouring -##wski -confident -##cious -poll -uncertain -punch -thesis -Tucker -IATA -Alec -##ographic -##law -1841 -desperately -1812 -Lithuania -accent -Cox -lightning -skirt -##load -Burns -Dynasty -##ug -chapters -Working -dense -Morocco -##kins -casting -Set -activated -oral -Brien -horn -HIV -dawn -stumbled -altar -tore -considerably -Nicole -interchange -registration -biography -Hull -Stan -bulk -consent -Pierce -##ER -Fifth -marched -terrorist -##piece -##itt -Presidential -Heather -staged -Plant -relegation -sporting -joins -##ced -Pakistani -dynamic -Heat -##lf -ourselves -Except -Elliott -nationally -goddess -investors -Burke -Jackie -##ā -##RA -Tristan -Associate -Tuesday -scope -Near -bunch -##abad -##ben -sunlight -##aire -manga -Willie -trucks -boarding -Lion -lawsuit -Learning -Der -pounding -awful -##mine -IT -Legend -romance -Serie -AC -gut -precious -Robertson -hometown -realm -Guards -Tag -batting -##vre -halt -conscious -1838 -acquire -collar -##gg -##ops -Herald -nationwide -citizenship -Aircraft -decrease -em -Fiction -Female -corporation -Located -##ip -fights -unconscious -Tampa -Poetry -lobby -Malta -##sar -##bie -layout -Tate -reader -stained -##bre -##rst -##ulate -loudly -Eva -Cohen -exploded -Merit -Maya -##rable -Rovers -##IC -Morrison -Should -vinyl -##mie -onwards -##gie -vicinity -Wildlife -probability -Mar -Barnes -##ook -spinning -Moses -##vie -Surrey -Planning -conferences -protective -Plaza -deny -Canterbury -manor -Estate -tilted -comics -IBM -destroying -server -Dorothy -##horn -Oslo -lesser -heaven -Marshal -scales -strikes -##ath -firms -attract -##BS -controlling -Bradford -southeastern -Amazon -Travis -Janet -governed -1842 -Train -Holden -bleeding -gifts -rent -1839 -palms -##ū -judicial -Ho -Finals -conflicts -unlikely -draws -##cies -compensation -adds -elderly -Anton -lasting -Nintendo -codes -ministers -pot -associations -capabilities -##cht -libraries -##sie -chances -performers -runway -##af -##nder -Mid -Vocals -##uch -##eon -interpreted -priority -Uganda -ruined -Mathematics -cook -AFL -Lutheran -AIDS -Capitol -chase -axis -Moreover -María -Saxon -storyline -##ffed -Tears -Kid -cent -colours -Sex -##long -pm -blonde -Edwin -CE -diocese -##ents -##boy -Inn -##ller -Saskatchewan -##kh -stepping -Windsor -##oka -##eri -Xavier -Resources -1843 -##top -##rad -##lls -Testament -poorly -1836 -drifted -slope -CIA -remix -Lords -mature -hosting -diamond -beds -##ncies -luxury -trigger -##lier -preliminary -hybrid -journalists -Enterprise -proven -expelled -insects -Beautiful -lifestyle -vanished -##ake -##ander -matching -surfaces -Dominican -Kids -referendum -Orlando -Truth -Sandy -privacy -Calgary -Speaker -sts -Nobody -shifting -##gers -Roll -Armenia -Hand -##ES -106 -##ont -Guild -larvae -Stock -flame -gravity -enhanced -Marion -surely -##tering -Tales -algorithm -Emmy -darker -VIII -##lash -hamlet -deliberately -occurring -choices -Gage -fees -settling -ridiculous -##ela -Sons -cop -custody -##ID -proclaimed -Cardinals -##pm -Metal -Ana -1835 -clue -Cardiff -riders -observations -MA -sometime -##och -performer -intact -Points -allegations -rotation -Tennis -tenor -Directors -##ats -Transit -thigh -Complex -##works -twentieth -Factory -doctrine -Daddy -##ished -pretend -Winston -cigarette -##IA -specimens -hydrogen -smoking -mathematical -arguments -openly -developer -##iro -fists -somebody -##san -Standing -Caleb -intelligent -Stay -Interior -echoed -Valentine -varieties -Brady -cluster -Ever -voyage -##of -deposits -ultimate -Hayes -horizontal -proximity -##ás -estates -exploration -NATO -Classical -##most -bills -condemned -1832 -hunger -##ato -planes -deserve -offense -sequences -rendered -acceptance -##ony -manufacture -Plymouth -innovative -predicted -##RC -Fantasy -##une -supporter -absent -Picture -bassist -rescued -##MC -Ahmed -Monte -##sts -##rius -insane -novelist -##és -agrees -Antarctic -Lancaster -Hopkins -calculated -startled -##star -tribal -Amendment -##hoe -invisible -patron -deer -Walk -tracking -Lyon -tickets -##ED -philosopher -compounds -chuckled -##wi -pound -loyalty -Academic -petition -refuses -marking -Mercury -northeastern -dimensions -scandal -Canyon -patch -publish -##oning -Peak -minds -##boro -Presbyterian -Hardy -theoretical -magnitude -bombs -cage -##ders -##kai -measuring -explaining -avoiding -touchdowns -Card -theology -##ured -Popular -export -suspicious -Probably -photograph -Lou -Parks -Arms -compact -Apparently -excess -Banks -lied -stunned -territorial -Filipino -spectrum -learns -wash -imprisonment -ugly -##rose -Albany -Erik -sends -##hara -##rid -consumed -##gling -Belgrade -Da -opposing -Magnus -footsteps -glowing -delicate -Alexandria -Ludwig -gorgeous -Bros -Index -##PA -customs -preservation -bonds -##mond -environments -##nto -instructed -parted -adoption -locality -workshops -goalkeeper -##rik -##uma -Brighton -Slovenia -##ulating -##tical -towel -hugged -stripped -Bears -upright -Wagner -##aux -secretly -Adventures -nest -Course -Lauren -Boeing -Abdul -Lakes -450 -##cu -USSR -caps -Chan -##nna -conceived -Actually -Belfast -Lithuanian -concentrate -possess -militia -pine -protagonist -Helena -##PS -##band -Belle -Clara -Reform -currency -pregnancy -1500 -##rim -Isabella -hull -Name -trend -journalism -diet -##mel -Recording -acclaimed -Tang -Jace -steering -vacant -suggestion -costume -laser -##š -##ink -##pan -##vić -integral -achievements -wise -classroom -unions -southwestern -##uer -Garcia -toss -Tara -Large -##tate -evident -responsibilities -populated -satisfaction -##bia -casual -Ecuador -##ght -arose -##ović -Cornwall -embrace -refuse -Heavyweight -XI -Eden -activists -##uation -biology -##shan -fraud -Fuck -matched -legacy -Rivers -missionary -extraordinary -Didn -holder -wickets -crucial -Writers -Hurricane -Iceland -gross -trumpet -accordance -hurry -flooded -doctorate -Albania -##yi -united -deceased -jealous -grief -flute -portraits -##а -pleasant -Founded -Face -crowned -Raja -advisor -Salem -##ec -Achievement -admission -freely -minimal -Sudan -developers -estimate -disabled -##lane -downstairs -Bruno -##pus -pinyin -##ude -lecture -deadly -underlying -optical -witnesses -Combat -Julius -tapped -variants -##like -Colonial -Critics -Similarly -mouse -voltage -sculptor -Concert -salary -Frances -##ground -hook -premises -Software -instructor -nominee -##ited -fog -slopes -##zu -vegetation -sail -##rch -Body -Apart -atop -View -utility -ribs -cab -migration -##wyn -bounded -2019 -pillow -trails -##ub -Halifax -shade -Rush -##lah -##dian -Notre -interviewed -Alexandra -Springfield -Indeed -rubbing -dozens -amusement -legally -##lers -Jill -Cinema -ignoring -Choice -##ures -pockets -##nell -laying -Blair -tackles -separately -##teen -Criminal -performs -theorem -Communication -suburbs -##iel -competitors -rows -##hai -Manitoba -Eleanor -interactions -nominations -assassination -##dis -Edmonton -diving -##dine -essay -##tas -AFC -Edge -directing -imagination -sunk -implement -Theodore -trembling -sealed -##rock -Nobel -##ancy -##dorf -##chen -genuine -apartments -Nicolas -AA -Bach -Globe -Store -220 -##10 -Rochester -##ño -alert -107 -Beck -##nin -Naples -Basin -Crawford -fears -Tracy -##hen -disk -##pped -seventeen -Lead -backup -reconstruction -##lines -terrified -sleeve -nicknamed -popped -##making -##ern -Holiday -Gospel -ibn -##ime -convert -divine -resolved -##quet -ski -realizing -##RT -Legislature -reservoir -Rain -sinking -rainfall -elimination -challenging -tobacco -##outs -Given -smallest -Commercial -pin -rebel -comedian -exchanged -airing -dish -Salvador -promising -##wl -relax -presenter -toll -aerial -##eh -Fletcher -brass -disappear -zones -adjusted -contacts -##lk -sensed -Walt -mild -toes -flies -shame -considers -wildlife -Hanna -Arsenal -Ladies -naming -##ishing -anxiety -discussions -cute -undertaken -Cash -strain -Wyoming -dishes -precise -Angela -##ided -hostile -twins -115 -Built -##pel -Online -tactics -Newman -##bourne -unclear -repairs -embarrassed -listing -tugged -Vale -##gin -Meredith -bout -##cle -velocity -tips -froze -evaluation -demonstrate -##card -criticised -Nash -lineup -Rao -monks -bacteria -lease -##lish -frightened -den -revived -finale -##rance -flee -Letters -decreased -##oh -Sounds -wrap -Sharon -incidents -renovated -everybody -stole -Bath -boxing -1815 -withdraw -backs -interim -react -murders -Rhodes -Copa -framed -flown -Estonia -Heavy -explored -##rra -##GA -##ali -Istanbul -1834 -##rite -##aging -##ues -Episcopal -arc -orientation -Maxwell -infected -##rot -BCE -Brook -grasp -Roberto -Excellence -108 -withdrawal -Marines -rider -Lo -##sin -##run -Subsequently -garrison -hurricane -facade -Prussia -crushed -enterprise -##mber -Twitter -Generation -Physical -Sugar -editing -communicate -Ellie -##hurst -Ernst -wagon -promotional -conquest -Parliamentary -courtyard -lawyers -Superman -email -Prussian -lately -lecturer -Singer -Majesty -Paradise -sooner -Heath -slot -curves -convoy -##vian -induced -synonym -breeze -##plane -##ox -peered -Coalition -##hia -odds -##esh -##lina -Tomorrow -Nadu -##ico -##rah -damp -autonomous -console -Victory -counts -Luxembourg -intimate -Archived -Carroll -spy -Zero -habit -Always -faction -teenager -Johnston -chaos -ruin -commerce -blog -##shed -##the -reliable -Word -Yu -Norton -parade -Catholics -damned -##iling -surgeon -##tia -Allison -Jonas -remarked -##ès -idiot -Making -proposals -Industries -strategies -artifacts -batteries -reward -##vers -Agricultural -distinguish -lengths -Jeffrey -Progressive -kicking -Patricia -##gio -ballot -##ios -skilled -##gation -Colt -limestone -##AS -peninsula -##itis -LA -hotels -shapes -Crime -depicting -northwestern -HD -silly -Das -##² -##ws -##ash -##matic -thermal -Has -forgive -surrendered -Palm -Nacional -drank -haired -Mercedes -##foot -loading -Timothy -##roll -mechanisms -traces -digging -discussing -Natalie -##zhou -Forbes -landmark -Anyway -Manor -conspiracy -gym -knocking -viewing -Formation -Pink -Beauty -limbs -Phillip -sponsor -Joy -granite -Harbour -##ero -payments -Ballet -conviction -##dam -Hood -estimates -lacked -Mad -Jorge -##wen -refuge -##LA -invaded -Kat -suburban -##fold -investigated -Ari -complained -creek -Georges -##uts -powder -accepting -deserved -carpet -Thunder -molecules -Legal -cliff -strictly -enrollment -ranch -##rg -##mba -proportion -renovation -crop -grabbing -##liga -finest -entries -receptor -helmet -blown -Listen -flagship -workshop -resolve -nails -Shannon -portal -jointly -shining -Violet -overwhelming -upward -Mick -proceedings -##dies -##aring -Laurence -Churchill -##rice -commit -170 -inclusion -Examples -##verse -##rma -fury -paths -##SC -ankle -nerves -Chemistry -rectangular -sworn -screenplay -cake -Mann -Seoul -Animal -sizes -Speed -vol -Population -Southwest -Hold -continuously -Qualified -wishing -Fighting -Made -disappointment -Portsmouth -Thirty -##beck -Ahmad -teammate -MLB -graph -Charleston -realizes -##dium -exhibits -preventing -##int -fever -rivalry -Male -mentally -dull -##lor -##rich -consistently -##igan -Madame -certificate -suited -Krishna -accuracy -Webb -Budapest -Rex -1831 -Cornell -OK -surveillance -##gated -habitats -Adventure -Conrad -Superior -Gay -sofa -aka -boot -Statistics -Jessie -Liberation -##lip -##rier -brands -saint -Heinrich -Christine -bath -Rhine -ballet -Jin -consensus -chess -Arctic -stack -furious -cheap -toy -##yre -##face -##gging -gastropod -##nne -Romans -membrane -answering -25th -architects -sustainable -##yne -Hon -1814 -Baldwin -dome -##awa -##zen -celebrity -enclosed -##uit -##mmer -Electronic -locals -##CE -supervision -mineral -Chemical -Slovakia -alley -hub -##az -heroes -Creative -##AM -incredible -politically -ESPN -yanked -halls -Aboriginal -Greatest -yield -##20 -congressional -robot -Kiss -welcomed -MS -speeds -proceed -Sherman -eased -Greene -Walsh -Geoffrey -variables -rocky -##print -acclaim -Reverend -Wonder -tonnes -recurring -Dawson -continent -finite -AP -continental -ID -facilitate -essays -Rafael -Neal -1833 -ancestors -##met -##gic -Especially -teenage -frustrated -Jules -cock -expense -##oli -##old -blocking -Notable -prohibited -ca -dock -organize -##wald -Burma -Gloria -dimension -aftermath -choosing -Mickey -torpedo -pub -##used -manuscripts -laps -Ulster -staircase -sphere -Insurance -Contest -lens -risks -investigations -ERA -glare -##play -Graduate -auction -Chronicle -##tric -##50 -Coming -seating -Wade -seeks -inland -Thames -Rather -butterfly -contracted -positioned -consumers -contestants -fragments -Yankees -Santos -administrator -hypothesis -retire -Denis -agreements -Winnipeg -##rill -1820 -trophy -crap -shakes -Jenkins -##rium -ya -twist -labels -Maritime -##lings -##iv -111 -##ensis -Cairo -Anything -##fort -opinions -crowded -##nian -abandon -##iff -drained -imported -##rr -tended -##rain -Going -introducing -sculptures -bankruptcy -danced -demonstration -stance -settings -gazed -abstract -pet -Calvin -stiff -strongest -wrestler -##dre -Republicans -grace -allocated -cursed -snail -advancing -Return -errors -Mall -presenting -eliminate -Amateur -Institution -counting -##wind -warehouse -##nde -Ethiopia -trailed -hollow -##press -Literary -capability -nursing -preceding -lamp -Thomson -Morton -##ctic -Crew -Close -composers -boom -Clare -missiles -112 -hunter -snap -##oni -##tail -Us -declaration -##cock -rally -huh -lion -straightened -Philippe -Sutton -alpha -valued -maker -navigation -detected -favorable -perception -Charter -##ña -Ricky -rebounds -tunnels -slapped -Emergency -supposedly -##act -deployment -socialist -tubes -anybody -corn -##NA -Seminary -heating -pump -##AA -achieving -souls -##ass -Link -##ele -##smith -greeted -Bates -Americas -Elder -cure -contestant -240 -fold -Runner -Uh -licked -Politics -committees -neighbors -fairy -Silva -Leipzig -tipped -correctly -exciting -electronics -foundations -cottage -governmental -##hat -allied -claws -presidency -cruel -Agreement -slender -accompanying -precisely -##pass -driveway -swim -Stand -crews -##mission -rely -everyday -Wings -demo -##hic -recreational -min -nationality -##duction -Easter -##hole -canvas -Kay -Leicester -talented -Discovery -shells -##ech -Kerry -Ferguson -Leave -##place -altogether -adopt -butt -wolves -##nsis -##ania -modest -soprano -Boris -##ught -electron -depicts -hid -cruise -differ -treasure -##nch -Gun -Mama -Bengali -trainer -merchants -innovation -presumably -Shirley -bottles -proceeds -Fear -invested -Pirates -particle -Dominic -blamed -Fight -Daisy -##pper -##graphic -nods -knight -Doyle -tales -Carnegie -Evil -Inter -Shore -Nixon -transform -Savannah -##gas -Baltic -stretching -worlds -protocol -Percy -Toby -Heroes -brave -dancers -##aria -backwards -responses -Chi -Gaelic -Berry -crush -embarked -promises -Madonna -researcher -realised -inaugurated -Cherry -Mikhail -Nottingham -reinforced -subspecies -rapper -##kie -Dreams -Re -Damon -Minneapolis -monsters -suspicion -Tel -surroundings -afterward -complaints -OF -sectors -Algeria -lanes -Sabha -objectives -Donna -bothered -distracted -deciding -##ives -##CA -##onia -bishops -Strange -machinery -Voiced -synthesis -reflects -interference -##TS -##ury -keen -##ign -frown -freestyle -ton -Dixon -Sacred -Ruby -Prison -##ión -1825 -outfit -##tain -curiosity -##ight -frames -steadily -emigrated -horizon -##erly -Doc -philosophical -Table -UTC -Marina -##DA -secular -##eed -Zimbabwe -cops -Mack -sheriff -Sanskrit -Francesco -catches -questioning -streaming -Kill -testimony -hissed -tackle -countryside -copyright -##IP -Buddhism -##rator -ladder -##ON -Past -rookie -depths -##yama -##ister -##HS -Samantha -Dana -Educational -brows -Hammond -raids -envelope -##sco -##hart -##ulus -epic -detection -Streets -Potter -statistical -für -ni -accounting -##pot -employer -Sidney -Depression -commands -Tracks -averaged -lets -Ram -longtime -suits -branded -chip -Shield -loans -ought -Said -sip -##rome -requests -Vernon -bordered -veterans -##ament -Marsh -Herzegovina -Pine -##igo -mills -anticipation -reconnaissance -##ef -expectations -protested -arrow -guessed -depot -maternal -weakness -##ap -projected -pour -Carmen -provider -newer -remind -freed -##rily -##wal -##tones -intentions -Fiji -timing -Match -managers -Kosovo -Herman -Wesley -Chang -135 -semifinals -shouting -Indo -Janeiro -Chess -Macedonia -Buck -##onies -rulers -Mail -##vas -##sel -MHz -Programme -Task -commercially -subtle -propaganda -spelled -bowling -basically -Raven -1828 -Colony -109 -##ingham -##wara -anticipated -1829 -##iers -graduates -##rton -##fication -endangered -ISO -diagnosed -##tage -exercises -Battery -bolt -poison -cartoon -##ción -hood -bowed -heal -Meyer -Reagan -##wed -subfamily -##gent -momentum -infant -detect -##sse -Chapman -Darwin -mechanics -NSW -Cancer -Brooke -Nuclear -comprised -hire -sanctuary -wingspan -contrary -remembering -surprising -Basic -stealing -OS -hatred -##lled -masters -violation -Rule -##nger -assuming -conquered -louder -robe -Beatles -legitimate -##vation -massacre -Rica -unsuccessfully -poets -##enberg -careers -doubled -premier -battalions -Dubai -Paper -Louisville -gestured -dressing -successive -mumbled -Vic -referee -pupil -##cated -##rre -ceremonies -picks -##IN -diplomat -alike -geographical -rays -##HA -##read -harbour -factories -pastor -playwright -Ultimate -nationalist -uniforms -obtaining -kit -Amber -##pling -screenwriter -ancestry -##cott -Fields -PR -Coleman -rat -Bavaria -squeeze -highlighted -Adult -reflecting -Mel -1824 -bicycle -organizing -sided -Previously -Underground -Prof -athletics -coupled -mortal -Hampton -worthy -immune -Ava -##gun -encouraging -simplified -##ssa -##nte -##ann -Providence -entities -Pablo -Strong -Housing -##ista -##ators -kidnapped -mosque -Kirk -whispers -fruits -shattered -fossil -Empress -Johns -Webster -Thing -refusing -differently -specimen -Ha -##EN -##tina -##elle -##night -Horn -neighbourhood -Bolivia -##rth -genres -Pre -##vich -Amelia -swallow -Tribune -Forever -Psychology -Use -##bers -Gazette -ash -##usa -Monster -##cular -delegation -blowing -Oblast -retreated -automobile -##ex -profits -shirts -devil -Treasury -##backs -Drums -Ronnie -gameplay -expertise -Evening -resides -Caesar -unity -Crazy -linking -Vision -donations -Isabel -valve -Sue -WWE -logical -availability -fitting -revolt -##mill -Linux -taxi -Access -pollution -statues -Augustus -##pen -cello -##some -lacking -##ati -Gwen -##aka -##ovich -1821 -Wow -initiatives -Uruguay -Cain -stroked -examine -##ī -mentor -moist -disorders -buttons -##tica -##anna -Species -Lynch -museums -scorer -Poor -eligibility -op -unveiled -cats -Title -wheat -critically -Syracuse -##osis -marketed -enhance -Ryder -##NG -##ull -##rna -embedded -throws -foods -happily -##ami -lesson -formats -punched -##rno -expressions -qualities -##sal -Gods -##lity -elect -wives -##lling -jungle -Toyota -reversed -Grammar -Cloud -Agnes -##ules -disputed -verses -Lucien -threshold -##rea -scanned -##bled -##dley -##lice -Kazakhstan -Gardner -Freeman -##rz -inspection -Rita -accommodation -advances -chill -Elliot -thriller -Constantinople -##mos -debris -whoever -1810 -Santo -Carey -remnants -Guatemala -##irs -carriers -equations -mandatory -##WA -anxious -measurement -Summit -Terminal -Erin -##zes -LLC -##uo -glancing -sin -##₃ -Downtown -flowering -Euro -Leigh -Lance -warn -decent -recommendations -##ote -Quartet -##rrell -Clarence -colleague -guarantee -230 -Clayton -Beast -addresses -prospect -destroyer -vegetables -Leadership -fatal -prints -190 -##makers -Hyde -persuaded -illustrations -Southampton -Joyce -beats -editors -mount -##grave -Malaysian -Bombay -endorsed -##sian -##bee -applying -Religion -nautical -bomber -Na -airfield -gravel -##rew -Cave -bye -dig -decree -burden -Election -Hawk -Fe -##iled -reunited -##tland -liver -Teams -Put -delegates -Ella -##fect -Cal -invention -Castro -bored -##kawa -##ail -Trinidad -NASCAR -pond -develops -##pton -expenses -Zoe -Released -##rf -organs -beta -parameters -Neill -##lene -lateral -Beat -blades -Either -##hale -Mitch -##ET -##vous -Rod -burnt -phones -Rising -##front -investigating -##dent -Stephanie -##keeper -screening -##uro -Swan -Sinclair -modes -bullets -Nigerian -melody -##ques -Rifle -##12 -128 -##jin -charm -Venus -##tian -fusion -advocated -visitor -pinned -genera -3000 -Ferry -Solo -quantity -regained -platinum -shoots -narrowly -preceded -update -##ichi -equality -unaware -regiments -ally -##tos -transmitter -locks -Seeing -outlets -feast -reopened -##ows -struggles -Buddy -1826 -bark -elegant -amused -Pretty -themed -schemes -Lisbon -Te -patted -terrorism -Mystery -##croft -##imo -Madagascar -Journey -dealer -contacted -##quez -ITV -vacation -Wong -Sacramento -organisms -##pts -balcony -coloured -sheer -defines -MC -abortion -forbidden -accredited -Newfoundland -tendency -entrepreneur -Benny -Tanzania -needing -finalist -mythology -weakened -gown -sentences -Guest -websites -Tibetan -UFC -voluntary -annoyed -Welcome -honestly -correspondence -geometry -Deutsche -Biology -Help -##aya -Lines -Hector -##ael -reluctant -##ages -wears -inquiry -##dell -Holocaust -Tourism -Wei -volcanic -##mates -Visual -sorts -neighborhoods -Running -apple -shy -Laws -bend -Northeast -feminist -Speedway -Murder -visa -stuffed -fangs -transmitted -fiscal -Ain -enlarged -##ndi -Cecil -Peterson -Benson -Bedford -acceptable -##CC -##wer -purely -triangle -foster -Alberto -educator -Highland -acute -LGBT -Tina -Mi -adventures -Davidson -Honda -translator -monk -enacted -summoned -##ional -collector -Genesis -Un -liner -Di -Statistical -##CS -filter -Knox -Religious -Stella -Estonian -Turn -##ots -primitive -parishes -##lles -complexity -autobiography -rigid -cannon -pursuing -exploring -##gram -##mme -freshman -caves -Expedition -Traditional -iTunes -certification -cooling -##ort -##gna -##IT -##lman -##VA -Motion -explosive -licence -boxer -shrine -loosely -Brigadier -Savage -Brett -MVP -heavier -##elli -##gged -Buddha -Easy -spells -fails -incredibly -Georg -stern -compatible -Perfect -applies -cognitive -excessive -nightmare -neighbor -Sicily -appealed -static -##₁ -Aberdeen -##leigh -slipping -bride -##guard -Um -Clyde -1818 -##gible -Hal -Frost -Sanders -interactive -Hour -##vor -hurting -bull -termed -shelf -capturing -##pace -rolls -113 -##bor -Chilean -teaches -##rey -exam -shipped -Twin -borrowed -##lift -Shit -##hot -Lindsay -Below -Kiev -Lin -leased -##sto -Eli -Diane -Val -subtropical -shoe -Bolton -Dragons -##rification -Vatican -##pathy -Crisis -dramatically -talents -babies -##ores -surname -##AP -##cology -cubic -opted -Archer -sweep -tends -Karnataka -Judy -stint -Similar -##nut -explicitly -##nga -interact -Mae -portfolio -clinic -abbreviated -Counties -##iko -hearts -##ı -providers -screams -Individual -##etti -Monument -##iana -accessed -encounters -gasp -##rge -defunct -Avery -##rne -nobility -useless -Phase -Vince -senator -##FL -1813 -surprisingly -##illo -##chin -Boyd -rumors -equity -Gone -Hearts -chassis -overnight -Trek -wrists -submit -civic -designers -##rity -prominence -decorative -derives -starter -##AF -wisdom -Powers -reluctantly -measurements -doctoral -Noel -Gideon -Baden -Cologne -lawn -Hawaiian -anthology -##rov -Raiders -embassy -Sterling -##pal -Telugu -troubled -##FC -##bian -fountain -observe -ore -##uru -##gence -spelling -Border -grinning -sketch -Benedict -Xbox -dialects -readily -immigrant -Constitutional -aided -nevertheless -SE -tragedy -##ager -##rden -Flash -##MP -Europa -emissions -##ield -panties -Beverly -Homer -curtain -##oto -toilet -Isn -Jerome -Chiefs -Hermann -supernatural -juice -integrity -Scots -auto -Patriots -Strategic -engaging -prosecution -cleaned -Byron -investments -adequate -vacuum -laughs -##inus -##nge -Usually -Roth -Cities -Brand -corpse -##ffy -Gas -rifles -Plains -sponsorship -Levi -tray -owed -della -commanders -##ead -tactical -##rion -García -harbor -discharge -##hausen -gentleman -endless -highways -##itarian -pleaded -##eta -archive -Midnight -exceptions -instances -Gibraltar -cart -##NS -Darren -Bonnie -##yle -##iva -OCLC -bra -Jess -##EA -consulting -Archives -Chance -distances -commissioner -##AR -LL -sailors -##sters -enthusiasm -Lang -##zia -Yugoslav -confirm -possibilities -Suffolk -##eman -banner -1822 -Supporting -fingertips -civilization -##gos -technically -1827 -Hastings -sidewalk -strained -monuments -Floyd -Chennai -Elvis -villagers -Cumberland -strode -albeit -Believe -planets -combining -Mohammad -container -##mouth -##tures -verb -BA -Tank -Midland -screened -Gang -Democracy -Helsinki -screens -thread -charitable -##version -swiftly -ma -rational -combine -##SS -##antly -dragging -Cliff -Tasmania -quest -professionally -##aj -rap -##lion -livestock -##hua -informal -specially -lonely -Matthews -Dictionary -1816 -Observatory -correspondent -constitute -homeless -waving -appreciated -Analysis -Meeting -dagger -##AL -Gandhi -flank -Giant -Choir -##not -glimpse -toe -Writer -teasing -springs -##dt -Glory -healthcare -regulated -complaint -math -Publications -makers -##hips -cement -Need -apologize -disputes -finishes -Partners -boring -ups -gains -1793 -Congressional -clergy -Folk -##made -##nza -Waters -stays -encoded -spider -betrayed -Applied -inception -##urt -##zzo -wards -bells -UCLA -Worth -bombers -Mo -trademark -Piper -##vel -incorporates -1801 -##cial -dim -Twelve -##word -Appeals -tighter -spacecraft -##tine -coordinates -##iac -mistakes -Zach -laptop -Teresa -##llar -##yr -favored -Nora -sophisticated -Irving -hammer -División -corporations -niece -##rley -Patterson -UNESCO -trafficking -Ming -balanced -plaque -Latvia -broader -##owed -Save -confined -##vable -Dalton -tide -##right -##ural -##num -swords -caring -##eg -IX -Acting -paved -##moto -launching -Antoine -substantially -Pride -Philharmonic -grammar -Indoor -Ensemble -enabling -114 -resided -Angelo -publicity -chaired -crawled -Maharashtra -Telegraph -lengthy -preference -differential -anonymous -Honey -##itation -wage -##iki -consecrated -Bryant -regulatory -Carr -##én -functioning -watches -##ú -shifts -diagnosis -Search -app -Peters -##SE -##cat -Andreas -honours -temper -counsel -Urdu -Anniversary -maritime -##uka -harmony -##unk -essence -Lorenzo -choked -Quarter -indie -##oll -loses -##prints -amendment -Adolf -scenario -similarities -##rade -##LC -technological -metric -Russians -thoroughly -##tead -cruiser -1806 -##nier -1823 -Teddy -##psy -au -progressed -exceptional -broadcaster -partnered -fitness -irregular -placement -mothers -unofficial -Garion -Johannes -1817 -regain -Solar -publishes -Gates -Broken -thirds -conversations -dive -Raj -contributor -quantities -Worcester -governance -##flow -generating -pretending -Belarus -##voy -radius -skating -Marathon -1819 -affection -undertook -##wright -los -##bro -locate -PS -excluded -recreation -tortured -jewelry -moaned -##logue -##cut -Complete -##rop -117 -##II -plantation -whipped -slower -crater -##drome -Volunteer -attributes -celebrations -regards -Publishers -oath -utilized -Robbie -Giuseppe -fiber -indication -melted -archives -Damien -storey -affecting -identifying -dances -alumni -comparable -upgrade -rented -sprint -##kle -Marty -##lous -treating -railways -Lebanese -erupted -occupy -sympathy -Jude -Darling -Qatar -drainage -McCarthy -heel -Klein -computing -wireless -flip -Du -Bella -##ast -##ssen -narrator -mist -sings -alignment -121 -2020 -securing -##rail -Progress -missionaries -brutal -mercy -##shing -Hip -##ache -##olo -switching -##here -Malay -##ob -constituted -Mohammed -Often -standings -surge -teachings -ink -detached -systematic -Trial -Myanmar -##wo -offs -Reyes -decoration -translations -wherever -reviewer -speculation -Bangkok -terminated -##ester -beard -RCA -Aidan -Associated -Emerson -Charity -1803 -generous -Dudley -ATP -##haven -prizes -toxic -gloves -##iles -##dos -Turning -myth -Parade -##building -Hits -##eva -teamed -Above -Duchess -Holt -##oth -Sub -Ace -atomic -inform -Ship -depend -Jun -##bes -Norwich -globe -Baroque -Christina -Cotton -Tunnel -kidding -Concerto -Brittany -tasted -phases -stems -angles -##TE -##nam -##40 -charted -Alison -intensive -Willis -glory -##lit -Bergen -est -taller -##dicate -labeled -##ido -commentator -Warrior -Viscount -shortened -aisle -Aria -Spike -spectators -goodbye -overlooking -mammals -##lude -wholly -Barrett -##gus -accompany -seventy -employ -##mb -ambitious -beloved -basket -##mma -##lding -halted -descendant -pad -exclaimed -cloak -##pet -Strait -Bang -Aviv -sadness -##ffer -Donovan -1880s -agenda -swinging -##quin -jerk -Boat -##rist -nervously -Silence -Echo -shout -implies -##iser -##cking -Shiva -Weston -damages -##tist -effectiveness -Horace -cycling -Rey -ache -Photography -PDF -Dear -leans -Lea -##vision -booth -attained -disbelief -##eus -##ution -Hop -pension -toys -Eurovision -faithful -##heads -Andre -owe -default -Atlas -Megan -highlights -lovers -Constantine -Sixth -masses -##garh -emerge -Auto -Slovak -##oa -##vert -Superintendent -flicked -inventor -Chambers -Frankie -Romeo -pottery -companions -Rudolf -##liers -diary -Unless -tap -alter -Randall -##ddle -##eal -limitations -##boards -utterly -knelt -guaranteed -Cowboys -Islander -horns -##ike -Wendy -sexually -Smart -breasts -##cian -compromise -Duchy -AT -Galaxy -analog -Style -##aking -weighed -Nigel -optional -Czechoslovakia -practicing -Ham -##0s -feedback -batted -uprising -operative -applicable -criminals -classrooms -Somehow -##ode -##OM -Naomi -Winchester -##pping -Bart -Regina -competitor -Recorded -Yuan -Vera -lust -Confederation -##test -suck -1809 -Lambert -175 -Friend -##ppa -Slowly -##⁺ -Wake -Dec -##aneous -chambers -Color -Gus -##site -Alternative -##world -Exeter -Omaha -celebrities -striker -210 -dwarf -meals -Oriental -Pearson -financing -revenues -underwater -Steele -screw -Feeling -Mt -acids -badge -swore -theaters -Moving -admired -lung -knot -penalties -116 -fork -##cribed -Afghan -outskirts -Cambodia -oval -wool -fossils -Ned -Countess -Darkness -delicious -##nica -Evelyn -Recordings -guidelines -##CP -Sandra -meantime -Antarctica -modeling -granddaughter -##rial -Roma -Seventh -Sunshine -Gabe -##nton -Shop -Turks -prolific -soup -parody -##nta -Judith -disciplines -resign -Companies -Libya -Jets -inserted -Mile -retrieve -filmmaker -##rand -realistic -unhappy -##30 -sandstone -##nas -##lent -##ush -##rous -Brent -trash -Rescue -##unted -Autumn -disgust -flexible -infinite -sideways -##oss -##vik -trailing -disturbed -50th -Newark -posthumously -##rol -Schmidt -Josef -##eous -determining -menu -Pole -Anita -Luc -peaks -118 -Yard -warrant -generic -deserted -Walking -stamp -tracked -##berger -paired -surveyed -sued -Rainbow -##isk -Carpenter -submarines -realization -touches -sweeping -Fritz -module -Whether -resembles -##form -##lop -unsure -hunters -Zagreb -unemployment -Senators -Georgetown -##onic -Barker -foul -commercials -Dresden -Words -collision -Carlton -Fashion -doubted -##ril -precision -MIT -Jacobs -mob -Monk -retaining -gotta -##rod -remake -Fast -chips -##pled -sufficiently -##lights -delivering -##enburg -Dancing -Barton -Officers -metals -##lake -religions -##ré -motivated -differs -dorsal -##birds -##rts -Priest -polished -##aling -Saxony -Wyatt -knockout -##hor -Lopez -RNA -##link -metallic -##kas -daylight -Montenegro -##lining -wrapping -resemble -Jam -Viking -uncertainty -angels -enables -##fy -Stuttgart -tricks -tattoo -127 -wicked -asset -breach -##yman -MW -breaths -Jung -im -1798 -noon -vowel -##qua -calmly -seasonal -chat -ingredients -cooled -Randolph -ensuring -##ib -##idal -flashing -1808 -Macedonian -Cool -councils -##lick -advantages -Immediately -Madras -##cked -Pain -fancy -chronic -Malayalam -begged -##nese -Inner -feathers -##vey -Names -dedication -Sing -pan -Fischer -nurses -Sharp -inning -stamps -Meg -##ello -edged -motioned -Jacksonville -##ffle -##dic -##US -divide -garnered -Ranking -chasing -modifications -##oc -clever -midst -flushed -##DP -void -##sby -ambulance -beaches -groan -isolation -strengthen -prevention -##ffs -Scouts -reformed -geographic -squadrons -Fiona -Kai -Consequently -##uss -overtime -##yas -Fr -##BL -Papua -Mixed -glances -Haiti -Sporting -sandy -confronted -René -Tanner -1811 -##IM -advisory -trim -##ibe -González -gambling -Jupiter -##ility -##owski -##nar -122 -apology -teased -Pool -feminine -wicket -eagle -shiny -##lator -blend -peaking -nasty -nodding -fraction -tech -Noble -Kuwait -brushing -Italia -Canberra -duet -Johan -1805 -Written -cameo -Stalin -pig -cord -##zio -Surely -SA -owing -holidays -123 -Ranger -lighthouse -##ige -miners -1804 -##ë -##gren -##ried -crashing -##atory -wartime -highlight -inclined -Torres -Tax -##zel -##oud -Own -##corn -Divine -EMI -Relief -Northwestern -ethics -BMW -click -plasma -Christie -coordinator -Shepherd -washing -cooked -##dio -##eat -Cerambycidae -algebra -Engine -costumes -Vampire -vault -submission -virtue -assumption -##rell -Toledo -##oting -##rva -crept -emphasized -##lton -##ood -Greeks -surgical -crest -Patrol -Beta -Tessa -##GS -pizza -traits -rats -Iris -spray -##GC -Lightning -binary -escapes -##take -Clary -crowds -##zong -hauled -maid -##fen -Manning -##yang -Nielsen -aesthetic -sympathetic -affiliation -soaked -Mozart -personalities -begging -##iga -clip -Raphael -yearly -Lima -abundant -##lm -1794 -strips -Initiative -reporters -##vsky -consolidated -##itated -Civic -rankings -mandate -symbolic -##ively -1807 -rental -duck -nave -complications -##nor -Irene -Nazis -haunted -scholarly -Pratt -Gran -Embassy -Wave -pity -genius -bats -canton -Tropical -marker -##cos -escorted -Climate -##posed -appreciation -freezing -puzzle -Internal -pools -Shawn -pathway -Daniels -Fitzgerald -extant -olive -Vanessa -marriages -cocked -##dging -prone -chemicals -doll -drawer -##HF -Stark -Property -##tai -flowed -Sheridan -##uated -Less -Omar -remarks -catalogue -Seymour -wreck -Carrie -##bby -Mercer -displaced -sovereignty -rip -Flynn -Archie -Quarterfinals -Hassan -##ards -vein -Osaka -pouring -wages -Romance -##cript -##phere -550 -##eil -##stown -Documentary -ancestor -CNN -Panthers -publishers -Rise -##mu -biting -Bright -String -succeeding -119 -loaned -Warwick -Sheikh -Von -Afterwards -Jax -Camden -helicopters -Hence -Laurel -##ddy -transaction -Corp -clause -##owing -##kel -Investment -cups -Lucia -Moss -Giles -chef -López -decisive -30th -distress -linguistic -surveys -Ready -maiden -Touch -frontier -incorporate -exotic -mollusk -Leopold -Ride -##wain -##ndo -teammates -tones -drift -ordering -Feb -Penny -Normandy -Present -Flag -pipes -##rro -delight -motto -Tibet -leap -Eliza -Produced -teenagers -sitcom -Try -Hansen -Cody -wandered -terrestrial -frog -scare -resisted -employers -coined -##DS -resistant -Fly -captive -dissolution -judged -associates -defining -##court -Hale -##mbo -raises -clusters -twelfth -##metric -Roads -##itude -satisfy -Android -Reds -Gloucester -Category -Valencia -Daemon -stabbed -Luna -Churches -Canton -##eller -Attack -Kashmir -annexed -grabs -asteroid -Hartford -recommendation -Rodriguez -handing -stressed -frequencies -delegate -Bones -Erie -Weber -Hands -Acts -millimetres -24th -Fat -Howe -casually -##SL -convent -1790 -IF -##sity -1795 -yelling -##ises -drain -addressing -amino -Marcel -Sylvia -Paramount -Gerard -Volleyball -butter -124 -Albion -##GB -triggered -1792 -folding -accepts -##ße -preparations -Wimbledon -dose -##grass -escaping -##tling -import -charging -##dation -280 -Nolan -##fried -Calcutta -##pool -Cove -examining -minded -heartbeat -twisting -domains -bush -Tunisia -Purple -Leone -##code -evacuated -battlefield -tiger -Electrical -##ared -chased -##cre -cultivated -Jet -solved -shrug -ringing -Impact -##iant -kilometre -##log -commemorate -migrated -singular -designing -promptly -Higgins -##own -##aves -freshwater -Marketing -Payne -beg -locker -pray -implied -AAA -corrected -Trans -Europeans -Ashe -acknowledge -Introduction -##writer -##llen -Munster -auxiliary -growl -Hours -Poems -##AT -reduces -Plain -plague -canceled -detention -polite -necklace -Gustav -##gu -##lance -En -Angola -##bb -dwelling -##hea -5000 -Qing -Dodgers -rim -##ored -##haus -spilled -Elisabeth -Viktor -backpack -1802 -amended -##worthy -Phantom -##ctive -keeper -##loom -Vikings -##gua -employs -Tehran -specialty -##bate -Marx -Mirror -Jenna -rides -needle -prayers -clarinet -forewings -##walk -Midlands -convincing -advocacy -Cao -Birds -cycles -Clement -Gil -bubble -Maximum -humanitarian -Tan -cries -##SI -Parsons -Trio -offshore -Innovation -clutched -260 -##mund -##duct -Prairie -relied -Falcon -##ste -Kolkata -Gill -Swift -Negro -Zoo -valleys -##OL -Opening -beams -MPs -outline -Bermuda -Personal -exceed -productive -##MT -republic -forum -##sty -tornado -Known -dipped -Edith -folks -mathematician -watershed -Ricardo -synthetic -##dication -deity -##₄ -gaming -subjected -suspects -Foot -swollen -Motors -##tty -##ý -aloud -ceremonial -es -nuts -intend -Carlisle -tasked -hesitation -sponsors -unified -inmates -##ctions -##stan -tiles -jokes -whereby -outcomes -Lights -scary -Stoke -Portrait -Blind -sergeant -violations -cultivation -fuselage -Mister -Alfonso -candy -sticks -teen -agony -Enough -invite -Perkins -Appeal -mapping -undergo -Glacier -Melanie -affects -incomplete -##dd -Colombian -##nate -CBC -purchasing -bypass -Drug -Electronics -Frontier -Coventry -##aan -autonomy -scrambled -Recent -bounced -cow -experiencing -Rouge -cuisine -Elite -disability -Ji -inheritance -wildly -Into -##wig -confrontation -Wheeler -shiver -Performing -aligned -consequently -Alexis -Sin -woodland -executives -Stevenson -Ferrari -inevitable -##cist -##dha -##base -Corner -comeback -León -##eck -##urus -MacDonald -pioneering -breakdown -landscapes -Veterans -Rican -Theological -stirred -participant -Credit -Hyderabad -snails -Claudia -##ocene -compliance -##MI -Flags -Middlesex -storms -winding -asserted -er -##ault -##kal -waking -##rates -abbey -Augusta -tooth -trustees -Commodore -##uded -Cunningham -NC -Witch -marching -Sword -Same -spiral -Harley -##ahan -Zack -Audio -1890s -##fit -Simmons -Kara -Veronica -negotiated -Speaking -FIBA -Conservatory -formations -constituencies -explicit -facial -eleventh -##ilt -villain -##dog -##case -##hol -armored -tin -hairs -##umi -##rai -mattress -Angus -cease -verbal -Recreation -savings -Aurora -peers -Monastery -Airways -drowned -additions -downstream -sticking -Shi -mice -skiing -##CD -Raw -Riverside -warming -hooked -boost -memorable -posed -treatments -320 -##dai -celebrating -blink -helpless -circa -Flowers -PM -uncommon -Oct -Hawks -overwhelmed -Sparhawk -repaired -Mercy -pose -counterpart -compare -survives -##½ -##eum -coordinate -Lil -grandchildren -notorious -Yi -Judaism -Juliet -accusations -1789 -floated -marathon -roar -fortified -reunion -145 -Nov -Paula -##fare -##toria -tearing -Cedar -disappearance -Si -gifted -scar -270 -PBS -Technologies -Marvin -650 -roller -cupped -negotiate -##erman -passport -tram -miracle -styled -##tier -necessity -Des -rehabilitation -Lara -USD -psychic -wipe -##lem -mistaken -##lov -charming -Rider -pageant -dynamics -Cassidy -##icus -defenses -##tadt -##vant -aging -##inal -declare -mistress -supervised -##alis -##rest -Ashton -submerged -sack -Dodge -grocery -ramp -Teacher -lineage -imagery -arrange -inscriptions -Organisation -Siege -combines -pounded -Fleming -legends -columnist -Apostolic -prose -insight -Arabian -expired -##uses -##nos -Alone -elbows -##asis -##adi -##combe -Step -Waterloo -Alternate -interval -Sonny -plains -Goals -incorporating -recruit -adjoining -Cheshire -excluding -marrying -ducked -Cherokee -par -##inate -hiking -Coal -##bow -natives -ribbon -Allies -con -descriptions -positively -##lal -defendant -22nd -Vivian -##beat -Weather -possessions -Date -sweetheart -inability -Salisbury -adviser -ideology -Nordic -##eu -Cubs -IP -Administrative -##nick -facto -liberation -Burnett -Javier -fashioned -Electoral -Turin -theft -unanimous -Per -1799 -Clan -Hawkins -Teachers -##wes -Cameroon -Parkway -##gment -demolition -atoms -nucleus -##thi -recovering -##yte -##vice -lifts -Must -deposit -Hancock -Semi -darkened -Declaration -moan -muscular -Myers -attractions -sauce -simulation -##weed -Alps -barriers -##baum -Barack -galleries -Min -holders -Greenwich -donation -Everybody -Wolfgang -sandwich -Kendra -Collegiate -casino -Slavic -ensuing -Porto -##grapher -Jesuit -suppressed -tires -Ibrahim -protesters -Ibn -Amos -1796 -phenomena -Hayden -Paraguay -Squad -Reilly -complement -aluminum -##eers -doubts -decay -demise -Practice -patience -fireplace -transparent -monarchy -##person -Rodney -mattered -rotating -Clifford -disposal -Standards -paced -##llie -arise -tallest -tug -documentation -node -freeway -Nikolai -##cite -clicked -imaging -Lorraine -Tactical -Different -Regular -Holding -165 -Pilot -guarded -##polis -Classics -Mongolia -Brock -monarch -cellular -receptors -Mini -Chandler -financed -financially -Lives -erection -Fuller -unnamed -Kannada -cc -passive -plateau -##arity -freak -##rde -retrieved -transactions -##sus -23rd -swimmer -beef -fulfill -Arlington -offspring -reasoning -Rhys -saves -pseudonym -centimetres -shivered -shuddered -##ME -Feel -##otic -professors -Blackburn -##eng -##life -##haw -interred -lodge -fragile -Della -guardian -##bbled -catalog -clad -observer -tract -declaring -##headed -Lok -dean -Isabelle -1776 -irrigation -spectacular -shuttle -mastering -##aro -Nathaniel -Retired -##lves -Brennan -##kha -dick -##dated -##hler -Rookie -leapt -televised -weekends -Baghdad -Yemen -##fo -factions -ion -Lab -mortality -passionate -Hammer -encompasses -confluence -demonstrations -Ki -derivative -soils -##unch -Ranch -Universities -conventions -outright -aiming -hierarchy -reside -illusion -graves -rituals -126 -Antwerp -Dover -##ema -campuses -Hobart -lifelong -aliens -##vity -Memory -coordination -alphabet -##mina -Titans -pushes -Flanders -##holder -Normal -excellence -capped -profound -Taipei -portrayal -sparked -scratch -se -##eas -##hir -Mackenzie -##cation -Neo -Shin -##lined -magnificent -poster -batsman -##rgent -persuade -##ement -Icelandic -miserable -collegiate -Feature -geography -##mura -Comic -Circus -processor -barracks -Tale -##11 -Bulls -##rap -strengthened -##bell -injection -miniature -broadly -Letter -fare -hostage -traders -##nium -##mere -Fortune -Rivera -Lu -triumph -Browns -Bangalore -cooperative -Basel -announcing -Sawyer -##him -##cco -##kara -darted -##AD -##nova -sucking -##position -perimeter -flung -Holdings -##NP -Basque -sketches -Augustine -Silk -Elijah -analyst -armour -riots -acquiring -ghosts -##ems -132 -Pioneer -Colleges -Simone -Economy -Author -semester -Soldier -il -##unting -##bid -freaking -Vista -tumor -##bat -murderer -##eda -unreleased -##grove -##sser -##té -edit -statute -sovereign -##gawa -Killer -stares -Fury -comply -##lord -##nant -barrels -Andhra -Maple -generator -mascot -unusually -eds -##ante -##runner -rod -##tles -Historically -Jennings -dumped -Established -resemblance -##lium -##cise -##body -##voke -Lydia -##hou -##iring -nonetheless -1797 -corrupt -patrons -physicist -sneak -Livingston -Citizens -Architects -Werner -trends -Melody -eighty -markings -brakes -##titled -oversaw -processed -mock -Midwest -intervals -##EF -stretches -werewolf -##MG -Pack -controller -##dition -Honours -cane -Griffith -vague -repertoire -Courtney -orgasm -Abdullah -dominance -occupies -Ya -introduces -Lester -instinct -collaborative -Indigenous -refusal -##rank -outlet -debts -spear -155 -##keeping -##ulu -Catalan -##osh -tensions -##OT -bred -crude -Dunn -abdomen -accurately -##fu -##lough -accidents -Row -Audrey -rude -Getting -promotes -replies -Paolo -merge -##nock -trans -Evangelical -automated -Canon -##wear -##ggy -##gma -Broncos -foolish -icy -Voices -knives -Aside -dreamed -generals -molecule -AG -rejection -insufficient -##nagar -deposited -sacked -Landing -arches -helpful -devotion -intake -Flower -PGA -dragons -evolutionary -##mail -330 -GM -tissues -##tree -arcade -composite -lid -Across -implications -lacks -theological -assessed -concentrations -Den -##mans -##ulous -Fu -homeland -##stream -Harriet -ecclesiastical -troop -ecological -winked -##xed -eighteenth -Casino -specializing -##sworth -unlocked -supreme -devastated -snatched -trauma -GDP -Nord -saddle -Wes -convenient -competes -##nu -##iss -Marian -subway -##rri -successes -umbrella -##far -##ually -Dundee -##cence -spark -##rix -##я -Quality -Geological -cockpit -rpm -Cam -Bucharest -riot -##PM -Leah -##dad -##pose -Ka -m³ -Bundesliga -Wolfe -grim -textile -quartet -expressing -fantastic -destroyers -eternal -picnic -##oro -contractor -1775 -spanning -declining -##cating -Lowe -Sutherland -Emirates -downward -nineteen -violently -scout -viral -melting -enterprises -##cer -Crosby -Jubilee -antenna -urgent -Rory -##uin -##sure -wandering -##gler -##vent -Suzuki -Lifetime -Dirty -occupying -##quent -Disc -Guru -mound -Lennon -Humanities -listeners -Walton -uh -Braves -Bologna -##bis -##gra -Dwight -crawl -flags -memoir -Thorne -Archdiocese -dairy -##uz -##tery -roared -adjust -patches -inn -Knowing -##bbed -##zan -scan -Papa -precipitation -angrily -passages -postal -Phi -embraced -blacks -economist -triangular -Sen -shooter -punished -Millennium -Swimming -confessed -Aston -defeats -Era -cousins -Williamson -##rer -daytime -dumb -##rek -underway -specification -Buchanan -prayed -concealed -activation -##issa -canon -awesome -Starr -plural -summers -##fields -Slam -unnecessary -1791 -resume -trilogy -compression -##rough -selective -dignity -Yan -##xton -immense -##yun -lone -seeded -hiatus -lightweight -summary -Yo -approve -Galway -rejoined -Elise -garbage -burns -speeches -129 -Honduras -##liness -inventory -jersey -FK -assure -slumped -Lionel -Suite -##sbury -Lena -continuation -##AN -brightly -##nti -GT -Knowledge -##park -##lius -lethal -##tribution -##sions -Certificate -Mara -##lby -algorithms -Jade -blows -pirates -fleeing -wheelchair -Stein -sophomore -Alt -Territorial -diploma -snakes -##olic -##tham -Tiffany -Pius -flush -urging -Hanover -Reich -##olate -Unity -Pike -collectively -Theme -ballad -kindergarten -rocked -zoo -##page -whip -Rodríguez -strokes -checks -Becky -Stern -upstream -##uta -Silent -volunteered -Sigma -##ingen -##tract -##ede -Gujarat -screwed -entertaining -##action -##ryn -defenders -innocence -lesbian -que -Richie -nodes -Lie -juvenile -Jakarta -safer -confront -Bert -breakthrough -gospel -Cable -##zie -institutional -Archive -brake -liquor -feeds -##iate -chancellor -Encyclopedia -Animation -scanning -teens -##mother -Core -Rear -Wine -##flower -reactor -Ave -cardinal -sodium -strands -Olivier -crouched -Vaughan -Sammy -Image -scars -Emmanuel -flour -bias -nipple -revelation -##ucci -Denny -##ssy -Form -Runners -admits -Rama -violated -Burmese -feud -underwear -Mohamed -Named -swift -statewide -Door -Recently -comparing -Hundred -##idge -##nity -##rds -Rally -Reginald -Auburn -solving -waitress -Treasurer -##ilization -Halloween -Ministers -Boss -Shut -##listic -Rahman -demonstrating -##pies -Gaza -Yuri -installations -Math -schooling -##bble -Bronx -exiled -gasoline -133 -bundle -humid -FCC -proportional -relate -VFL -##dez -continuity -##cene -syndicated -atmospheric -arrows -Wanderers -reinforcements -Willow -Lexington -Rotten -##yon -discovering -Serena -portable -##lysis -targeting -£1 -Goodman -Steam -sensors -detachment -Malik -##erie -attitudes -Goes -Kendall -Read -Sleep -beans -Nikki -modification -Jeanne -knuckles -Eleven -##iously -Gross -Jaime -dioxide -moisture -Stones -UCI -displacement -Metacritic -Jury -lace -rendering -elephant -Sergei -##quire -GP -Abbott -##type -projection -Mouse -Bishops -whispering -Kathleen -Rams -##jar -whites -##oran -assess -dispatched -##hire -kin -##mir -Nursing -advocates -tremendous -sweater -assisting -##bil -Farmer -prominently -reddish -Hague -cyclone -##SD -Sage -Lawson -Sanctuary -discharged -retains -##ube -shotgun -wilderness -Reformed -similarity -Entry -Watts -Bahá -Quest -Looks -visions -Reservoir -Arabs -curls -Blu -dripping -accomplish -Verlag -drill -sensor -Dillon -physicians -smashed -##dir -painters -Renault -straw -fading -Directorate -lounge -commissions -Brain -##graph -neo -##urg -plug -coordinated -##houses -Critical -lamps -illustrator -Returning -erosion -Crow -##ciation -blessing -Thought -Wife -medalist -synthesizer -Pam -Thornton -Esther -HBO -fond -Associates -##raz -pirate -permits -Wide -tire -##PC -Ernie -Nassau -transferring -RFC -##ntly -um -spit -AS -##mps -Mining -polar -villa -anchored -##zzi -embarrassment -relates -##ă -Rupert -counterparts -131 -Baxter -##18 -Igor -recognizes -Clive -##hane -##eries -##ibly -occurrence -##scope -fin -colorful -Rapids -banker -tile -##rative -##dus -delays -destinations -##llis -Pond -Dane -grandparents -rewarded -socially -motorway -##hof -##lying -##human -modeled -Dayton -Forward -conscience -Sharma -whistle -Mayer -Sasha -##pical -circuits -Zhou -##ça -Latvian -finalists -predators -Lafayette -closes -obligations -Resolution -##vier -Trustees -reminiscent -##hos -Highlands -Protected -asylum -evacuation -##acy -Chevrolet -confession -Somalia -emergence -separating -##rica -alright -calcium -Laurent -Welfare -Leonardo -ashes -dental -Deal -minerals -##lump -##mount -accounted -staggered -slogan -photographic -builder -##imes -##raft -tragic -144 -SEC -Hit -tailed -##ples -##rring -##rson -ethical -wrestlers -concludes -lunar -##ept -nitrogen -Aid -cyclist -quarterfinals -##ه -harvest -##hem -Pasha -IL -##mis -continually -##forth -Intel -bucket -##ended -witches -pretended -dresses -viewer -peculiar -lowering -volcano -Marilyn -Qualifier -clung -##sher -Cut -modules -Bowie -##lded -onset -transcription -residences -##pie -##itor -scrapped -##bic -Monaco -Mayo -eternity -Strike -uncovered -skeleton -##wicz -Isles -bug -Promoted -##rush -Mechanical -XII -##ivo -gripping -stubborn -velvet -TD -decommissioned -operas -spatial -unstable -Congressman -wasted -##aga -##ume -advertisements -##nya -obliged -Cannes -Conway -bricks -##gnant -##mity -##uise -jumps -Clear -##cine -##sche -chord -utter -Su -podium -spokesman -Royce -assassin -confirmation -licensing -liberty -##rata -Geographic -individually -detained -##ffe -Saturn -crushing -airplane -bushes -knights -##PD -Lilly -hurts -unexpectedly -Conservatives -pumping -Forty -candle -Pérez -peasants -supplement -Sundays -##ggs -##rries -risen -enthusiastic -corresponds -pending -##IF -Owens -floods -Painter -inflation -presumed -inscribed -Chamberlain -bizarre -1200 -liability -reacted -tub -Legacy -##eds -##pted -shone -##litz -##NC -Tiny -genome -bays -Eduardo -robbery -stall -hatch -Depot -Variety -Flora -reprinted -trembled -outlined -CR -Theresa -spans -##plication -Jensen -##eering -posting -##rky -pays -##ost -Marcos -fortifications -inferior -##ential -Devi -despair -Talbot -##chus -updates -ego -Booth -Darius -tops -##lau -Scene -##DC -Harlem -Trey -Generally -candles -##α -Neville -Admiralty -##hong -iconic -victorious -1600 -Rowan -abundance -miniseries -clutching -sanctioned -##words -obscure -##ision -##rle -##EM -disappearing -Resort -Obviously -##eb -exceeded -1870s -Adults -##cts -Cry -Kerr -ragged -selfish -##lson -circled -pillars -galaxy -##asco -##mental -rebuild -caution -Resistance -Start -bind -splitting -Baba -Hogan -ps -partnerships -slam -Peggy -courthouse -##OD -organizational -packages -Angie -##nds -possesses -##rp -Expressway -Gould -Terror -Him -Geoff -nobles -##ope -shark -##nh -identifies -##oor -testified -Playing -##ump -##isa -stool -Idol -##pice -##tana -Byrne -Gerry -grunted -26th -observing -habits -privilege -immortal -wagons -##thy -dot -Bring -##lian -##witz -newest -##uga -constraints -Screen -Issue -##RNA -##vil -reminder -##gles -addiction -piercing -stunning -var -##rita -Signal -accumulated -##wide -float -devastating -viable -cartoons -Uttar -flared -##encies -Theology -patents -##bahn -privileges -##ava -##CO -137 -##oped -##NT -orchestral -medication -225 -erect -Nadia -École -fried -Sales -scripts -##rease -airs -Cage -inadequate -structured -countless -Avengers -Kathy -disguise -mirrors -Investigation -reservation -##nson -Legends -humorous -Mona -decorations -attachment -Via -motivation -Browne -strangers -##ński -Shadows -Twins -##pressed -Alma -Nominated -##ott -Sergio -canopy -152 -Semifinals -devised -##irk -upwards -Traffic -Goddess -Move -beetles -138 -spat -##anne -holdings -##SP -tangled -Whilst -Fowler -anthem -##ING -##ogy -snarled -moonlight -songwriting -tolerance -Worlds -exams -##pia -notices -sensitivity -poetic -Stephens -Boone -insect -reconstructed -Fresh -27th -balloon -##ables -Brendan -mug -##gee -1780 -apex -exports -slides -Lahore -hiring -Shell -electorate -sexuality -poker -nonprofit -##imate -cone -##uce -Okinawa -superintendent -##HC -referenced -turret -Sprint -Citizen -equilibrium -Stafford -curb -Driver -Valerie -##rona -aching -impacts -##bol -observers -Downs -Shri -##uth -airports -##uda -assignments -curtains -solitary -icon -patrols -substances -Jasper -mountainous -Published -ached -##ingly -announce -dove -damaging -##tism -Primera -Dexter -limiting -batch -##uli -undergoing -refugee -Ye -admiral -pavement -##WR -##reed -pipeline -desires -Ramsey -Sheila -thickness -Brotherhood -Tea -instituted -Belt -Break -plots -##ais -masculine -##where -Theo -##aged -##mined -Experience -scratched -Ethiopian -Teaching -##nov -Aiden -Abe -Samoa -conditioning -##mous -Otherwise -fade -Jenks -##encing -Nat -##lain -Anyone -##kis -smirk -Riding -##nny -Bavarian -blessed -potatoes -Hook -##wise -likewise -hardened -Merry -amid -persecution -##sten -Elections -Hoffman -Pitt -##vering -distraction -exploitation -infamous -quote -averaging -healed -Rhythm -Germanic -Mormon -illuminated -guides -##ische -interfere -##ilized -rector -perennial -##ival -Everett -courtesy -##nham -Kirby -Mk -##vic -Medieval -##tale -Luigi -limp -##diction -Alive -greeting -shove -##force -##fly -Jasmine -Bend -Capt -Suzanne -ditch -134 -##nning -Host -fathers -rebuilding -Vocal -wires -##manship -tan -Factor -fixture -##LS -Māori -Plate -pyramid -##umble -slap -Schneider -yell -##ulture -##tional -Goodbye -sore -##pher -depressed -##dox -pitching -Find -Lotus -##wang -strand -Teen -debates -prevalent -##bilities -exposing -hears -billed -##rse -reorganized -compelled -disturbing -displaying -##tock -Clinical -emotionally -##iah -Derbyshire -grouped -##quel -Bahrain -Journalism -IN -persistent -blankets -Crane -camping -Direct -proving -Lola -##dding -Corporate -birthplace -##boats -##ender -Figure -dared -Assam -precursor -##nched -Tribe -Restoration -slate -Meyrick -hunted -stroking -Earlier -Kind -polls -appeals -monetary -##reate -Kira -Langdon -explores -GPS -extensions -squares -Results -draped -announcer -merit -##ennial -##tral -##roved -##cion -robots -supervisor -snorted -##group -Cannon -procession -monkey -freeze -sleeves -Nile -verdict -ropes -firearms -extraction -tensed -EC -Saunders -##tches -diamonds -Marriage -##amble -curling -Amazing -##haling -unrelated -##roads -Daughter -cum -discarded -kidney -cliffs -forested -Candy -##lap -authentic -tablet -notation -##nburg -Bulldogs -Callum -Meet -mouths -coated -##xe -Truman -combinations -##mation -Steelers -Fan -Than -paternal -##father -##uti -Rebellion -inviting -Fun -theatres -##ي -##rom -curator -##cision -networking -Oz -drought -##ssel -granting -MBA -Shelby -Elaine -jealousy -Kyoto -shores -signaling -tenants -debated -Intermediate -Wise -##hes -##pu -Havana -duke -vicious -exited -servers -Nonetheless -Reports -explode -##beth -Nationals -offerings -Oval -conferred -eponymous -folklore -##NR -Shire -planting -1783 -Zeus -accelerated -Constable -consuming -troubles -McCartney -texture -bust -Immigration -excavated -hopefully -##cession -##coe -##name -##ully -lining -Einstein -Venezuelan -reissued -minorities -Beatrice -crystals -##nies -circus -lava -Beirut -extinction -##shu -Becker -##uke -issuing -Zurich -extract -##esta -##rred -regulate -progression -hut -alcoholic -plea -AB -Norse -Hubert -Mansfield -ashamed -##put -Bombardment -stripes -electrons -Denise -horrified -Nor -arranger -Hay -Koch -##ddling -##iner -Birthday -Josie -deliberate -explorer -##jiang -##signed -Arrow -wiping -satellites -baritone -mobility -##rals -Dorset -turbine -Coffee -185 -##lder -Cara -Colts -pits -Crossing -coral -##birth -Tai -zombie -smoothly -##hp -mates -##ady -Marguerite -##tary -puzzled -tapes -overly -Sonic -Prayer -Thinking -##uf -IEEE -obligation -##cliffe -Basil -redesignated -##mmy -nostrils -Barney -XIII -##phones -vacated -unused -Berg -##roid -Towards -viola -136 -Event -subdivided -rabbit -recruiting -##nery -Namibia -##16 -##ilation -recruits -Famous -Francesca -##hari -Goa -##lat -Karachi -haul -biblical -##cible -MGM -##rta -horsepower -profitable -Grandma -importantly -Martinez -incoming -##kill -beneficial -nominal -praying -##isch -gable -nail -noises -##ttle -Polytechnic -rub -##cope -Thor -audition -erotic -##ending -##iano -Ultimately -armoured -##mum -presently -pedestrian -##tled -Ipswich -offence -##ffin -##borne -Flemish -##hman -echo -##cting -auditorium -gentlemen -winged -##tched -Nicaragua -Unknown -prosperity -exhaust -pie -Peruvian -compartment -heights -disabilities -##pole -Harding -Humphrey -postponed -moths -Mathematical -Mets -posters -axe -##nett -Nights -Typically -chuckle -councillors -alternating -141 -Norris -##ately -##etus -deficit -dreaming -cooler -oppose -Beethoven -##esis -Marquis -flashlight -headache -investor -responding -appointments -##shore -Elias -ideals -shades -torch -lingering -##real -pier -fertile -Diploma -currents -Snake -##horse -##15 -Briggs -##ota -##hima -##romatic -Coastal -Kuala -ankles -Rae -slice -Hilton -locking -Approximately -Workshop -Niagara -strangely -##scence -functionality -advertisement -Rapid -Anders -ho -Soviets -packing -basal -Sunderland -Permanent -##fting -rack -tying -Lowell -##ncing -Wizard -mighty -tertiary -pencil -dismissal -torso -grasped -##yev -Sand -gossip -##nae -Beer -implementing -##19 -##riya -Fork -Bee -##eria -Win -##cid -sailor -pressures -##oping -speculated -Freddie -originating -##DF -##SR -##outh -28th -melt -Brenda -lump -Burlington -USC -marginal -##bine -Dogs -swamp -cu -Ex -uranium -metro -spill -Pietro -seize -Chorus -partition -##dock -##media -engineered -##oria -conclusions -subdivision -##uid -Illustrated -Leading -##hora -Berkshire -definite -##books -##cin -##suke -noun -winced -Doris -dissertation -Wilderness -##quest -braced -arbitrary -kidnapping -Kurdish -##but -clearance -excavations -wanna -Allmusic -insult -presided -yacht -##SM -Honour -Tin -attracting -explosives -Gore -Bride -##ience -Packers -Devils -Observer -##course -Loser -##erry -##hardt -##mble -Cyrillic -undefeated -##stra -subordinate -##ame -Wigan -compulsory -Pauline -Cruise -Opposition -##ods -Period -dispersed -expose -##60 -##has -Certain -Clerk -Wolves -##hibition -apparatus -allegiance -orbital -justified -thanked -##ević -Biblical -Carolyn -Graves -##tton -Hercules -backgrounds -replica -1788 -aquatic -Mega -Stirling -obstacles -filing -Founder -vowels -Deborah -Rotterdam -surpassed -Belarusian -##ologists -Zambia -Ren -Olga -Alpine -bi -councillor -Oaks -Animals -eliminating -digit -Managing -##GE -laundry -##rdo -presses -slamming -Tudor -thief -posterior -##bas -Rodgers -smells -##ining -Hole -SUV -trombone -numbering -representations -Domingo -Paralympics -cartridge -##rash -Combined -shelves -Kraków -revision -##frame -Sánchez -##tracted -##bler -Alain -townships -sic -trousers -Gibbs -anterior -symmetry -vaguely -Castile -IRA -resembling -Penguin -##ulent -infections -##stant -raped -##pressive -worrying -brains -bending -JR -Evidence -Venetian -complexes -Jonah -850 -exported -Ambrose -Gap -philanthropist -##atus -Marxist -weighing -##KO -##nath -Soldiers -chiefs -reject -repeating -shaky -Zürich -preserving -##xin -cigarettes -##break -mortar -##fin -Already -reproduction -socks -Waiting -amazed -##aca -dash -##path -Airborne -##harf -##get -descending -OBE -Sant -Tess -Lucius -enjoys -##ttered -##ivation -##ete -Leinster -Phillies -execute -geological -unfinished -Courts -SP -Beaver -Duck -motions -Platinum -friction -##aud -##bet -Parts -Stade -entirety -sprang -Smithsonian -coffin -prolonged -Borneo -##vise -unanimously -##uchi -Cars -Cassandra -Australians -##CT -##rgen -Louisa -spur -Constance -##lities -Patent -racism -tempo -##ssion -##chard -##nology -##claim -Million -Nichols -##dah -Numerous -ing -Pure -plantations -donor -##EP -##rip -convenience -##plate -dots -indirect -##written -Dong -failures -adapt -wizard -unfortunately -##gion -practitioners -economically -Enrique -unchanged -kingdoms -refined -definitions -lazy -worries -railing -##nay -Kaiser -##lug -cracks -sells -ninety -##WC -Directed -denotes -developmental -papal -unfortunate -disappointing -sixteenth -Jen -##urier -NWA -drifting -Horror -##chemical -behaviors -bury -surfaced -foreigners -slick -AND -##rene -##ditions -##teral -scrap -kicks -comprise -buddy -##anda -Mental -##ype -Dom -wines -Limerick -Luca -Rand -##won -Tomatoes -homage -geometric -##nted -telescope -Shelley -poles -##fan -shareholders -Autonomous -cope -intensified -Genoa -Reformation -grazing -##tern -Zhao -provisional -##bies -Con -##riel -Cynthia -Raleigh -vivid -threaten -Length -subscription -roses -Müller -##isms -robin -##tial -Laos -Stanton -nationalism -##clave -##ND -##17 -##zz -staging -Busch -Cindy -relieve -##spective -packs -neglected -CBE -alpine -Evolution -uneasy -coastline -Destiny -Barber -Julio -##tted -informs -unprecedented -Pavilion -##bei -##ference -betrayal -awaiting -leaked -V8 -puppet -adverse -Bourne -Sunset -collectors -##glass -##sque -copied -Demon -conceded -resembled -Rafe -Levy -prosecutor -##ject -flora -manned -deaf -Mosque -reminds -Lizzie -Products -Funny -cassette -congress -##rong -Rover -tossing -prompting -chooses -Satellite -cautiously -Reese -##UT -Huang -Gloucestershire -giggled -Kitty -##å -Pleasant -Aye -##ond -judging -1860s -intentionally -Hurling -aggression -##xy -transfers -employing -##fies -##oda -Archibald -Blessed -Ski -flavor -Rosie -##burgh -sunset -Scholarship -WC -surround -ranged -##jay -Degree -Houses -squeezing -limb -premium -Leningrad -steals -##inated -##ssie -madness -vacancy -hydraulic -Northampton -##prise -Marks -Boxing -##fying -academics -##lich -##TY -CDs -##lma -hardcore -monitors -paperback -cables -Dimitri -upside -advent -Ra -##clusive -Aug -Christchurch -objected -stalked -Simple -colonists -##laid -CT -discusses -fellowship -Carnival -cares -Miracle -pastoral -rooted -shortage -borne -Quentin -meditation -tapping -Novel -##ades -Alicia -Burn -famed -residency -Fernández -Johannesburg -Zhu -offended -Mao -outward -##inas -XV -denial -noticing -##ís -quarry -##hound -##amo -Bernie -Bentley -Joanna -mortgage -##rdi -##sumption -lenses -extracted -depiction -##RE -Networks -Broad -Revenue -flickered -virgin -flanked -##о -Enterprises -probable -Liberals -Falcons -drowning -phrases -loads -assumes -inhaled -awe -logs -slightest -spiders -waterfall -##pate -rocking -shrub -##uil -roofs -##gard -prehistoric -wary -##rak -TO -clips -sustain -treason -microphone -voter -Lamb -psychologist -wrinkled -##ères -mating -Carrier -340 -##lbert -sensing -##rino -destiny -distract -weaker -UC -Nearly -neurons -spends -Apache -##rem -genuinely -wells -##lanted -stereo -##girl -Lois -Leaving -consul -fungi -Pier -Cyril -80s -Jungle -##tani -illustration -Split -##hana -Abigail -##patrick -1787 -diminished -Selected -packaging -##EG -Martínez -communal -Manufacturing -sentiment -143 -unwilling -praising -Citation -pills -##iti -##rax -muffled -neatly -workforce -Yep -leisure -Tu -##nding -Wakefield -ancestral -##uki -destructive -seas -Passion -showcase -##ceptive -heroic -142 -exhaustion -Customs -##aker -Scholar -sliced -##inian -Direction -##OW -Swansea -aluminium -##eep -ceramic -McCoy -Career -Sector -chartered -Damascus -pictured -Interest -stiffened -Plateau -obsolete -##tant -irritated -inappropriate -overs -##nko -bail -Talent -Sur -ours -##nah -barred -legged -sociology -Bud -dictionary -##luk -Cover -obey -##oring -annoying -##dong -apprentice -Cyrus -Role -##GP -##uns -##bag -Greenland -Porsche -Rocket -##32 -organism -##ntary -reliability -##vocation -##й -Found -##hine -motors -promoter -unfair -##oms -##note -distribute -eminent -rails -appealing -chiefly -meaningful -Stephan -##rehension -Consumer -psychiatric -bowler -saints -##iful -##н -1777 -Pol -Dorian -Townsend -hastily -##jima -Quincy -Sol -fascinated -Scarlet -alto -Avon -certainty -##eding -Keys -##chu -Chu -##VE -ions -tributaries -Thanksgiving -##fusion -astronomer -oxide -pavilion -Supply -Casa -Bollywood -sadly -mutations -Keller -##wave -nationals -##rgo -##ym -predict -Catholicism -Vega -##eration -##ums -Mali -tuned -Lankan -Plans -radial -Bosnian -Lexi -##14 -##ü -sacks -unpleasant -Empty -handles -##taking -Bon -switches -intently -tuition -antique -##jk -fraternity -notebook -Desmond -##sei -prostitution -##how -deed -##OP -501 -Somewhere -Rocks -##mons -campaigned -frigate -gases -suppress -##hang -Merlin -Northumberland -dominate -expeditions -thunder -##ups -##rical -Cap -thorough -Ariel -##kind -renewable -constructing -pacing -terrorists -Bowen -documentaries -westward -##lass -##nage -Merchant -##ued -Beaumont -Din -##hian -Danube -peasant -Garrison -encourages -gratitude -reminding -stormed -##ouse -pronunciation -##ailed -Weekend -suggestions -##ffing -##DI -Active -Colombo -##logists -Merrill -##cens -Archaeological -Medina -captained -##yk -duel -cracking -Wilkinson -Guam -pickup -renovations -##ël -##izer -delighted -##iri -Weaver -##ctional -tens -##hab -Clint -##usion -##each -petals -Farrell -##sable -caste -##will -Ezra -##qi -##standing -thrilled -ambush -exhaled -##SU -Resource -blur -forearm -specifications -contingent -cafe -##iology -Antony -fundraising -grape -##rgy -turnout -##udi -Clifton -laboratories -Irvine -##opus -##lid -Monthly -Bihar -statutory -Roses -Emil -##rig -lumber -optimal -##DR -pumps -plaster -Mozambique -##aco -nightclub -propelled -##hun -ked -surplus -wax -##urai -pioneered -Sunny -imprint -Forget -Eliot -approximate -patronage -##bek -##ely -##mbe -Partnership -curl -snapping -29th -Patriarch -##jord -seldom -##ature -astronomy -Bremen -XIV -airborne -205 -1778 -recognizing -stranded -arrogant -bombardment -destined -ensured -146 -robust -Davenport -Interactive -Offensive -Fi -prevents -probe -propeller -sorrow -Blade -mounting -automotive -##dged -wallet -201 -lashes -Forrest -##ift -Cell -Younger -shouts -##cki -folds -##chet -Epic -yields -homosexual -tunes -##minate -##text -Manny -chemist -hindwings -##urn -pilgrimage -##sfield -##riff -MLS -##rive -Huntington -translates -Path -slim -##ndra -##oz -climax -commuter -desperation -##reet -denying -##rious -daring -seminary -polo -##clamation -Teatro -Torah -Cats -identities -Poles -photographed -fiery -popularly -##cross -winters -Hesse -##vio -Nurse -Senegal -Salon -prescribed -justify -##gues -##и -##orted -HQ -##hiro -evaluated -momentarily -##unts -Debbie -##licity -##TP -Mighty -Rabbit -##chal -Events -Savoy -##ht -Brandenburg -Bordeaux -##laus -Release -##IE -##kowski -1900s -SK -Strauss -##aly -Sonia -Updated -synagogue -McKay -flattened -370 -clutch -contests -toast -evaluate -pope -heirs -jam -tutor -reverted -##ading -nonsense -hesitate -Lars -Ceylon -Laurie -##guchi -accordingly -customary -148 -Ethics -Multiple -instincts -IGN -##ä -bullshit -##hit -##par -desirable -##ducing -##yam -alias -ashore -licenses -##lification -misery -147 -Cola -assassinated -fiercely -##aft -las -goat -substrate -lords -Cass -Bridges -ICC -lasts -sights -reproductive -##asi -Ivory -Clean -fixing -##lace -seeming -aide -1850s -harassment -##FF -##LE -reasonably -##coat -##cano -NYC -1784 -Fifty -immunity -Canadians -Cheng -comforting -meanwhile -##tera -##blin -breeds -glowed -##vour -Aden -##verted -##aded -##oral -neat -enforced -poisoning -##ews -##hone -enforce -predecessors -survivor -Month -unfamiliar -pierced -waived -dump -responds -Mai -Declan -angular -Doesn -interpretations -##yar -invest -Dhaka -policeman -Congregation -Eighth -painfully -##este -##vior -Württemberg -##cles -blockade -encouragement -##fie -Caucasus -Malone -Universidad -utilize -Nissan -inherent -151 -agreeing -syllable -determines -Protocol -conclude -##gara -40th -Xu -Taiwanese -##ather -boiler -printer -Lacey -titular -Klaus -Fallon -Wembley -fox -Chandra -Governorate -obsessed -##Ps -micro -##25 -Cooke -gymnasium -weaving -Shall -Hussein -glaring -softball -Reader -Dominion -Trouble -varsity -Cooperation -Chaos -Kang -Kramer -Eisenhower -proves -Connie -consortium -governors -Bethany -opener -Normally -Willy -linebacker -Regent -Used -AllMusic -Twilight -##shaw -Companion -Tribunal -simpler -##gam -Experimental -Slovenian -cellar -deadline -trout -Hubbard -ads -idol -##hetto -Granada -clues -salmon -1700 -Omega -Caldwell -softened -Bills -Honolulu -##gn -Terrace -suitcase -##IL -frantic -##oons -Abbot -Sitting -Fortress -Riders -sickness -enzymes -trustee -Bern -forged -##13 -##ruff -##rl -##versity -inspector -champagne -##held -##FI -hereditary -Taliban -handball -##wine -Sioux -##dicated -honoured -139 -##tude -Skye -meanings -##rkin -cardiac -analyzed -vegetable -##FS -Royals -dial -freelance -##fest -partisan -petroleum -ridden -Lincolnshire -panting -##comb -presidents -Haley -##chs -contributes -Jew -discoveries -panicked -Woody -eyelids -Fate -Tulsa -mg -whiskey -zombies -Wii -##udge -investigators -##bull -centred -##screen -Bone -Lana -##oise -forts -##ske -Conan -Lyons -##writing -SH -##ride -rhythmic -154 -##llah -pioneers -##bright -captivity -Sanchez -Oman -##mith -Flint -Platform -##ioned -emission -packet -Persia -##formed -takeover -tempted -Vance -Few -Toni -receptions -##ن -exchanges -Camille -whale -Chronicles -##rent -##ushing -##rift -Alto -Genus -##asing -onward -foremost -longing -Rockefeller -containers -##cribe -intercepted -##olt -pleading -Bye -bee -##umbling -153 -undertake -Izzy -cheaper -Ultra -validity -##pse -Sa -hovering -##pert -vintage -engraved -##rise -farmland -##ever -##ifier -Atlantis -propose -Catalonia -plunged -##edly -demonstrates -gig -##cover -156 -Osborne -cowboy -herd -investigator -loops -Burning -rests -Instrumental -embarrassing -focal -install -readings -swirling -Chatham -parameter -##zin -##holders -Mandarin -Moody -converting -Escape -warnings -##chester -incarnation -##ophone -adopting -##lins -Cromwell -##laws -Axis -Verde -Kappa -Schwartz -Serbs -caliber -Wanna -Chung -##ality -nursery -principally -Bulletin -likelihood -logging -##erty -Boyle -supportive -twitched -##usive -builds -Marseille -omitted -motif -Lands -##lusion -##ssed -Barrow -Airfield -Harmony -WWF -endured -merging -convey -branding -examinations -167 -Italians -##dh -dude -1781 -##teau -crawling -thoughtful -clasped -concluding -brewery -Moldova -Wan -Towers -Heidelberg -202 -##ict -Lagos -imposing -##eval -##serve -Bacon -frowning -thirteenth -conception -calculations -##ович -##mile -##ivated -mutation -strap -##lund -demographic -nude -perfection -stocks -##renched -##dit -Alejandro -bites -fragment -##hack -##rchy -GB -Surgery -Berger -punish -boiling -consume -Elle -Sid -Dome -relies -Crescent -treasurer -Bloody -1758 -upheld -Guess -Restaurant -signatures -font -millennium -mural -stakes -Abel -hailed -insists -Alumni -Breton -##jun -digits -##FM -##thal -Talking -motive -reigning -babe -masks -##ø -Shaun -potato -sour -whitish -Somali -##derman -##rab -##wy -chancel -telecommunications -Noise -messenger -tidal -grinding -##ogenic -Rebel -constituent -peripheral -recruitment -##ograph -##tler -pumped -Ravi -poked -##gley -Olive -diabetes -discs -liking -sting -fits -stir -Mari -Sega -creativity -weights -Macau -mandated -Bohemia -disastrous -Katrina -Baku -Rajasthan -waiter -##psis -Siberia -verbs -##truction -patented -1782 -##ndon -Relegated -Hunters -Greenwood -Shock -accusing -skipped -Sessions -markers -subset -monumental -Viola -comparative -Alright -Barbados -setup -Session -standardized -##ík -##sket -appoint -AFB -Nationalist -##WS -Troop -leaped -Treasure -goodness -weary -originates -100th -compassion -expresses -recommend -168 -composing -seventeenth -Tex -Atlético -bald -Finding -Presidency -Sharks -favoured -inactive -##lter -suffix -princes -brighter -##ctus -classics -defendants -culminated -terribly -Strategy -evenings -##ção -##iver -##urance -absorb -##rner -Territories -RBI -soothing -Martín -concurrently -##tr -Nicholson -fibers -swam -##oney -Allie -Algerian -Dartmouth -Mafia -##bos -##tts -Councillor -vocabulary -##bla -##lé -intending -##dler -Guerrero -sunshine -pedal -##TO -administrators -periodic -scholarships -Loop -Madeline -exaggerated -##ressed -Regan -##cellular -Explorer -##oids -Alexandre -vows -Reporter -Unable -Average -absorption -##bedience -Fortunately -Auxiliary -Grandpa -##HP -##ovo -potent -temporal -adrenaline -##udo -confusing -guiding -Dry -qualifications -joking -wherein -heavyweight -##ices -nightmares -pharmaceutical -Commanding -##aled -##ove -Gregor -##UP -censorship -degradation -glorious -Austro -##rench -380 -Miriam -sped -##orous -offset -##KA -fined -specialists -Pune -João -##dina -propped -fungus -##ς -frantically -Gabrielle -Hare -committing -##plied -Ask -Wilmington -stunt -numb -warmer -preacher -earnings -##lating -integer -##ija -federation -homosexuality -##cademia -epidemic -grumbled -shoving -Milk -Satan -Tobias -innovations -##dington -geology -memoirs -##IR -spared -culminating -Daphne -Focus -severed -stricken -Paige -Mans -flats -Russo -communes -litigation -strengthening -##powered -Staffordshire -Wiltshire -Painting -Watkins -##د -specializes -Select -##rane -##aver -Fulton -playable -##VN -openings -sampling -##coon -##21 -Allah -travelers -allocation -##arily -Loch -##hm -commentators -fulfilled -##troke -Emeritus -Vanderbilt -Vijay -pledged -##tative -diagram -drilling -##MD -##plain -Edison -productivity -31st -##rying -##ption -##gano -##oration -##bara -posture -bothering -platoon -politely -##inating -redevelopment -Job -##vale -stark -incorrect -Mansion -renewal -threatens -Bahamas -fridge -##tata -Uzbekistan -##edia -Sainte -##mio -gaps -neural -##storm -overturned -Preservation -shields -##ngo -##physics -ah -gradual -killings -##anza -consultation -premiership -Felipe -coincidence -##ène -##any -Handbook -##loaded -Edit -Guns -arguably -##ş -compressed -depict -seller -##qui -Kilkenny -##kling -Olympia -librarian -##acles -dramas -JP -Kit -Maj -##lists -proprietary -##nged -##ettes -##tok -exceeding -Lock -induction -numerical -##vist -Straight -foyer -imaginary -##pop -violinist -Carla -bouncing -##ashi -abolition -##uction -restoring -scenic -##č -Doom -overthrow -para -##vid -##ughty -Concord -HC -cocaine -deputies -##aul -visibility -##wart -Kapoor -Hutchinson -##agan -flashes -kn -decreasing -##ronology -quotes -vain -satisfying -##iam -##linger -310 -Hanson -fauna -##zawa -##rrel -Trenton -##VB -Employment -vocational -Exactly -bartender -butterflies -tow -##chers -##ocks -pigs -merchandise -##game -##pine -Shea -##gration -Connell -Josephine -monopoly -##dled -Cobb -warships -cancellation -someday -stove -##Cs -candidacy -superhero -unrest -Toulouse -admiration -undergone -whirled -Reconnaissance -costly -##ships -290 -Cafe -amber -Tory -##mpt -definitive -##dress -proposes -redesigned -acceleration -##asa -##raphy -Presley -exits -Languages -##cel -Mode -spokesperson -##tius -Ban -forthcoming -grounded -ACC -compelling -logistics -retailers -abused -##gating -soda -##yland -##lution -Landmark -XVI -blush -##tem -hurling -dread -Tobago -Foley -##uad -scenarios -##mentation -##rks -Score -fatigue -hairy -correspond -##iard -defences -confiscated -##rudence -1785 -Formerly -Shot -advertised -460 -Text -ridges -Promise -Dev -exclusion -NHS -tuberculosis -rockets -##offs -sparkling -256 -disappears -mankind -##hore -HP -##omo -taxation -Multi -DS -Virgil -##ams -Dell -stacked -guessing -Jump -Nope -cheer -hates -ballots -overlooked -analyses -Prevention -maturity -dos -##cards -##lect -Mare -##yssa -Petty -##wning -differing -iOS -##ior -Joachim -Sentinel -##nstein -90s -Pamela -480 -Asher -##lary -Vicente -landings -portray -##rda -##xley -Virtual -##uary -finances -Jain -Somebody -Tri -behave -Michele -##ider -dwellings -FAA -Gallagher -##lide -Monkey -195 -aforementioned -##rism -##bey -##kim -##puted -Mesa -hopped -unopposed -recipients -Reality -Been -gritted -149 -playground -pillar -##rone -Guinness -##tad -Théâtre -depended -Tipperary -Reuben -frightening -wooded -Target -globally -##uted -Morales -Baptiste -drunken -Institut -characterised -##chemistry -Strip -discrete -Premiership -##zzling -gazing -Outer -##quisition -Sikh -Booker -##yal -contemporaries -Jericho -##chan -##physical -##witch -Militia -##rez -##zard -dangers -##utter -##₀ -Programs -darling -participates -railroads -##ienne -behavioral -bureau -##rook -161 -Hicks -##rises -Comes -inflicted -bees -kindness -norm -##ković -generators -##pard -##omy -##ili -methodology -Alvin -façade -latitude -##plified -DE -Morse -##mered -educate -intersects -##MF -##cz -##vated -AL -##graded -##fill -constitutes -artery -feudal -avant -cautious -##ogue -immigrated -##chenko -Saul -Clinic -Fang -choke -Cornelius -flexibility -temperate -pins -##erson -oddly -inequality -157 -Natasha -Sal -##uter -215 -aft -blinking -##ntino -northward -Exposition -cookies -Wedding -impulse -Overseas -terrifying -##ough -Mortimer -##see -440 -https -og -imagining -##cars -Nicola -exceptionally -threads -##cup -Oswald -Provisional -dismantled -deserves -1786 -Fairy -discourse -Counsel -departing -Arc -guarding -##orse -420 -alterations -vibrant -Em -squinted -terrace -rowing -Led -accessories -SF -Sgt -cheating -Atomic -##raj -Blackpool -##iary -boarded -substituted -bestowed -lime -kernel -##jah -Belmont -shaken -sticky -retrospective -Louie -migrants -weigh -sunglasses -thumbs -##hoff -excavation -##nks -Extra -Polo -motives -Drum -infrared -tastes -berth -verge -##stand -programmed -warmed -Shankar -Titan -chromosome -cafeteria -dividing -pepper -CPU -Stevie -satirical -Nagar -scowled -Died -backyard -##gata -##reath -##bir -Governors -portraying -##yah -Revenge -##acing -1772 -margins -Bahn -OH -lowland -##razed -catcher -replay -##yoshi -Seriously -##licit -Aristotle -##ald -Habsburg -weekday -Secretariat -CO -##dly -##joy -##stad -litre -ultra -##cke -Mongol -Tucson -correlation -compose -traps -Groups -Hai -Salvatore -##dea -cents -##eese -concession -clash -Trip -Panzer -Moroccan -cruisers -torque -Ba -grossed -##arate -restriction -concentrating -FDA -##Leod -##ones -Scholars -##esi -throbbing -specialised -##heses -Chicken -##fia -##ificant -Erich -Residence -##trate -manipulation -namesake -##tom -Hoover -cue -Lindsey -Lonely -275 -##HT -combustion -subscribers -Punjabi -respects -Jeremiah -penned -##gor -##rilla -suppression -##tration -Crimson -piston -Derry -crimson -lyrical -oversee -portrays -CF -Districts -Lenin -Cora -searches -clans -VHS -##hel -Jacqueline -Redskins -Clubs -desktop -indirectly -alternatives -marijuana -suffrage -##smos -Irwin -##liff -Process -##hawks -Sloane -##bson -Sonata -yielded -Flores -##ares -armament -adaptations -integrate -neighbours -shelters -##tour -Skinner -##jet -##tations -1774 -Peterborough -##elles -ripping -Liang -Dickinson -charities -Rwanda -monasteries -crossover -racist -barked -guerrilla -##ivate -Grayson -##iques -##vious -##got -Rolls -denominations -atom -affinity -##delity -Wish -##inted -##inae -interrogation -##cey -##erina -##lifting -192 -Sands -1779 -mast -Likewise -##hyl -##oft -contempt -##por -assaulted -fills -establishments -Mal -consulted -##omi -##sight -greet -##roma -##egan -Pulitzer -##rried -##dius -##ractical -##voked -Hasan -CB -##zzy -Romanesque -Panic -wheeled -recorder -##tters -##warm -##gly -botanist -Balkan -Lockheed -Polly -farewell -suffers -purchases -Eaton -##80 -Quick -commenting -Saga -beasts -hides -motifs -##icks -Alonso -Springer -Wikipedia -circulated -encoding -jurisdictions -snout -UAE -Integrated -unmarried -Heinz -##lein -##figured -deleted -##tley -Zen -Cycling -Fuel -Scandinavian -##rants -Conner -reef -Marino -curiously -lingered -Gina -manners -activism -Mines -Expo -Micah -promotions -Server -booked -derivatives -eastward -detailing -reelection -##chase -182 -Campeonato -Po -158 -Peel -winger -##itch -canyon -##pit -LDS -A1 -##shin -Giorgio -pathetic -##rga -##mist -Aren -##lag -confronts -motel -textbook -shine -turbines -1770 -Darcy -##cot -Southeastern -##lessness -Banner -recognise -stray -Kitchen -paperwork -realism -Chrysler -filmmakers -fishermen -##hetic -variously -Vishnu -fiddle -Eddy -Origin -##tec -##ulin -Flames -Rs -bankrupt -Extreme -Pomeranian -##emption -ratified -##iu -jockey -Stratford -##ivating -##oire -Babylon -pardon -AI -affordable -deities -disturbance -Trying -##sai -Ida -Papers -advancement -70s -archbishop -Luftwaffe -announces -tugging -##lphin -##sistence -##eel -##ishes -ambition -aura -##fled -##lected -##vue -Prasad -boiled -clarity -Violin -investigative -routing -Yankee -##uckle -McMahon -bugs -eruption -##rooms -Minutes -relics -##ckle -##nse -sipped -valves -weakly -##ital -Middleton -collided -##quer -bamboo -insignia -Tyne -exercised -Ninth -echoing -polynomial -considerations -lunged -##bius -objections -complain -disguised -plaza -##VC -institutes -Judicial -ascent -imminent -Waterford -hello -Lumpur -Niger -Goldman -vendors -Kensington -Wren -browser -##bner -##tri -##mize -##pis -##lea -Cheyenne -Bold -Settlement -Hollow -Paralympic -axle -##toire -##actic -impose -perched -utilizing -slips -Benz -Michaels -manipulate -Chiang -##mian -Dolphins -prohibition -attacker -ecology -Estadio -##SB -##uild -attracts -recalls -glacier -lad -##rima -Barlow -kHz -melodic -##aby -##iracy -assumptions -Cornish -##aru -DOS -Maddie -##mers -lyric -Luton -nm -##tron -Reno -Fin -YOU -Broadcast -Finch -sensory -##bent -Jeep -##uman -additionally -Buildings -businessmen -treaties -235 -Stranger -gateway -Charlton -accomplishments -Diary -apologized -zinc -histories -supplier -##tting -162 -asphalt -Treatment -Abbas -##pating -##yres -Bloom -sedan -soloist -##cum -antagonist -denounced -Fairfax -##aving -##enko -noticeable -Budget -Buckingham -Snyder -retreating -Jai -spoon -invading -giggle -woven -gunfire -arrests -##vered -##come -respiratory -violet -##aws -Byrd -shocking -tenant -Jamaican -Ottomans -Seal -theirs -##isse -##48 -cooperate -peering -##nius -163 -Composer -organist -Mongolian -Bauer -Spy -collects -prophecy -congregations -##moor -Brick -calculation -fixtures -exempt -##dden -Ada -Thousand -##lue -tracing -##achi -bodyguard -vicar -supplying -Łódź -interception -monitored -##heart -Paso -overlap -annoyance -##dice -yellowish -stables -elders -illegally -honesty -##oar -skinny -spinal -##puram -Bourbon -##cor -flourished -Medium -##stics -##aba -Follow -##ckey -stationary -##scription -dresser -scrutiny -Buckley -Clearly -##SF -Lyrics -##heimer -drying -Oracle -internally -rains -##last -Enemy -##oes -McLean -Ole -phosphate -Rosario -Rifles -##mium -battered -Pepper -Presidents -conquer -Château -castles -##aldo -##ulf -Depending -Lesser -Boom -trades -Peyton -164 -emphasize -accustomed -SM -Ai -Classification -##mins -##35 -##rons -leak -piled -deeds -lush -##self -beginnings -breathless -1660 -McGill -##ago -##chaft -##gies -humour -Bomb -securities -Might -##zone -##eves -Matthias -Movies -Levine -vengeance -##ads -Challenger -Misty -Traditionally -constellation -##rass -deepest -workplace -##oof -##vina -impatient -##ML -Mughal -Alessandro -scenery -Slater -postseason -troupe -##ń -Volunteers -Facility -militants -Reggie -sanctions -Expeditionary -Nam -countered -interpret -Basilica -coding -expectation -Duffy -def -Tong -wakes -Bowling -Vehicle -Adler -salad -intricate -stronghold -medley -##uries -##bur -joints -##rac -##yx -##IO -Ordnance -Welch -distributor -Ark -cavern -trench -Weiss -Mauritius -decreases -docks -eagerly -irritation -Matilda -biographer -Visiting -##marked -##iter -##ear -##gong -Moreno -attendant -Bury -instrumentation -theologian -clit -nuns -symphony -translate -375 -loser -##user -##VR -##meter -##orious -harmful -##yuki -Commissioners -Mendoza -sniffed -Hulk -##dded -##ulator -##nz -Donnell -##eka -deported -Met -SD -Aerospace -##cultural -##odes -Fantastic -cavity -remark -emblem -fearing -##iance -ICAO -Liberia -stab -##yd -Pac -Gymnasium -IS -Everton -##vanna -mantle -##ief -Ramon -##genic -Shooting -Smoke -Random -Africans -MB -tavern -bargain -voluntarily -Ion -Peoples -Rusty -attackers -Patton -sins -##cake -Hat -moderately -##hala -##alia -requesting -mechanic -##eae -Seine -Robbins -##ulum -susceptible -Bravo -Slade -Strasbourg -rubble -entrusted -Creation -##amp -smoothed -##uintet -evenly -reviewers -skip -Sculpture -177 -Rough -##rrie -Reeves -##cede -Administrator -garde -minus -carriages -grenade -Ninja -fuscous -##kley -Punk -contributors -Aragon -Tottenham -##cca -##sir -VA -laced -dealers -##sonic -crisp -harmonica -Artistic -Butch -Andes -Farmers -corridors -unseen -##tium -Countries -Lone -envisioned -Katy -##lang -##cc -Quarterly -##neck -consort -##aceae -bidding -Corey -concurrent -##acts -##gum -Highness -##lient -##rators -arising -##unta -pathways -49ers -bolted -complaining -ecosystem -libretto -Ser -narrated -212 -Soft -influx -##dder -incorporation -plagued -tents -##ddled -1750 -Risk -citation -Tomas -hostilities -seals -Bruins -Dominique -attic -competent -##UR -##cci -hugging -Breuning -bacterial -Shrewsbury -vowed -eh -elongated -hangs -render -centimeters -##ficient -Mu -turtle -besieged -##gaard -grapes -bravery -collaborations -deprived -##amine -##using -##gins -arid -##uve -coats -hanged -##sting -Pa -prefix -##ranged -Exit -Chain -Flood -Materials -suspicions -##ö -hovered -Hidden -##state -Malawi -##24 -Mandy -norms -fascinating -airlines -delivers -##rust -Cretaceous -spanned -pillows -##onomy -jar -##kka -regent -fireworks -morality -discomfort -lure -uneven -##jack -Lucian -171 -archaeology -##til -mornings -Billie -Marquess -impending -spilling -tombs -##volved -Celia -Coke -underside -##bation -Vaughn -Daytona -Godfrey -Pascal -Alien -##sign -172 -##lage -iPhone -Gonna -genocide -##rber -oven -endure -dashed -simultaneous -##phism -Wally -##rō -ants -predator -reissue -##aper -Speech -funk -Rudy -claw -Hindus -Numbers -Bing -lantern -##aurus -scattering -poisoned -##active -Andrei -algebraic -baseman -##ritz -Gregg -##cola -selections -##putation -lick -Laguna -##IX -Sumatra -Warning -turf -buyers -Burgess -Oldham -exploit -worm -initiate -strapped -tuning -filters -haze -##е -##ledge -##ydro -##culture -amendments -Promotion -##union -Clair -##uria -petty -shutting -##eveloped -Phoebe -Zeke -conducts -grains -clashes -##latter -illegitimate -willingly -Deer -Lakers -Reference -chaplain -commitments -interrupt -salvation -Panther -Qualifying -Assessment -cancel -efficiently -attorneys -Dynamo -impress -accession -clinging -randomly -reviewing -Romero -Cathy -charting -clapped -rebranded -Azerbaijani -coma -indicator -punches -##tons -Sami -monastic -prospects -Pastor -##rville -electrified -##CI -##utical -tumbled -Chef -muzzle -selecting -UP -Wheel -protocols -##tat -Extended -beautifully -nests -##stal -Andersen -##anu -##³ -##rini -kneeling -##reis -##xia -anatomy -dusty -Safe -turmoil -Bianca -##elo -analyze -##ر -##eran -podcast -Slovene -Locke -Rue -##retta -##uni -Person -Prophet -crooked -disagreed -Versailles -Sarajevo -Utrecht -##ogen -chewing -##ception -##iidae -Missile -attribute -majors -Arch -intellectuals -##andra -ideological -Cory -Salzburg -##fair -Lot -electromagnetic -Distribution -##oper -##pered -Russ -Terra -repeats -fluttered -Riga -##ific -##gt -cows -Hair -labelled -protects -Gale -Personnel -Düsseldorf -Moran -rematch -##OE -Slow -forgiveness -##ssi -proudly -Macmillan -insist -undoubtedly -Québec -Violence -##yuan -##aine -mourning -linen -accidental -##iol -##arium -grossing -lattice -maneuver -##marine -prestige -petrol -gradient -invasive -militant -Galerie -widening -##aman -##quist -disagreement -##ales -creepy -remembers -buzz -##erial -Exempt -Dirk -mon -Addison -##inen -deposed -##agon -fifteenth -Hang -ornate -slab -##lades -Fountain -contractors -das -Warwickshire -1763 -##rc -Carly -Essays -Indy -Ligue -greenhouse -slit -##sea -chewed -wink -##azi -Playhouse -##kon -Gram -Ko -Samson -creators -revive -##rians -spawned -seminars -Craft -Tall -diverted -assistants -computational -enclosure -##acity -Coca -##eve -databases -Drop -##loading -##hage -Greco -Privy -entrances -pork -prospective -Memories -robes -##market -transporting -##lik -Rudolph -Horton -visually -##uay -##nja -Centro -Tor -Howell -##rsey -admitting -postgraduate -herbs -##att -Chin -Rutherford -##bot -##etta -Seasons -explanations -##bery -Friedman -heap -##ryl -##sberg -jaws -##agh -Choi -Killing -Fanny -##suming -##hawk -hopeful -##aid -Monty -gum -remarkably -Secrets -disco -harp -advise -##avia -Marathi -##cycle -Truck -abbot -sincere -urine -##mology -masked -bathing -##tun -Fellows -##TM -##gnetic -owl -##jon -hymn -##leton -208 -hostility -##cée -baked -Bottom -##AB -shudder -##ater -##von -##hee -reorganization -Cycle -##phs -Lex -##style -##rms -Translation -##erick -##imeter -##ière -attested -Hillary -##DM -gal -wander -Salle -##laming -Perez -Pit -##LP -USAF -contexts -Disease -blazing -aroused -razor -walled -Danielle -Mont -Funk -royalty -thee -203 -donors -##erton -famously -processors -reassigned -welcoming -Goldberg -##quities -undisclosed -Orient -Patty -vaccine -refrigerator -Cypriot -consonant -##waters -176 -sober -##lement -Racecourse -##uate -Luckily -Selection -conceptual -vines -Breaking -wa -lions -oversight -sheltered -Dancer -ponds -borrow -##BB -##pulsion -Daly -##eek -fertility -spontaneous -Worldwide -gasping -##tino -169 -ABS -Vickers -ambient -energetic -prisons -##eson -Stacy -##roach -GmbH -Afro -Marin -farmhouse -pinched -##cursion -##sp -Sabine -##pire -181 -nak -swelling -humble -perfume -##balls -Rai -cannons -##taker -Married -Maltese -canals -interceptions -hats -lever -slowing -##ppy -Nike -Silas -Scarborough -skirts -166 -inauguration -Shuttle -alloy -beads -belts -Compton -Cause -battling -critique -surf -Dock -roommate -##ulet -invade -Garland -##slow -nutrition -persona -##zam -Wichita -acquaintance -coincided -##cate -Dracula -clamped -##gau -overhaul -##broken -##rrier -melodies -ventures -Paz -convex -Roots -##holding -Tribute -transgender -##ò -chimney -##riad -Ajax -Thereafter -messed -nowadays -pH -##100 -##alog -Pomerania -##yra -Rossi -glove -##TL -Races -##asily -tablets -Jase -##ttes -diner -##rns -Hu -Mohan -anytime -weighted -remixes -Dove -cherry -imports -##urity -GA -##TT -##iated -##sford -Clarkson -evidently -rugged -Dust -siding -##ometer -acquitted -choral -##mite -infants -Domenico -gallons -Atkinson -gestures -slated -##xa -Archaeology -unwanted -##ibes -##duced -premise -Colby -Geelong -disqualified -##pf -##voking -simplicity -Walkover -Qaeda -Warden -##bourg -##ān -Invasion -Babe -harness -183 -##tated -maze -Burt -bedrooms -##nsley -Horizon -##oast -minimize -peeked -MLA -Trains -tractor -nudged -##iform -Growth -Benton -separates -##about -##kari -buffer -anthropology -brigades -foil -##wu -Domain -licking -whore -##rage -##sham -Initial -Courthouse -Rutgers -dams -villains -supermarket -##brush -Brunei -Palermo -arises -Passenger -outreach -##gill -Labrador -McLaren -##uy -Lori -##fires -Heads -magistrate -¹⁄₂ -Weapons -##wai -##roke -projecting -##ulates -bordering -McKenzie -Pavel -midway -Guangzhou -streamed -racer -##lished -eccentric -spectral -206 -##mism -Wilde -Grange -preparatory -lent -##tam -starving -Gertrude -##cea -##ricted -Breakfast -Mira -blurted -derive -##lair -blunt -sob -Cheltenham -Henrik -reinstated -intends -##istan -unite -##ector -playful -sparks -mapped -Cadet -luggage -prosperous -##ein -salon -##utes -Biological -##rland -Tyrone -buyer -##lose -amounted -Saw -smirked -Ronan -Reviews -Adele -trait -##proof -Bhutan -Ginger -##junct -digitally -stirring -##isted -coconut -Hamlet -Dinner -Scale -pledge -##RP -Wrong -Goal -Panel -therapeutic -elevations -infectious -priesthood -##inda -Guyana -diagnostic -##mbre -Blackwell -sails -##arm -literal -periodically -gleaming -Robot -Rector -##abulous -##tres -Reaching -Romantic -CP -Wonderful -##tur -ornamental -##nges -traitor -##zilla -genetics -mentioning -##eim -resonance -Areas -Shopping -##nard -Gail -Solid -##rito -##mara -Willem -Chip -Matches -Volkswagen -obstacle -Organ -invites -Coral -attain -##anus -##dates -Midway -shuffled -Cecilia -dessert -Gateway -Ch -Napoleonic -Petroleum -jets -goose -striped -bowls -vibration -Sims -nickel -Thirteen -problematic -intervene -##grading -##unds -Mum -semifinal -Radical -##izations -refurbished -##sation -##harine -Maximilian -cites -Advocate -Potomac -surged -preserves -Curry -angled -ordination -##pad -Cade -##DE -##sko -researched -torpedoes -Resident -wetlands -hay -applicants -depart -Bernstein -##pic -##ario -##rae -favourable -##wari -##р -metabolism -nobleman -Defaulted -calculate -ignition -Celebrity -Belize -sulfur -Flat -Sc -USB -flicker -Hertfordshire -Sept -CFL -Pasadena -Saturdays -Titus -##nir -Canary -Computing -Isaiah -##mler -formidable -pulp -orchid -Called -Solutions -kilograms -steamer -##hil -Doncaster -successors -Stokes -Holstein -##sius -sperm -API -Rogue -instability -Acoustic -##rag -159 -undercover -Wouldn -##pra -##medical -Eliminated -honorable -##chel -denomination -abrupt -Buffy -blouse -fi -Regardless -Subsequent -##rdes -Lover -##tford -bacon -##emia -carving -##cripts -Massacre -Ramos -Latter -##ulp -ballroom -##gement -richest -bruises -Rest -Wiley -##aster -explosions -##lastic -Edo -##LD -Mir -choking -disgusted -faintly -Barracks -blasted -headlights -Tours -ensued -presentations -##cale -wrought -##oat -##coa -Quaker -##sdale -recipe -##gny -corpses -##liance -comfortably -##wat -Landscape -niche -catalyst -##leader -Securities -messy -##RL -Rodrigo -backdrop -##opping -treats -Emilio -Anand -bilateral -meadow -VC -socialism -##grad -clinics -##itating -##ppe -##ymphonic -seniors -Advisor -Armoured -Method -Alley -##orio -Sad -fueled -raided -Axel -NH -rushes -Dixie -Otis -wrecked -##22 -capitalism -café -##bbe -##pion -##forcing -Aubrey -Lublin -Whenever -Sears -Scheme -##lana -Meadows -treatise -##RI -##ustic -sacrifices -sustainability -Biography -mystical -Wanted -multiplayer -Applications -disliked -##tisfied -impaired -empirical -forgetting -Fairfield -Sunni -blurred -Growing -Avalon -coil -Camera -Skin -bruised -terminals -##fted -##roving -Commando -##hya -##sper -reservations -needles -dangling -##rsch -##rsten -##spect -##mbs -yoga -regretted -Bliss -Orion -Rufus -glucose -Olsen -autobiographical -##dened -222 -humidity -Shan -##ifiable -supper -##rou -flare -##MO -campaigning -descend -socio -declares -Mounted -Gracie -Arte -endurance -##ety -Copper -costa -airplay -##MB -Proceedings -dislike -grimaced -occupants -births -glacial -oblivious -cans -installment -muddy -##ł -captains -pneumonia -Quiet -Sloan -Excuse -##nine -Geography -gymnastics -multimedia -drains -Anthology -Gear -cylindrical -Fry -undertaking -##pler -##tility -Nan -##recht -Dub -philosophers -piss -Atari -##pha -Galicia -México -##nking -Continuing -bump -graveyard -persisted -Shrine -##erapy -defects -Advance -Bomber -##oil -##ffling -cheerful -##lix -scrub -##eto -awkwardly -collaborator -fencing -##alo -prophet -Croix -coughed -##lication -roadway -slaughter -elephants -##erated -Simpsons -vulnerability -ivory -Birth -lizard -scarce -cylinders -fortunes -##NL -Hate -Priory -##lai -McBride -##copy -Lenny -liaison -Triangle -coronation -sampled -savage -amidst -Grady -whatsoever -instinctively -Reconstruction -insides -seizure -Drawing -##rlin -Antioch -Gao -Díaz -1760 -Sparks -##tien -##bidae -rehearsal -##bbs -botanical -##hers -compensate -wholesale -Seville -shareholder -prediction -astronomical -Reddy -hardest -circling -whereabouts -termination -Rep -Assistance -Dramatic -Herb -##ghter -climbs -188 -Poole -301 -##pable -wit -##istice -Walters -relying -Jakob -##redo -proceeding -Langley -affiliates -ou -##allo -##holm -Samsung -##ishi -Missing -Xi -vertices -Claus -foam -restless -##uating -##sso -##ttering -Philips -delta -bombed -Catalogue -coaster -Ling -Willard -satire -410 -Composition -Net -Orioles -##ldon -fins -Palatinate -Woodward -tease -tilt -brightness -##70 -##bbling -##loss -##dhi -##uilt -Whoever -##yers -hitter -Elton -Extension -ace -Affair -restructuring -##loping -Paterson -hi -##rya -spouse -Shay -Himself -piles -preaching -##gical -bikes -Brave -expulsion -Mirza -stride -Trees -commemorated -famine -masonry -Selena -Watt -Banking -Rancho -Stockton -dip -tattoos -Vlad -acquainted -Flyers -ruthless -fourteenth -illustrate -##akes -EPA -##rows -##uiz -bumped -Designed -Leaders -mastered -Manfred -swirled -McCain -##rout -Artemis -rabbi -flinched -upgrades -penetrate -shipyard -transforming -caretaker -##eiro -Maureen -tightening -##founded -RAM -##icular -##mper -##rung -Fifteen -exploited -consistency -interstate -##ynn -Bridget -contamination -Mistress -##rup -coating -##FP -##jective -Libyan -211 -Gemma -dependence -shrubs -##ggled -Germain -retaliation -traction -##PP -Dangerous -terminology -psychiatrist -##garten -hurdles -Natal -wasting -Weir -revolves -stripe -##reased -preferences -##entation -##lde -##áil -##otherapy -Flame -##ologies -viruses -Label -Pandora -veil -##ogical -Coliseum -Cottage -creeping -Jong -lectured -##çaise -shoreline -##fference -##hra -Shade -Clock -Faye -bilingual -Humboldt -Operating -##fter -##was -algae -towed -amphibious -Parma -impacted -smacked -Piedmont -Monsters -##omb -Moor -##lberg -sinister -Postal -178 -Drummond -Sign -textbooks -hazardous -Brass -Rosemary -Pick -Sit -Architect -transverse -Centennial -confess -polling -##aia -Julien -##mand -consolidation -Ethel -##ulse -severity -Yorker -choreographer -1840s -##ltry -softer -versa -##geny -##quila -##jō -Caledonia -Friendship -Visa -rogue -##zzle -bait -feather -incidence -Foods -Ships -##uto -##stead -arousal -##rote -Hazel -##bolic -Swing -##ej -##cule -##jana -##metry -##uity -Valuable -##ₙ -Shropshire -##nect -365 -Ones -realise -Café -Albuquerque -##grown -##stadt -209 -##ᵢ -prefers -withstand -Lillian -MacArthur -Hara -##fulness -domination -##VO -##school -Freddy -ethnicity -##while -adorned -hormone -Calder -Domestic -Freud -Shields -##phus -##rgan -BP -Segunda -Mustang -##GI -Bonn -patiently -remarried -##umbria -Crete -Elephant -Nuremberg -tolerate -Tyson -##evich -Programming -##lander -Bethlehem -segregation -Constituency -quarterly -blushed -photographers -Sheldon -porcelain -Blanche -goddamn -lively -##fused -bumps -##eli -curated -coherent -provoked -##vet -Madeleine -##isco -rainy -Bethel -accusation -ponytail -gag -##lington -quicker -scroll -##vate -Bow -Gender -Ira -crashes -ACT -Maintenance -##aton -##ieu -bitterly -strains -rattled -vectors -##arina -##ishly -173 -parole -##nx -amusing -Gonzalez -##erative -Caucus -sensual -Penelope -coefficient -Mateo -##mani -proposition -Duty -lacrosse -proportions -Plato -profiles -Botswana -Brandt -reins -mandolin -encompassing -##gens -Kahn -prop -summon -##MR -##yrian -##zaki -Falling -conditional -thy -##bao -##ych -radioactive -##nics -Newspaper -##people -##nded -Gaming -sunny -##look -Sherwood -crafted -NJ -awoke -187 -timeline -giants -possessing -##ycle -Cheryl -ng -Ruiz -polymer -potassium -Ramsay -relocation -##leen -Sociology -##bana -Franciscan -propulsion -denote -##erjee -registers -headline -Tests -emerges -Articles -Mint -livery -breakup -kits -Rap -Browning -Bunny -##mington -##watch -Anastasia -Zachary -arranging -biographical -Erica -Nippon -##membrance -Carmel -##sport -##xes -Paddy -##holes -Issues -Spears -compliment -##stro -##graphs -Castillo -##MU -##space -Corporal -##nent -174 -Gentlemen -##ilize -##vage -convinces -Carmine -Crash -##hashi -Files -Doctors -brownish -sweating -goats -##conductor -rendition -##bt -NL -##spiration -generates -##cans -obsession -##noy -Danger -Diaz -heats -Realm -priorities -##phon -1300 -initiation -pagan -bursts -archipelago -chloride -Screenplay -Hewitt -Khmer -bang -judgement -negotiating -##ait -Mabel -densely -Boulder -knob -430 -Alfredo -##kt -pitches -##ées -##ان -Macdonald -##llum -imply -##mot -Smile -spherical -##tura -Derrick -Kelley -Nico -cortex -launches -differed -parallels -Navigation -##child -##rming -canoe -forestry -reinforce -##mote -confirming -tasting -scaled -##resh -##eting -Understanding -prevailing -Pearce -CW -earnest -Gaius -asserts -denoted -landmarks -Chargers -warns -##flies -Judges -jagged -##dain -tails -Historian -Millie -##sler -221 -##uard -absurd -Dion -##ially -makeshift -Specifically -ignorance -Eat -##ieri -comparisons -forensic -186 -Giro -skeptical -disciplinary -battleship -##45 -Libby -520 -Odyssey -ledge -##post -Eternal -Missionary -deficiency -settler -wonders -##gai -raging -##cis -Romney -Ulrich -annexation -boxers -sect -204 -ARIA -dei -Hitchcock -te -Varsity -##fic -CC -lending -##nial -##tag -##rdy -##obe -Defensive -##dson -##pore -stellar -Lam -Trials -contention -Sung -##uminous -Poe -superiority -##plicate -325 -bitten -conspicuous -##olly -Lila -Pub -Petit -distorted -ISIL -distinctly -##family -Cowboy -mutant -##cats -##week -Changes -Sinatra -epithet -neglect -Innocent -gamma -thrill -reggae -##adia -##ational -##due -landlord -##leaf -visibly -##ì -Darlington -Gomez -##iting -scarf -##lade -Hinduism -Fever -scouts -##roi -convened -##oki -184 -Lao -boycott -unemployed -##lore -##ß -##hammer -Curran -disciples -odor -##ygiene -Lighthouse -Played -whales -discretion -Yves -##ceived -pauses -coincide -##nji -dizzy -##scopic -routed -Guardians -Kellan -carnival -nasal -224 -##awed -Mitsubishi -640 -Cast -silky -Projects -joked -Huddersfield -Rothschild -zu -##olar -Divisions -mildly -##eni -##lge -Appalachian -Sahara -pinch -##roon -wardrobe -##dham -##etal -Bubba -##lini -##rumbling -Communities -Poznań -unification -Beau -Kris -SV -Rowing -Minh -reconciliation -##saki -##sor -taped -##reck -certificates -gubernatorial -rainbow -##uing -litter -##lique -##oted -Butterfly -benefited -Images -induce -Balkans -Velvet -##90 -##xon -Bowman -##breaker -penis -##nitz -##oint -##otive -crust -##pps -organizers -Outdoor -nominees -##rika -TX -##ucks -Protestants -##imation -appetite -Baja -awaited -##points -windshield -##igh -##zled -Brody -Buster -stylized -Bryce -##sz -Dollar -vest -mold -ounce -ok -receivers -##uza -Purdue -Harrington -Hodges -captures -##ggio -Reservation -##ssin -##tman -cosmic -straightforward -flipping -remixed -##athed -Gómez -Lim -motorcycles -economies -owning -Dani -##rosis -myths -sire -kindly -1768 -Bean -graphs -##mee -##RO -##geon -puppy -Stephenson -notified -##jer -Watching -##rama -Sino -urgency -Islanders -##mash -Plata -fumble -##chev -##stance -##rack -##she -facilitated -swings -akin -enduring -payload -##phine -Deputies -murals -##tooth -610 -Jays -eyeing -##quito -transparency -##cote -Timor -negatively -##isan -battled -##fected -thankful -Rage -hospitality -incorrectly -207 -entrepreneurs -##cula -##wley -hedge -##cratic -Corpus -Odessa -Whereas -##ln -fetch -happier -Amherst -bullying -graceful -Height -Bartholomew -willingness -qualifier -191 -Syed -Wesleyan -Layla -##rrence -Webber -##hum -Rat -##cket -##herence -Monterey -contaminated -Beside -Mustafa -Nana -213 -##pruce -Reason -##spense -spike -##gé -AU -disciple -charcoal -##lean -formulated -Diesel -Mariners -accreditation -glossy -1800s -##ih -Mainz -unison -Marianne -shear -overseeing -vernacular -bowled -##lett -unpopular -##ckoned -##monia -Gaston -##TI -##oters -Cups -##bones -##ports -Museo -minors -1773 -Dickens -##EL -##NBC -Presents -ambitions -axes -Río -Yukon -bedside -Ribbon -Units -faults -conceal -##lani -prevailed -214 -Goodwin -Jaguar -crumpled -Cullen -Wireless -ceded -remotely -Bin -mocking -straps -ceramics -##avi -##uding -##ader -Taft -twenties -##aked -Problem -quasi -Lamar -##ntes -##avan -Barr -##eral -hooks -sa -##ône -194 -##ross -Nero -Caine -trance -Homeland -benches -Guthrie -dismiss -##lex -César -foliage -##oot -##alty -Assyrian -Ahead -Murdoch -dictatorship -wraps -##ntal -Corridor -Mackay -respectable -jewels -understands -##pathic -Bryn -##tep -ON -capsule -intrigued -Sleeping -communists -##chayat -##current -##vez -doubling -booklet -##uche -Creed -##NU -spies -##sef -adjusting -197 -Imam -heaved -Tanya -canonical -restraint -senators -stainless -##gnate -Matter -cache -restrained -conflicting -stung -##ool -Sustainable -antiquity -193 -heavens -inclusive -##ador -fluent -303 -911 -archaeologist -superseded -##plex -Tammy -inspire -##passing -##lub -Lama -Mixing -##activated -##yote -parlor -tactic -198 -Stefano -prostitute -recycling -sorted -banana -Stacey -Musée -aristocratic -cough -##rting -authorised -gangs -runoff -thoughtfully -##nish -Fisheries -Provence -detector -hum -##zhen -pill -##árez -Map -Leaves -Peabody -skater -vent -##color -390 -cerebral -hostages -mare -Jurassic -swell -##isans -Knoxville -Naked -Malaya -scowl -Cobra -##anga -Sexual -##dron -##iae -196 -##drick -Ravens -Blaine -##throp -Ismail -symmetric -##lossom -Leicestershire -Sylvester -glazed -##tended -Radar -fused -Families -Blacks -Sale -Zion -foothills -microwave -slain -Collingwood -##pants -##dling -killers -routinely -Janice -hearings -##chanted -##ltration -continents -##iving -##yster -##shot -##yna -injected -Guillaume -##ibi -kinda -Confederacy -Barnett -disasters -incapable -##grating -rhythms -betting -draining -##hak -Callie -Glover -##iliated -Sherlock -hearted -punching -Wolverhampton -Leaf -Pi -builders -furnished -knighted -Photo -##zle -Touring -fumbled -pads -##ий -Bartlett -Gunner -eerie -Marius -Bonus -pots -##hino -##pta -Bray -Frey -Ortiz -stalls -belongings -Subway -fascination -metaphor -Bat -Boer -Colchester -sway -##gro -rhetoric -##dheim -Fool -PMID -admire -##hsil -Strand -TNA -##roth -Nottinghamshire -##mat -##yler -Oxfordshire -##nacle -##roner -BS -##nces -stimulus -transports -Sabbath -##postle -Richter -4000 -##grim -##shima -##lette -deteriorated -analogous -##ratic -UHF -energies -inspiring -Yiddish -Activities -##quential -##boe -Melville -##ilton -Judd -consonants -labs -smuggling -##fari -avid -##uc -truce -undead -##raith -Mostly -bracelet -Connection -Hussain -awhile -##UC -##vention -liable -genetically -##phic -Important -Wildcats -daddy -transmit -##cas -conserved -Yesterday -##lite -Nicky -Guys -Wilder -Lay -skinned -Communists -Garfield -Nearby -organizer -Loss -crafts -walkway -Chocolate -Sundance -Synod -##enham -modify -swayed -Surface -analysts -brackets -drone -parachute -smelling -Andrés -filthy -frogs -vertically -##OK -localities -marries -AHL -35th -##pian -Palazzo -cube -dismay -relocate -##на -Hear -##digo -##oxide -prefecture -converts -hangar -##oya -##ucking -Spectrum -deepened -spoiled -Keeping -##phobic -Verona -outrage -Improvement -##UI -masterpiece -slung -Calling -chant -Haute -mediated -manipulated -affirmed -##hesis -Hangul -skies -##llan -Worcestershire -##kos -mosaic -##bage -##wned -Putnam -folder -##LM -guts -noteworthy -##rada -AJ -sculpted -##iselle -##rang -recognizable -##pent -dolls -lobbying -impatiently -Se -staple -Serb -tandem -Hiroshima -thieves -##ynx -faculties -Norte -##alle -##trusion -chords -##ylon -Gareth -##lops -##escu -FIA -Levin -auspices -groin -Hui -nun -Listed -Honourable -Larsen -rigorous -##erer -Tonga -##pment -##rave -##track -##aa -##enary -540 -clone -sediment -esteem -sighted -cruelty -##boa -inverse -violating -Amtrak -Status -amalgamated -vertex -AR -harmless -Amir -mounts -Coronation -counseling -Audi -CO₂ -splits -##eyer -Humans -Salmon -##have -##rado -##čić -216 -takeoff -classmates -psychedelic -##gni -Gypsy -231 -Anger -GAA -ME -##nist -##tals -Lissa -Odd -baptized -Fiat -fringe -##hren -179 -elevators -perspectives -##TF -##ngle -Question -frontal -950 -thicker -Molecular -##nological -Sixteen -Baton -Hearing -commemorative -dorm -Architectural -purity -##erse -risky -Georgie -relaxing -##ugs -downed -##rar -Slim -##phy -IUCN -##thorpe -Parkinson -217 -Marley -Shipping -sweaty -Jesuits -Sindh -Janata -implying -Armenians -intercept -Ankara -commissioners -ascended -sniper -Grass -Walls -salvage -Dewey -generalized -learnt -PT -##fighter -##tech -DR -##itrus -##zza -mercenaries -slots -##burst -##finger -##nsky -Princes -Rhodesia -##munication -##strom -Fremantle -homework -ins -##Os -##hao -##uffed -Thorpe -Xiao -exquisite -firstly -liberated -technician -Oilers -Phyllis -herb -sharks -MBE -##stock -Product -banjo -##morandum -##than -Visitors -unavailable -unpublished -oxidation -Vogue -##copic -##etics -Yates -##ppard -Leiden -Trading -cottages -Principles -##Millan -##wife -##hiva -Vicar -nouns -strolled -##eorological -##eton -##science -precedent -Armand -Guido -rewards -##ilis -##tise -clipped -chick -##endra -averages -tentatively -1830s -##vos -Certainly -305 -Société -Commandant -##crats -##dified -##nka -marsh -angered -ventilation -Hutton -Ritchie -##having -Eclipse -flick -motionless -Amor -Fest -Loire -lays -##icit -##sband -Guggenheim -Luck -disrupted -##ncia -Disco -##vigator -criticisms -grins -##lons -##vial -##ody -salute -Coaches -junk -saxophonist -##eology -Uprising -Diet -##marks -chronicles -robbed -##iet -##ahi -Bohemian -magician -wavelength -Kenyan -augmented -fashionable -##ogies -Luce -F1 -Monmouth -##jos -##loop -enjoyment -exemption -Centers -##visor -Soundtrack -blinding -practitioner -solidarity -sacrificed -##oso -##cture -##riated -blended -Abd -Copyright -##nob -34th -##reak -Claudio -hectare -rotor -testify -##ends -##iably -##sume -landowner -##cess -##ckman -Eduard -Silesian -backseat -mutually -##abe -Mallory -bounds -Collective -Poet -Winkler -pertaining -scraped -Phelps -crane -flickering -Proto -bubbles -popularized -removes -##86 -Cadillac -Warfare -audible -rites -shivering -##sist -##nst -##biotic -Mon -fascist -Bali -Kathryn -ambiguous -furiously -morale -patio -Sang -inconsistent -topology -Greens -monkeys -Köppen -189 -Toy -vow -##ías -bombings -##culus -improvised -lodged -subsidiaries -garment -startling -practised -Hume -Thorn -categorized -Till -Eileen -wedge -##64 -Federico -patriotic -unlock -##oshi -badminton -Compared -Vilnius -##KE -Crimean -Kemp -decks -spaced -resolutions -sighs -##mind -Imagine -Cartoon -huddled -policemen -forwards -##rouch -equals -##nter -inspected -Charley -MG -##rte -pamphlet -Arturo -dans -scarcely -##ulton -##rvin -parental -unconstitutional -watts -Susannah -Dare -##sitive -Rowland -Valle -invalid -##ué -Detachment -acronym -Yokohama -verified -##lsson -groove -Liza -clarified -compromised -265 -##rgon -##orf -hesitant -Fruit -Application -Mathias -icons -##cell -Qin -interventions -##uron -punt -remnant -##rien -Ames -manifold -spines -floral -##zable -comrades -Fallen -orbits -Annals -hobby -Auditorium -implicated -researching -Pueblo -Ta -terminate -##pella -Rings -approximation -fuzzy -##ús -thriving -##ket -Conor -alarmed -etched -Cary -##rdon -Ally -##rington -Pay -mint -##hasa -##unity -##dman -##itate -Oceania -furrowed -trams -##aq -Wentworth -ventured -choreography -prototypes -Patel -mouthed -trenches -##licing -##yya -Lies -deception -##erve -##vations -Bertrand -earthquakes -##tography -Southwestern -##aja -token -Gupta -##yō -Beckett -initials -ironic -Tsar -subdued -shootout -sobbing -liar -Scandinavia -Souls -ch -therapist -trader -Regulation -Kali -busiest -##pation -32nd -Telephone -Vargas -##moky -##nose -##uge -Favorite -abducted -bonding -219 -255 -correction -mat -drown -fl -unbeaten -Pocket -Summers -Quite -rods -Percussion -##ndy -buzzing -cadet -Wilkes -attire -directory -utilities -naive -populous -Hendrix -##actor -disadvantage -1400 -Landon -Underworld -##ense -Occasionally -mercury -Davey -Morley -spa -wrestled -##vender -eclipse -Sienna -supplemented -thou -Stream -liturgical -##gall -##berries -##piration -1769 -Bucks -abandoning -##jutant -##nac -232 -venom -##31 -Roche -dotted -Currie -Córdoba -Milo -Sharif -divides -justification -prejudice -fortunate -##vide -##ābād -Rowe -inflammatory -##eld -avenue -Sources -##rimal -Messenger -Blanco -advocating -formulation -##pute -emphasizes -nut -Armored -##ented -nutrients -##tment -insistence -Martins -landowners -##RB -comparatively -headlines -snaps -##qing -Celebration -##mad -republican -##NE -Trace -##500 -1771 -proclamation -NRL -Rubin -Buzz -Weimar -##AG -199 -posthumous -##ental -##deacon -Distance -intensely -overheard -Arcade -diagonal -hazard -Giving -weekdays -##ù -Verdi -actresses -##hare -Pulling -##erries -##pores -catering -shortest -##ctors -##cure -##restle -##reta -##runch -##brecht -##uddin -Moments -senate -Feng -Prescott -##thest -218 -divisional -Bertie -sparse -surrounds -coupling -gravitational -werewolves -##lax -Rankings -##mated -##tries -Shia -##mart -##23 -##vocative -interfaces -morphology -newscast -##bide -inputs -solicitor -Olaf -cabinets -puzzles -##tains -Unified -##firmed -WA -solemn -##opy -Tito -Jaenelle -Neolithic -horseback -##ires -pharmacy -prevalence -##lint -Swami -##bush -##tudes -Philipp -mythical -divers -Scouting -aperture -progressively -##bay -##nio -bounce -Floor -##elf -Lucan -adulthood -helm -Bluff -Passage -Salvation -lemon -napkin -scheduling -##gets -Elements -Mina -Novak -stalled -##llister -Infrastructure -##nky -##tania -##uished -Katz -Norma -sucks -trusting -1765 -boilers -Accordingly -##hered -223 -Crowley -##fight -##ulo -Henrietta -##hani -pounder -surprises -##chor -##glia -Dukes -##cracy -##zier -##fs -Patriot -silicon -##VP -simulcast -telegraph -Mysore -cardboard -Len -##QL -Auguste -accordion -analytical -specify -ineffective -hunched -abnormal -Transylvania -##dn -##tending -Emilia -glittering -Maddy -##wana -1762 -External -Lecture -endorsement -Hernández -Anaheim -Ware -offences -##phorus -Plantation -popping -Bonaparte -disgusting -neared -##notes -Identity -heroin -nicely -##raverse -apron -congestion -##PR -padded -##fts -invaders -##came -freshly -Halle -endowed -fracture -ROM -##max -sediments -diffusion -dryly -##tara -Tam -Draw -Spin -Talon -Anthropology -##lify -nausea -##shirt -insert -Fresno -capitalist -indefinitely -apples -Gift -scooped -60s -Cooperative -mistakenly -##lover -murmur -##iger -Equipment -abusive -orphanage -##9th -##lterweight -##unda -Baird -ant -saloon -33rd -Chesapeake -##chair -##sound -##tend -chaotic -pornography -brace -##aret -heiress -SSR -resentment -Arbor -headmaster -##uren -unlimited -##with -##jn -Bram -Ely -Pokémon -pivotal -##guous -Database -Marta -Shine -stumbling -##ovsky -##skin -Henley -Polk -functioned -##layer -##pas -##udd -##MX -blackness -cadets -feral -Damian -##actions -2D -##yla -Apocalypse -##aic -inactivated -##china -##kovic -##bres -destroys -nap -Macy -sums -Madhya -Wisdom -rejects -##amel -60th -Cho -bandwidth -##sons -##obbing -##orama -Mutual -shafts -##estone -##rsen -accord -replaces -waterfront -##gonal -##rida -convictions -##ays -calmed -suppliers -Cummings -GMA -fearful -Scientist -Sinai -examines -experimented -Netflix -Enforcement -Scarlett -##lasia -Healthcare -##onte -Dude -inverted -##36 -##regation -##lidae -Munro -##angay -Airbus -overlapping -Drivers -lawsuits -bodily -##udder -Wanda -Effects -Fathers -##finery -##islav -Ridley -observatory -pod -##utrition -Electricity -landslide -##mable -##zoic -##imator -##uration -Estates -sleepy -Nickelodeon -steaming -irony -schedules -snack -spikes -Hmm -##nesia -##bella -##hibit -Greenville -plucked -Harald -##ono -Gamma -infringement -roaring -deposition -##pol -##orum -660 -seminal -passports -engagements -Akbar -rotated -##bina -##gart -Hartley -##lown -##truct -uttered -traumatic -Dex -##ôme -Holloway -MV -apartheid -##nee -Counter -Colton -OR -245 -Spaniards -Regency -Schedule -scratching -squads -verify -##alk -keyboardist -rotten -Forestry -aids -commemorating -##yed -##érie -Sting -##elly -Dai -##fers -##berley -##ducted -Melvin -cannabis -glider -##enbach -##rban -Costello -Skating -cartoonist -AN -audit -##pectator -distributing -226 -312 -interpreter -header -Alternatively -##ases -smug -##kumar -cabins -remastered -Connolly -Kelsey -LED -tentative -Check -Sichuan -shaved -##42 -Gerhard -Harvest -inward -##rque -Hopefully -hem -##34 -Typical -binds -wrath -Woodstock -forcibly -Fergus -##charged -##tured -prepares -amenities -penetration -##ghan -coarse -##oned -enthusiasts -##av -##twined -fielded -##cky -Kiel -##obia -470 -beers -tremble -youths -attendees -##cademies -##sex -Macon -communism -dir -##abi -Lennox -Wen -differentiate -jewel -##SO -activate -assert -laden -unto -Gillespie -Guillermo -accumulation -##GM -NGO -Rosenberg -calculating -drastically -##omorphic -peeled -Liège -insurgents -outdoors -##enia -Aspen -Sep -awakened -##eye -Consul -Maiden -insanity -##brian -furnace -Colours -distributions -longitudinal -syllables -##scent -Martian -accountant -Atkins -husbands -sewage -zur -collaborate -highlighting -##rites -##PI -colonization -nearer -##XT -dunes -positioning -Ku -multitude -luxurious -Volvo -linguistics -plotting -squared -##inder -outstretched -##uds -Fuji -ji -##feit -##ahu -##loat -##gado -##luster -##oku -América -##iza -Residents -vine -Pieces -DD -Vampires -##ová -smoked -harshly -spreads -##turn -##zhi -betray -electors -##settled -Considering -exploits -stamped -Dusty -enraged -Nairobi -##38 -intervened -##luck -orchestras -##lda -Hereford -Jarvis -calf -##itzer -##CH -salesman -Lovers -cigar -Angelica -doomed -heroine -##tible -Sanford -offenders -##ulously -articulated -##oam -Emanuel -Gardiner -Edna -Shu -gigantic -##stable -Tallinn -coasts -Maker -ale -stalking -##oga -##smus -lucrative -southbound -##changing -Reg -##lants -Schleswig -discount -grouping -physiological -##OH -##sun -Galen -assurance -reconcile -rib -scarlet -Thatcher -anarchist -##oom -Turnpike -##ceding -cocktail -Sweeney -Allegheny -concessions -oppression -reassuring -##poli -##ticus -##TR -##VI -##uca -##zione -directional -strikeouts -Beneath -Couldn -Kabul -##national -hydroelectric -##jit -Desire -##riot -enhancing -northbound -##PO -Ok -Routledge -volatile -Bernardo -Python -333 -ample -chestnut -automobiles -##innamon -##care -##hering -BWF -salaries -Turbo -acquisitions -##stituting -strengths -pilgrims -Ponce -Pig -Actors -Beard -sanitation -##RD -##mett -Telecommunications -worms -##idas -Juno -Larson -Ventura -Northeastern -weighs -Houghton -collaborating -lottery -##rano -Wonderland -gigs -##lmer -##zano -##edd -##nife -mixtape -predominant -tripped -##ruly -Alexei -investing -Belgarath -Brasil -hiss -##crat -##xham -Côte -560 -kilometer -##cological -analyzing -##As -engined -listener -##cakes -negotiation -##hisky -Santana -##lemma -IAAF -Seneca -skeletal -Covenant -Steiner -##lev -##uen -Neptune -retention -##upon -Closing -Czechoslovak -chalk -Navarre -NZ -##IG -##hop -##oly -##quatorial -##sad -Brewery -Conflict -Them -renew -turrets -disagree -Petra -Slave -##reole -adjustment -##dela -##regard -##sner -framing -stature -##rca -##sies -##46 -##mata -Logic -inadvertently -naturalist -spheres -towering -heightened -Dodd -rink -##fle -Keyboards -bulb -diver -ul -##tsk -Exodus -Deacon -España -Canadiens -oblique -thud -reigned -rug -Whitman -Dash -##iens -Haifa -pets -##arland -manually -dart -##bial -Sven -textiles -subgroup -Napier -graffiti -revolver -humming -Babu -protector -typed -Provinces -Sparta -Wills -subjective -##rella -temptation -##liest -FL -Sadie -manifest -Guangdong -Transfer -entertain -eve -recipes -##33 -Benedictine -retailer -##dence -establishes -##cluded -##rked -Ursula -##ltz -##lars -##rena -qualifiers -##curement -colt -depictions -##oit -Spiritual -differentiation -staffed -transitional -##lew -1761 -fatalities -##oan -Bayern -Northamptonshire -Weeks -##CU -Fife -capacities -hoarse -##latt -##ة -evidenced -##HD -##ographer -assessing -evolve -hints -42nd -streaked -##lve -Yahoo -##estive -##rned -##zas -baggage -Elected -secrecy -##champ -Character -Pen -Decca -cape -Bernardino -vapor -Dolly -counselor -##isers -Benin -##khar -##CR -notch -##thus -##racy -bounty -lend -grassland -##chtenstein -##dating -pseudo -golfer -simplest -##ceive -Lucivar -Triumph -dinosaur -dinosaurs -##šić -Seahawks -##nco -resorts -reelected -1766 -reproduce -universally -##OA -ER -tendencies -Consolidated -Massey -Tasmanian -reckless -##icz -##ricks -1755 -questionable -Audience -##lates -preseason -Quran -trivial -Haitian -Freeway -dialed -Appointed -Heard -ecosystems -##bula -hormones -Carbon -Rd -##arney -##working -Christoph -presiding -pu -##athy -Morrow -Dar -ensures -posing -remedy -EA -disclosed -##hui -##rten -rumours -surveying -##ficiency -Aziz -Jewel -Plays -##smatic -Bernhard -Christi -##eanut -##friend -jailed -##dr -govern -neighbour -butler -Acheron -murdering -oils -mac -Editorial -detectives -bolts -##ulon -Guitars -malaria -36th -Pembroke -Opened -##hium -harmonic -serum -##sio -Franks -fingernails -##gli -culturally -evolving -scalp -VP -deploy -uploaded -mater -##evo -Jammu -Spa -##icker -flirting -##cursions -Heidi -Majority -sprawled -##alytic -Zheng -bunker -##lena -ST -##tile -Jiang -ceilings -##ently -##ols -Recovery -dire -##good -Manson -Honestly -Montréal -1764 -227 -quota -Lakshmi -incentive -Accounting -##cilla -Eureka -Reaper -buzzed -##uh -courtroom -dub -##mberg -KC -Gong -Theodor -Académie -NPR -criticizing -protesting -##pired -##yric -abuses -fisheries -##minated -1767 -yd -Gemini -Subcommittee -##fuse -Duff -Wasn -Wight -cleaner -##tite -planetary -Survivor -Zionist -mounds -##rary -landfall -disruption -yielding -##yana -bids -unidentified -Garry -Ellison -Elmer -Fishing -Hayward -demos -modelling -##anche -##stick -caressed -entertained -##hesion -piers -Crimea -##mass -WHO -boulder -trunks -1640 -Biennale -Palestinians -Pursuit -##udes -Dora -contender -##dridge -Nanjing -##ezer -##former -##ibel -Whole -proliferation -##tide -##weiler -fuels -predictions -##ente -##onium -Filming -absorbing -Ramón -strangled -conveyed -inhabit -prostitutes -recession -bonded -clinched -##eak -##iji -##edar -Pleasure -Rite -Christy -Therapy -sarcasm -##collegiate -hilt -probation -Sarawak -coefficients -underworld -biodiversity -SBS -groom -brewing -dungeon -##claiming -Hari -turnover -##ntina -##omer -##opped -orthodox -styling -##tars -##ulata -priced -Marjorie -##eley -##abar -Yong -##tically -Crambidae -Hernandez -##ego -##rricular -##ark -##lamour -##llin -##augh -##tens -Advancement -Loyola -##4th -##hh -goin -marshes -Sardinia -##ša -Ljubljana -Singing -suspiciously -##hesive -Félix -Regarding -flap -stimulation -##raught -Apr -Yin -gaping -tighten -skier -##itas -##lad -##rani -264 -Ashes -Olson -Problems -Tabitha -##rading -balancing -sunrise -##ease -##iture -##ritic -Fringe -##iciency -Inspired -Linnaeus -PBA -disapproval -##kles -##rka -##tails -##urger -Disaster -Laboratories -apps -paradise -Aero -Came -sneaking -Gee -Beacon -ODI -commodity -Ellington -graphical -Gretchen -spire -##skaya -##trine -RTÉ -efficacy -plc -tribunal -##ytic -downhill -flu -medications -##kaya -widen -Sunrise -##nous -distinguishing -pawn -##BO -##irn -##ssing -##ν -Easton -##vila -Rhineland -##aque -defect -##saurus -Goose -Ju -##classified -Middlesbrough -shaping -preached -1759 -##erland -Ein -Hailey -musicals -##altered -Galileo -Hilda -Fighters -Lac -##ometric -295 -Leafs -Milano -##lta -##VD -##ivist -penetrated -Mask -Orchard -plaintiff -##icorn -Yvonne -##fred -outfielder -peek -Collier -Caracas -repealed -Bois -dell -restrict -Dolores -Hadley -peacefully -##LL -condom -Granny -Orders -sabotage -##toon -##rings -compass -marshal -gears -brigadier -dye -Yunnan -communicating -donate -emerald -vitamin -administer -Fulham -##classical -##llas -Buckinghamshire -Held -layered -disclosure -Akira -programmer -shrimp -Crusade -##ximal -Luzon -bakery -##cute -Garth -Citadel -uniquely -Curling -info -mum -Para -##ști -sleek -##ione -hey -Lantern -mesh -##lacing -##lizzard -##gade -prosecuted -Alba -Gilles -greedy -twists -##ogged -Viper -##kata -Appearances -Skyla -hymns -##pelled -curving -predictable -Grave -Watford -##dford -##liptic -##vary -Westwood -fluids -Models -statutes -##ynamite -1740 -##culate -Framework -Johanna -##gression -Vuelta -imp -##otion -##raga -##thouse -Ciudad -festivities -##love -Beyoncé -italics -##vance -DB -##haman -outs -Singers -##ueva -##urning -##51 -##ntiary -##mobile -285 -Mimi -emeritus -nesting -Keeper -Ways -##onal -##oux -Edmond -MMA -##bark -##oop -Hampson -##ñez -##rets -Gladstone -wreckage -Pont -Playboy -reluctance -##ná -apprenticeship -preferring -Value -originate -##wei -##olio -Alexia -##rog -Parachute -jammed -stud -Eton -vols -##ganized -1745 -straining -creep -indicators -##mán -humiliation -hinted -alma -tanker -##egation -Haynes -Penang -amazement -branched -rumble -##ddington -archaeologists -paranoid -expenditure -Absolutely -Musicians -banished -##fining -baptism -Joker -Persons -hemisphere -##tieth -##ück -flock -##xing -lbs -Kung -crab -##dak -##tinent -Regulations -barrage -parcel -##ós -Tanaka -##rsa -Natalia -Voyage -flaws -stepfather -##aven -##eological -Botanical -Minsk -##ckers -Cinderella -Feast -Loving -Previous -Shark -##took -barrister -collaborators -##nnes -Croydon -Graeme -Juniors -##7th -##formation -##ulos -##ák -£2 -##hwa -##rove -##ș -Whig -demeanor -Otago -##TH -##ooster -Faber -instructors -##ahl -##bha -emptied -##schen -saga -##lora -exploding -##rges -Crusaders -##caster -##uations -streaks -CBN -bows -insights -ka -1650 -diversion -LSU -Wingspan -##liva -Response -sanity -Producers -imitation -##fine -Lange -Spokane -splash -weed -Siberian -magnet -##rocodile -capitals -##rgus -swelled -Rani -Bells -Silesia -arithmetic -rumor -##hampton -favors -Weird -marketplace -##orm -tsunami -unpredictable -##citation -##ferno -Tradition -postwar -stench -succeeds -##roup -Anya -Users -oversized -totaling -pouch -##nat -Tripoli -leverage -satin -##cline -Bathurst -Lund -Niall -thereof -##quid -Bangor -barge -Animated -##53 -##alan -Ballard -utilizes -Done -ballistic -NDP -gatherings -##elin -##vening -Rockets -Sabrina -Tamara -Tribal -WTA -##citing -blinded -flux -Khalid -Una -prescription -##jee -Parents -##otics -##food -Silicon -cured -electro -perpendicular -intimacy -##rified -Lots -##ceiving -##powder -incentives -McKenna -##arma -##ounced -##rinkled -Alzheimer -##tarian -262 -Seas -##cam -Novi -##hout -##morphic -##hazar -##hul -##nington -Huron -Bahadur -Pirate -pursed -Griffiths -indicted -swap -refrain -##mulating -Lal -stomped -##Pad -##mamoto -Reef -disposed -plastered -weeping -##rato -Minas -hourly -tumors -##ruising -Lyle -##yper -##sol -Odisha -credibility -##Dowell -Braun -Graphic -lurched -muster -##nex -##ührer -##connected -##iek -##ruba -Carthage -Peck -maple -bursting -##lava -Enrico -rite -##jak -Moment -##skar -Styx -poking -Spartan -##urney -Hepburn -Mart -Titanic -newsletter -waits -Mecklenburg -agitated -eats -##dious -Chow -matrices -Maud -##sexual -sermon -234 -##sible -##lung -Qi -cemeteries -mined -sprinter -##ckett -coward -##gable -##hell -##thin -##FB -Contact -##hay -rainforest -238 -Hemisphere -boasts -##nders -##verance -##kat -Convent -Dunedin -Lecturer -lyricist -##bject -Iberian -comune -##pphire -chunk -##boo -thrusting -fore -informing -pistols -echoes -Tier -battleships -substitution -##belt -moniker -##charya -##lland -Thoroughbred -38th -##01 -##tah -parting -tongues -Cale -##seau -Unionist -modular -celebrates -preview -steamed -Bismarck -302 -737 -vamp -##finity -##nbridge -weaknesses -husky -##berman -absently -##icide -Craven -tailored -Tokugawa -VIP -syntax -Kazan -captives -doses -filtered -overview -Cleopatra -Conversely -stallion -Burger -Suez -Raoul -th -##reaves -Dickson -Nell -Rate -anal -colder -##sław -Arm -Semitic -##green -reflective -1100 -episcopal -journeys -##ours -##pository -##dering -residue -Gunn -##27 -##ntial -##crates -##zig -Astros -Renee -Emerald -##vili -connectivity -undrafted -Sampson -treasures -##kura -##theon -##vern -Destroyer -##iable -##ener -Frederic -briefcase -confinement -Bree -##WD -Athena -233 -Padres -Thom -speeding -##hali -Dental -ducks -Putin -##rcle -##lou -Asylum -##usk -dusk -pasture -Institutes -ONE -jack -##named -diplomacy -Intercontinental -Leagues -Towns -comedic -premature -##edic -##mona -##ories -trimmed -Charge -Cream -guarantees -Dmitry -splashed -Philosophical -tramway -##cape -Maynard -predatory -redundant -##gratory -##wry -sobs -Burgundy -edible -outfits -Handel -dazed -dangerously -idle -Operational -organizes -##sional -blackish -broker -weddings -##halt -Becca -McGee -##gman -protagonists -##pelling -Keynes -aux -stumble -##ordination -Nokia -reel -sexes -##woods -##pheric -##quished -##voc -##oir -##pathian -##ptus -##sma -##tating -##ê -fulfilling -sheath -##ayne -Mei -Ordinary -Collin -Sharpe -grasses -interdisciplinary -##OX -Background -##ignment -Assault -transforms -Hamas -Serge -ratios -##sik -swaying -##rcia -Rosen -##gant -##versible -cinematographer -curly -penny -Kamal -Mellon -Sailor -Spence -phased -Brewers -amassed -Societies -##ropriations -##buted -mythological -##SN -##byss -##ired -Sovereign -preface -Parry -##ife -altitudes -crossings -##28 -Crewe -southernmost -taut -McKinley -##owa -##tore -254 -##ckney -compiling -Shelton -##hiko -228 -Poll -Shepard -Labs -Pace -Carlson -grasping -##ов -Delaney -Winning -robotic -intentional -shattering -##boarding -##git -##grade -Editions -Reserves -ignorant -proposing -##hanna -cutter -Mongols -NW -##eux -Codex -Cristina -Daughters -Rees -forecast -##hita -NGOs -Stations -Beaux -Erwin -##jected -##EX -##trom -Schumacher -##hrill -##rophe -Maharaja -Oricon -##sul -##dynamic -##fighting -Ce -Ingrid -rumbled -Prospect -stairwell -Barnard -applause -complementary -##uba -grunt -##mented -Bloc -Carleton -loft -noisy -##hey -490 -contrasted -##inator -##rief -##centric -##fica -Cantonese -Blanc -Lausanne -License -artifact -##ddin -rot -Amongst -Prakash -RF -##topia -milestone -##vard -Winters -Mead -churchyard -Lulu -estuary -##ind -Cha -Infinity -Meadow -subsidies -##valent -CONCACAF -Ching -medicinal -navigate -Carver -Twice -abdominal -regulating -RB -toilets -Brewer -weakening -ambushed -##aut -##vignon -Lansing -unacceptable -reliance -stabbing -##mpo -##naire -Interview -##ested -##imed -bearings -##lts -Rashid -##iation -authenticity -vigorous -##frey -##uel -biologist -NFC -##rmaid -##wash -Makes -##aunt -##steries -withdrawing -##qa -Buccaneers -bleed -inclination -stain -##ilo -##ppel -Torre -privileged -cereal -trailers -alumnus -neon -Cochrane -Mariana -caress -##47 -##ients -experimentation -Window -convict -signaled -##YP -rower -Pharmacy -interacting -241 -Strings -dominating -kinase -Dinamo -Wire -pains -sensations -##suse -Twenty20 -##39 -spotlight -##hend -elemental -##pura -Jameson -Swindon -honoring -pained -##ediatric -##lux -Psychological -assemblies -ingredient -Martial -Penguins -beverage -Monitor -mysteries -##ION -emigration -mused -##sique -crore -AMC -Funding -Chinatown -Establishment -Finalist -enjoyable -1756 -##mada -##rams -NO -newborn -CS -comprehend -Invisible -Siemens -##acon -246 -contraction -##volving -##moration -##rok -montane -##ntation -Galloway -##llow -Verity -directorial -pearl -Leaning -##rase -Fernandez -swallowing -Automatic -Madness -haunting -paddle -##UE -##rrows -##vies -##zuki -##bolt -##iber -Fender -emails -paste -##lancing -hind -homestead -hopeless -##dles -Rockies -garlic -fatty -shrieked -##ismic -Gillian -Inquiry -Schultz -XML -##cius -##uld -Domesday -grenades -northernmost -##igi -Tbilisi -optimistic -##poon -Refuge -stacks -Bose -smash -surreal -Nah -Straits -Conquest -##roo -##weet -##kell -Gladys -CH -##lim -##vitation -Doctorate -NRHP -knocks -Bey -Romano -##pile -242 -Diamonds -strides -eclectic -Betsy -clade -##hady -##leashed -dissolve -moss -Suburban -silvery -##bria -tally -turtles -##uctive -finely -industrialist -##nary -Ernesto -oz -pact -loneliness -##hov -Tomb -multinational -risked -Layne -USL -ne -##quiries -Ad -Message -Kamen -Kristen -reefs -implements -##itative -educators -garments -gunshot -##essed -##rve -Montevideo -vigorously -Stamford -assemble -packaged -##same -état -Viva -paragraph -##eter -##wire -Stick -Navajo -MCA -##pressing -ensembles -ABA -##zor -##llus -Partner -raked -##BI -Iona -thump -Celeste -Kiran -##iscovered -##rith -inflammation -##arel -Features -loosened -##yclic -Deluxe -Speak -economical -Frankenstein -Picasso -showcased -##zad -##eira -##planes -##linear -##overs -monsoon -prosecutors -slack -Horses -##urers -Angry -coughing -##truder -Questions -##tō -##zak -challenger -clocks -##ieving -Newmarket -##acle -cursing -stimuli -##mming -##qualified -slapping -##vasive -narration -##kini -Advertising -CSI -alliances -mixes -##yes -covert -amalgamation -reproduced -##ardt -##gis -1648 -id -Annette -Boots -Champagne -Brest -Daryl -##emon -##jou -##llers -Mean -adaptive -technicians -##pair -##usal -Yoga -fronts -leaping -Jul -harvesting -keel -##44 -petitioned -##lved -yells -Endowment -proponent -##spur -##tised -##zal -Homes -Includes -##ifer -##oodoo -##rvette -awarding -mirrored -ransom -Flute -outlook -##ganj -DVDs -Sufi -frontman -Goddard -barren -##astic -Suicide -hillside -Harlow -Lau -notions -Amnesty -Homestead -##irt -GE -hooded -umpire -mustered -Catch -Masonic -##erd -Dynamics -Equity -Oro -Charts -Mussolini -populace -muted -accompaniment -##lour -##ndes -ignited -##iferous -##laced -##atch -anguish -registry -##tub -##hards -##neer -251 -Hooker -uncomfortably -##6th -##ivers -Catalina -MiG -giggling -1754 -Dietrich -Kaladin -pricing -##quence -Sabah -##lving -##nical -Gettysburg -Vita -Telecom -Worst -Palais -Pentagon -##brand -##chichte -Graf -unnatural -1715 -bio -##26 -Radcliffe -##utt -chatting -spices -##aus -untouched -##eper -Doll -turkey -Syndicate -##rlene -##JP -##roots -Como -clashed -modernization -1757 -fantasies -##iating -dissipated -Sicilian -inspect -sensible -reputed -##final -Milford -poised -RC -metabolic -Tobacco -Mecca -optimization -##heat -lobe -rabbits -NAS -geologist -##liner -Kilda -carpenter -nationalists -##brae -summarized -##venge -Designer -misleading -beamed -##meyer -Matrix -excuses -##aines -##biology -401 -Moose -drafting -Sai -##ggle -Comprehensive -dripped -skate -##WI -##enan -##ruk -narrower -outgoing -##enter -##nounce -overseen -##structure -travellers -banging -scarred -##thing -##arra -Ebert -Sometime -##nated -BAFTA -Hurricanes -configurations -##MLL -immortality -##heus -gothic -##mpest -clergyman -viewpoint -Maxim -Instituto -emitted -quantitative -1689 -Consortium -##rsk -Meat -Tao -swimmers -Shaking -Terence -mainline -##linity -Quantum -##rogate -Nair -banquet -39th -reprised -lagoon -subdivisions -synonymous -incurred -password -sprung -##vere -Credits -Petersen -Faces -##vu -statesman -Zombie -gesturing -##going -Sergey -dormant -possessive -totals -southward -Ángel -##odies -HM -Mariano -Ramirez -Wicked -impressions -##Net -##cap -##ème -Transformers -Poker -RIAA -Redesignated -##chuk -Harcourt -Peña -spacious -tinged -alternatively -narrowing -Brigham -authorization -Membership -Zeppelin -##amed -Handball -steer -##orium -##rnal -##rops -Committees -endings -##MM -##yung -ejected -grams -##relli -Birch -Hilary -Stadion -orphan -clawed -##kner -Motown -Wilkins -ballads -outspoken -##ancipation -##bankment -##cheng -Advances -harvested -novelty -ineligible -oversees -##´s -obeyed -inevitably -Kingdoms -burying -Fabian -relevance -Tatiana -##MCA -sarcastic -##onda -Akron -229 -sandwiches -Adobe -Maddox -##azar -Hunting -##onized -Smiling -##tology -Juventus -Leroy -Poets -attach -lo -##rly -##film -Structure -##igate -olds -projections -SMS -outnumbered -##tase -judiciary -paramilitary -playfully -##rsing -##tras -Chico -Vin -informally -abandonment -##russ -Baroness -injuring -octagonal -deciduous -##nea -##olm -Hz -Norwood -poses -Marissa -alerted -willed -##KS -Dino -##ddler -##vani -Barbie -Thankfully -625 -bicycles -shimmering -##tinuum -##wolf -Chesterfield -##idy -##urgency -Knowles -sweetly -Ventures -##ponents -##valence -Darryl -Powerplant -RAAF -##pec -Kingsley -Parramatta -penetrating -spectacle -##inia -Marlborough -residual -compatibility -hike -Underwood -depleted -ministries -##odus -##ropriation -rotting -Faso -##inn -Happiness -Lille -Suns -cookie -rift -warmly -##lvin -Bugs -Gotham -Gothenburg -Properties -##seller -##ubi -Created -MAC -Noelle -Requiem -Ulysses -##ails -franchises -##icious -##rwick -celestial -kinetic -720 -STS -transmissions -amplitude -forums -freeing -reptiles -tumbling -##continent -##rising -##tropy -physiology -##uster -Loves -bodied -neutrality -Neumann -assessments -Vicky -##hom -hampered -##uku -Custom -timed -##eville -##xious -elastic -##section -rig -stilled -shipment -243 -artworks -boulders -Bournemouth -##hly -##LF -##linary -rumored -##bino -##drum -Chun -Freiburg -##dges -Equality -252 -Guadalajara -##sors -##taire -Roach -cramped -##ultural -Logistics -Punch -fines -Lai -caravan -##55 -lame -Collector -pausing -315 -migrant -hawk -signalling -##erham -##oughs -Demons -surfing -Rana -insisting -Wien -adolescent -##jong -##rera -##umba -Regis -brushes -##iman -residues -storytelling -Consider -contrasting -regeneration -##elling -##hlete -afforded -reactors -costing -##biotics -##gat -##евич -chanting -secondly -confesses -##ikos -##uang -##ronological -##− -Giacomo -##eca -vaudeville -weeds -rejecting -revoked -affluent -fullback -progresses -geologic -proprietor -replication -gliding -recounted -##bah -##igma -Flow -ii -newcomer -##lasp -##miya -Candace -fractured -interiors -confidential -Inverness -footing -##robe -Coordinator -Westphalia -jumper -##chism -dormitory -##gno -281 -acknowledging -leveled -##éra -Algiers -migrate -Frog -Rare -##iovascular -##urous -DSO -nomadic -##iera -woken -lifeless -##graphical -##ifications -Dot -Sachs -crow -nmi -Tacoma -Weight -mushroom -RS -conditioned -##zine -Tunisian -altering -##mizing -Handicap -Patti -Monsieur -clicking -gorge -interrupting -##powerment -drawers -Serra -##icides -Specialist -##itte -connector -worshipped -##ask -consoles -tags -##iler -glued -##zac -fences -Bratislava -honeymoon -313 -A2 -disposition -Gentleman -Gilmore -glaciers -##scribed -Calhoun -convergence -Aleppo -shortages -##43 -##orax -##worm -##codes -##rmal -neutron -##ossa -Bloomberg -Salford -periodicals -##ryan -Slayer -##ynasties -credentials -##tista -surveyor -File -stinging -unnoticed -Medici -ecstasy -espionage -Jett -Leary -circulating -bargaining -concerto -serviced -37th -HK -##fueling -Delilah -Marcia -graded -##join -Kaplan -feasible -##nale -##yt -Burnley -dreadful -ministerial -Brewster -Judah -##ngled -##rrey -recycled -Iroquois -backstage -parchment -##numbered -Kern -Motorsports -Organizations -##mini -Seems -Warrington -Dunbar -Ezio -##eor -paralyzed -Ara -yeast -##olis -cheated -reappeared -banged -##ymph -##dick -Lyndon -glide -Mat -##natch -Hotels -Household -parasite -irrelevant -youthful -##smic -##tero -##anti -2d -Ignacio -squash -##nets -shale -##اد -Abrams -##oese -assaults -##dier -##otte -Swamp -287 -Spurs -##economic -Fargo -auditioned -##mé -Haas -une -abbreviation -Turkic -##tisfaction -favorites -specials -##lial -Enlightenment -Burkina -##vir -Comparative -Lacrosse -elves -##lerical -##pear -Borders -controllers -##villa -excelled -##acher -##varo -camouflage -perpetual -##ffles -devoid -schooner -##bered -##oris -Gibbons -Lia -discouraged -sue -##gnition -Excellent -Layton -noir -smack -##ivable -##evity -##lone -Myra -weaken -weaponry -##azza -Shake -backbone -Certified -clown -occupational -caller -enslaved -soaking -Wexford -perceive -shortlisted -##pid -feminism -Bari -Indie -##avelin -##ldo -Hellenic -Hundreds -Savings -comedies -Honors -Mohawk -Told -coded -Incorporated -hideous -trusts -hose -Calais -Forster -Gabon -Internationale -AK -Colour -##UM -##heist -McGregor -localized -##tronomy -Darrell -##iara -squirrel -freaked -##eking -##manned -##ungen -radiated -##dua -commence -Donaldson -##iddle -MR -SAS -Tavern -Teenage -admissions -Instruments -##ilizer -Konrad -contemplated -##ductor -Jing -Reacher -recalling -Dhabi -emphasizing -illumination -##tony -legitimacy -Goethe -Ritter -McDonnell -Polar -Seconds -aspiring -derby -tunic -##rmed -outlines -Changing -distortion -##cter -Mechanics -##urly -##vana -Egg -Wolverine -Stupid -centralized -knit -##Ms -Saratoga -Ogden -storylines -##vres -lavish -beverages -##grarian -Kyrgyzstan -forcefully -superb -Elm -Thessaloniki -follower -Plants -slang -trajectory -Nowadays -Bengals -Ingram -perch -coloring -carvings -doubtful -##aph -##gratulations -##41 -Curse -253 -nightstand -Campo -Meiji -decomposition -##giri -McCormick -Yours -##amon -##bang -Texans -injunction -organise -periodical -##peculative -oceans -##aley -Success -Lehigh -##guin -1730 -Davy -allowance -obituary -##tov -treasury -##wayne -euros -readiness -systematically -##stered -##igor -##xen -##cliff -##lya -Send -##umatic -Celtics -Judiciary -425 -propagation -rebellious -##ims -##lut -Dal -##ayman -##cloth -Boise -pairing -Waltz -torment -Hatch -aspirations -diaspora -##hame -Rank -237 -Including -Muir -chained -toxicity -Université -##aroo -Mathews -meadows -##bio -Editing -Khorasan -##them -##ahn -##bari -##umes -evacuate -##sium -gram -kidnap -pinning -##diation -##orms -beacon -organising -McGrath -##ogist -Qur -Tango -##ceptor -##rud -##cend -##cie -##jas -##sided -Tuscany -Venture -creations -exhibiting -##rcerer -##tten -Butcher -Divinity -Pet -Whitehead -falsely -perished -handy -Moines -cyclists -synthesizers -Mortal -notoriety -##ronic -Dialogue -expressive -uk -Nightingale -grimly -vineyards -Driving -relentless -compiler -##district -##tuated -Hades -medicines -objection -Answer -Soap -Chattanooga -##gogue -Haryana -Parties -Turtle -##ferred -explorers -stakeholders -##aar -##rbonne -tempered -conjecture -##tee -##hur -Reeve -bumper -stew -##church -##generate -##ilitating -##chanized -##elier -##enne -translucent -##lows -Publisher -evangelical -inherit -##rted -247 -SmackDown -bitterness -lesions -##worked -mosques -wed -##lashes -Ng -Rebels -booking -##nail -Incident -Sailing -yo -confirms -Chaplin -baths -##kled -modernist -pulsing -Cicero -slaughtered -boasted -##losure -zipper -##hales -aristocracy -halftime -jolt -unlawful -Marching -sustaining -Yerevan -bracket -ram -Markus -##zef -butcher -massage -##quisite -Leisure -Pizza -collapsing -##lante -commentaries -scripted -##disciplinary -##sused -eroded -alleging -vase -Chichester -Peacock -commencement -dice -hotter -poisonous -executions -##occo -frost -fielding -vendor -Counts -Troops -maize -Divisional -analogue -shadowy -Nuevo -Ville -radiating -worthless -Adriatic -Buy -blaze -brutally -horizontally -longed -##matical -federally -Rolf -Root -exclude -rag -agitation -Lounge -astonished -##wirl -Impossible -transformations -##IVE -##ceded -##slav -downloaded -fucked -Egyptians -Welles -##ffington -U2 -befriended -radios -##jid -archaic -compares -##ccelerator -##imated -##tosis -Hung -Scientists -Thousands -geographically -##LR -Macintosh -fluorescent -##ipur -Wehrmacht -##BR -##firmary -Chao -##ague -Boyer -##grounds -##hism -##mento -##taining -infancy -##cton -510 -Boca -##loy -1644 -ben -dong -stresses -Sweat -expressway -graders -ochreous -nets -Lawn -thirst -Uruguayan -satisfactory -##tracts -baroque -rusty -##ław -Shen -Gdańsk -chickens -##graving -Hodge -Papal -SAT -bearer -##ogo -##rger -merits -Calendar -Highest -Skills -##ortex -Roberta -paradigm -recounts -frigates -swamps -unitary -##oker -balloons -Hawthorne -Muse -spurred -advisors -reclaimed -stimulate -fibre -pat -repeal -##dgson -##iar -##rana -anthropologist -descends -flinch -reared -##chang -##eric -##lithic -commissioning -##cumenical -##lume -##rchen -Wolff -##tsky -Eurasian -Nepali -Nightmare -ZIP -playback -##latz -##vington -Warm -##75 -Martina -Rollins -Saetan -Variations -sorting -##م -530 -Joaquin -Ptolemy -thinner -##iator -##pticism -Cebu -Highlanders -Linden -Vanguard -##SV -##mor -##ulge -ISSN -cartridges -repression -Étienne -311 -Lauderdale -commodities -null -##rb -1720 -gearbox -##reator -Ang -Forgotten -dubious -##rls -##dicative -##phate -Groove -Herrera -##çais -Collections -Maximus -##published -Fell -Qualification -filtering -##tized -Roe -hazards -##37 -##lative -##tröm -Guadalupe -Tajikistan -Preliminary -fronted -glands -##paper -##iche -##iding -Cairns -rallies -Location -seduce -##mple -BYU -##itic -##FT -Carmichael -Prentice -songwriters -forefront -Physicians -##rille -##zee -Preparatory -##cherous -UV -##dized -Navarro -misses -##nney -Inland -resisting -##sect -Hurt -##lino -galaxies -##raze -Institutions -devote -##lamp -##ciating -baron -##bracing -Hess -operatic -##CL -##ος -Chevalier -Guiana -##lattered -Fed -##cuted -##smo -Skull -denies -236 -Waller -##mah -Sakura -mole -nominate -sermons -##bering -widowed -##röm -Cavendish -##struction -Nehru -Revelation -doom -Gala -baking -Nr -Yourself -banning -Individuals -Sykes -orchestrated -630 -Phone -steered -620 -specialising -starvation -##AV -##alet -##upation -seductive -##jects -##zure -Tolkien -Benito -Wizards -Submarine -dictator -Duo -Caden -approx -basins -##nc -shrink -##icles -##sponsible -249 -mit -outpost -##bayashi -##rouse -##tl -Jana -Lombard -RBIs -finalized -humanities -##function -Honorable -tomato -##iot -Pie -tee -##pect -Beaufort -Ferris -bucks -##graduate -##ocytes -Directory -anxiously -##nating -flanks -##Ds -virtues -##believable -Grades -criterion -manufactures -sourced -##balt -##dance -##tano -Ying -##BF -##sett -adequately -blacksmith -totaled -trapping -expanse -Historia -Worker -Sense -ascending -housekeeper -##oos -Crafts -Resurrection -##verty -encryption -##aris -##vat -##pox -##runk -##iability -gazes -spying -##ths -helmets -wired -##zophrenia -Cheung -WR -downloads -stereotypes -239 -Lucknow -bleak -Bragg -hauling -##haft -prohibit -##ermined -##castle -barony -##hta -Typhoon -antibodies -##ascism -Hawthorn -Kurdistan -Minority -Gorge -Herr -appliances -disrupt -Drugs -Lazarus -##ilia -##ryo -##tany -Gotta -Masovian -Roxy -choreographed -##rissa -turbulent -##listed -Anatomy -exiting -##det -##isław -580 -Kaufman -sage -##apa -Symposium -##rolls -Kaye -##ptera -##rocław -jerking -##menclature -Guo -M1 -resurrected -trophies -##lard -Gathering -nestled -serpent -Dow -reservoirs -Claremont -arbitration -chronicle -eki -##arded -##zers -##mmoth -Congregational -Astronomical -NE -RA -Robson -Scotch -modelled -slashed -##imus -exceeds -##roper -##utile -Laughing -vascular -superficial -##arians -Barclay -Caucasian -classmate -sibling -Kimberly -Shreveport -##ilde -##liche -Cheney -Deportivo -Veracruz -berries -##lase -Bed -MI -Anatolia -Mindanao -broadband -##olia -##arte -##wab -darts -##immer -##uze -believers -ordinance -violate -##wheel -##ynth -Alongside -Coupe -Hobbs -arrondissement -earl -townland -##dote -##lihood -##sla -Ghosts -midfield -pulmonary -##eno -cues -##gol -##zda -322 -Siena -Sultanate -Bradshaw -Pieter -##thical -Raceway -bared -competence -##ssent -Bet -##urer -##ła -Alistair -Göttingen -appropriately -forge -##osterone -##ugen -DL -345 -convoys -inventions -##resses -##cturnal -Fay -Integration -slash -##roats -Widow -barking -##fant -1A -Hooper -##cona -##runched -unreliable -##emont -##esign -##stabulary -##stop -Journalists -bony -##iba -##trata -##ège -horrific -##bish -Jocelyn -##rmon -##apon -##cier -trainers -##ulatory -1753 -BR -corpus -synthesized -##bidden -##rafford -Elgin -##entry -Doherty -clockwise -##played -spins -##ample -##bley -Cope -constructions -seater -warlord -Voyager -documenting -fairies -##viator -Lviv -jewellery -suites -##gold -Maia -NME -##eavor -##kus -Eugène -furnishings -##risto -MCC -Metropolis -Older -Telangana -##mpus -amplifier -supervising -1710 -buffalo -cushion -terminating -##powering -steak -Quickly -contracting -dem -sarcastically -Elsa -##hein -bastards -narratives -Takes -304 -composure -typing -variance -##ifice -Softball -##rations -McLaughlin -gaped -shrines -##hogany -Glamorgan -##icle -##nai -##ntin -Fleetwood -Woodland -##uxe -fictitious -shrugs -##iper -BWV -conform -##uckled -Launch -##ductory -##mized -Tad -##stituted -##free -Bel -Chávez -messing -quartz -##iculate -##folia -##lynn -ushered -##29 -##ailing -dictated -Pony -##opsis -precinct -802 -Plastic -##ughter -##uno -##porated -Denton -Matters -SPD -hating -##rogen -Essential -Deck -Dortmund -obscured -##maging -Earle -##bred -##ittle -##ropolis -saturated -##fiction -##ression -Pereira -Vinci -mute -warehouses -##ún -biographies -##icking -sealing -##dered -executing -pendant -##wives -murmurs -##oko -substrates -symmetrical -Susie -##mare -Yusuf -analogy -##urage -Lesley -limitation -##rby -##ío -disagreements -##mise -embroidered -nape -unarmed -Sumner -Stores -dwell -Wilcox -creditors -##rivatization -##shes -##amia -directs -recaptured -scouting -McGuire -cradle -##onnell -Sato -insulin -mercenary -tolerant -Macquarie -transitions -cradled -##berto -##ivism -##yotes -FF -Ke -Reach -##dbury -680 -##bill -##oja -##sui -prairie -##ogan -reactive -##icient -##rits -Cyclone -Sirius -Survival -Pak -##coach -##trar -halves -Agatha -Opus -contrasts -##jection -ominous -##iden -Baylor -Woodrow -duct -fortification -intercourse -##rois -Colbert -envy -##isi -Afterward -geared -##flections -accelerate -##lenching -Witness -##rrer -Angelina -Material -assertion -misconduct -Nix -cringed -tingling -##eti -##gned -Everest -disturb -sturdy -##keepers -##vied -Profile -heavenly -##kova -##victed -translating -##sses -316 -Invitational -Mention -martyr -##uristic -Barron -hardness -Nakamura -405 -Genevieve -reflections -##falls -jurist -##LT -Pyramid -##yme -Shoot -heck -linguist -##tower -Ives -superiors -##leo -Achilles -##phological -Christophe -Padma -precedence -grassy -Oral -resurrection -##itting -clumsy -##lten -##rue -huts -##stars -Equal -##queduct -Devin -Gaga -diocesan -##plating -##upe -##graphers -Patch -Scream -hail -moaning -tracts -##hdi -Examination -outsider -##ergic -##oter -Archipelago -Havilland -greenish -tilting -Aleksandr -Konstantin -warship -##emann -##gelist -##ought -billionaire -##blivion -321 -Hungarians -transplant -##jured -##fters -Corbin -autism -pitchers -Garner -thence -Scientology -transitioned -integrating -repetitive -##dant -Rene -vomit -##burne -1661 -Researchers -Wallis -insulted -wavy -##wati -Ewing -excitedly -##kor -frescoes -injustice -##achal -##lumber -##úl -novella -##sca -Liv -##enstein -##river -monstrous -topping -downfall -looming -sinks -trillion -##pont -Effect -##phi -##urley -Sites -catchment -##H1 -Hopper -##raiser -1642 -Maccabi -lance -##chia -##sboro -NSA -branching -retorted -tensor -Immaculate -drumming -feeder -##mony -Dyer -homicide -Temeraire -fishes -protruding -skins -orchards -##nso -inlet -ventral -##finder -Asiatic -Sul -1688 -Melinda -assigns -paranormal -gardening -Tau -calming -##inge -##crow -regimental -Nik -fastened -correlated -##gene -##rieve -Sick -##minster -##politan -hardwood -hurled -##ssler -Cinematography -rhyme -Montenegrin -Packard -debating -##itution -Helens -Trick -Museums -defiance -encompassed -##EE -##TU -##nees -##uben -##ünster -##nosis -435 -Hagen -cinemas -Corbett -commended -##fines -##oman -bosses -ripe -scraping -##loc -filly -Saddam -pointless -Faust -Orléans -Syriac -##♭ -longitude -##ropic -Alfa -bliss -gangster -##ckling -SL -blending -##eptide -##nner -bends -escorting -##bloid -##quis -burials -##sle -##è -Ambulance -insults -##gth -Antrim -unfolded -##missible -splendid -Cure -warily -Saigon -Waste -astonishment -boroughs -##VS -##dalgo -##reshing -##usage -rue -marital -versatile -unpaid -allotted -bacterium -##coil -##cue -Dorothea -IDF -##location -##yke -RPG -##tropical -devotees -liter -##pree -Johnstone -astronaut -attends -pollen -periphery -doctrines -meta -showered -##tyn -GO -Huh -laude -244 -Amar -Christensen -Ping -Pontifical -Austen -raiding -realities -##dric -urges -##dek -Cambridgeshire -##otype -Cascade -Greenberg -Pact -##cognition -##aran -##urion -Riot -mimic -Eastwood -##imating -reversal -##blast -##henian -Pitchfork -##sunderstanding -Staten -WCW -lieu -##bard -##sang -experimenting -Aquino -##lums -TNT -Hannibal -catastrophic -##lsive -272 -308 -##otypic -41st -Highways -aggregator -##fluenza -Featured -Reece -dispatch -simulated -##BE -Communion -Vinnie -hardcover -inexpensive -til -##adores -groundwater -kicker -blogs -frenzy -##wala -dealings -erase -Anglia -##umour -Hapoel -Marquette -##raphic -##tives -consult -atrocities -concussion -##érard -Decree -ethanol -##aen -Rooney -##chemist -##hoot -1620 -menacing -Schuster -##bearable -laborers -sultan -Juliana -erased -onstage -##ync -Eastman -##tick -hushed -##yrinth -Lexie -Wharton -Lev -##PL -Testing -Bangladeshi -##bba -##usions -communicated -integers -internship -societal -##odles -Loki -ET -Ghent -broadcasters -Unix -##auer -Kildare -Yamaha -##quencing -##zman -chilled -##rapped -##uant -Duval -sentiments -Oliveira -packets -Horne -##rient -Harlan -Mirage -invariant -##anger -##tensive -flexed -sweetness -##wson -alleviate -insulting -limo -Hahn -##llars -##hesia -##lapping -buys -##oaming -mocked -pursuits -scooted -##conscious -##ilian -Ballad -jackets -##kra -hilly -##cane -Scenic -McGraw -silhouette -whipping -##roduced -##wark -##chess -##rump -Lemon -calculus -demonic -##latine -Bharatiya -Govt -Que -Trilogy -Ducks -Suit -stairway -##ceipt -Isa -regulator -Automobile -flatly -##buster -##lank -Spartans -topography -Tavi -usable -Chartered -Fairchild -##sance -##vyn -Digest -nuclei -typhoon -##llon -Alvarez -DJs -Grimm -authoritative -firearm -##chschule -Origins -lair -unmistakable -##xial -##cribing -Mouth -##genesis -##shū -##gaon -##ulter -Jaya -Neck -##UN -##oing -##static -relativity -##mott -##utive -##esan -##uveau -BT -salts -##roa -Dustin -preoccupied -Novgorod -##asus -Magnum -tempting -##histling -##ilated -Musa -##ghty -Ashland -pubs -routines -##etto -Soto -257 -Featuring -Augsburg -##alaya -Bit -loomed -expects -##abby -##ooby -Auschwitz -Pendleton -vodka -##sent -rescuing -systemic -##inet -##leg -Yun -applicant -revered -##nacht -##ndas -Muller -characterization -##patient -##roft -Carole -##asperated -Amiga -disconnected -gel -##cologist -Patriotic -rallied -assign -veterinary -installing -##cedural -258 -Jang -Parisian -incarcerated -stalk -##iment -Jamal -McPherson -Palma -##oken -##viation -512 -Rourke -irrational -##rippled -Devlin -erratic -##NI -##payers -Ni -engages -Portal -aesthetics -##rrogance -Milne -assassins -##rots -335 -385 -Cambodian -Females -fellows -si -##block -##otes -Jayne -Toro -flutter -##eera -Burr -##lanche -relaxation -##fra -Fitzroy -##undy -1751 -261 -comb -conglomerate -ribbons -veto -##Es -casts -##ege -1748 -Ares -spears -spirituality -comet -##nado -##yeh -Veterinary -aquarium -yer -Councils -##oked -##ynamic -Malmö -remorse -auditions -drilled -Hoffmann -Moe -Nagoya -Yacht -##hakti -##race -##rrick -Talmud -coordinating -##EI -##bul -##his -##itors -##ligent -##uerra -Narayan -goaltender -taxa -##asures -Det -##mage -Infinite -Maid -bean -intriguing -##cription -gasps -socket -##mentary -##reus -sewing -transmitting -##different -##furbishment -##traction -Grimsby -sprawling -Shipyard -##destine -##hropic -##icked -trolley -##agi -##lesh -Josiah -invasions -Content -firefighters -intro -Lucifer -subunit -Sahib -Myrtle -inhibitor -maneuvers -##teca -Wrath -slippery -##versing -Shoes -##dial -##illiers -##luded -##mmal -##pack -handkerchief -##edestal -##stones -Fusion -cumulative -##mell -##cacia -##rudge -##utz -foe -storing -swiped -##meister -##orra -batter -strung -##venting -##kker -Doo -Taste -immensely -Fairbanks -Jarrett -Boogie -1746 -mage -Kick -legislators -medial -##ilon -##logies -##ranton -Hybrid -##uters -Tide -deportation -Metz -##secration -##virus -UFO -##fell -##orage -##raction -##rrigan -1747 -fabricated -##BM -##GR -##rter -muttering -theorist -##tamine -BMG -Kincaid -solvent -##azed -Thin -adorable -Wendell -ta -##viour -pulses -##pologies -counters -exposition -sewer -Luciano -Clancy -##angelo -##riars -Showtime -observes -frankly -##oppy -Bergman -lobes -timetable -##bri -##uest -FX -##dust -##genus -Glad -Helmut -Meridian -##besity -##ontaine -Revue -miracles -##titis -PP -bluff -syrup -307 -Messiah -##erne -interfering -picturesque -unconventional -dipping -hurriedly -Kerman -248 -Ethnic -Toward -acidic -Harrisburg -##65 -intimidating -##aal -Jed -Pontiac -munitions -##nchen -growling -mausoleum -##ération -##wami -Cy -aerospace -caucus -Doing -##around -##miring -Cuthbert -##poradic -##rovisation -##wth -evaluating -##scraper -Belinda -owes -##sitic -##thermal -##fast -economists -##lishing -##uerre -##ân -credible -##koto -Fourteen -cones -##ebrates -bookstore -towels -##phony -Appearance -newscasts -##olin -Karin -Bingham -##elves -1680 -306 -disks -##lston -##secutor -Levant -##vout -Micro -snuck -##ogel -##racker -Exploration -drastic -##kening -Elsie -endowment -##utnant -Blaze -##rrosion -leaking -45th -##rug -##uernsey -760 -Shapiro -cakes -##ehan -##mei -##ité -##kla -repetition -successively -Friendly -Île -Koreans -Au -Tirana -flourish -Spirits -Yao -reasoned -##leam -Consort -cater -marred -ordeal -supremacy -##ritable -Paisley -euro -healer -portico -wetland -##kman -restart -##habilitation -##zuka -##Script -emptiness -communion -##CF -##inhabited -##wamy -Casablanca -pulsed -##rrible -##safe -395 -Dual -Terrorism -##urge -##found -##gnolia -Courage -patriarch -segregated -intrinsic -##liography -##phe -PD -convection -##icidal -Dharma -Jimmie -texted -constituents -twitch -##calated -##mitage -##ringing -415 -milling -##geons -Armagh -Geometridae -evergreen -needy -reflex -template -##pina -Schubert -##bruck -##icted -##scher -##wildered -1749 -Joanne -clearer -##narl -278 -Print -automation -consciously -flashback -occupations -##ests -Casimir -differentiated -policing -repay -##aks -##gnesium -Evaluation -commotion -##CM -##smopolitan -Clapton -mitochondrial -Kobe -1752 -Ignoring -Vincenzo -Wet -bandage -##rassed -##unate -Maris -##eted -##hetical -figuring -##eit -##nap -leopard -strategically -##reer -Fen -Iain -##ggins -##pipe -Matteo -McIntyre -##chord -##feng -Romani -asshole -flopped -reassure -Founding -Styles -Torino -patrolling -##erging -##ibrating -##ructural -sincerity -##ät -##teacher -Juliette -##cé -##hog -##idated -##span -Winfield -##fender -##nast -##pliant -1690 -Bai -Je -Saharan -expands -Bolshevik -rotate -##root -Britannia -Severn -##cini -##gering -##say -sly -Steps -insertion -rooftop -Piece -cuffs -plausible -##zai -Provost -semantic -##data -##vade -##cimal -IPA -indictment -Libraries -flaming -highlands -liberties -##pio -Elders -aggressively -##pecific -Decision -pigeon -nominally -descriptive -adjustments -equestrian -heaving -##mour -##dives -##fty -##yton -intermittent -##naming -##sets -Calvert -Casper -Tarzan -##kot -Ramírez -##IB -##erus -Gustavo -Roller -vaulted -##solation -##formatics -##tip -Hunger -colloquially -handwriting -hearth -launcher -##idian -##ilities -##lind -##locating -Magdalena -Soo -clubhouse -##kushima -##ruit -Bogotá -Organic -Worship -##Vs -##wold -upbringing -##kick -groundbreaking -##urable -##ván -repulsed -##dira -##ditional -##ici -melancholy -##bodied -##cchi -404 -concurrency -H₂O -bouts -##gami -288 -Leto -troll -##lak -advising -bundled -##nden -lipstick -littered -##leading -##mogeneous -Experiment -Nikola -grove -##ogram -Mace -##jure -cheat -Annabelle -Tori -lurking -Emery -Walden -##riz -paints -Markets -brutality -overrun -##agu -##sat -din -ostensibly -Fielding -flees -##eron -Pound -ornaments -tornadoes -##nikov -##organisation -##reen -##Works -##ldred -##olten -##stillery -soluble -Mata -Grimes -Léon -##NF -coldly -permitting -##inga -##reaked -Agents -hostess -##dl -Dyke -Kota -avail -orderly -##saur -##sities -Arroyo -##ceps -##egro -Hawke -Noctuidae -html -seminar -##ggles -##wasaki -Clube -recited -##sace -Ascension -Fitness -dough -##ixel -Nationale -##solidate -pulpit -vassal -570 -Annapolis -bladder -phylogenetic -##iname -convertible -##ppan -Comet -paler -##definite -Spot -##dices -frequented -Apostles -slalom -##ivision -##mana -##runcated -Trojan -##agger -##iq -##league -Concept -Controller -##barian -##curate -##spersed -##tring -engulfed -inquired -##hmann -286 -##dict -##osy -##raw -MacKenzie -su -##ienced -##iggs -##quitaine -bisexual -##noon -runways -subsp -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##{ -##| -##} -##~ -##¡ -##¢ -##£ -##¥ -##§ -##¨ -##© -##ª -##« -##¬ -##® -##± -##´ -##µ -##¶ -##· -##¹ -##º -##» -##¼ -##¾ -##¿ -##À -##Á -## -##Ä -##Å -##Æ -##Ç -##È -##É -##Í -##Î -##Ñ -##Ó -##Ö -##× -##Ø -##Ú -##Ü -##Þ -##â -##ã -##æ -##ç -##î -##ï -##ð -##ñ -##ô -##õ -##÷ -##û -##þ -##ÿ -##Ā -##ą -##Ć -##Č -##ď -##Đ -##đ -##ē -##ė -##ę -##ě -##ğ -##ġ -##Ħ -##ħ -##ĩ -##Ī -##İ -##ļ -##Ľ -##ľ -##Ł -##ņ -##ň -##ŋ -##Ō -##ŏ -##ő -##Œ -##œ -##ř -##Ś -##ś -##Ş -##Š -##Ţ -##ţ -##ť -##ũ -##ŭ -##ů -##ű -##ų -##ŵ -##ŷ -##ź -##Ż -##ż -##Ž -##ž -##Ə -##ƒ -##ơ -##ư -##ǎ -##ǐ -##ǒ -##ǔ -##ǫ -##Ș -##Ț -##ț -##ɐ -##ɑ -##ɔ -##ɕ -##ə -##ɛ -##ɡ -##ɣ -##ɨ -##ɪ -##ɲ -##ɾ -##ʀ -##ʁ -##ʂ -##ʃ -##ʊ -##ʋ -##ʌ -##ʐ -##ʑ -##ʒ -##ʔ -##ʰ -##ʲ -##ʳ -##ʷ -##ʻ -##ʼ -##ʾ -##ʿ -##ˈ -##ː -##ˡ -##ˢ -##ˣ -##́ -##̃ -##̍ -##̯ -##͡ -##Α -##Β -##Γ -##Δ -##Ε -##Η -##Θ -##Ι -##Κ -##Λ -##Μ -##Ν -##Ο -##Π -##Σ -##Τ -##Φ -##Χ -##Ψ -##Ω -##ά -##έ -##ή -##ί -##β -##γ -##δ -##ε -##ζ -##η -##θ -##ι -##κ -##λ -##μ -##ξ -##ο -##π -##ρ -##σ -##τ -##υ -##φ -##χ -##ψ -##ω -##ό -##ύ -##ώ -##І -##Ј -##А -##Б -##В -##Г -##Д -##Е -##Ж -##З -##И -##К -##Л -##М -##Н -##О -##П -##Р -##С -##Т -##У -##Ф -##Х -##Ц -##Ч -##Ш -##Э -##Ю -##Я -##б -##в -##г -##д -##ж -##з -##к -##л -##м -##п -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##щ -##ъ -##ы -##ь -##э -##ю -##ё -##і -##ї -##ј -##њ -##ћ -##Ա -##Հ -##ա -##ե -##ի -##կ -##մ -##յ -##ն -##ո -##ս -##տ -##ր -##ւ -##ְ -##ִ -##ֵ -##ֶ -##ַ -##ָ -##ֹ -##ּ -##א -##ב -##ג -##ד -##ה -##ו -##ז -##ח -##ט -##י -##כ -##ל -##ם -##מ -##ן -##נ -##ס -##ע -##פ -##צ -##ק -##ר -##ש -##ת -##، -##ء -##آ -##أ -##إ -##ئ -##ا -##ب -##ت -##ث -##ج -##ح -##خ -##ذ -##ز -##س -##ش -##ص -##ض -##ط -##ظ -##ع -##غ -##ف -##ق -##ك -##ل -##و -##ى -##َ -##ِ -##ٹ -##پ -##چ -##ک -##گ -##ہ -##ی -##ے -##ं -##आ -##क -##ग -##च -##ज -##ण -##त -##द -##ध -##न -##प -##ब -##भ -##म -##य -##र -##ल -##व -##श -##ष -##स -##ह -##ा -##ि -##ी -##ु -##े -##ो -##् -##। -##॥ -##আ -##ই -##এ -##ও -##ক -##খ -##গ -##চ -##ছ -##জ -##ট -##ত -##থ -##দ -##ধ -##ন -##প -##ব -##ম -##য -##র -##ল -##শ -##স -##হ -##় -##া -##ি -##ী -##ু -##ে -##ো -##্ -##য় -##க -##த -##ப -##ம -##ய -##ர -##ல -##வ -##ா -##ி -##ு -##் -##ร -##་ -##ག -##ང -##ད -##ན -##བ -##མ -##ར -##ལ -##ས -##ི -##ུ -##ེ -##ོ -##ა -##ე -##ი -##ლ -##ნ -##ო -##რ -##ს -##ᴬ -##ᴵ -##ᵀ -##ᵃ -##ᵇ -##ᵈ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵖ -##ᵗ -##ᵘ -##ᵣ -##ᵤ -##ᵥ -##ᶜ -##ᶠ -##ḍ -##Ḥ -##ḥ -##Ḩ -##ḩ -##ḳ -##ṃ -##ṅ -##ṇ -##ṛ -##ṣ -##ṭ -##ạ -##ả -##ấ -##ầ -##ẩ -##ậ -##ắ -##ế -##ề -##ể -##ễ -##ệ -##ị -##ọ -##ố -##ồ -##ổ -##ộ -##ớ -##ờ -##ợ -##ụ -##ủ -##ứ -##ừ -##ử -##ữ -##ự -##ỳ -##ỹ -##ἀ -##ἐ -##ὁ -##ὐ -##ὰ -##ὶ -##ὸ -##ῆ -##ῖ -##ῦ -##ῶ -##‐ -##‑ -##‒ -##– -##— -##― -##‖ -##‘ -##’ -##‚ -##“ -##” -##„ -##† -##‡ -##• -##… -##‰ -##′ -##″ -##⁄ -##⁰ -##ⁱ -##⁴ -##⁵ -##⁶ -##⁷ -##⁸ -##⁹ -##⁻ -##ⁿ -##₅ -##₆ -##₇ -##₈ -##₉ -##₊ -##₍ -##₎ -##ₐ -##ₑ -##ₒ -##ₓ -##ₕ -##ₖ -##ₘ -##ₚ -##ₛ -##ₜ -##₤ -##€ -##₱ -##₹ -##ℓ -##№ -##ℝ -##⅓ -##← -##↑ -##→ -##↔ -##⇌ -##⇒ -##∂ -##∈ -##∗ -##∘ -##√ -##∞ -##∧ -##∨ -##∩ -##∪ -##≈ -##≠ -##≡ -##≤ -##≥ -##⊂ -##⊆ -##⊕ -##⋅ -##─ -##│ -##■ -##● -##★ -##☆ -##☉ -##♠ -##♣ -##♥ -##♦ -##♯ -##⟨ -##⟩ -##ⱼ -##、 -##。 -##《 -##》 -##「 -##」 -##『 -##』 -##〜 -##い -##う -##え -##お -##か -##き -##く -##け -##こ -##さ -##し -##す -##せ -##そ -##た -##ち -##つ -##て -##と -##な -##に -##の -##は -##ひ -##ま -##み -##む -##め -##も -##や -##ゆ -##よ -##ら -##り -##る -##れ -##ん -##ア -##ィ -##イ -##ウ -##エ -##オ -##カ -##ガ -##キ -##ク -##グ -##コ -##サ -##シ -##ジ -##ス -##ズ -##タ -##ダ -##ッ -##テ -##デ -##ト -##ド -##ナ -##ニ -##ハ -##バ -##パ -##フ -##ブ -##プ -##マ -##ミ -##ム -##ャ -##ュ -##ラ -##リ -##ル -##レ -##ロ -##ン -##・ -##ー -##一 -##三 -##上 -##下 -##中 -##事 -##二 -##井 -##京 -##人 -##亻 -##仁 -##佐 -##侍 -##光 -##公 -##力 -##北 -##十 -##南 -##原 -##口 -##史 -##司 -##吉 -##同 -##和 -##囗 -##国 -##國 -##土 -##城 -##士 -##大 -##天 -##太 -##夫 -##女 -##子 -##宀 -##安 -##宮 -##宿 -##小 -##尚 -##山 -##島 -##川 -##州 -##平 -##年 -##心 -##愛 -##戸 -##文 -##新 -##方 -##日 -##明 -##星 -##書 -##月 -##木 -##本 -##李 -##村 -##東 -##松 -##林 -##正 -##武 -##氏 -##水 -##氵 -##江 -##河 -##海 -##版 -##犬 -##王 -##生 -##田 -##白 -##皇 -##省 -##真 -##石 -##社 -##神 -##竹 -##美 -##義 -##花 -##藤 -##西 -##谷 -##車 -##辶 -##道 -##郎 -##郡 -##部 -##野 -##金 -##長 -##門 -##陽 -##青 -##食 -##馬 -##高 -##龍 -##龸 -##사 -##씨 -##의 -##이 -##한 -##fi -##fl -##! -##( -##) -##, -##- -##/ -##: diff --git a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/output/result_dir/label_test.txt b/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/output/result_dir/label_test.txt deleted file mode 100644 index 6df8e0ffde20fe0043873f78adc543e0651a4b51..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/output/result_dir/label_test.txt +++ /dev/null @@ -1,40610 +0,0 @@ -S O O -- O O -J B-PER B-LOC -GE O O -L B-ORG O -W O O -, O O -CH B-ORG B-PER -IN O O -S B-LOC O -DE O O -. O O -Na B-LOC B-PER -La I-PER I-PER -AL B-LOC B-LOC -, O O -United B-LOC B-LOC -Arab I-LOC I-LOC -Emirates I-LOC I-LOC -1996 O O -Japan B-LOC B-LOC -began O O -the O O -defence O O -of O O -their O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -title O O -with O O -a O O -lucky O O -2 O O -win O O -against O O -Syria B-LOC B-LOC -in O O -a O O -Group O O -C O O -championship O O -match O O -on O O -Friday O O -. O O -But O O -China B-LOC B-LOC -saw O O -their O O -luck O O -desert O O -them O O -in O O -the O O -second O O -match O O -of O O -the O O -group O O -, O O -crashing O O -to O O -a O O -surprise O O -2 O O -defeat O O -to O O -newcomer O O -Uzbekistan B-LOC B-LOC -. O O -China B-LOC B-LOC -controlled O O -most O O -of O O -the O O -match O O -and O O -saw O O -several O O -chances O O -missed O O -until O O -the O O -78 O O -minute O O -when O O -U B-MISC B-MISC -striker O O -Igor B-PER B-PER -S I-PER I-PER -took O O -advantage O O -of O O -a O O -mi O O -defensive O O -header O O -to O O -lo O O -the O O -ball O O -over O O -the O O -advancing O O -Chinese B-MISC B-MISC -keeper O O -and O O -into O O -an O O -empty O O -net O O -. O O -Ole B-PER B-PER -S I-PER I-PER -made O O -sure O O -of O O -the O O -win O O -in O O -injury O O -time O O -, O O -hitting O O -an O O -un O O -left O O -foot O O -shot O O -from O O -just O O -outside O O -the O O -area O O -. O O -The O O -former O O -Soviet B-MISC B-MISC -republic O O -was O O -playing O O -in O O -an O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -finals O O -tie O O -for O O -the O O -first O O -time O O -. O O -Despite O O -winning O O -the O O -Asian B-MISC B-MISC -Games I-MISC I-MISC -title O O -two O O -years O O -ago O O -, O O -Uzbekistan B-LOC B-LOC -are O O -in O O -the O O -finals O O -as O O -outsider O O -. O O -Two O O -goals O O -from O O -defensive O O -errors O O -in O O -the O O -last O O -six O O -minutes O O -allowed O O -Japan B-LOC B-LOC -to O O -come O O -from O O -behind O O -and O O -collect O O -all O O -three O O -points O O -from O O -their O O -opening O O -meeting O O -against O O -Syria B-LOC B-LOC -. O O -Ta B-PER B-PER -Ta I-PER I-PER -scored O O -the O O -winner O O -in O O -the O O -88 O O -minute O O -, O O -rising O O -to O O -head O O -a O O -Hi B-PER B-PER -Yan I-PER I-PER -cross O O -towards O O -the O O -Syrian B-MISC B-MISC -goal O O -which O O -goalkeeper O O -Salem B-PER B-PER -Bit I-PER I-PER -appeared O O -to O O -have O O -covered O O -but O O -then O O -allowed O O -to O O -slip O O -into O O -the O O -net O O -. O O -It O O -was O O -the O O -second O O -costly O O -b O O -by O O -Syria B-LOC B-LOC -in O O -four O O -minutes O O -. O O -De O O -Hassan B-PER B-PER -Abbas I-PER I-PER -rose O O -to O O -intercept O O -a O O -long O O -ball O O -into O O -the O O -area O O -in O O -the O O -84 O O -minute O O -but O O -only O O -managed O O -to O O -diver O O -it O O -into O O -the O O -top O O -corner O O -of O O -Bit B-ORG B-PER -' O O -goal O O -. O O -Na B-PER B-PER -Jo I-PER I-PER -had O O -given O O -Syria B-LOC B-LOC -the O O -lead O O -with O O -a O O -well O O -header O O -in O O -the O O -seventh O O -minute O O -. O O -Japan B-LOC B-LOC -then O O -laid O O -siege O O -to O O -the O O -Syrian B-MISC B-MISC -penalty O O -area O O -for O O -most O O -of O O -the O O -game O O -but O O -rarely O O -breach O O -the O O -Syrian B-MISC B-MISC -defence O O -. O O -Bit B-ORG B-PER -pulled O O -off O O -fine O O -saves O O -whenever O O -they O O -did O O -. O O -Japan B-LOC B-LOC -coach O O -Shu B-PER B-PER -Ka I-PER I-PER -said O O -: O O -' O O -' O O -The O O -Syrian B-MISC B-MISC -own O O -goal O O -proved O O -lucky O O -for O O -us O O -. O O -The O O -Syrian B-MISC B-MISC -scored O O -early O O -and O O -then O O -played O O -defensive O O -and O O -adopted O O -long O O -balls O O -which O O -made O O -it O O -hard O O -for O O -us O O -. O O -' O O -' O O -Japan B-LOC B-LOC -, O O -co O O -of O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -in O O -2002 O O -and O O -ranked O O -20th O O -in O O -the O O -world O O -by O O -FIFA B-ORG B-ORG -, O O -are O O -favourite O O -to O O -regain O O -their O O -title O O -here O O -. O O -Host O O -UAE B-LOC B-LOC -play O O -Kuwait B-LOC B-LOC -and O O -South B-LOC B-LOC -Korea I-LOC I-LOC -take O O -on O O -Indonesia B-LOC B-LOC -on O O -Saturday O O -in O O -Group O O -A O O -matches O O -. O O -All O O -four O O -teams O O -are O O -level O O -with O O -one O O -point O O -each O O -from O O -one O O -game O O -. O O -R B-ORG B-ORG -UN I-ORG I-ORG -- O O -C B-ORG B-PER -BA O O -F O O -IT O B-LOC -A O O -A O O -Y O O -. O O -ROM B-LOC B-LOC -1996 O O -Italy B-LOC B-LOC -recalled O O -Marcel B-PER B-PER -Cut I-PER I-PER -on O O -Friday O O -for O O -their O O -friendly O O -against O O -Scotland B-LOC B-LOC -at O O -Murray B-LOC B-LOC -more O O -than O O -a O O -year O O -after O O -the O O -30 O O -wing O O -announced O O -he O O -was O O -retiring O O -following O O -differences O O -over O O -selection O O -. O O -Cut B-PER B-PER -, O O -who O O -trainer O O -George B-PER B-PER -Co I-PER I-PER -said O O -was O O -certain O O -to O O -play O O -on O O -Saturday O O -week O O -, O O -was O O -named O O -in O O -a O O -21 O O -squad O O -lacking O O -only O O -two O O -of O O -the O O -team O O -beaten O O -54 O O -by O O -England B-LOC B-LOC -at O O -T B-LOC B-LOC -last O O -month O O -. O O -Stefano B-PER B-PER -Bo I-PER I-PER -is O O -out O O -through O O -illness O O -and O O -Co B-PER B-PER -said O O -he O O -had O O -dropped O O -back O O -row O O -Co B-PER B-PER -Co I-PER I-PER -, O O -who O O -had O O -been O O -recalled O O -for O O -the O O -England B-LOC B-LOC -game O O -after O O -five O O -years O O -out O O -of O O -the O O -national O O -team O O -. O O -Cut B-PER B-PER -announced O O -his O O -retirement O O -after O O -the O O -1995 O B-MISC -World B-MISC I-MISC -Cup I-MISC I-MISC -, O O -where O O -he O O -took O O -issue O O -with O O -being O O -dropped O O -from O O -the O O -Italy B-LOC B-LOC -side O O -that O O -faced O O -England B-LOC B-LOC -in O O -the O O -pool O O -stages O O -. O O -Co B-PER B-PER -said O O -he O O -had O O -approached O O -the O O -player O O -two O O -months O O -ago O O -about O O -a O O -comeback O O -. O O -" O O -He O O -ended O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -on O O -the O O -wrong O O -note O O -, O O -" O O -Co B-PER B-PER -said O O -. O O -" O O -I O O -thought O O -it O O -would O O -be O O -useful O O -to O O -have O O -him O O -back O O -and O O -he O O -said O O -he O O -would O O -be O O -available O O -. O O -I O O -think O O -now O O -is O O -the O O -right O O -time O O -for O O -him O O -to O O -return O O -. O O -" O O -Squad O O -: O O -Javier B-PER B-PER -Per I-PER I-PER -, O O -Paolo B-PER B-PER -V I-PER I-PER -, O O -Marcel B-PER B-PER -Cut I-PER I-PER -, O O -Ivan B-PER B-PER -Francesca I-PER I-PER -, O O -Lea B-PER B-PER -Man I-PER I-PER -, O O -Diego B-PER B-PER -Dom I-PER I-PER -, O O -Francesco B-PER B-PER -Ma I-PER I-PER -, O O -Alessandro B-PER B-PER -T I-PER I-PER -, O O -Or B-PER B-PER -Ara I-PER I-PER -, O O -Andrea B-PER B-PER -S I-PER I-PER -, O O -Mass B-PER B-PER -G I-PER I-PER -, O O -Carlo B-PER B-PER -Ch I-PER I-PER -, O O -Walter B-PER B-PER -C I-PER I-PER -, O O -Franco B-PER B-PER -Pro I-PER I-PER -C I-PER I-PER -, O O -Carlo B-PER B-PER -Or I-PER I-PER -, O O -Mass B-PER B-PER -Cut I-PER I-PER -, O O -G B-PER B-PER -C I-PER I-PER -, O O -G B-PER B-PER -G I-PER I-PER -, O O -Nicola B-PER B-PER -S O O -- O O -LA O O -GO O O -G O O -J B-PER B-LOC -W O O -O O O -S B-LOC B-LOC -. O O -AL B-LOC B-LOC -, O O -United B-LOC B-LOC -Arab I-LOC I-LOC -Emirates I-LOC I-LOC -1996 O O -Two O O -goals O O -in O O -the O O -last O O -six O O -minutes O O -gave O O -holders O O -Japan B-LOC B-LOC -an O O -un O O -2 O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -victory O O -over O O -Syria B-LOC B-LOC -on O O -Friday O O -. O O -Ta B-PER B-PER -Ta I-PER I-PER -headed O O -the O O -winner O O -in O O -the O O -88 O O -minute O O -of O O -the O O -group O O -C O O -game O O -after O O -goalkeeper O O -Salem B-PER B-PER -Bit I-PER I-PER -spoiled O O -a O O -mistake O O -display O O -by O O -allowing O O -the O O -ball O O -to O O -slip O O -under O O -his O O -body O O -. O O -It O O -was O O -the O O -second O O -Syrian B-MISC B-MISC -defensive O O -b O O -in O O -four O O -minutes O O -. O O -De O O -Hassan B-PER B-PER -Abbas I-PER I-PER -rose O O -to O O -intercept O O -a O O -long O O -ball O O -into O O -the O O -area O O -in O O -the O O -84 O O -minute O O -but O O -only O O -managed O O -to O O -diver O O -it O O -into O O -the O O -top O O -corner O O -of O O -Bit B-ORG B-PER -' O O -goal O O -. O O -Syria B-LOC B-LOC -had O O -taken O O -the O O -lead O O -from O O -their O O -first O O -serious O O -attack O O -in O O -the O O -seventh O O -minute O O -. O O -Na B-PER B-PER -Jo I-PER I-PER -headed O O -a O O -cross O O -from O O -the O O -right O O -by O O -Am B-PER B-PER -A I-PER I-PER -into O O -the O O -top O O -right O O -corner O O -of O O -Ken B-PER B-PER -Shi I-PER I-PER -' O O -goal O O -. O O -Japan B-LOC B-LOC -then O O -laid O O -siege O O -to O O -the O O -Syrian B-MISC B-MISC -penalty O O -area O O -and O O -had O O -a O O -goal O O -di O O -for O O -offs O O -in O O -the O O -16th O O -minute O O -. O O -A O O -minute O O -later O O -, O O -Bit B-ORG B-PER -produced O O -a O O -good O O -double O O -save O O -, O O -first O O -from O O -Ka B-PER B-PER -Mi I-PER I-PER -' O O -header O O -and O O -then O O -blocked O O -a O O -Ta B-PER B-PER -follow O O -shot O O -. O O -Bit B-PER B-PER -saved O O -well O O -again O O -from O O -Mi B-PER B-PER -in O O -the O O -37th O O -minute O O -, O O -par O O -away O O -his O O -header O O -from O O -a O O -corner O O -. O O -Japan B-LOC B-LOC -started O O -the O O -second O O -half O O -brightly O O -but O O -Bit B-ORG B-PER -denied O O -them O O -an O O -equal O O -when O O -he O O -dive O O -to O O -his O O -right O O -to O O -save O O -Na B-PER B-PER -So I-PER I-PER -' O O -low O O -drive O O -in O O -the O O -53 O O -minute O O -. O O -Japan B-LOC B-LOC -: O O -19 O O -- O O -Ken B-PER B-PER -Shi I-PER I-PER -, O O -2 O O -- O O -Hi B-PER B-PER -Yan I-PER I-PER -, O O -3 O O -- O O -Na B-PER B-PER -So I-PER I-PER -, O O -4 O O -- O O -Ma B-PER B-PER -I I-PER I-PER -, O O -5 O O -- O O -Nor B-PER B-PER -O I-PER I-PER -, O O -6 O O -- O O -Mo B-PER B-PER -Ya I-PER I-PER -, O O -8 O O -- O O -Ma B-PER B-PER -Mae I-PER I-PER -( O O -7 O O -- O O -Ya B-PER B-PER -Honda I-PER I-PER -71 O O -) O O -, O O -9 O O -- O O -Ta B-PER B-PER -Ta I-PER I-PER -, O O -10 O O -- O O -Hi B-PER B-PER -Nana I-PER I-PER -, O O -11 O O -- O O -Ka B-PER B-PER -Mi I-PER I-PER -, O O -15 O O -- O O -Hi B-PER B-PER -Mo I-PER I-PER -( O O -14 O O -- O O -Ma B-PER B-PER -Ok I-PER I-PER -75 O O -) O O -. O O -Syria B-LOC B-LOC -: O O -24 O O -- O O -Salem B-PER B-PER -Bit I-PER I-PER -, O O -3 O O -- O O -Bach B-PER B-PER -Sr I-PER I-PER -; O O -4 O O -- O O -Hassan B-PER B-PER -Abbas I-PER I-PER -, O O -5 O O -- O O -Ta B-PER B-PER -J I-PER I-PER -, O O -6 O O -- O O -Am B-PER B-PER -A I-PER I-PER -( O O -9 O O -- O O -Lou B-PER B-PER -Tale I-PER I-PER -69 O O -) O O -, O O -8 O O -- O O -Ni B-PER B-PER -al I-PER I-PER -, O O -10 O O -- O O -Mohammed B-PER B-PER -A I-PER I-PER -, O O -12 O O -- O O -Ali B-PER B-PER -Di I-PER I-PER -, O O -13 O O -- O O -Abdul B-PER B-PER -La I-PER I-PER -He I-PER I-PER -( O O -17 O O -- O O -Am B-PER B-PER -R I-PER I-PER -46 O O -) O O -, O O -14 O O -- O O -K B-PER B-PER -Z I-PER I-PER -; O O -16 O O -- O O -Na B-PER B-PER -Jo I-PER I-PER -. O O -F O O -SK O B-MISC -C I-MISC I-MISC -M O O -R O O -. O O -T B-LOC B-LOC -, O O -France B-LOC B-LOC -1996 O O -Results O O -of O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -freestyle O O -skiing O O -m O O -competition O O -on O O -Friday O O -: O O -Men O O -1 O O -Je B-PER B-PER -Ron I-PER I-PER -( O O -Sweden B-LOC B-LOC -) O O -25 O O -points O O -2 O O -Andrei B-PER B-PER -Ivan I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -24 O O -3 O O -Ryan B-PER B-PER -Johnson I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -24 O O -4 O O -Jean B-PER B-PER -Brass I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -24 O O -5 O O -Ko B-PER B-PER -Hole I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -23 O O -6 O O -Je B-PER B-PER -Col I-PER I-PER -( O O -France B-LOC B-LOC -) O O -23 O O -7 O O -Jim B-PER B-PER -Moran I-PER I-PER -( O O -U B-LOC B-LOC -) O O -23 O O -8 O O -Dominic B-PER B-PER -G I-PER I-PER -( O O -Canada B-LOC B-LOC -AL B-LOC B-LOC -, O O -United B-LOC B-LOC -Arab I-LOC I-LOC -Emirates I-LOC I-LOC -1996 O O -Results O O -of O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -group O O -C O O -matches O O -played O O -on O O -Friday O O -: O O -Japan B-LOC B-LOC -2 O O -Syria B-LOC B-LOC -1 O O -( O O -halftime O O -0 O O -) O O -Score O O -: O O -Japan B-LOC B-LOC -- O O -Hassan B-PER B-PER -Abbas I-PER I-PER -84 O O -own O O -goal O O -, O O -Ta B-PER B-PER -Ta I-PER I-PER -88 O O -. O O -Syria B-LOC B-LOC -- O O -Na B-PER B-PER -Jo I-PER I-PER -7 O O -Attendance O O -: O O -10 O O -. O O -China B-LOC B-LOC -0 O O -Uzbekistan B-LOC B-LOC -2 O O -( O O -halftime O O -0 O O -) O O -Score O O -: O O -S B-PER B-PER -Igor I-PER I-PER -78 O O -, O O -S B-PER B-PER -Ole B-PER I-PER -90 O O -At O O -: O O -3 O O -Standing O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -Uzbekistan B-LOC B-LOC -1 O O -1 O O -0 O O -0 O O -2 O O -0 O O -3 O O -Japan B-LOC B-LOC -1 O O -1 O O -0 O O -0 O O -2 O O -1 O O -3 O O -Syria B-LOC B-LOC -1 O O -0 O O -0 O O -1 O O -1 O O -2 O O -0 O O -China B-LOC B-LOC -1 O O -0 O O -0 O O -1 O O -0 O O -2 O O -0 O O -CR O O -- O O -PA B-LOC B-LOC -V O O -NE B-LOC B-LOC -Z I-LOC I-LOC -ONE O O -SC O O -. O O -[ O O -CO O O -14 O O -GM B-MISC B-MISC -] O O -S B-LOC B-LOC -, O O -Pakistan B-LOC B-LOC -1996 O O -Score O O -in O O -the O O -second O O -one O O -cricket O O -international O O -between O O -Pakistan B-LOC B-LOC -and O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -on O O -Friday O O -: O O -Pakistan B-LOC B-LOC -Sa B-PER B-PER -An I-PER I-PER -run O O -out O O -91 O O -( O O -correct O O -from O O -90 O O -) O O -Z B-PER B-PER -El I-PER I-PER -b O O -Cairns B-PER B-PER -86 O O -( O O -correct O O -from O O -87 O O -) O O -I B-PER B-PER -Ahmad I-PER I-PER -c O O -S B-PER B-PER -b O O -Vaughan B-PER B-PER -59 O O -In B-PER B-PER -Ha I-PER I-PER -s O O -G B-PER B-PER -b O O -As B-PER B-PER -2 O O -Was B-PER B-PER -A I-PER I-PER -b O O -Harris B-PER B-PER -4 O O -Shah B-PER B-PER -A I-PER I-PER -b O O -Harris B-PER B-PER -2 O O -Mo B-PER B-PER -Khan I-PER I-PER -c O O -As B-PER B-PER -b O O -Harris B-PER B-PER -1 O O -W B-PER B-PER -You I-PER I-PER -Bowling O O -: O O -Was B-PER B-PER -A I-PER I-PER -8 O O -( O O -9 O O -, O O -1 O O -) O O -, O O -W B-PER B-PER -You I-PER I-PER -6 O O -( O O -2 O O -, O O -1 O O -) O O -, O O -Sa B-PER B-PER -Mu I-PER I-PER -8 O O -, O O -Mu B-PER B-PER -Ahmad I-PER I-PER -10 O O -( O O -1 O O -) O O -, O O -Shah B-PER B-PER -A I-PER I-PER -7 O O -, O O -Sal B-PER B-PER -Malik I-PER I-PER -2 O O -, O O -I B-PER B-PER -Ahmad I-PER I-PER -0 O O -. O O -Re O O -: O O -Pakistan B-LOC B-LOC -won O O -by O O -46 O O -runs O O -. O O -Third O O -one O O -match O O -: O O -December O O -8 O O -, O O -in O O -Karachi B-LOC B-LOC -. O O -S O O -- O O -E B-MISC B-MISC -F B-MISC I-MISC -C I-MISC I-MISC -SEC O O -R O O -R O O -. O O -L B-LOC B-LOC -1996 O O -Re O O -of O O -an O O -English B-MISC B-MISC -F B-MISC I-MISC -Challenge I-MISC I-MISC -Cup I-MISC B-MISC -second O O -round O O -match O O -on O O -Friday O O -: O O -Plymouth B-ORG B-ORG -4 O O -Exeter B-ORG B-ORG -1 O O -S O O -- O O -B B-MISC B-PER -BA O O -L O O -. O O -L B-LOC B-LOC -1996 O O -Dutch B-MISC B-MISC -forward O O -Reggie B-PER B-PER -B I-PER I-PER -had O O -his O O -in O O -suspension O O -lifted O O -by O O -FIFA B-ORG B-ORG -on O O -Friday O O -and O O -was O O -set O O -to O O -make O O -his O O -Sheffield B-ORG B-ORG -Wednesday I-ORG I-ORG -comeback O O -against O O -Liverpool B-ORG B-ORG -on O O -Saturday O O -. O O -B B-PER B-PER -missed O O -his O O -club O O -' O O -last O O -two O O -games O O -after O O -FIFA B-ORG B-ORG -slapped O O -a O O -worldwide O O -ban O O -on O O -him O O -for O O -appearing O O -to O O -sign O O -contracts O O -for O O -both O O -Wednesday B-ORG B-ORG -and O O -U B-ORG B-ORG -while O O -he O O -was O O -playing O O -for O O -Fe B-ORG B-ORG -. O O -FIFA B-ORG B-ORG -' O O -players O O -' O O -status O O -committee O O -, O O -meeting O O -in O O -Barcelona B-LOC B-LOC -, O O -decided O O -that O O -although O O -the O O -U B-MISC B-ORG -document O O -was O O -basically O O -valid O O -, O O -it O O -could O O -not O O -be O O -legally O O -protected O O -. O O -The O O -committee O O -said O O -the O O -Italian B-MISC B-MISC -club O O -had O O -violated O O -regulations O O -by O O -failing O O -to O O -inform O O -Fe B-ORG B-ORG -, O O -with O O -whom O O -the O O -player O O -was O O -under O O -contract O O -. O O -B B-ORG B-PER -was O O -fined O O -75 O O -Swiss B-MISC B-MISC -f O O -( O O -$ O O -57 O O -) O O -for O O -failing O O -to O O -inform O O -the O O -En B-ORG B-MISC -club O O -of O O -his O O -previous O O -commitment O O -to O O -U B-ORG B-ORG -. O O -S O O -- O O -L O B-ORG -' O O -B B-PER B-PER -F O O -F O O -PA O O -IN O O -FA O O -F O O -. O O -L B-LOC B-LOC -1996 O O -Leeds B-ORG B-ORG -' O O -England B-LOC B-LOC -under O O -striker O O -Lee B-PER B-PER -Bow I-PER I-PER -was O O -fined O O -4 O O -pounds O O -( O O -$ O O -7 O O -) O O -on O O -Friday O O -for O O -hurling O O -chairs O O -at O O -restaurant O O -staff O O -during O O -a O O -disturbance O O -at O O -a O O -McDonald B-ORG B-ORG -' I-ORG I-ORG -fast O O -restaurant O O -. O O -Bow B-PER B-PER -, O O -19 O O -, O O -who O O -was O O -caught O O -in O O -the O O -act O O -by O O -security O O -cameras O O -, O O -pleaded O O -guilty O O -to O O -a O O -charge O O -of O O -a O O -at O O -a O O -court O O -in O O -London B-LOC B-LOC -. O O -He O O -was O O -fined O O -and O O -ordered O O -to O O -pay O O -a O O -total O O -of O O -175 O O -pounds O O -to O O -two O O -members O O -of O O -staff O O -injured O O -in O O -the O O -f O O -in O O -an O O -east O O -London I-LOC B-LOC -restaurant O O -in O O -October O O -. O O -Leeds B-ORG B-ORG -had O O -already O O -fined O O -Bow B-PER B-PER -4 O O -pounds O O -( O O -$ O O -6 O O -) O O -and O O -warned O O -him O O -a O O -repeat O O -of O O -his O O -criminal O O -behaviour O O -could O O -cost O O -him O O -his O O -place O O -in O O -the O O -side O O -. O O -Bow B-PER B-PER -, O O -who O O -moved O O -to O O -the O O -Yorkshire B-LOC B-LOC -club O O -in O O -August O O -for O O -3 O O -million O O -pounds O O -( O O -$ O O -5 O O -million O O -) O O -, O O -was O O -expected O O -to O O -play O O -against O O -Middlesbrough B-ORG B-ORG -on O O -Saturday O O -. O O -BA O O -- O O -EU B-MISC B-MISC -ST O O -. O O -L B-LOC B-LOC -1996 O O -Standing O O -in O O -the O O -men O O -' O O -Euro B-MISC B-MISC -basketball O O -championship O O -after O O -Thursday O O -' O O -matches O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -lost O O -, O O -points O O -) O O -: O O -Group O O -A O O -CS B-ORG B-ORG -Moscow I-ORG I-ORG -( O O -Russia B-LOC B-LOC -9 O O -6 O O -3 O O -15 O O -Stefan B-ORG B-ORG -Milan I-ORG I-ORG -( O O -Italy B-LOC B-LOC -) O O -9 O O -6 O O -3 O O -15 O O -Maccabi B-ORG B-ORG -Tel I-ORG I-ORG -Aviv I-ORG I-ORG -( O O -Israel B-LOC B-LOC -) O O -9 O O -5 O O -4 O O -14 O O -U B-ORG B-ORG -S I-ORG I-ORG -( O O -Turkey B-LOC B-LOC -) O O -9 O O -4 O O -5 O O -13 O O -Lim B-ORG B-ORG -( O O -France B-LOC B-LOC -) O O -9 O O -3 O O -6 O O -12 O O -Pan B-ORG B-ORG -( O O -Greece B-LOC B-LOC -) O O -9 O O -3 O O -6 O O -12 O O -Group O O -B O O -Teams B-ORG B-ORG -Bologna I-ORG I-ORG -( O O -Italy B-LOC B-LOC -) O O -9 O O -7 O O -2 O O -16 O O -Olympia B-ORG B-ORG -( O O -Greece B-LOC B-LOC -) O O -9 O O -5 O O -4 O O -14 O O -C B-ORG B-ORG -Robert B-PER B-PER -Kit I-PER I-PER -L B-LOC B-LOC -1996 O O -Centre O O -Jason B-PER B-PER -Little I-PER I-PER -will O O -miss O O -Australia B-LOC B-LOC -' O O -end O O -fixture O O -against O O -the O O -Bar B-ORG B-ORG -at O O -T B-LOC B-LOC -on O O -Saturday O O -. O O -Little B-PER B-PER -has O O -opted O O -not O O -to O O -risk O O -a O O -the O O -knee O O -injury O O -which O O -ruled O O -him O O -out O O -of O O -a O O -large O O -chunk O O -of O O -the O O -tour O O -and O O -is O O -replaced O O -by O O -fellow O O -Queensland B-MISC B-MISC -Daniel B-PER B-PER -Herbert I-PER I-PER -. O O -Owen B-PER B-PER -Fine I-PER I-PER -has O O -recovered O O -from O O -the O O -knocks O O -he O O -took O O -in O O -last O O -weekend O O -' O O -test O O -against O O -Wales B-LOC B-LOC -and O O -retains O O -his O O -place O O -in O O -the O O -back O O -ahead O O -of O O -Daniel B-PER B-PER -Man I-PER I-PER -. O O -The O O -Wall B-ORG B-ORG -have O O -their O O -sights O O -set O O -on O O -a O O -13th O O -successive O O -victory O O -to O O -end O O -their O O -European B-MISC B-MISC -tour O O -with O O -a O O -100 O O -percent O O -record O O -but O O -also O O -want O O -to O O -turn O O -on O O -the O O -style O O -and O O -provide O O -David B-PER B-PER -Camp I-PER I-PER -with O O -a O O -fitting O O -send O O -in O O -his O O -final O O -match O O -in O O -Australian B-MISC B-MISC -colours O O -. O O -The O O -Wall B-ORG B-ORG -currently O O -have O O -no O O -plans O O -to O O -make O O -any O O -special O O -presentation O O -to O O -the O O -34 O O -winger O O -but O O -a O O -full O O -house O O -of O O -75 O O -spectators O O -will O O -still O O -gather O O -in O O -the O O -hope O O -of O O -witness O O -one O O -last O O -moment O O -of O O -magic O O -. O O -Camp B-PER B-PER -will O O -be O O -up O O -against O O -a O O -familiar O O -foe O O -in O O -the O O -shape O O -of O O -Bar B-ORG B-ORG -captain O O -Rob B-PER B-PER -Andrew I-PER I-PER -, O O -the O O -man O O -who O O -kicked O O -Australia B-LOC B-LOC -to O O -defeat O O -with O O -a O O -last O O -drop O O -in O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -quarter O O -in O O -Cape B-LOC B-LOC -Town I-LOC I-LOC -. O O -" O O -Campo B-PER B-PER -has O O -a O O -massive O O -following O O -in O O -this O O -country O O -and O O -has O O -had O O -the O O -public O O -with O O -him O O -ever O O -since O O -he O O -first O O -played O O -here O O -in O O -1984 O O -, O O -" O O -said O O -Andrew B-PER B-PER -, O O -also O O -likely O O -to O O -be O O -making O O -his O O -final O O -T B-ORG B-LOC -appearance O O -. O O -On O O -tour O O -, O O -Australia B-LOC B-LOC -have O O -won O O -all O O -four O O -tests O O -against O O -Italy B-LOC B-LOC -, O O -Scotland B-LOC B-LOC -, O O -Ireland B-LOC B-LOC -and O O -Wales B-LOC B-LOC -, O O -and O O -scored O O -41 O O -points O O -at O O -an O O -average O O -of O O -almost O O -35 O O -points O O -a O O -game O O -. O O -League O O -duties O O -restricted O O -the O O -Bar B-ORG B-ORG -' O O -select O O -options O O -but O O -they O O -still O O -b O O -13 O O -international O O -including O O -England B-LOC B-LOC -full O O -Tim B-PER B-PER -St I-PER I-PER -and O O -recalled O O -wing O O -Tony B-PER B-PER -Underwood I-PER I-PER -, O O -plus O O -All B-ORG B-ORG -Black I-ORG I-ORG -forwards O O -Ian B-PER B-PER -Jones I-PER I-PER -and O O -Nor B-PER B-PER -Hewitt I-PER I-PER -. O O -Teams O O -: O O -Bar O B-ORG -- O O -15 O O -- O O -Tim B-PER B-PER -St I-PER I-PER -( O O -England B-LOC B-LOC -) O O -; O O -14 O O -- O O -Nigel B-PER B-PER -Walker I-PER I-PER -( O O -Wales B-LOC B-LOC -) O O -, O O -13 O O -- O O -Allan B-PER B-PER -Bat I-PER I-PER -( O O -Wales B-LOC B-LOC -) O O -, O O -12 O O -- O O -Gregor B-PER B-PER -Townsend I-PER I-PER -( O O -Scotland B-LOC B-LOC -) O O -, O O -11 O O -- O O -Tony B-PER B-PER -Underwood I-PER I-PER -( O O -England B-LOC B-LOC -) O O -; O O -10 O O -- O O -Rob B-PER B-PER -Andrew I-PER I-PER -( O O -England B-LOC B-LOC -) O O -, O O -9 O O -- O O -Rob B-PER B-PER -How I-PER I-PER -( O O -Wales B-LOC B-LOC -) O O -; O O -8 O O -- O O -Scott B-PER B-PER -Quinn I-PER I-PER -( O O -Wales B-LOC B-LOC -) O O -, O O -7 O O -- O O -Neil B-PER B-PER -Back I-PER I-PER -( O O -England B-LOC B-LOC -) O O -, O O -6 O O -- O O -Dale B-PER B-PER -M I-PER I-PER -( O O -Pont B-LOC B-LOC -) O O -, O O -5 O O -- O O -Ian B-PER B-PER -Jones I-PER I-PER -( O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -) O O -, O O -4 O O -- O O -Craig B-PER B-PER -Quinn I-PER I-PER -( O O -Wales B-LOC B-LOC -) O O -, O O -3 O O -- O O -Darren B-PER B-PER -G I-PER I-PER -( O O -Leicester B-LOC B-LOC -) O O -Australia B-LOC B-LOC -- O O -15 O O -- O O -Matthew B-PER B-PER -Burke I-PER I-PER -; O O -14 O O -- O O -Joe B-PER B-PER -R I-PER I-PER -, O O -13 O O -- O O -Daniel B-PER B-PER -Herbert I-PER I-PER -, O O -12 O O -- O O -Tim B-PER B-PER -Ho I-PER I-PER -( O O -captain O O -) O O -, O O -11 O O -- O O -David B-PER B-PER -Camp I-PER I-PER -; O O -10 O O -- O O -Pat B-PER B-PER -Howard I-PER I-PER -, O O -9 O O -- O O -Sam B-PER B-PER -Payne I-PER I-PER -; O O -8 O O -- O O -Michael B-PER B-PER -B I-PER I-PER -, O O -7 O O -- O O -David B-PER B-PER -Wilson I-PER I-PER -, O O -6 O O -- O O -Owen B-PER B-PER -Fine I-PER I-PER -, O O -5 O O -- O O -David B-PER B-PER -G I-PER I-PER -, O O -4 O O -- O O -Tim B-PER B-PER -Gavin I-PER I-PER -, O O -3 O O -- O O -Andrew B-PER B-PER -Blade I-PER I-PER -, O O -2 O O -- O O -Marco B-PER B-PER -Cap I-PER I-PER -, O O -1 O O -- O O -Dan B-PER B-PER -Crowley I-PER I-PER -. O O -GO O O -- O O -Z B-MISC B-MISC -O I-MISC I-MISC -SEC O O -R O O -SC O O -. O O -H B-LOC B-LOC -1996 O O -Leading O O -second O O -round O O -scores O O -in O O -the O O -Zimbabwe B-MISC B-MISC -Open I-MISC I-MISC -at O O -the O O -par O O -Chapman B-LOC B-LOC -Golf I-LOC I-LOC -Club I-LOC I-LOC -on O O -Friday O O -( O O -South B-MISC B-MISC -African I-MISC I-MISC -unless O O -stated O O -) O O -: O O -132 O O -Des B-PER B-PER -Te I-PER I-PER -65 O O -67 O O -133 O O -Mark B-PER B-PER -M I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -72 O O -61 O O -134 O O -Steve B-PER B-PER -van I-PER I-PER -V I-PER I-PER -65 O O -69 O O -136 O O -Nick B-PER B-PER -Price I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -68 O O -68 O O -, O O -Justin B-PER B-PER -Ho I-PER I-PER -71 O O -65 O O -, O O -Andrew B-PER B-PER -Pitt I-PER I-PER -( O O -U B-LOC B-LOC -) O O -69 O O -67 O O -138 O O -Mark B-PER B-PER -C I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -69 O O -69 O O -, O O -Mark B-PER B-PER -Mu I-PER I-PER -71 O O -67 O O -139 O O -He B-PER B-PER -S I-PER I-PER -75 O O -64 O O -, O O -Andrew B-PER B-PER -Park I-PER I-PER -72 O O -67 O O -140 O O -Sc B-PER B-PER -van I-PER I-PER -der I-PER I-PER -Me I-PER I-PER -B B-LOC B-LOC -1996 O O -Romania B-LOC B-LOC -trainer O O -Ang B-PER B-PER -I I-PER I-PER -called O O -up O O -three O O -un O O -players O O -on O O -Friday O O -in O O -his O O -squad O O -to O O -face O O -Macedonia B-LOC B-LOC -next O O -week O O -in O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -qualifier O O -. O O -Mid O O -Vale B-PER B-PER -Stefan I-PER I-PER -and O O -striker O O -V B-PER B-PER -Ion I-PER I-PER -of O O -O B-ORG B-ORG -Gala I-ORG I-ORG -and O O -defender O O -Liv B-PER B-PER -C I-PER I-PER -of O O -National B-ORG B-ORG -Bucharest I-ORG I-ORG -are O O -the O O -newcomer O O -for O O -the O O -European B-MISC B-MISC -group O O -eight O O -clash O O -in O O -Macedonia B-LOC B-LOC -on O O -December O O -14 O O -. O O -I B-PER B-PER -said O O -he O O -had O O -picked O O -them O O -because O O -of O O -their O O -good O O -performances O O -in O O -the O O -domestic O O -championship O O -in O O -which O O -National B-ORG B-ORG -Bucharest I-ORG I-ORG -are O O -top O O -and O O -O B-ORG B-ORG -Gala I-ORG I-ORG -third O O -. O O -" O O -I O O -think O O -it O O -' O O -fair O O -to O O -give O O -them O O -a O O -chance O O -, O O -" O O -he O O -told O O -reporters O O -. O O -League O O -title O O -St B-ORG B-ORG -Bucharest I-ORG I-ORG -, O O -who O O -finished O O -bottom O O -of O O -their O O -Champions B-MISC B-MISC -' I-MISC I-MISC -League I-MISC I-MISC -group O O -in O O -the O O -European B-MISC B-MISC -Cup I-MISC I-MISC -, O O -have O O -only O O -two O O -players O O -in O O -the O O -squad O O -. O O -Attack O O -midfielder O O -Adrian B-PER B-PER -Il I-PER I-PER -, O O -who O O -recently O O -moved O O -from O O -St B-ORG B-ORG -to O O -Turkish B-MISC B-MISC -club O O -Gala B-ORG B-ORG -, O O -is O O -ruled O O -out O O -after O O -two O O -yellow O O -offences O O -. O O -Squad O O -: O O -Goal O O -- O O -Bo B-PER B-PER -St I-PER I-PER -, O O -F B-PER B-PER -P I-PER I-PER -. O O -De O O -- O O -Dan B-PER B-PER -Pet I-PER I-PER -, O O -Daniel B-PER B-PER -Pro I-PER I-PER -, O O -Anton B-PER B-PER -Do I-PER I-PER -, O O -Co B-PER B-PER -Pa I-PER I-PER -, O O -Liv B-PER B-PER -C I-PER I-PER -, O O -T B-PER B-PER -Se I-PER I-PER -, O O -I B-PER B-PER -Fi I-PER I-PER -. O O -Mid O O -- O O -G B-PER B-PER -Ha I-PER I-PER -, O O -G B-PER B-PER -Pope I-PER I-PER -, O O -Con B-PER B-PER -G I-PER I-PER -, O O -Vale B-PER B-PER -Stefan I-PER I-PER -, O O -Ba B-PER B-PER -Pan I-PER I-PER -, O O -Do B-PER B-PER -Mu I-PER I-PER -, O O -O B-PER B-PER -Sting I-PER I-PER -. O O -Forward O O -- O O -I B-PER B-PER -Vlad I-PER I-PER -, O O -G B-PER B-PER -C I-PER I-PER -, O O -Ion B-PER B-PER -Dan I-PER I-PER -, O O -V B-PER B-PER -Ion I-PER I-PER -. O O -R O B-ORG -S O O -- O O -BR B-MISC B-MISC -CH O O -R O O -. O O -R B-LOC B-LOC -DE I-LOC I-LOC -J I-LOC I-LOC -1996 O O -Results O O -of O O -Brazilian B-MISC B-MISC -soccer O O -championship O O -semifinal O O -, O O -first O O -leg O O -matches O O -on O O -Thursday O O -. O O -Go B-ORG B-ORG -1 O O -G B-ORG B-ORG -3 O O -Port B-ORG B-ORG -1 O O -At B-ORG B-ORG -Mine I-ORG I-ORG -0 O O -CR O O -- O O -LA B-ORG B-PER -E O O -AN O O -MI O O -D O O -. O O -Robert B-PER B-PER -G I-PER I-PER -ME B-LOC B-LOC -1996 O O -Australia B-LOC B-LOC -gave O O -Brian B-PER B-PER -Lara I-PER I-PER -another O O -reason O O -to O O -be O O -miserable O O -when O O -they O O -beat O O -West B-LOC B-LOC -Indies I-LOC I-LOC -by O O -five O O -wickets O O -in O O -the O O -opening O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -match O O -on O O -Friday O O -. O O -Lara B-PER B-PER -, O O -discipline O O -for O O -misconduct O O -on O O -Wednesday O O -, O O -was O O -dismissed O O -for O O -five O O -to O O -extend O O -a O O -disappointing O O -run O O -of O O -form O O -on O O -tour O O -. O O -Australia B-LOC B-LOC -, O O -who O O -hold O O -a O O -2 O O -lead O O -in O O -the O O -five O O -test O O -series O O -, O O -overhaul O O -West B-LOC B-LOC -Indies I-LOC I-LOC -' O O -total O O -of O O -172 O O -all O O -out O O -with O O -eight O O -balls O O -to O O -spare O O -to O O -end O O -a O O -run O O -of O O -six O O -successive O O -one O O -defeats O O -. O O -All O O -Greg B-PER B-PER -B I-PER I-PER -steered O O -his O O -side O O -to O O -a O O -comfortable O O -victory O O -with O O -an O O -unbeaten O O -57 O O -in O O -90 O O -balls O O -to O O -the O O -delight O O -of O O -the O O -42 O O -crowd O O -. O O -Man O O -match O O -B B-PER B-PER -came O O -to O O -the O O -wicket O O -with O O -the O O -total O O -on O O -70 O O -for O O -two O O -and O O -hit O O -three O O -four O O -during O O -an O O -un O O -innings O O -lasting O O -129 O O -minutes O O -. O O -His O O -crucial O O -fifth O O -partnership O O -with O O -fellow O O -all O O -Stuart B-PER B-PER -Law I-PER I-PER -, O O -who O O -scored O O -21 O O -, O O -added O O -71 O O -off O O -85 O O -balls O O -. O O -Lara B-PER B-PER -looked O O -out O O -of O O -touch O O -during O O -his O O -brief O O -stay O O -at O O -the O O -c O O -before O O -chip O O -a O O -simple O O -catch O O -to O O -Shane B-PER B-PER -War I-PER I-PER -at O O -mid O O -. O O -West B-LOC B-LOC -Indies I-LOC I-LOC -tour O O -manager O O -Clive B-PER B-PER -Lloyd I-PER I-PER -has O O -a O O -for O O -Lara B-PER B-PER -' O O -behaviour O O -on O O -Tuesday O O -. O O -He O O -( O O -Lara B-PER B-PER -) O O -had O O -told O O -Australia B-LOC B-LOC -coach O O -Geoff B-PER B-PER -Marsh I-PER I-PER -that O O -wicket O O -Ian B-PER B-PER -He I-PER I-PER -was O O -un O O -in O O -the O O -visitors O O -' O O -dressing O O -room O O -. O O -The O O -Melbourne B-LOC B-LOC -crowd O O -were O O -clearly O O -angered O O -by O O -the O O -incident O O -, O O -loudly O O -j O O -the O O -West B-LOC B-LOC -Indies I-LOC I-LOC -vice O O -as O O -he O O -walked O O -to O O -the O O -middle O O -. O O -It O O -was O O -left O O -to O O -fellow O O -left O O -Shi B-PER B-PER -Chan I-PER I-PER -to O O -hold O O -the O O -innings O O -together O O -with O O -a O O -g O O -54 O O -despite O O -the O O -hand O O -of O O -an O O -injured O O -groin O O -. O O -Chan B-PER B-PER -was O O -forced O O -to O O -rely O O -on O O -a O O -runner O O -for O O -most O O -of O O -his O O -innings O O -after O O -hurting O O -himself O O -as O O -he O O -s O O -back O O -to O O -his O O -c O O -to O O -avoid O O -being O O -run O O -out O O -. O O -Pakistan B-LOC B-LOC -, O O -who O O -arrive O O -in O O -Australia B-LOC B-LOC -later O O -this O O -month O O -, O O -are O O -the O O -other O O -team O O -competing O O -in O O -the O O -World B-MISC B-MISC -Series I-MISC I-MISC -tournament O O -. O O -CR O O -- O O -AU B-LOC B-LOC -V O O -W O B-LOC -IN O I-LOC -W O B-MISC -SE O I-MISC -SC O O -. O O -ME B-LOC B-LOC -1996 O O -Score O O -in O O -the O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -match O O -between O O -Australia B-LOC B-LOC -and O O -West B-LOC B-LOC -Indies I-LOC I-LOC -on O O -Friday O O -: O O -West B-LOC B-LOC -Indies I-LOC I-LOC -S B-PER B-PER -Campbell I-PER I-PER -c O O -He B-PER B-PER -b O O -Gillespie B-PER B-PER -31 O O -R B-PER B-PER -Samuel I-PER I-PER -c O O -M B-PER B-PER -W I-PER I-PER -b O O -Gillespie B-PER B-PER -7 O O -B B-PER B-PER -Lara I-PER I-PER -c O O -War B-PER B-PER -b O O -Moody B-PER B-PER -5 O O -S B-PER B-PER -Chan I-PER I-PER -c O O -He B-PER B-PER -b O O -B B-PER B-PER -54 O O -C B-PER B-PER -Hooper I-PER I-PER -run O O -out O O -7 O O -J B-PER B-PER -Adams I-PER I-PER -lb O O -b O O -Moody B-PER B-PER -5 O O -J B-PER B-PER -Murray I-PER I-PER -c O O -B B-PER B-PER -b O O -War B-PER B-PER -24 O O -N B-PER B-PER -McLean I-PER I-PER -c O O -and O O -b O O -M B-PER B-PER -W I-PER I-PER -7 O O -K B-PER B-PER -Benjamin I-PER I-PER -b O O -War B-PER B-PER -8 O O -C B-PER B-PER -Ambrose I-PER I-PER -run O O -out O O -2 O O -C B-PER B-PER -Walsh I-PER I-PER -not O O -out O O -Bowling O O -: O O -Re B-PER B-PER -10 O O -( O O -n O O -) O O -, O O -Gillespie B-PER B-PER -10 O O -, O O -Moody B-PER B-PER -10 O O -, O O -B B-PER B-PER -6 O O -, O O -War B-PER B-PER -10 O O -( O O -w O O -) O O -, O O -M B-PER B-PER -W I-PER I-PER -3 O O -. O O -Australia B-LOC B-LOC -M B-PER B-PER -Taylor I-PER I-PER -b O O -McLean B-PER B-PER -29 O O -M B-PER B-PER -W I-PER I-PER -c O O -Murray B-PER B-PER -b O O -Benjamin B-PER B-PER -27 O O -R B-PER B-PER -Pont I-PER I-PER -lb O O -McLean B-PER B-PER -5 O O -G B-PER B-PER -B I-PER I-PER -not O O -out O O -57 O O -M B-PER B-PER -Be I-PER I-PER -s O O -Murray B-PER B-PER -b O O -Hooper B-PER B-PER -3 O O -S B-PER B-PER -Law I-PER I-PER -b O O -Hooper B-PER B-PER -21 O O -T B-PER B-PER -Moody I-PER I-PER -not O O -out O O -3 O O -Extra O O -( O O -lb O O -n O O -w O O -) O O -28 O O -Total O O -( O O -for O O -five O O -wickets O O -, O O -48 O O -overs O O -) O O -173 O O -Fall O O -of O O -wickets O O -: O O -1 O O -2 O O -3 O O -4 O O -5 O O -. O O -Did O O -not O O -bat O O -: O O -I B-PER B-PER -He I-PER I-PER -, O O -P B-PER B-PER -Re I-PER I-PER -, O O -S B-PER B-PER -War I-PER I-PER -, O O -J B-PER B-PER -Gillespie I-PER I-PER -. O O -Bowling O O -: O O -Ambrose B-PER B-PER -10 O O -( O O -2 O O -1 O O -) O O -, O O -Walsh B-PER B-PER -9 O O -( O O -4 O O -) O O -, O O -Benjamin B-PER B-PER -9 O O -( O O -1 O O -1 O O -) O O -, O O -Hooper B-PER B-PER -10 O O -( O O -1 O O -) O O -, O O -McLean B-PER B-PER -10 O O -( O O -1 O O -) O O -. O O -Re O O -: O O -Australia B-LOC B-LOC -won O O -by O O -five O O -wickets O O -. O O -CR O O -- O O -AU B-LOC B-LOC -B O O -W O B-LOC -IN O I-LOC -B O O -F O O -W O O -. O O -ME B-LOC B-LOC -1996 O O -Australia B-LOC B-LOC -beat O O -West B-LOC B-LOC -Indies I-LOC I-LOC -by O O -five O O -wickets O O -in O O -a O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -match O O -at O O -the O O -Melbourne B-LOC B-LOC -Cricket I-LOC I-LOC -Ground I-LOC I-LOC -on O O -Friday O O -. O O -Score O O -: O O -West B-LOC B-LOC -Indies I-LOC I-LOC -172 O O -all O O -out O O -in O O -49 O O -overs O O -( O O -Shi B-PER B-PER -Chan I-PER I-PER -54 O O -) O O -; O O -Australia B-LOC B-LOC -173 O O -in O O -48 O O -overs O O -( O O -Greg B-PER B-PER -B I-PER I-PER -57 O O -not O O -out O O -) O O -. O O -CR O O -- O O -W O B-LOC -IN O I-LOC -172 O O -AL O O -O O O -IN O O -49 O O -O O O -V O O -AU B-LOC B-LOC -. O O -ME B-LOC B-LOC -1996 O O -West B-LOC B-LOC -Indies I-LOC I-LOC -were O O -all O O -out O O -for O O -172 O O -off O O -49 O O -overs O O -in O O -the O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -match O O -against O O -Australia B-LOC B-LOC -on O O -Friday O O -. O O -CR O O -- O O -SH B-PER B-MISC -SH O I-MISC -SC O O -. O O -H B-LOC B-LOC -, O O -Australia B-LOC B-LOC -1996 O O -Score O O -on O O -the O O -first O O -day O O -of O O -the O O -four O O -Sheffield B-MISC B-MISC -Shield I-MISC I-MISC -match O O -between O O -Tasmania B-LOC B-LOC -and O O -Victoria B-LOC B-LOC -at O O -Belle B-LOC B-LOC -Oval I-LOC I-LOC -on O O -Friday O O -: O O -Tasmania B-LOC B-LOC -35 O O -for O O -three O O -( O O -David B-PER B-PER -Bo I-PER I-PER -106 O O -not O O -out O O -, O O -Shaun B-PER B-PER -Young I-PER I-PER -86 O O -not O O -out O O -, O O -Michael B-PER B-PER -Di I-PER I-PER -119 O O -) O O -v O O -Victoria B-LOC B-ORG -. O O -CR O O -- O O -LA B-ORG B-PER -S O O -M O O -AU B-MISC O -TO O O -MI O O -. O O -ME B-LOC B-LOC -1996 O O -West B-LOC B-LOC -Indies I-LOC I-LOC -batsman O O -Brian B-PER B-PER -Lara I-PER I-PER -suffered O O -another O O -blow O O -to O O -his O O -Australian B-MISC B-MISC -tour O O -, O O -after O O -already O O -being O O -discipline O O -for O O -misconduct O O -, O O -when O O -he O O -was O O -dismissed O O -cheap O O -in O O -the O O -first O O -limited O O -overs O O -match O O -against O O -Australia B-LOC B-LOC -on O O -Friday O O -. O O -Lara B-PER B-PER -, O O -who O O -earned O O -a O O -stern O O -re O O -from O O -his O O -own O O -tour O O -management O O -after O O -an O O -angry O O -out O O -against O O -Australia B-LOC B-LOC -wicket O O -Ian B-PER B-PER -He I-PER I-PER -, O O -scored O O -five O O -to O O -pro O O -a O O -run O O -of O O -poor O O -form O O -with O O -the O O -bat O O -. O O -The O O -West B-LOC B-LOC -Indies I-LOC I-LOC -vice O O -struggled O O -for O O -timing O O -during O O -his O O -36 O O -stay O O -at O O -the O O -c O O -before O O -chip O O -a O O -ball O O -from O O -medium O O -pace O O -Tom B-PER B-PER -Moody I-PER I-PER -straight O O -to O O -Shane B-PER B-PER -War I-PER I-PER -at O O -mid O O -. O O -West B-LOC B-LOC -Indies I-LOC I-LOC -were O O -53 O O -for O O -two O O -in O O -15 O O -overs O O -when O O -rain O O -stopped O O -play O O -at O O -the O O -Melbourne B-LOC B-LOC -Cricket I-LOC I-LOC -Ground I-LOC I-LOC -after O O -captain O O -Courtney B-PER B-PER -Walsh I-PER I-PER -won O O -the O O -toss O O -and O O -elected O O -to O O -bat O O -. O O -Lara B-PER B-PER -' O O -out O O -three O O -days O O -ago O O -has O O -clearly O O -turned O O -some O O -of O O -the O O -Australian B-MISC B-MISC -public O O -against O O -him O O -. O O -As O O -he O O -walked O O -to O O -the O O -wicket O O -he O O -was O O -greeted O O -by O O -loud O O -j O O -from O O -sections O O -of O O -the O O -crowd O O -. O O -On O O -several O O -occasions O O -during O O -his O O -innings O O -, O O -the O O -crowd O O -joined O O -together O O -in O O -a O O -series O O -of O O -o O O -chant O O -against O O -him O O -. O O -Tour B-MISC O -manager O O -Clive B-PER B-PER -Lloyd I-PER I-PER -on O O -Wednesday O O -a O O -for O O -Lara B-PER B-PER -' O O -behaviour O O -in O O -confront O O -Australia B-LOC B-LOC -coach O O -Geoff B-PER B-PER -Marsh I-PER I-PER -in O O -the O O -opposition O O -dressing O O -room O O -to O O -protest O O -against O O -his O O -dismissal O O -in O O -the O O -second O O -test O O -on O O -Tuesday O O -. O O -Lloyd B-PER B-PER -did O O -not O O -say O O -what O O -form O O -the O O -discipline O O -would O O -take O O -. O O -Lara B-PER B-PER -, O O -who O O -holds O O -the O O -record O O -for O O -the O O -highest O O -score O O -in O O -test O O -and O O -first O O -cricket O O -, O O -was O O -unhappy O O -about O O -He B-PER B-PER -' O O -role O O -in O O -the O O -incident O O -and O O -questioned O O -whether O O -the O O -ball O O -had O O -carried O O -to O O -the O O -Australia B-LOC B-LOC -keeper O O -. O O -Australia B-LOC B-LOC -went O O -on O O -to O O -win O O -the O O -match O O -at O O -the O O -Sydney B-LOC B-LOC -Cricket I-LOC I-LOC -Ground I-LOC I-LOC -by O O -124 O O -runs O O -to O O -take O O -a O O -two O O -lead O O -in O O -the O O -five O O -series O O -after O O -Lara B-PER B-PER -failed O O -in O O -both O O -innings O O -. O O -Lara B-PER B-PER -has O O -yet O O -to O O -score O O -a O O -century O O -since O O -West B-LOC B-LOC -Indies I-ORG I-LOC -arrived O O -in O O -Australia B-LOC B-LOC -five O O -weeks O O -ago O O -. O O -Both O O -West B-LOC B-LOC -Indies I-LOC I-LOC -and O O -Australia B-LOC B-LOC -team O O -management O O -have O O -played O O -down O O -the O O -incident O O -, O O -stress O O -that O O -relations O O -between O O -the O O -two O O -sides O O -have O O -not O O -been O O -adverse O O -affected O O -. O O -Pakistan B-LOC B-LOC -, O O -who O O -arrive O O -next O O -week O O -, O O -are O O -the O O -third O O -team O O -in O O -the O O -triangular O O -World B-MISC B-MISC -Series I-MISC I-MISC -tournament O O -. O O -CR O O -- O O -W O B-LOC -IN O I-LOC -TO O O -BA O O -A O O -W O O -THE O O -TO O O -. O O -ME B-LOC B-LOC -1996 O O -West B-LOC B-LOC -Indies I-LOC I-LOC -captain O O -Courtney B-PER B-PER -Walsh I-PER I-PER -elected O O -to O O -bat O O -after O O -winning O O -the O O -toss O O -in O O -the O O -first O O -match O O -in O O -the O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -competition O O -against O O -Australia B-LOC B-LOC -at O O -the O O -Melbourne B-LOC B-LOC -Cricket I-LOC O -Ground I-LOC O -on O O -Friday O O -. O O -Teams O O -: O O -Australia B-LOC B-LOC -- O O -Mark B-PER B-PER -Taylor I-PER I-PER -( O O -captain O O -) O O -, O O -Mark B-PER B-PER -W I-PER I-PER -, O O -Ricky B-PER B-PER -Pont I-PER I-PER -, O O -Greg B-PER B-PER -B I-PER I-PER -, O O -Michael B-PER B-PER -Be I-PER I-PER -, O O -Stuart B-PER B-PER -Law I-PER I-PER -, O O -Tom B-PER B-PER -Moody I-PER I-PER -, O O -Ian B-PER B-PER -He I-PER I-PER -, O O -Paul B-PER B-PER -Re I-PER I-PER -, O O -Shane B-PER B-PER -War I-PER I-PER -, O O -Jason B-PER B-PER -Gillespie I-PER I-PER -, O O -Glenn B-PER B-PER -McGrath I-PER I-PER -12th O O -man O O -. O O -West B-LOC B-LOC -Indies I-LOC I-LOC -- O O -She B-PER B-PER -Campbell I-PER I-PER -, O O -Robert B-PER B-PER -Samuel I-PER I-PER -, O O -Brian B-PER B-PER -Lara I-PER I-PER -, O O -Shi B-PER B-PER -Chan I-PER I-PER -, O O -Carl B-PER B-PER -Hooper I-PER I-PER -, O O -Jimmy B-PER B-PER -Adams I-PER I-PER -, O O -Junior B-PER B-PER -Murray I-PER I-PER -, O O -Nixon B-PER B-PER -McLean I-PER I-PER -, O O -Kenneth B-PER B-PER -Benjamin I-PER I-PER -, O O -C B-PER B-PER -Ambrose I-PER I-PER -, O O -Courtney B-PER B-PER -Walsh I-PER I-PER -( O O -captain O O -) O O -, O O -Roland B-PER B-PER -Hold I-PER I-PER -12th O O -man O O -. O O -BA O O -- O O -W B-MISC B-MISC -G B-MISC I-MISC -PR I-MISC I-MISC -R O O -. O O -BA B-LOC B-LOC -1996 O O -Results O O -in O O -last O O -of O O -the O O -group O O -matches O O -at O O -the O O -World B-MISC B-MISC -Grand B-MISC I-MISC -Prix I-MISC I-MISC -badminton O O -finals O O -on O O -Friday O O -: O O -Men O O -' O O -singles O O -Group O O -B O O -Chen B-PER B-PER -Gang I-PER I-PER -( O O -China B-LOC B-LOC -) O O -beat O O -Martin B-PER B-PER -Lo I-PER I-PER -Hansen I-PER I-PER -( O O -Denmark B-LOC B-LOC -) O O -15 O O -15 O O -Dong B-PER B-PER -Ji I-PER I-PER -( O O -China B-LOC B-LOC -) O O -beat O O -Thomas B-PER B-PER -St I-PER I-PER -( O O -Denmark B-LOC B-LOC -) O O -15 O O -15 O O -In B-PER B-PER -W I-PER I-PER -( O O -Indonesia B-LOC B-LOC -) O O -beat O O -On B-PER B-PER -E I-PER I-PER -Ho I-PER I-PER -( O O -Malaysia B-LOC B-LOC -) O O -5 O O -15 O O -15 O O -Group O O -C O O -Sun B-PER B-PER -Jun I-PER I-PER -( O O -China B-LOC B-LOC -) O O -beat O O -Rashid B-PER B-PER -Side I-PER I-PER -( O O -Malaysia B-LOC B-LOC -) O O -15 O O -17 O O -Her B-PER B-PER -Susan I-PER I-PER -( O O -Semifinals O O -( O O -on O O -Saturday O O -) O O -: O O -Su B-PER B-PER -Susan I-PER I-PER -( O O -Indonesia B-LOC B-LOC -) O O -v O O -Cam B-PER B-PER -Martin I-PER I-PER -( O O -Denmark B-LOC B-LOC -) O O -; O O -Ye B-PER B-PER -Zhao I-PER I-PER -( O O -China B-LOC B-LOC -) O O -v O O -Gong B-PER B-PER -Z I-PER I-PER -( O O -China B-LOC B-LOC -) O O -. O O -S O O -- O O -AR B-MISC B-MISC -CO O O -W O O -A B-MISC B-MISC -C I-MISC I-MISC -W O I-MISC -' O I-MISC -C I-MISC I-MISC -. O O -CA B-LOC B-LOC -1996 O O -Re O O -of O O -the O O -second O O -leg O O -of O O -the O O -African B-MISC B-MISC -Cup I-MISC I-MISC -Winners I-MISC I-MISC -' I-MISC I-MISC -Cup I-MISC I-MISC -final O O -at O O -the O O -National B-LOC B-LOC -stadium O I-LOC -on O O -Friday O O -: O O -Arab B-ORG B-ORG -Con I-ORG I-ORG -( O O -Egypt B-LOC B-LOC -) O O -4 O O -So B-ORG B-ORG -( O O -Z B-LOC B-LOC -) O O -0 O O -( O O -halftime O O -2 O O -) O O -Score O O -: O O -Al B-PER B-PER -Ash I-PER I-PER -7 O O -, O O -56 O O -penalty O O -, O O -Mohamed B-PER B-PER -O I-PER I-PER -24 O O -, O O -73 O O -Con O O -won O O -4 O O -on O O -aggregate O O -. O O -NHL B-ORG B-ORG -I I-MISC O -H O O -- O O -ST O O -A O O -T O O -' O O -GA O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Standing O O -of O O -National B-MISC B-ORG -Hockey I-MISC I-ORG -League I-MISC B-ORG -teams O O -after O O -games O O -played O O -on O O -Thursday O O -( O O -ta O O -under O O -won O O -, O O -lost O O -, O O -tied O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -EA B-MISC O -CO O O -NO O O -D O O -W O O -L O O -T O O -G O O -GA B-ORG O -PT I-ORG O -H I-ORG B-ORG -12 O O -7 O O -6 O O -77 O O -76 O O -30 O O -B B-ORG B-ORG -13 O O -12 O O -1 O O -77 O O -76 O O -27 O O -B B-ORG B-ORG -10 O O -11 O O -4 O O -74 O O -84 O O -24 O O -M B-ORG B-ORG -10 O O -14 O O -4 O O -96 O O -103 O O -24 O O -P B-ORG B-ORG -9 O O -13 O O -3 O O -81 O O -91 O O -21 O O -O B-ORG B-ORG -[ O O -CO O O -08 O O -GM B-MISC B-MISC -] O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -( O O -Co O O -headline O O -from O O -NBA B-ORG B-ORG -to O O -NHL B-ORG B-ORG -and O O -correct O O -team O O -name O O -in O O -second O O -result O O -from O O -La B-ORG B-ORG -C I-ORG I-ORG -to O O -N B-ORG B-ORG -Islanders I-ORG I-ORG -. O O -) O O -Results O O -of O O -National B-MISC B-ORG -Hockey I-MISC I-ORG -League I-MISC B-ORG -games O O -on O O -Thursday O O -( O O -home O O -team O O -in O O -CA O O -) O O -: O O -Hartford B-ORG B-ORG -4 O O -B B-ORG B-ORG -2 O O -FL B-ORG B-ORG -4 O O -N B-ORG B-ORG -Islanders I-ORG I-ORG -2 O O -NE B-ORG B-ORG -J I-ORG I-ORG -2 O O -Calgary B-ORG B-ORG -1 O O -Phoenix B-ORG B-ORG -3 O O -ST B-ORG B-ORG -L I-ORG I-ORG -0 O O -Tampa B-ORG B-ORG -Bay I-ORG I-ORG -2 O O -L B-ORG B-ORG -AN I-ORG I-ORG -1 O O -NFL B-ORG B-ORG -AM I-MISC O -F O O -C B-PER O -EA O B-ORG -TO O O -ST O O -IN O O -P B-MISC O -H O O -. O O -IN B-LOC B-LOC -1996 O O -The O O -injury O O -Indianapolis B-ORG B-ORG -Colts I-ORG I-ORG -lost O O -another O O -quarterback O O -on O O -Thursday O O -but O O -last O O -year O O -' O O -AFC B-MISC O -finalists O O -rallied O O -together O O -to O O -shoot O O -down O O -the O O -Philadelphia B-ORG B-ORG -Eagles I-ORG I-ORG -37 O O -in O O -a O O -show O O -of O O -playoff O O -contender O O -. O O -Marshall B-PER B-PER -F I-PER I-PER -rushed O O -for O O -101 O O -yards O O -and O O -two O O -touchdowns O O -and O O -Jason B-PER B-PER -Bel I-PER I-PER -returned O O -an O O -interception O O -44 O O -yards O O -for O O -a O O -score O O -as O O -the O O -Colts B-ORG B-ORG -improved O O -to O O -8 O O -, O O -the O O -same O O -mark O O -as O O -the O O -Eagles B-ORG B-ORG -, O O -who O O -lost O O -for O O -the O O -fourth O O -time O O -in O O -five O O -games O O -. O O -Paul B-PER B-PER -Justin I-PER I-PER -, O O -starting O O -for O O -the O O -side O O -Jim B-PER B-PER -Ha I-PER I-PER -, O O -was O O -14 O O -for O O -144 O O -yards O O -and O O -a O O -touchdown O O -for O O -the O O -the O O -Colts B-ORG B-ORG -, O O -who O O -played O O -their O O -last O O -home O O -game O O -of O O -the O O -season O O -. O O -Indianapolis B-LOC B-LOC -closes O O -with O O -games O O -at O O -Kansas B-LOC B-LOC -City I-LOC I-LOC -and O O -Cincinnati B-LOC B-LOC -. O O -The O O -Eagles B-ORG B-ORG -were O O -held O O -without O O -a O O -touchdown O O -until O O -the O O -final O O -five O O -seconds O O -. O O -Philadelphia B-LOC B-LOC -, O O -which O O -fell O O -from O O -an O O -NFC B-MISC O -East I-MISC O -tie O O -with O O -the O O -Dallas B-ORG B-ORG -Cowboys I-ORG I-ORG -and O O -Washington B-ORG B-ORG -Redskins I-ORG I-ORG -, O O -go O O -on O O -the O O -road O O -against O O -the O O -New B-ORG B-ORG -York I-ORG I-ORG -Jets I-ORG I-ORG -and O O -then O O -entertain O O -Arizona B-LOC B-ORG -. O O -The O O -loss O O -by O O -Philadelphia B-ORG B-ORG -allowed O O -the O O -idle O O -Green B-ORG B-ORG -Bay I-ORG I-ORG -Packers I-ORG I-ORG -( O O -10 O O -) O O -to O O -c O O -the O O -first O O -NFC B-MISC O -playoff O O -berth O O -. O O -The O O -Colts B-ORG B-ORG -won O O -despite O O -the O O -absence O O -of O O -injured O O -starting O O -defensive O O -tackle O O -Tony B-PER B-PER -Sir I-PER I-PER -, O O -corner O O -Ray B-PER B-PER -Buchanan I-PER I-PER -and O O -linebacker O O -Quentin B-PER B-PER -Cory I-PER I-PER -. O O -F B-PER B-PER -carried O O -16 O O -times O O -, O O -including O O -a O O -13 O O -TD O O -run O O -in O O -the O O -first O O -quarter O O -and O O -a O O -seven O O -score O O -early O O -in O O -the O O -final O O -period O O -. O O -Justin B-PER B-PER -made O O -his O O -second O O -straight O O -start O O -for O O -Ha B-PER B-PER -, O O -who O O -has O O -a O O -knee O O -injury O O -. O O -Justin B-PER B-PER -suffered O O -a O O -s O O -right O O -shoulder O O -in O O -the O O -third O O -quarter O O -and O O -did O O -not O O -return O O -. O O -Third O O -Ke B-PER B-PER -Bell I-PER I-PER -, O O -a O O -1988 O O -draft O O -choice O O -of O O -the O O -Miami B-ORG B-ORG -Dolphins I-ORG I-ORG -, O O -made O O -his O O -NFL B-ORG B-ORG -debut O O -and O O -was O O -5 O O -for O O -75 O O -yards O O -, O O -including O O -a O O -20 O O -scoring O O -strike O O -to O O -Marvin B-PER B-PER -Harrison I-PER I-PER -in O O -the O O -third O O -period O O -. O O -A O O -39 O O -interference O O -penalty O O -against O O -Philadelphia B-LOC B-LOC -' O O -Troy B-PER B-PER -Vincent I-PER I-PER -set O O -up O O -F B-PER B-PER -' O O -first O O -score O O -around O O -left O O -end O O -that O O -capped O O -an O O -80 O O -march O O -5 O O -into O O -the O O -game O O -and O O -the O O -r O O -was O O -on O O -. O O -Eagles B-ORG B-ORG -quarterback O O -Ty B-PER B-PER -Det I-PER I-PER -was O O -17 O O -for O O -182 O O -yards O O -before O O -he O O -was O O -bench O O -. O O -Ricky B-PER B-PER -Watt I-PER I-PER -, O O -who O O -leads O O -the O O -NFC B-ORG O -in O O -rushing O O -, O O -left O O -the O O -game O O -after O O -getting O O -knee O O -to O O -the O O -helmet O O -after O O -gaining O O -33 O O -yards O O -on O O -seven O O -carries O O -. O O -NBA B-ORG B-ORG -BA O O -- O O -ST O O -A O O -T O O -' O O -GA O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Standing O O -of O O -National B-MISC B-ORG -Basketball I-MISC B-ORG -Association I-MISC I-ORG -teams O O -after O O -games O O -played O O -on O O -Thursday O O -( O O -ta O O -under O O -won O O -, O O -lost O O -, O O -percentage O O -, O O -games O O -behind O O -) O O -: O O -EA B-MISC O -CO O O -AT B-ORG B-LOC -D O O -W O O -L O O -PC O O -GB O O -MI B-ORG B-ORG -14 O O -4 O O -. O O -- O O -NE B-ORG B-ORG -Y I-ORG I-ORG -10 O O -6 O O -. O O -3 O O -OR B-ORG B-ORG -8 O O -6 O O -. O O -4 O O -WA B-ORG B-ORG -7 O O -9 O O -. O O -6 O O -P B-ORG B-ORG -7 O O -10 O O -. O O -6 O O -1 O O -B B-ORG B-ORG -4 O O -12 O O -. O O -9 O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -National B-MISC B-ORG -Football I-MISC I-ORG -League I-MISC I-ORG -standings O O -after O O -Thursday O O -' O O -game O O -( O O -ta O O -under O O -won O O -, O O -lost O O -, O O -tied O O -, O O -points O O -for O O -and O O -points O O -against O O -) O O -: O O -AM B-MISC B-MISC -F I-MISC O -CO O O -EA B-ORG O -D I-MISC O -W O O -L O O -T O O -P O O -PA O O -NE B-ORG B-ORG -E I-ORG I-ORG -9 O O -4 O O -0 O O -35 O O -26 O O -B B-ORG B-ORG -9 O O -4 O O -0 O O -26 O O -215 O O -IN B-ORG B-ORG -8 O O -6 O O -0 O O -26 O O -28 O O -MI B-ORG B-ORG -6 O O -7 O O -0 O O -285 O O -26 O O -NY B-ORG B-ORG -J I-ORG I-ORG -1 O O -12 O O -0 O O -221 O O -36 O O -CE B-ORG O -D I-MISC O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Re O O -of O O -National B-MISC B-ORG -Football I-MISC B-LOC -League I-MISC B-LOC -game O O -on O O -Thursday O O -( O O -home O O -team O O -in O O -CA O O -) O O -: O O -IN B-ORG B-ORG -37 O O -Philadelphia B-ORG B-ORG -10 O O -NCAA B-MISC B-ORG -AM I-MISC O -F O B-MISC -ST O I-MISC -' O O -PA O B-PER -F O O -R O O -L B-ORG B-MISC -A O I-MISC -W O O -. O O -H B-LOC B-LOC -1996 O O -Ohio B-ORG B-ORG -State I-ORG I-ORG -left O O -tackle O O -Orlando B-PER B-PER -Pace I-PER I-PER -became O O -the O O -first O O -repeat O O -winner O O -of O O -the O O -Lombard B-MISC B-MISC -Award I-MISC I-MISC -Thursday O O -night O O -when O O -the O O -R B-ORG B-ORG -Club I-ORG I-ORG -of I-ORG O -Houston I-ORG B-LOC -again O O -honoured O O -him O O -as O O -college O O -football O O -' O O -line O O -of O O -the O O -year O O -. O O -Pace B-PER B-PER -, O O -a O O -junior O O -, O O -helped O O -Ohio B-ORG B-ORG -State I-ORG I-ORG -to O O -a O O -10 O O -record O O -and O O -a O O -berth O O -in O O -the O O -Rose B-MISC B-MISC -Bowl I-MISC I-MISC -against O O -Arizona B-ORG B-ORG -State I-ORG I-ORG -. O O -He O O -was O O -the O O -most O O -dominant O O -offensive O O -line O O -in O O -the O O -country O O -and O O -also O O -played O O -defensive O O -line O O -in O O -goal O O -situations O O -. O O -Last O O -year O O -, O O -Pace B-PER B-PER -became O O -the O O -first O O -sophomore O O -to O O -win O O -the O O -award O O -since O O -its O O -inception O O -in O O -1970 O O -. O O -Pace B-PER B-PER -out O O -three O O -senior O O -finalists O O -- O O -Virginia B-ORG B-ORG -Tech I-ORG I-ORG -defensive O O -end O O -Cornell B-PER B-PER -Brown I-PER I-PER -, O O -Arizona B-ORG B-ORG -State I-ORG I-ORG -offensive O O -tackle O O -Juan B-PER B-PER -R I-PER I-PER -and O O -defensive O O -end O O -Jared B-PER B-PER -Tom I-PER I-PER -of O O -Nebraska B-LOC B-ORG -. O O -The O O -Lombard B-MISC B-MISC -Award I-MISC I-MISC -is O O -presented O O -to O O -the O O -college O O -line O O -who O O -, O O -in O O -addition O O -to O O -outstanding O O -effort O O -on O O -the O O -field O O -, O O -best O O -ex O O -the O O -characteristics O O -and O O -discipline O O -of O O -Vince B-PER B-PER -Lombard I-PER I-PER -, O O -legendary O O -coach O O -of O O -the O O -Green B-ORG B-ORG -Bay I-ORG I-ORG -Packers I-ORG I-ORG -. O O -S O O -- O O -D B-MISC B-MISC -F O O -D O O -R O O -/ O O -ST O O -. O O -AM B-LOC B-LOC -1996 O O -Re O O -of O O -Dutch B-MISC B-MISC -first O O -division O O -soccer O O -match O O -played O O -on O O -Friday O O -: O O -R B-ORG B-ORG -W I-ORG I-ORG -1 O O -Willem B-ORG B-ORG -II I-ORG I-ORG -T I-ORG I-ORG -2 O O -Standing O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -PS B-ORG B-ORG -Ein I-ORG I-ORG -18 O O -13 O O -3 O O -2 O O -52 O O -14 O O -42 O O -Fe B-ORG B-ORG -17 O O -11 O O -3 O O -3 O O -29 O O -20 O O -36 O O -T B-ORG B-ORG -En I-ORG I-ORG -18 O O -10 O O -4 O O -4 O O -28 O O -15 O O -34 O O -G B-ORG B-ORG -Do I-ORG I-ORG -18 O O -9 O O -3 O O -6 O O -29 O O -22 O O -30 O O -V B-ORG B-ORG -A I-ORG I-ORG -18 O O -8 O O -5 O O -5 O O -29 O O -21 O O -29 O O -B B-LOC B-LOC -1996 O O -Results O O -of O O -German B-MISC B-MISC -first O O -division O O -soccer O O -matches O O -played O O -on O O -Friday O O -: O O -Bo B-ORG B-ORG -2 O O -Bay B-ORG B-ORG -Lev I-ORG I-ORG -2 O O -We B-ORG B-ORG -Bremen I-ORG I-ORG -1 O O -1860 B-ORG B-ORG -Munich I-ORG I-ORG -1 O O -Karl B-ORG B-ORG -3 O O -Freiburg B-ORG B-ORG -0 O O -Sc B-ORG B-ORG -2 O O -Hans B-ORG B-ORG -R I-ORG I-ORG -0 O O -Standing O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -goals O O -against O O -points O O -) O O -: O O -Bay B-ORG B-ORG -Lev I-ORG I-ORG -17 O O -10 O O -4 O O -3 O O -38 O O -22 O O -34 O O -Bayern B-ORG B-ORG -Munich I-ORG I-ORG -16 O O -9 O O -6 O O -1 O O -26 O O -14 O O -33 O O -V B-ORG B-ORG -Stuttgart I-ORG I-ORG -16 O O -9 O O -4 O O -3 O O -39 O O -17 O O -31 O O -Bo B-ORG B-ORG -Dortmund I-ORG I-ORG -16 O O -9 O O -4 O O -3 O O -33 O O -17 O O -31 O O -Karl B-ORG B-ORG -17 O O -8 O O -PA B-LOC B-LOC -1996 O O -Su O O -of O O -French B-MISC B-MISC -first O O -division O O -matches O O -on O O -Friday O O -: O O -Len B-ORG B-ORG -0 O O -Nan B-ORG B-ORG -4 O O -( O O -J B-PER B-PER -N I-PER I-PER -7 O O -, O O -Claude B-PER B-PER -Make I-PER I-PER -42 O O -, O O -Jocelyn B-PER B-PER -Go I-PER B-PER -67 O O -, O O -Christophe B-PER B-PER -Pig I-PER I-PER -72 O O -) O O -. O O -Half O O -0 O O -. O O -Attendance O O -: O O -15 O O -. O O -Paris B-ORG B-ORG -St I-ORG I-ORG -Germain I-ORG I-ORG -1 O O -( O O -Bruno B-PER B-PER -N I-PER I-PER -2 O O -) O O -Nancy B-ORG B-ORG -2 O O -( O O -Paul B-PER B-PER -Fischer I-PER I-PER -70 O O -, O O -Phil B-PER B-PER -Gray I-PER I-PER -89 O O -) O O -. O O -1 O O -. O O -30 O O -. O O -S O O -- O O -D B-MISC B-MISC -F O O -D O O -S O O -. O O -AM B-LOC B-LOC -1996 O O -Su O O -of O O -Dutch B-MISC B-MISC -first O O -division O O -soccer O O -match O O -played O O -on O O -Friday O O -: O O -R B-ORG B-ORG -W I-ORG I-ORG -1 O O -( O O -Star B-ORG O -76 O O -) O O -Willem B-ORG B-ORG -II I-ORG I-ORG -T I-ORG I-ORG -2 O O -( O O -Ko B-PER B-PER -45 O O -, O O -Van B-PER B-PER -der I-PER I-PER -V I-PER I-PER -77 O O -) O O -. O O -Half O O -0 O O -. O O -Attendance O O -5 O O -. O O -S O O -- O O -F B-MISC B-MISC -L O O -ST O O -. O O -PA B-LOC B-LOC -1996 O O -Standing O O -in O O -the O O -French B-MISC B-MISC -first O O -division O O -after O O -Friday O O -' O O -matches O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -against O O -, O O -points O O -) O O -: O O -Paris B-ORG B-ORG -Saint I-ORG I-ORG -21 O O -12 O O -6 O O -3 O O -34 O O -15 O O -42 O O -Monaco B-ORG B-ORG -20 O O -12 O O -5 O O -3 O O -36 O O -16 O O -41 O O -Bordeaux B-ORG B-ORG -20 O O -9 O O -7 O O -4 O O -29 O O -21 O O -34 O O -Strasbourg B-ORG B-ORG -20 O O -11 O O -1 O O -8 O O -27 O O -27 O O -34 O O -Ba B-ORG B-ORG -20 O O -9 O O -6 O O -5 O O -27 O O -22 O O -33 O O -Au B-ORG B-ORG -20 O O -8 O O -8 O O -4 O O -26 O O -12 O O -32 O O -Metz B-ORG B-ORG -20 O O -8 O O -7 O O -5 O O -21 O O -16 O O -31 O O -Nan B-ORG B-ORG -21 O O -7 O O -9 O O -5 O O -41 O O -25 O O -30 O O -G B-ORG B-ORG -20 O O -7 O O -7 O O -6 O O -18 O O -18 O O -28 O O -Lille B-ORG B-ORG -20 O O -7 O O -7 O O -PA B-LOC B-LOC -1996 O O -Results O O -of O O -French B-MISC B-MISC -first O O -division O O -matches O O -on O O -Friday O O -: O O -Len B-ORG B-ORG -0 O O -Nan B-ORG B-ORG -4 O O -Paris B-ORG B-ORG -St I-ORG I-ORG -Germain I-ORG I-ORG -1 O O -Nancy B-ORG B-ORG -2 O O -S O O -- O O -GE B-MISC B-MISC -F O O -D O O -S O O -. O O -B B-LOC B-LOC -1996 O O -Su O O -of O O -matches O O -played O O -in O O -the O O -German B-MISC B-MISC -first O O -division O O -on O O -Friday O O -: O O -Bo B-ORG B-ORG -2 O O -( O O -Stick B-PER B-PER -30th O O -pen O O -, O O -W B-PER B-PER -89 O O -) O O -Bay B-ORG B-ORG -Lev I-ORG I-ORG -2 O O -( O O -Ki B-PER B-PER -18th O O -, O O -Ram B-PER B-PER -56 O O -) O O -. O O -Half O O -1 O O -. O O -Attendance O O -: O O -24 O O -We B-ORG B-ORG -Bremen I-ORG I-ORG -1 O O -( O O -Bo B-PER B-PER -85 O O -) O O -1860 B-ORG B-ORG -Munich I-ORG I-ORG -1 O O -( O O -Bo B-PER B-PER -8th O O -) O O -. O O -Half O O -0 O O -. O O -Attendance O O -33 O O -Karl B-ORG B-LOC -3 O O -( O O -Reich B-PER B-PER -29th O O -, O O -Carl B-PER B-PER -44 O O -, O O -Dundee B-PER B-ORG -69 O O -) O O -Freiburg B-ORG B-LOC -0 O O -. O O -Half O O -2 O O -. O O -Attendance O O -33 O O -Sc B-ORG B-ORG -2 O O -( O O -Mu B-PER B-PER -2nd O O -and O O -27th O O -) O O -Hans B-ORG B-ORG -R I-ORG I-ORG -0 O O -. O O -Half O O -2 O O -. O O -Attendance O O -29 O O -T O O -- O O -G B-MISC B-MISC -SL I-MISC I-MISC -C I-MISC I-MISC -Q O O -R O O -. O O -M B-LOC B-LOC -, O O -Germany B-LOC B-LOC -1996 O O -Quarter O O -results O O -at O O -the O O -$ O O -6 O O -million O O -Grand B-MISC B-MISC -Slam I-MISC I-MISC -Cup I-MISC I-MISC -tennis O O -tournament O O -on O O -Friday O O -: O O -Go B-PER B-PER -Ivan I-PER I-PER -( O O -Croatia B-LOC B-LOC -) O O -beat O O -Mark B-PER B-PER -Wood I-PER I-PER -( O O -Australia B-LOC B-LOC -) O O -6 O O -6 O O -Ye B-PER B-PER -Ka I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -beat O O -Jim B-PER B-PER -Co I-PER I-PER -( O O -U B-LOC B-LOC -) O O -2 O O -6 O O -8 O O -S O O -- O O -W O B-PER -H O O -DE O O -P O B-LOC -OF O O -CO B-LOC B-PER -. O O -L B-LOC B-LOC -1996 O O -Portugal B-LOC B-LOC -called O O -up O O -Porto B-ORG B-ORG -central O O -defender O O -Jo B-PER B-PER -Manuel I-PER I-PER -Pi I-PER I-PER -on O O -Friday O O -to O O -face O O -Germany B-LOC B-LOC -in O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -qualifier O O -in O O -place O O -of O O -injured O O -club O O -colleague O O -Jorge B-PER B-PER -Costa I-PER I-PER -, O O -who O O -is O O -still O O -nursing O O -a O O -broken O O -nose O O -after O O -being O O -head O O -by O O -Liberia B-MISC B-MISC -striker O O -Georg B-PER B-PER -We I-PER I-PER -. O O -Costa B-PER B-PER -has O O -not O O -played O O -since O O -being O O -struck O O -by O O -the O O -AC B-ORG B-ORG -Milan I-ORG I-ORG -forward O O -after O O -a O O -bad O O -European B-MISC B-MISC -Champions I-MISC I-MISC -' I-MISC I-MISC -League I-MISC I-MISC -game O O -on O O -November O O -27 O O -. O O -Portugal B-LOC B-LOC -lead O O -European B-MISC B-MISC -qualifying O O -group O O -nine O O -with O O -seven O O -points O O -from O O -four O O -games O O -, O O -one O O -more O O -than O O -Ukraine B-LOC B-LOC -and O O -three O O -more O O -than O O -Germany B-LOC B-LOC -, O O -who O O -have O O -only O O -played O O -twice O O -. O O -The O O -Portuguese B-MISC B-MISC -host O O -Germany B-LOC B-LOC -on O O -December O O -14 O O -. O O -Squad O O -: O O -Goal O O -- O O -V B-PER B-PER -Bai I-PER I-PER -( O O -Barcelona B-ORG B-ORG -, O O -Spain B-LOC B-LOC -) O O -, O O -R B-PER B-PER -Co I-PER I-PER -( O O -B B-ORG B-ORG -) O O -: O O -De O O -- O O -Paul B-PER B-PER -Santos I-PER I-PER -( O O -Porto B-ORG B-ORG -) O O -, O O -Sergio B-PER B-PER -Con I-PER I-PER -( O O -Porto B-ORG B-ORG -) O O -, O O -Jo B-PER B-PER -Manuel I-PER I-PER -Pi I-PER I-PER -( O O -Porto B-ORG B-ORG -) O O -, O O -Ocean B-PER B-PER -Cruz I-PER I-PER -( O O -Sporting B-ORG B-ORG -) O O -, O O -Fernando B-PER B-PER -Co I-PER I-PER -( O O -Barcelona B-ORG B-ORG -) O O -, O O -Held B-PER B-PER -C I-PER I-PER -( O O -Deportivo B-ORG B-ORG -Co I-ORG I-ORG -, O O -Spain B-LOC B-LOC -) O O -, O O -Di B-PER B-PER -Te I-PER I-PER -( O O -Juventus B-ORG B-ORG -, O O -Italy B-LOC B-LOC -) O O -, O O -Carlos B-PER B-PER -Secret I-PER I-PER -( O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -, O O -Spain B-LOC B-LOC -) O O -: O O -Mid O O -- O O -R B-PER B-PER -Barr I-PER I-PER -( O O -Porto B-ORG B-ORG -) O O -, O O -Jose B-PER B-PER -Barr I-PER I-PER -( O O -Porto B-ORG B-ORG -) O O -, O O -Luis B-PER B-PER -Fi I-PER I-PER -S O O -SH O O -ON O O -R O B-ORG -MA O I-ORG -V O O -BA O B-ORG -. O O -MA B-LOC B-LOC -1996 O O -William B-PER B-PER -Hill I-PER I-PER -betting O O -on O O -Saturday O O -' O O -Spanish B-MISC B-MISC -first O O -division O O -match O O -between O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -and O O -Barcelona B-ORG B-ORG -: O O -To O O -win O O -: O O -6 O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -; O O -7 O O -Barcelona B-ORG B-ORG -Draw O O -: O O -9 O O -Co O O -score O O -: O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -to O O -win O O -Barcelona B-ORG B-ORG -to O O -win O O -1 O O -13 O O -1 O O -15 O O -2 O O -9 O O -2 O O -12 O O -2 O O -8 O O -2 O O -10 O O -3 O O -20 O O -3 O O -28 O O -3 O O -16 O O -3 O O -22 O O -3 O O -25 O O -3 O O -25 O O -MA B-LOC B-LOC -1996 O O -Spanish B-MISC B-MISC -police O O -will O O -breath O O -fans O O -at O O -the O O -gates O O -of O O -the O O -Santiago B-LOC B-LOC -Bern I-LOC I-LOC -stadium O I-LOC -and O O -ban O O -drunk O O -supporters O O -from O O -Saturday O O -' O O -big O O -Real B-ORG B-MISC -Madrid I-ORG I-MISC -game O O -, O O -the O O -Madrid B-ORG B-LOC -daily O O -El B-ORG B-ORG -Mu I-ORG I-ORG -said O O -on O O -Friday O O -. O O -Although O O -there O O -are O O -no O O -known O O -precedent O O -in O O -the O O -country O O -, O O -the O O -action O O -is O O -en O O -in O O -Spanish B-MISC B-MISC -legislation O O -governing O O -sports O O -events O O -. O O -T O O -for O O -the O O -game O O -s O O -that O O -supporters O O -will O O -be O O -barred O O -if O O -they O O -are O O -" O O -under O O -the O O -effects O O -of O O -alcohol O O -" O O -. O O -S O O -- O O -SP B-MISC B-MISC -F O O -D O O -ST O O -. O O -MA B-LOC B-LOC -1996 O O -Standing O O -in O O -the O O -Spanish B-MISC B-MISC -first O O -division O O -ahead O O -of O O -this O O -weekend O O -' O O -games O O -. O O -( O O -ta O O -under O O -games O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -against O O -, O O -points O O -) O O -: O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -15 O O -10 O O -5 O O -0 O O -31 O O -12 O O -35 O O -Barcelona B-ORG B-ORG -15 O O -10 O O -4 O O -1 O O -46 O O -19 O O -34 O O -Deportivo B-ORG B-ORG -Co I-ORG I-ORG -15 O O -9 O O -6 O O -0 O O -23 O O -7 O O -33 O O -Real B-ORG B-ORG -Bet I-ORG I-ORG -15 O O -8 O O -5 O O -2 O O -28 O O -13 O O -29 O O -At B-ORG B-ORG -Madrid I-ORG I-ORG -15 O O -8 O O -3 O O -4 O O -26 O O -17 O O -27 O O -Athletic B-ORG B-ORG -B I-ORG I-ORG -15 O O -7 O O -4 O O -4 O O -28 O O -22 O O -25 O O -Real B-ORG B-ORG -So I-ORG I-ORG -15 O O -7 O O -3 O O -5 O O -20 O O -18 O O -24 O O -Val B-ORG B-ORG -15 O O -7 O O -3 O O -5 O O -19 O O -18 O O -24 O O -Racing B-ORG B-ORG -Santa I-ORG I-ORG -15 O O -5 O O -7 O O -3 O O -15 O O -15 O O -22 O O -Ray B-ORG B-ORG -Valle I-ORG I-ORG -15 O O -5 O O -5 O O -5 O O -21 O O -19 O O -20 O O -Valencia B-ORG B-ORG -15 O O -MA B-LOC B-LOC -1996 O O -Spain B-LOC B-LOC -coach O O -Javier B-PER B-PER -Clement I-PER I-PER -has O O -added O O -un O O -Deportivo B-ORG B-ORG -Co I-ORG I-ORG -midfielder O O -Armand B-PER B-PER -Alvarez I-PER I-PER -to O O -his O O -squad O O -for O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -qualifier O O -against O O -Yugoslavia B-LOC B-LOC -on O O -December O O -14 O O -. O O -" O O -I O O -do O O -n O O -believe O O -it O O -. O O -I O O -thought O O -it O O -was O O -a O O -joke O O -, O O -" O O -said O O -Armand B-PER B-PER -who O O -replaces O O -injured O O -At B-ORG B-ORG -Madrid I-ORG I-ORG -play O O -Jose B-PER B-PER -Luis I-PER I-PER -Cam I-PER I-PER -. O O -S O O -- O O -FIFA B-ORG B-ORG -B O O -H B-PER B-PER -ST O O -B O O -W O B-PER -. O O -ROM B-LOC B-LOC -1996 O O -FIFA B-ORG B-ORG -chairman O O -Jo B-PER B-PER -Have I-PER I-PER -said O O -on O O -Friday O O -he O O -would O O -personally O O -present O O -AC B-ORG B-ORG -Milan I-ORG I-ORG -George I-PER B-PER -We I-PER I-PER -with O O -world O O -soccer O O -' O O -fair O O -play O O -award O O -despite O O -the O O -striker O O -' O O -attack O O -on O O -Porto B-ORG B-ORG -captain O O -Jorge B-PER B-PER -Costa I-PER I-PER -. O O -In O O -an O O -interview O O -with O O -the O O -Italian B-MISC B-MISC -newspaper O O -G B-ORG B-ORG -dell I-ORG I-ORG -Sport I-ORG I-ORG -, O O -he O O -was O O -quoted O O -as O O -saying O O -We B-PER B-PER -had O O -been O O -provoked O O -into O O -the O O -assault O O -which O O -left O O -Costa B-PER B-PER -with O O -a O O -broken O O -nose O O -. O O -" O O -FIFA B-ORG B-ORG -has O O -named O O -the O O -Liberia B-MISC B-MISC -for O O -its O O -1996 O O -Fair B-MISC O -Play I-MISC O -award O O -and O O -it O O -is O O -not O O -going O O -to O O -change O O -its O O -decision O O -, O O -" O O -Have B-PER B-PER -said O O -. O O -" O O -A O O -reaction O O -, O O -provoked O O -, O O -cannot O O -erase O O -10 O O -years O O -of O O -loyalty O O -everywhere O O -and O O -in O O -every O O -competition O O -. O O -" O O -I O O -will O O -be O O -happy O O -to O O -give O O -him O O -the O O -award O O -personally O O -on O O -January O O -20 O O -in O O -Lisbon B-LOC B-LOC -and O O -I O O -' O O -confident O O -that O O -Costa B-PER B-PER -himself O O -will O O -be O O -there O O -beside O O -me O O -on O O -that O O -day O O -to O O -shake O O -his O O -hand O O -. O O -" O O -We B-PER B-PER -was O O -suspended O O -for O O -one O O -match O O -by O O -UEFA B-ORG B-ORG -, O O -European B-MISC B-MISC -soccer O O -' O O -governing O O -body O O -, O O -pending O O -a O O -full O O -investigation O O -. O O -The O O -incident O O -took O O -place O O -in O O -the O O -players O O -' O O -tunnel O O -after O O -a O O -European B-MISC B-MISC -Champions I-MISC I-MISC -' I-MISC I-MISC -League I-MISC I-MISC -match O O -on O O -November O O -20 O O -. O O -We B-PER B-PER -has O O -admitted O O -head O O -butt O O -Costa B-PER B-PER -but O O -said O O -he O O -reacted O O -to O O -racist O O -ta O O -. O O -He O O -has O O -offered O O -to O O -a O O -if O O -Costa B-PER B-PER -acknowledge O O -the O O -pro O O -. O O -Costa B-PER B-PER -, O O -who O O -needed O O -surgery O O -on O O -his O O -nose O O -, O O -has O O -not O O -accepted O O -the O O -offer O O -and O O -was O O -reported O O -to O O -be O O -considering O O -su O O -We B-PER B-PER -. O O -We B-PER B-PER -served O O -out O O -his O O -suspension O O -during O O -Milan B-ORG B-ORG -' O O -2 O O -home O O -defeat O O -by O O -Rosen B-ORG B-ORG -of O O -Norway B-LOC B-LOC -on O O -Wednesday O O -. O O -The O O -defeat O O -put O O -the O O -Italians B-MISC B-MISC -out O O -of O O -the O O -Euro B-MISC B-MISC -Cup I-MISC I-MISC -. O O -G B-LOC O -W O O -T O O -MA O B-ORG -UN O I-ORG -FA O O -IN O O -AU B-LOC B-LOC -. O O -VI B-LOC B-LOC -1996 O O -Two O O -Manchester B-ORG B-ORG -United I-ORG I-ORG -soccer O O -fans O O -were O O -wounded O O -by O O -unidentified O O -gun O O -on O O -Friday O O -and O O -taken O O -to O O -hospital O O -in O O -the O O -Austrian B-MISC B-MISC -capital O O -, O O -police O O -said O O -. O O -" O O -The O O -four O O -B B-MISC B-MISC -were O O -shot O O -at O O -from O O -a O O -Mercedes B-MISC B-MISC -car O O -at O O -around O O -1 O O -a O O -, O O -" O O -a O O -spoke O O -told O O -Re B-ORG B-ORG -. O O -The O O -two O O -men O O -were O O -hit O O -in O O -the O O -p O O -and O O -leg O O -. O O -Police O O -said O O -their O O -lives O O -were O O -not O O -in O O -danger O O -. O O -The O O -fans O O -, O O -in O O -Austria B-LOC B-LOC -to O O -watch O O -their O O -team O O -play O O -Rapid B-ORG B-ORG -Vienna I-ORG I-ORG -last O O -Wednesday O O -, O O -may O O -have O O -been O O -involved O O -in O O -a O O -pub O O -bra O O -earlier O O -, O O -the O O -spoke O O -said O O -. O O -Manchester B-ORG B-ORG -United I-ORG I-ORG -won O O -2 O O -. O O -S O O -- O O -IT B-MISC B-MISC -F O O -D O O -MA O O -T O O -W O O -. O O -ROM B-LOC B-LOC -1996 O O -Italian B-MISC B-MISC -Serie I-MISC B-MISC -A I-MISC I-MISC -games O O -to O O -be O O -played O O -on O O -Sunday O O -( O O -league O O -positions O O -in O O -parent O O -, O O -all O O -kick O O -off O O -times O O -GM B-MISC B-MISC -) O O -: O O -Bologna B-ORG B-ORG -( O O -4 O O -) O O -v O O -Pi B-ORG B-ORG -( O O -13 O O -) O O -133 O O -Along O O -with O O -leaders O O -Vice B-ORG B-ORG -, O O -fourth O O -Bologna B-ORG B-ORG -represent O O -the O O -biggest O O -surprise O O -of O O -this O O -Italian B-MISC B-MISC -autumn O O -. O O -Led O O -as O O -usual O O -by O O -S B-MISC B-MISC -Ken B-PER B-PER -Anders I-PER I-PER -and O O -Russian B-MISC B-MISC -Igor B-PER B-PER -Ko I-PER I-PER -in O O -attack O O -, O O -Bologna B-ORG B-ORG -can O O -expect O O -a O O -tough O O -home O O -match O O -against O O -a O O -Pi B-ORG B-ORG -side O O -still O O -ex O O -after O O -a O O -3 O O -league O O -win O O -over O O -AC B-ORG B-ORG -Milan I-ORG I-ORG -last O O -Sunday O O -. O O -C B-ORG B-ORG -( O O -16 O O -) O O -v O O -Reg B-ORG B-ORG -( O O -18 O O -) O O -153 O O -C B-ORG B-ORG -start O O -favourite O O -in O O -this O O -relegation O O -scrap O O -following O O -draws O O -with O O -Na B-ORG B-ORG -and O O -Inter B-ORG B-ORG -in O O -last O O -two O O -out O O -but O O -will O O -be O O -without O O -suspended O O -Swiss B-MISC B-MISC -defender O O -Ramon B-PER B-PER -Vega I-PER I-PER -. O O -Bottom O O -team O O -Reg B-ORG B-ORG -are O O -also O O -without O O -a O O -suspended O O -defender O O -, O O -German B-MISC B-MISC -Diet B-PER B-PER -Be I-PER I-PER -. O O -Fi B-ORG B-ORG -( O O -10 O O -) O O -v O O -Peru B-ORG B-ORG -( O O -8 O O -) O O -133 O O -Fi B-ORG B-ORG -will O O -be O O -without O O -three O O -suspended O O -players O O -- O O -defenders O O -Daniel B-PER B-PER -Car I-PER I-PER -and O O -Lorenzo B-PER B-PER -Amor I-PER I-PER -and O O -midfielder O O -Emilia B-PER B-PER -Big I-PER I-PER -- O O -for O O -a O O -difficult O O -home O O -match O O -against O O -unpredictable O O -, O O -attack O O -Peru B-ORG B-ORG -led O O -by O O -in O O -C B-MISC B-MISC -striker O O -Milan B-PER B-PER -Rap I-PER I-PER -and O O -the O O -experienced O O -Faust B-PER B-PER -Pi I-PER I-PER -. O O -La B-ORG B-ORG -( O O -12 O O -) O O -v O O -AS B-ORG B-ORG -Roma I-ORG I-ORG -( O O -7 O O -) O O -1930 O O -Poor O O -man O O -' O O -Roman B-MISC B-MISC -derby O O -in O O -what O O -has O O -been O O -a O O -miserable O O -season O O -for O O -both O O -Rome B-LOC B-LOC -teams O O -, O O -already O O -eliminated O O -from O O -the O O -Italian B-MISC B-MISC -and O O -UEFA B-MISC B-MISC -Cups I-MISC I-MISC -. O O -La B-ORG B-ORG -have O O -injury O O -doubts O O -about O O -striker O O -Pier B-PER B-PER -C I-PER I-PER -, O O -Czech B-MISC B-LOC -midfielder O O -Pavel B-PER B-PER -Ned I-PER I-PER -and O O -defender O O -Paolo B-PER B-PER -Negro I-PER I-PER -, O O -while O O -Roma B-ORG B-ORG -present O O -a O O -full O O -strength O O -side O O -led O O -by O O -Argentine B-MISC B-MISC -Abel B-PER B-PER -Ba I-PER I-PER -, O O -Marco B-PER B-PER -Del I-PER I-PER -and O O -Francesco B-PER B-PER -To I-PER I-PER -in O O -attack O O -. O O -AC B-ORG B-ORG -Milan I-ORG I-ORG -( O O -9 O O -) O O -v O O -U B-ORG B-ORG -( O O -11 O O -) O O -133 O O -Can O O -Milan B-ORG B-ORG -sink O O -any O O -further O O -? O O -Following O O -their O O -mid O O -Champions B-MISC B-MISC -' I-MISC I-MISC -League I-MISC I-MISC -elimination O O -by O O -Norwegian B-MISC B-MISC -side O O -Rosen B-ORG B-ORG -, O O -a O O -morale O O -win O O -is O O -badly O O -needed O O -. O O -Liberia B-MISC B-MISC -striker O O -George B-PER B-PER -We I-PER I-PER -makes O O -a O O -welcome O O -return O O -for O O -Milan B-ORG B-ORG -alongside O O -Roberto B-PER B-PER -Ba I-PER I-PER -, O O -with O O -Montenegrin B-MISC B-MISC -De B-PER B-PER -Sa I-PER I-PER -in O O -midfield O O -. O O -Good O O -news O O -for O O -Milan B-ORG B-ORG -is O O -that O O -U B-ORG B-ORG -' O O -German B-MISC B-MISC -striker O O -Oliver B-PER B-PER -B I-PER I-PER -is O O -out O O -through O O -injury O O -. O O -Na B-ORG B-ORG -( O O -5 O O -) O O -v O O -Verona B-ORG B-ORG -( O O -17 O O -) O O -133 O O -In O O -Na B-ORG B-ORG -should O O -prove O O -too O O -strong O O -for O O -second O O -from O O -bottom O O -Verona B-ORG B-ORG -despite O O -the O O -absence O O -of O O -their O O -suspended O O -Argentine B-MISC B-MISC -defender O O -Roberto B-PER B-PER -A I-PER I-PER -. O O -Verona B-ORG B-ORG -' O O -slim O O -chances O O -have O O -been O O -further O O -reduced O O -by O O -a O O -knee O O -injury O O -to O O -their O O -experienced O O -midfielder O O -E B-PER B-PER -Co I-PER I-PER -. O O -Parma B-ORG B-ORG -( O O -14 O O -) O O -v O O -At B-ORG B-ORG -( O O -15 O O -) O O -133 O O -Parma B-ORG B-ORG -may O O -field O O -new O O -signing O O -, O O -C B-MISC B-MISC -midfielder O O -Mario B-PER B-PER -Stan I-PER I-PER -, O O -in O O -an O O -attempt O O -to O O -lift O O -a O O -miserable O O -season O O -which O O -has O O -seen O O -them O O -go O O -without O O -a O O -win O O -since O O -a O O -1 O O -triumph O O -over O O -C B-ORG B-ORG -eight O O -weeks O O -ago O O -. O O -Parma B-ORG B-ORG -' O O -French B-MISC B-MISC -midfielder O O -Daniel B-PER B-PER -Bravo I-PER I-PER -and O O -defender O O -F B-PER B-PER -Can I-PER I-PER -are O O -suspended O O -while O O -Argentine B-MISC B-MISC -N B-PER B-PER -Sen I-PER I-PER -is O O -out O O -through O O -injury O O -. O O -At B-ORG B-ORG -look O O -to O O -Fi B-PER B-PER -In I-PER I-PER -, O O -scorer O O -of O O -eight O O -goals O O -. O O -Sam B-ORG B-ORG -( O O -6 O O -) O O -v O O -Juventus B-ORG B-ORG -( O O -3 O O -) O O -133 O O -All O O -Juventus B-ORG B-ORG -field O O -their O O -most O O -recent O O -signing O O -, O O -Portuguese B-MISC B-MISC -defender O O -Di B-PER B-PER -, O O -while O O -Alessandro B-PER B-PER -Del I-PER I-PER -Pier I-PER I-PER -and O O -C B-PER B-MISC -Al B-PER B-PER -Bo I-PER I-PER -lead O O -the O O -attack O O -. O O -The O O -new O O -world O O -club O O -champions O O -may O O -prove O O -too O O -strong O O -for O O -a O O -Sam B-ORG B-ORG -side O O -led O O -by O O -captain O O -Roberto B-PER B-PER -Man I-PER I-PER -but O O -missing O O -injured O O -French B-MISC B-MISC -midfielder O O -Pierre B-PER B-PER -Lai I-PER I-PER -. O O -Vice B-ORG B-ORG -( O O -1 O O -) O O -v O O -Inter B-ORG B-ORG -( O O -2 O O -) O O -133 O O -Not O O -exactly O O -a O O -clash O O -of O O -the O O -t O O -but O O -an O O -in O O -match O O -nonetheless O O -. O O -Full O O -strength O O -Vice B-ORG B-ORG -, O O -led O O -by O O -Uruguayan B-MISC B-MISC -Marcel B-PER B-PER -O I-PER I-PER -, O O -may O O -continue O O -their O O -surprise O O -run O O -at O O -the O O -top O O -against O O -an O O -Inter B-ORG B-ORG -side O O -that O O -has O O -been O O -less O O -than O O -impressive O O -in O O -three O O -successive O O -home O O -draws O O -. O O -Inter B-ORG B-ORG -will O O -be O O -without O O -suspended O O -French B-MISC B-MISC -defender O O -Jo B-PER B-PER -Anglo I-PER I-PER -and O O -injured O O -Chilean B-MISC B-MISC -striker O O -Ivan B-PER B-PER -Z I-PER I-PER -. O O -BA O O -- O O -EU B-MISC B-MISC -R O O -. O O -BR B-LOC B-LOC -1996 O O -Re O O -of O O -a O O -Euro B-MISC B-MISC -basketball O O -match O O -on O O -Thursday O O -: O O -Group O O -B O O -In O O -Cha B-LOC B-LOC -: O O -Cha B-ORG B-ORG -( O O -Belgium B-LOC B-LOC -) O O -75 O O -E B-ORG B-ORG -Madrid I-ORG I-ORG -( O O -Spain B-LOC B-LOC -) O O -82 O O -( O O -34 O O -) O O -Leading O O -scorer O O -: O O -Cha B-ORG B-ORG -- O O -Eric B-PER B-PER -C I-PER I-PER -18 O O -, O O -Ron B-PER B-PER -Ellis I-PER I-PER -18 O O -, O O -Jacques B-PER B-PER -St I-PER I-PER -14 O O -E B-ORG B-ORG -- O O -Harper B-PER B-PER -Williams I-PER I-PER -20 O O -, O O -Chad B-PER B-PER -Thompson I-PER I-PER -17 O O -, O O -Juan B-PER B-PER -Ai I-PER I-PER -14 O O -Group O O -D O O -In O O -Belgrade B-LOC B-LOC -: O O -Part B-ORG B-ORG -Belgrade I-ORG I-ORG -( O O -Yugoslavia B-LOC B-LOC -) O O -78 O O -Kind B-ORG B-ORG -Bologna I-ORG I-ORG -( O O -Italy B-LOC B-LOC -) O O -70 O O -( O O -halftime O O -44 O O -) O O -Leading O O -scorer O O -: O O -Part B-ORG B-ORG -- O O -B B-LOC B-LOC -, O O -India B-LOC B-LOC -1996 O O -World O O -number O O -two O O -Rodney B-PER B-PER -E I-PER I-PER -moved O O -within O O -sight O O -of O O -his O O -fifth O O -title O O -of O O -the O O -year O O -on O O -Friday O O -when O O -he O O -hurried O O -in O O -only O O -40 O O -minutes O O -to O O -the O O -final O O -of O O -the O O -richest O O -squash O O -tournament O O -outside O O -the O O -World B-MISC B-MISC -Open I-MISC I-MISC -, O O -the O O -$ O O -105 O O -Ma B-MISC B-MISC -International I-MISC I-MISC -. O O -The O O -Australian B-MISC B-MISC -brushed O O -aside O O -un O O -English B-MISC B-MISC -Mark B-PER B-PER -Cairns I-PER I-PER -15 O O -15 O O -15 O O -. O O -Top O O -E B-PER B-PER -now O O -meets O O -title O O -Peter B-PER B-PER -Nico I-PER I-PER -of O O -Scotland B-LOC B-LOC -who O O -over O O -Simon B-PER B-PER -Park I-PER I-PER -of O O -England B-LOC B-LOC -15 O O -15 O O -15 O O -. O O -Nico B-PER B-PER -was O O -full O O -of O O -praise O O -for O O -his O O -opponent O O -who O O -has O O -battled O O -test O O -cancer O O -to O O -return O O -to O O -the O O -circuit O O -. O O -" O O -He O O -' O O -a O O -remarkably O O -courage O O -player O O -, O O -" O O -said O O -Nico B-PER B-PER -. O O -S O O -- O O -MA B-MISC B-MISC -IN O I-MISC -SE O O -R O O -. O O -B B-LOC B-LOC -, O O -India B-LOC B-LOC -1996 O O -Results O O -of O O -semifinals O O -in O O -the O O -Ma B-MISC B-MISC -International I-MISC I-MISC -squash O O -tournament O O -on O O -Friday O O -: O O -Peter B-PER B-PER -Nico I-PER I-PER -( O O -Scotland B-LOC B-LOC -) O O -beat O O -Simon B-PER B-PER -Park I-PER I-PER -( O O -England B-LOC B-LOC -) O O -15 O O -15 O O -15 O O -Rodney B-PER B-PER -E I-PER I-PER -( O O -Australia B-LOC B-LOC -) O O -beat O O -Mark B-PER B-PER -Cairns I-PER I-PER -( O O -England B-LOC B-LOC -) O O -15 O O -15 O O -15 O O -. O O -Final O O -: O O -Nico B-PER B-PER -v O O -E B-ORG B-PER -, O O -on O O -Saturday O O -. O O -G B-ORG O -K O O -F O O -IN O O -S B-ORG B-MISC -' O O -Z B-ORG B-MISC -PR O O -. O O -D B-LOC B-PER -, O O -South B-LOC B-LOC -Africa I-LOC I-LOC -1996 O O -At O O -least O O -four O O -people O O -have O O -been O O -shot O O -dead O O -in O O -two O O -suspected O O -political O O -attacks O O -in O O -South B-LOC B-LOC -Africa I-LOC I-LOC -' O O -volatile O O -Z B-MISC B-MISC -heart O O -, O O -police O O -said O O -on O O -Friday O O -. O O -A O O -police O O -spokesman O O -said O O -two O O -youths O O -believed O O -to O O -be O O -supporters O O -of O O -President O O -Nelson B-PER B-PER -Man I-PER I-PER -' O O -African B-ORG B-ORG -National I-ORG I-ORG -Congress I-ORG I-ORG -( O O -AN B-ORG B-ORG -) O O -had O O -been O O -killed O O -when O O -unknown O O -gun O O -opened O O -fire O O -at O O -the O O -rural O O -settlement O O -of O O -I B-LOC B-LOC -on O O -K B-LOC B-LOC -province O O -' O O -south O O -coast O O -on O O -Thursday O O -night O O -. O O -The O O -victims O O -were O O -18 O O -and O O -20 O O -, O O -he O O -said O O -, O O -adding O O -one O O -other O O -youth O O -had O O -been O O -wounded O O -in O O -the O O -shooting O O -. O O -In O O -another O O -attack O O -, O O -also O O -on O O -the O O -province O O -' O O -south O O -coast O O -on O O -Thursday O O -night O O -, O O -two O O -men O O -were O O -shot O O -dead O O -near O O -Um B-LOC B-LOC -. O O -" O O -We O O -suspect O O -that O O -these O O -killings O O -are O O -linked O O -to O O -politics O O -, O O -" O O -spokesman O O -Ba B-PER B-PER -Na I-PER I-PER -told O O -Re B-ORG B-ORG -. O O -There O O -had O O -been O O -no O O -arrests O O -. O O -The O O -killings O O -came O O -just O O -hours O O -after O O -violence O O -monitors O O -said O O -they O O -were O O -not O O -optimistic O O -of O O -a O O -peaceful O O -f O O -season O O -in O O -K B-LOC B-LOC -and O O -pointed O O -the O O -south O O -coast O O -region O O -where O O -18 O O -people O O -were O O -massacre O O -last O O -Christmas O O -as O O -one O O -of O O -potential O O -hot O O -spots O O -. O O -They O O -said O O -the O O -recent O O -l O O -in O O -political O O -feud O O -could O O -be O O -upset O O -as O O -thousands O O -of O O -migrant O O -workers O O -, O O -some O O -tense O O -with O O -g O O -br O O -in O O -the O O -cities O O -and O O -keen O O -to O O -settle O O -old O O -scores O O -, O O -flock O O -back O O -to O O -their O O -home O O -villages O O -. O O -More O O -than O O -14 O O -people O O -have O O -lost O O -their O O -lives O O -in O O -over O O -a O O -decade O O -of O O -political O O -turf O O -wars O O -between O O -the O O -AN B-ORG B-ORG -and O O -Z B-MISC B-MISC -Chief O O -Man B-PER B-PER -But I-PER I-PER -' O O -In B-ORG B-ORG -Freedom I-ORG I-ORG -Party I-ORG I-ORG -in O O -the O O -province O O -. O O -H B-PER B-PER -PR O O -C B-MISC B-MISC -N O O -AL O B-PER -AS O O -F O O -. O O -K B-PER B-PER -G I-PER I-PER -PR B-LOC B-LOC -1996 O O -Czech B-MISC B-LOC -President O O -V B-PER B-PER -Have I-PER I-PER -on O O -Friday O O -welcomed O O -the O O -appointment O O -of O O -Madeleine B-PER B-PER -Al I-PER I-PER -, O O -who O O -is O O -of O O -Czech B-MISC B-LOC -extraction O O -, O O -as O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -' O O -first O O -woman O O -Secretary O O -of O O -State O O -. O O -In O O -a O O -statement O O -Have B-PER B-PER -, O O -who O O -is O O -recovering O O -from O O -cancer O O -surgery O O -, O O -said O O -: O O -" O O -Madeleine B-PER B-PER -Al I-PER I-PER -is O O -a O O -distinguished O O -friend O O -, O O -a O O -tested O O -diplomat O O -, O O -and O O -a O O -true O O -American B-MISC B-MISC -of O O -fine O O -origins O O -. O O -" O O -" O O -I O O -look O O -forward O O -to O O -continuing O O -our O O -good O O -relations O O -. O O -with O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -and O O -with O O -the O O -first O O -woman O O -ever O O -to O O -hold O O -the O O -position O O -of O O -Secretary O O -of O O -State O O -. O O -I O O -wish O O -her O O -well O O -, O O -" O O -Have B-PER B-PER -said O O -in O O -a O O -statement O O -to O O -Re B-ORG B-ORG -. O O -Have B-PER B-PER -, O O -who O O -helped O O -lead O O -the O O -" O O -velvet O O -revolution O O -" O O -that O O -ou O O -the O O -Communist B-MISC B-MISC -regime O O -in O O -Prague B-LOC B-LOC -in O O -1989 O O -, O O -invited O O -Al B-PER B-PER -, O O -then O O -working O O -for O O -a O O -private O O -foreign O O -policy O O -think O O -tank O O -, O O -to O O -advise O O -his O O -new O O -democratic O O -government O O -in O O -1990 O O -. O O -Have B-PER B-PER -had O O -a O O -small O O -ma O O -t O O -removed O O -from O O -his O O -lung O O -on O O -Monday O O -and O O -is O O -recovering O O -in O O -hospital O O -. O O -Al B-PER B-PER -, O O -born O O -Marie B-PER B-PER -Ko I-PER I-PER -to O O -a O O -Czechoslovak B-MISC B-MISC -diplomat O O -in O O -1937 O O -, O O -fled O O -with O O -her O O -family O O -to O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -after O O -the O O -Communists B-MISC B-MISC -came O O -to O O -power O O -in O O -a O O -coup O O -in O O -1948 O O -. O O -As O O -an O O -academic O O -, O O -Al B-PER B-PER -studied O O -and O O -lectured O O -on O O -Europe B-LOC B-LOC -' O O -20th O O -century O O -problems O O -before O O -becoming O O -U B-LOC B-LOC -ambassador O O -to O O -the O O -United B-ORG B-ORG -Nations I-ORG I-ORG -. O O -Czech B-MISC B-LOC -diplomat O O -, O O -seeking O O -to O O -have O O -their O O -country O O -included O O -in O O -the O O -expected O O -expansion O O -of O O -NATO B-ORG B-ORG -, O O -praised O O -the O O -selection O O -of O O -Al B-PER B-PER -, O O -known O O -to O O -be O O -a O O -strong O O -supporter O O -of O O -alliance O O -' O O -integration O O -of O O -former O O -So B-MISC B-MISC -countries O O -. O O -" O O -The O O -nomination O O -. O O -is O O -a O O -clear O O -signal O O -that O O -one O O -key O O -of O O -the O O -lines O O -of O O -foreign O O -policy O O -will O O -be O O -the O O -strengthening O O -of O O -the O O -trans B-MISC B-MISC -cooperation O O -, O O -a O O -creation O O -of O O -strategic O O -partnership O O -between O O -Europe B-LOC B-LOC -and O O -the O O -US B-LOC B-LOC -, O O -" O O -Foreign O O -Minister O O -Josef B-PER B-PER -Z I-PER I-PER -told O O -Re B-ORG B-ORG -. O O -" O O -( O O -Al B-PER B-PER -) O O -is O O -a O O -convinced O O -advocate O O -of O O -NATO B-ORG B-ORG -en O O -and O O -of O O -stab O O -of O O -security O O -structures O O -. O O -" O O -Czech B-MISC B-LOC -ambassador O O -to O O -the O O -United B-ORG B-ORG -Nations I-ORG I-ORG -, O O -Ka B-PER B-PER -Ko I-PER I-PER -, O O -told O O -the O O -daily O O -M B-ORG B-ORG -Front I-ORG I-ORG -D I-ORG I-ORG -that O O -Al B-PER B-PER -" O O -is O O -a O O -little O O -light O O -in O O -our O O -diplomatic O O -heaven O O -, O O -" O O -but O O -warned O O -against O O -expecting O O -her O O -to O O -ex O O -any O O -influence O O -in O O -favour O O -of O O -the O O -Czech B-MISC B-MISC -. O O -RA B-PER B-ORG -ROM B-ORG I-ORG -A O O -H O O -AT O O -4 O O -PM O O -. O O -B B-LOC B-LOC -1996 O O -Radio B-ORG B-ORG -Romania I-ORG I-ORG -news O O -headlines O O -: O O -* O O -The O O -Democratic B-ORG B-MISC -Convention I-ORG I-MISC -signed O O -an O O -agreement O O -on O O -government O O -and O O -parliamentary O O -support O O -with O O -its O O -coalition O O -partners O O -the O O -Social B-ORG B-ORG -Democratic I-ORG I-ORG -Union I-ORG I-ORG -and O O -the O O -Hungarian B-ORG B-ORG -Democratic I-ORG I-ORG -Union I-ORG I-ORG -( O O -U B-ORG B-ORG -) O O -. O O -The O O -ceremony O O -was O O -attended O O -by O O -President O O -Emil B-PER B-PER -Constantine I-PER I-PER -. O O -* O O -The O O -three O O -parties O O -in O O -the O O -government O O -coalition O O -have O O -committed O O -themselves O O -to O O -a O O -real O O -reform O O -of O O -Romania B-LOC B-LOC -' O O -economy O O -, O O -Constantine B-PER B-PER -said O O -after O O -the O O -ceremony O O -. O O -* O O -The O O -U B-ORG B-ORG -wants O O -to O O -contribute O O -to O O -social O O -reform O O -and O O -economic O O -revival O O -in O O -Romania B-LOC B-LOC -, O O -union O O -leader O O -Mark B-PER B-PER -Bel I-PER I-PER -said O O -. O O -* O O -The O O -international O O -airport O O -in O O -Tim B-LOC B-LOC -and O O -the O O -domestic O O -airports O O -in O O -Ara B-LOC B-LOC -, O O -Or B-LOC B-LOC -and O O -Si B-LOC B-LOC -were O O -closed O O -due O O -to O O -fog O O -. O O -- O O -Bucharest B-ORG B-ORG -News I-ORG I-ORG -40 O O -312 O O -C B-MISC B-MISC -VI O O -SE O O -W O O -DE O O -AT O O -PA B-MISC O -CO O O -. O O -PR B-LOC B-LOC -1996 O O -Saturday O O -' O O -national O O -congress O O -of O O -the O O -ruling O O -Czech B-ORG B-ORG -Civic I-ORG I-ORG -Democratic I-ORG I-ORG -Party I-ORG I-ORG -( O O -O B-ORG B-ORG -) O O -will O O -discuss O O -making O O -the O O -party O O -more O O -efficient O O -and O O -transparent O O -, O O -Foreign O O -Minister O O -and O O -O B-ORG B-ORG -vice O O -Josef B-PER B-PER -Z I-PER I-PER -, O O -said O O -on O O -Friday O O -. O O -" O O -Modern O O -and O O -more O O -pro O O -of O O -the O O -party O O -' O O -structure O O -, O O -having O O -financing O O -of O O -the O O -party O O -be O O -more O O -transparent O O -. O O -are O O -absolutely O O -fundamental O O -, O O -" O O -Z B-PER B-PER -, O O -who O O -is O O -also O O -vice O O -in O O -the O O -government O O -, O O -told O O -Re B-ORG B-ORG -. O O -He O O -said O O -after O O -June O O -general O O -elections O O -in O O -which O O -the O O -ruling O O -three O O -coalition O O -lost O O -its O O -parliamentary O O -majority O O -, O O -the O O -O B-ORG B-ORG -executive O O -, O O -led O O -by O O -Prime O O -Minister O O -V B-PER B-PER -Klaus I-PER I-PER -, O O -had O O -developed O O -proposals O O -on O O -these O O -subjects O O -to O O -present O O -at O O -the O O -congress O O -on O O -Saturday O O -in O O -the O O -Czech B-MISC B-LOC -second O O -city O O -B B-LOC B-LOC -. O O -" O O -I O O -am O O -convinced O O -, O O -that O O -the O O -congress O O -will O O -tackle O O -these O O -proposals O O -, O O -" O O -he O O -said O O -. O O -The O O -O B-ORG B-ORG -, O O -a O O -party O O -in O O -which O O -Klaus B-PER B-PER -often O O -tries O O -to O O -em O O -the O O -style O O -of O O -former O O -British B-MISC B-MISC -Prime O O -Minister O O -Margaret B-PER B-PER -Thatcher I-PER I-PER -, O O -has O O -been O O -in O O -control O O -of O O -Czech B-MISC B-LOC -politics O O -since O O -winning O O -general O O -elections O O -in O O -1992 O O -. O O -Z B-ORG B-PER -in O O -the O O -summer O O -led O O -calls O O -for O O -the O O -party O O -and O O -its O O -leadership O O -to O O -listen O O -to O O -more O O -diverse O O -opinions O O -, O O -a O O -thin O O -criticism O O -of O O -Klaus B-PER B-PER -who O O -has O O -spear O O -the O O -country O O -' O O -post B-MISC B-MISC -economic O O -reforms O O -. O O -The O O -party O O -, O O -led O O -by O O -the O O -vigorously O O -Klaus B-PER B-PER -, O O -took O O -32 O O -of O O -81 O O -seats O O -after O O -late O O -November O O -runoff O O -elections O O -to O O -the O O -new O O -upper O O -house O O -of O O -Czech B-MISC B-LOC -parliament O O -. O O -But O O -after O O -the O O -first O O -round O O -vote O O -a O O -week O O -before O O -, O O -the O O -O B-ORG B-ORG -had O O -the O O -potential O O -to O O -win O O -as O O -many O O -79 O O -seats O O -. O O -Klaus B-PER B-PER -and O O -his O O -coalition O O -lost O O -its O O -majority O O -in O O -parliament O O -in O O -June O O -lower O O -house O O -elections O O -after O O -the O O -left O O -opposition O O -consolidated O O -, O O -putting O O -the O O -centre O O -Social B-ORG B-MISC -Democrats I-ORG I-MISC -in O O -a O O -strong O O -second O O -position O O -. O O -- O O -Prague B-ORG B-ORG -News I-ORG I-ORG -42 O O -P O B-LOC -GO O O -M O O -F O O -P O O -S O B-MISC -ACC O O -. O O -Marc B-PER B-PER -G I-PER I-PER -WA B-LOC B-LOC -1996 O O -Poland B-LOC B-LOC -said O O -on O O -Friday O O -that O O -Swiss B-MISC B-MISC -bank O O -accounts O O -, O O -which O O -in O O -many O O -cases O O -belonged O O -to O O -Polish B-MISC B-MISC -Jews I-MISC B-MISC -who O O -died O O -in O O -the O O -Holocaust B-MISC B-MISC -, O O -were O O -used O O -in O O -debt O O -settlements O O -between O O -the O O -two O O -countries O O -after O O -the O O -World B-MISC B-MISC -War I-MISC I-MISC -Two I-MISC I-MISC -. O O -Foreign O O -Minister O O -Darius B-PER B-PER -Rosa I-PER I-PER -, O O -un O O -first O O -findings O O -of O O -a O O -special O O -government O O -commission O O -, O O -said O O -that O O -in O O -1970s O O -the O O -then O O -communist O O -Poland B-LOC B-LOC -received O O -460 O O -Swiss B-MISC B-MISC -f O O -from O O -the O O -accounts O O -. O O -" O O -In O O -1970s O O -, O O -Poland B-LOC B-LOC -received O O -from O O -un O O -accounts O O -in O O -Switzerland B-LOC B-LOC -a O O -sum O O -of O O -460 O O -f O O -. O O -What O O -was O O -its O O -right O O -( O O -to O O -the O O -money O O -) O O -. O O -do O O -not O O -know O O -, O O -" O O -Rosa B-PER B-PER -told O O -a O O -news O O -conference O O -. O O -Switzerland B-LOC B-LOC -stands O O -accused O O -by O O -Senator O O -Al B-PER B-PER -D I-PER I-PER -, O O -chairman O O -of O O -the O O -powerful O O -U B-ORG B-ORG -Senate I-ORG I-ORG -Banking I-ORG I-ORG -Committee I-ORG I-ORG -, O O -of O O -agreeing O O -to O O -give O O -money O O -to O O -Poland B-LOC B-LOC -from O O -un O O -bank O O -accounts O O -of O O -Polish B-MISC B-MISC -citizens O O -, O O -as O O -part O O -of O O -an O O -accord O O -on O O -com O O -Swiss B-MISC B-MISC -nationals O O -whose O O -assets O O -had O O -been O O -seized O O -in O O -communist O O -Poland B-LOC B-LOC -. O O -Many O O -of O O -these O O -citizens O O -were O O -Jews B-MISC B-MISC -murdered O O -during O O -the O O -war O O -, O O -when O O -Nazi B-MISC B-MISC -German B-MISC B-MISC -invaders O O -killed O O -most O O -of O O -Poland B-LOC B-LOC -' O O -3 O O -million O O -Jews B-MISC B-MISC -. O O -Rosa B-PER B-PER -did O O -not O O -say O O -whether O O -the O O -payment O O -in O O -1970s O O -was O O -part O O -of O O -the O O -1949 O O -agreement O O -between O O -Warsaw B-LOC B-LOC -and O O -Switzerland B-LOC B-LOC -on O O -compensation O O -to O O -Swiss B-MISC B-MISC -citizens O O -whose O O -assets O O -were O O -seized O O -by O O -the O O -Soviet B-MISC B-MISC -communists O O -authorities O O -after O O -World B-MISC B-MISC -War I-MISC I-MISC -Two I-MISC I-MISC -. O O -" O O -I O O -expect O O -that O O -the O O -commission O O -will O O -finish O O -gathering O O -information O O -within O O -two O O -to O O -three O O -weeks O O -and O O -then O O -more O O -details O O -will O O -be O O -provided O O -, O O -" O O -Rosa B-PER B-PER -said O O -. O O -Rosa B-PER B-PER -confirmed O O -that O O -the O O -1949 O O -agreement O O -had O O -provided O O -for O O -granting O O -Switzerland B-LOC B-LOC -about O O -53 O O -million O O -f O O -and O O -most O O -of O O -this O O -sum O O -was O O -re O O -with O O -coal O O -exports O O -. O O -He O O -said O O -, O O -however O O -, O O -that O O -Switzerland B-LOC B-LOC -did O O -get O O -about O O -16 O O -f O O -from O O -the O O -so O O -" O O -dead O O -accounts O O -" O O -as O O -part O O -of O O -the O O -compensation O O -. O O -" O O -About O O -16 O O -f O O -were O O -seized O O -from O O -accounts O O -of O O -four O O -or O O -five O O -Polish B-MISC B-MISC -citizens O O -, O O -whose O O -data O O -we O O -do O O -not O O -precisely O O -know O O -. O O -The O O -issue O O -is O O -of O O -moral O O -and O O -legal O O -nature O O -, O O -because O O -its O O -financial O O -significance O O -is O O -small O O -, O O -" O O -Rosa B-PER B-PER -said O O -. O O -Under O O -pressure O O -from O O -international O O -Jewish B-MISC B-MISC -organisations O O -, O O -Swiss B-MISC B-MISC -government O O -has O O -devised O O -a O O -plan O O -to O O -pay O O -out O O -millions O O -of O O -dollars O O -in O O -un O O -bank O O -accounts O O -as O O -a O O -con O O -gesture O O -toward O O -Holocaust B-MISC B-MISC -victims O O -. O O -The O O -conservative O O -Radical B-MISC B-ORG -Democrats I-ORG I-ORG -( O O -F B-ORG B-ORG -) O O -have O O -said O O -they O O -would O O -ask O O -parliament O O -next O O -week O O -to O O -order O O -Swiss B-MISC B-MISC -banks O O -to O O -put O O -some O O -40 O O -million O O -Swiss B-MISC B-MISC -f O O -( O O -$ O O -31 O O -million O O -) O O -in O O -dormant O O -wealth O O -into O O -a O O -fund O O -ear O O -for O O -Jewish B-MISC B-MISC -groups O O -and O O -charitable O O -organisations O O -. O O -But O O -Swiss B-MISC B-MISC -banks O O -and O O -the O O -country O O -' O O -Jewish B-MISC B-MISC -community O O -voiced O O -doubts O O -whether O O -the O O -plan O O -would O O -work O O -. O O -IN O B-MISC -SE O O -NO O O -B O O -97 O O -NE O O -R O O -. O O -Steven B-PER B-PER -Si I-PER I-PER -WA B-LOC B-LOC -1996 O O -Polish B-MISC B-MISC -br O O -Z B-PER B-ORG -' O O -1996 O O -profit O O -s O O -may O O -last O O -into O O -next O O -year O O -due O O -in O O -part O O -to O O -he O O -de O O -charges O O -, O O -but O O -recent O O -high O O -investment O O -should O O -help O O -the O O -firm O O -defend O O -its O O -10 O O -market O O -share O O -, O O -the O O -firm O O -' O O -chief O O -executive O O -said O O -. O O -Company O O -President O O -Jean B-PER B-PER -van I-PER I-PER -Box I-PER I-PER -told O O -Re B-ORG B-ORG -in O O -an O O -interview O O -on O O -Friday O O -that O O -the O O -firm O O -, O O -whose O O -net O O -profit O O -fell O O -77 O O -percent O O -in O O -the O O -first O O -10 O O -months O O -of O O -1996 O O -despite O O -a O O -30 O O -rise O O -in O O -sales O O -, O O -might O O -only O O -post O O -slightly O O -better O O -profits O O -in O O -1997 O O -before O O -having O O -a O O -chance O O -to O O -make O O -a O O -more O O -significant O O -turn O O -. O O -So O O -far O O -this O O -year O O -Z B-ORG B-ORG -, O O -whose O O -full O O -name O O -is O O -Z B-ORG B-ORG -Pi I-ORG I-ORG -w I-ORG I-ORG -Z I-ORG I-ORG -SA I-ORG I-ORG -, O O -has O O -net O O -six O O -million O O -z O O -on O O -sales O O -of O O -224 O O -million O O -z O O -. O O -It O O -has O O -produced O O -1 O O -million O O -he O O -. O O -Van B-PER B-PER -Box I-ORG I-PER -would O O -not O O -say O O -how O O -much O O -higher O O -1997 O O -profits O O -or O O -market O O -share O O -could O O -be O O -but O O -said O O -sales O O -of O O -leading O O -Polish B-MISC B-MISC -br O O -should O O -rise O O -as O O -the O O -country O O -' O O -young O O -urban O O -professionals O O -gradually O O -switch O O -from O O -vodka O O -to O O -beer O O -. O O -" O O -The O O -perspective O O -on O O -growth O O -is O O -such O O -that O O -reasonably O O -we O O -can O O -think O O -that O O -somewhere O O -between O O -65 O O -and O O -80 O O -litre O O -per O O -year O O -is O O -certainly O O -reach O O -, O O -" O O -van B-PER O -Box I-PER B-PER -said O O -on O O -Polish B-MISC B-MISC -per O O -beer O O -consumption O O -, O O -currently O O -around O O -40 O O -litre O O -. O O -He O O -said O O -the O O -65 O O -level O O -could O O -be O O -reached O O -in O O -the O O -next O O -ten O O -years O O -and O O -make O O -Poland B-LOC B-LOC -, O O -with O O -its O O -40 O O -population O O -, O O -Europe B-LOC B-LOC -' O O -third O O -largest O O -beer O O -market O O -after O O -Germany B-LOC B-LOC -and O O -Britain B-LOC B-LOC -. O O -Van B-PER B-PER -Box I-PER I-PER -said O O -Poland B-LOC B-LOC -' O O -top O O -five O O -br O O -, O O -which O O -produce O O -about O O -55 O O -percent O O -of O O -the O O -country O O -' O O -beer O O -, O O -could O O -all O O -raise O O -market O O -share O O -as O O -some O O -of O O -the O O -numerous O O -small O O -br O O -fall O O -to O O -competition O O -from O O -the O O -large O O -br O O -with O O -foreign O O -investors O O -. O O -Z B-ORG B-ORG -is O O -31 O O -owned O O -by O O -He B-ORG B-ORG -while O O -Carl B-ORG B-ORG -has O O -the O O -same O O -amount O O -in O O -Ok B-LOC B-ORG -. O O -Earlier O O -this O O -year O O -South B-ORG B-ORG -African I-ORG I-ORG -Brewer I-ORG I-ORG -Ltd I-ORG I-ORG -( O O -SA B-ORG B-ORG -) O O -bought O O -strategic O O -stakes O O -in O O -the O O -un O O -Le B-ORG B-ORG -and O O -Ty B-ORG B-ORG -br O O -, O O -which O O -together O O -hold O O -more O O -than O O -20 O O -percent O O -of O O -the O O -market O O -, O O -and O O -Australia B-LOC B-LOC -' O O -B B-ORG B-ORG -B I-ORG I-ORG -has O O -a O O -controlling O O -stake O O -in O O -Poland B-LOC B-LOC -' O O -large O O -t O O -brewery O O -, O O -El B-ORG B-ORG -Company I-ORG I-ORG -Ltd I-ORG I-ORG -( O O -E B-ORG B-ORG -) O O -. O O -Van B-PER B-PER -Box I-PER I-PER -said O O -the O O -tough O O -competition O O -had O O -prevented O O -Z B-PER B-ORG -from O O -raising O O -prices O O -in O O -line O O -with O O -inflation O O -, O O -which O O -had O O -added O O -to O O -the O O -pressure O O -on O O -the O O -firm O O -' O O -margins O O -. O O -He O O -said O O -advertising O O -costs O O -would O O -also O O -increase O O -in O O -the O O -fight O O -for O O -market O O -share O O -. O O -But O O -he O O -said O O -the O O -company O O -' O O -investment O O -of O O -more O O -than O O -$ O O -100 O O -million O O -already O O -this O O -decade O O -, O O -largely O O -in O O -production O O -, O O -would O O -help O O -position O O -it O O -to O O -compete O O -with O O -such O O -competitors O O -as O O -br O O -from O O -the O O -neighbouring O O -Czech B-LOC B-LOC -Republic I-LOC I-LOC -. O O -Some O O -analysts O O -say O O -cheaper O O -but O O -high O O -Czech B-MISC B-LOC -imports O O -could O O -invade O O -Poland B-LOC B-LOC -once O O -ta O O -for O O -CE B-ORG B-ORG -countries O O -are O O -lifted O O -in O O -1998 O O -, O O -but O O -van B-PER O -Box I-PER B-PER -says O O -such O O -a O O -threat O O -might O O -be O O -exaggerated O O -despite O O -the O O -Czech B-MISC B-LOC -beer O O -market O O -' O O -over O O -. O O -" O O -I O O -think O O -Polish B-MISC B-MISC -consumers O O -in O O -general O O -are O O -quite O O -proud O O -of O O -their O O -beers O O -- O O -and O O -I O O -' O O -speaking O O -about O O -all O O -the O O -brands O O -- O O -and O O -as O O -we O O -make O O -good O O -beers O O -. O O -I O O -think O O -that O O -this O O -fi O O -to O O -our O O -beers O O -is O O -a O O -factor O O -which O O -can O O -limit O O -the O O -Czech B-MISC B-LOC -beers O O -, O O -" O O -he O O -said O O -. O O -Van B-PER B-PER -Box I-PER I-PER -said O O -Z B-ORG B-ORG -had O O -its O O -eye O O -on O O -Ok B-ORG B-ORG -, O O -which O O -has O O -said O O -it O O -would O O -start O O -producing O O -Carl B-ORG B-ORG -beer O O -next O O -year O O -, O O -but O O -that O O -Z B-ORG B-ORG -' O O -potential O O -production O O -of O O -He B-ORG B-ORG -was O O -a O O -medium O O -possibility O O -rather O O -than O O -a O O -short O O -one O O -. O O -He O O -said O O -his O O -firm O O -would O O -be O O -better O O -off O O -concentrating O O -on O O -its O O -leading O O -brand O O -, O O -Z B-MISC B-ORG -Full I-MISC B-MISC -Light I-MISC I-MISC -, O O -which O O -accounts O O -for O O -85 O O -percent O O -of O O -sales O O -and O O -is O O -the O O -country O O -' O O -largest O O -brand O O -. O O -" O O -You O O -will O O -not O O -win O O -the O O -war O O -of O O -the O O -Polish B-MISC B-MISC -beer O O -market O O -with O O -imported O O -international O O -brands O O -, O O -" O O -van B-PER O -Box I-PER B-PER -said O O -, O O -adding O O -that O O -He B-ORG B-ORG -would O O -remain O O -an O O -up O O -import O O -in O O -Poland B-LOC B-LOC -. O O -Van B-PER B-PER -Box I-PER I-PER -also O O -said O O -Z B-ORG B-ORG -would O O -be O O -boost O O -by O O -its O O -recent O O -shed O O -of O O -soft O O -drinks O O -which O O -only O O -accounted O O -for O O -about O O -three O O -percent O O -of O O -the O O -firm O O -' O O -overall O O -sales O O -and O O -for O O -which O O -7 O O -million O O -z O O -in O O -provisions O O -had O O -already O O -been O O -made O O -. O O -- O O -Warsaw B-ORG B-ORG -News I-ORG I-ORG -+ O O -22 O O -65 O O -97 O O -H B-ORG B-PER -H O O -T O O -A O O -CO O O -W O O -. O O -PR B-LOC B-LOC -1996 O O -Doctors O O -performed O O -an O O -emergency O O -t O O -to O O -help O O -Czech B-MISC B-LOC -President O O -V B-PER B-PER -Have I-PER I-PER -breathe O O -after O O -cancer O O -surgery O O -on O O -his O O -lungs O O -earlier O O -this O O -week O O -, O O -a O O -spokesman O O -said O O -on O O -Friday O O -. O O -He O O -said O O -that O O -the O O -procedure O O -to O O -insert O O -a O O -device O O -into O O -Have B-PER B-PER -' O O -throat O O -, O O -done O O -after O O -his O O -breathing O O -worse O O -on O O -Thursday O O -, O O -had O O -helped O O -, O O -and O O -the O O -president O O -' O O -condition O O -significantly O O -improved O O -. O O -" O O -A O O -worse O O -in O O -the O O -president O O -' O O -lung O O -functions O O -took O O -place O O -yesterday O O -, O O -" O O -presidential O O -spokesman O O -La B-PER B-PER -Space I-PER I-PER -said O O -in O O -a O O -statement O O -. O O -" O O -A O O -t O O -was O O -performed O O -and O O -supportive O O -breathing O O -was O O -installed O O -through O O -the O O -help O O -of O O -a O O -breathing O O -device O O -, O O -" O O -he O O -said O O -. O O -" O O -After O O -these O O -steps O O -, O O -the O O -president O O -' O O -condition O O -sign O O -improved O O -. O O -" O O -Have B-PER B-PER -has O O -been O O -recovering O O -from O O -surgery O O -on O O -Monday O O -which O O -removed O O -a O O -small O O -ma O O -t O O -and O O -half O O -of O O -his O O -right O O -lung O O -. O O -Doctors O O -after O O -the O O -operation O O -said O O -that O O -they O O -had O O -caught O O -the O O -cancer O O -early O O -, O O -and O O -that O O -Have B-PER B-PER -could O O -fully O O -recover O O -from O O -the O O -surgery O O -within O O -six O O -weeks O O -. O O -His O O -spokesman O O -said O O -on O O -Thursday O O -that O O -Have B-PER B-PER -, O O -60 O O -and O O -a O O -heavy O O -smoke O O -, O O -had O O -also O O -developed O O -a O O -slight O O -case O O -of O O -pneumonia O O -in O O -the O O -left O O -lung O O -. O O -UK B-LOC B-MISC -open O O -skies O O -talks O O -end O O -, O O -no O O -date O O -to O O -restart O O -. O O -L B-LOC B-LOC -1996 O O -The O O -UK B-ORG B-LOC -Department I-ORG B-ORG -of I-ORG I-ORG -Transport I-ORG I-ORG -on O O -Friday O O -said O O -that O O -the O O -latest O O -round O O -of O O -" O O -open O O -skies O O -" O O -talks O O -with O O -the O O -U B-LOC B-LOC -had O O -ended O O -with O O -no O O -deal O O -on O O -liberal O O -the O O -trans O O -flight O O -market O O -and O O -no O O -date O O -set O O -for O O -when O O -talks O O -would O O -restart O O -. O O -A O O -spokesman O O -for O O -the O O -D B-ORG B-ORG -told O O -Re B-ORG B-ORG -" O O -We O O -have O O -had O O -talks O O -towards O O -concluding O O -a O O -new O O -air O O -service O O -agreement O O -which O O -would O O -produce O O -liberal O O -. O O -useful O O -progress O O -was O O -made O O -on O O -a O O -number O O -of O O -issues O O -, O O -but O O -not O O -all O O -. O O -No O O -date O O -has O O -been O O -set O O -for O O -further O O -talks O O -. O O -" O O -Tam B-ORG B-ORG -Tim I-ORG I-ORG -at O O -$ O O -15 O O -in O O -London B-LOC B-LOC -. O O -L B-LOC B-LOC -1996 O O -PT B-ORG B-ORG -Tam I-ORG I-ORG -Tim I-ORG I-ORG -closed O O -at O O -$ O O -15 O O -per O O -G B-ORG O -in O O -London B-LOC B-LOC -on O O -Friday O O -. O O -It O O -recorded O O -the O O -day O O -' O O -low O O -of O O -$ O O -15 O O -and O O -the O O -day O O -' O O -high O O -of O O -$ O O -15 O O -. O O -It O O -closed O O -at O O -$ O O -15 O O -on O O -Thursday O O -. O O -One O O -Global B-ORG O -De I-ORG O -Re I-ORG O -represents O O -10 O O -common O O -shares O O -. O O -- O O -Jakarta B-LOC B-LOC -news O O -+ O O -38 O O -Tel B-ORG B-ORG -at O O -$ O O -35 O O -in O O -London B-LOC B-LOC -. O O -L B-LOC B-LOC -1996 O O -PT B-ORG B-ORG -Tel I-ORG I-ORG -Indonesia I-ORG I-ORG -( O O -Tel B-ORG B-ORG -) O O -closed O O -at O O -$ O O -35 O O -in O O -London B-LOC B-LOC -on O O -Friday O O -. O O -It O O -recorded O O -the O O -day O O -' O O -low O O -of O O -$ O O -34 O O -and O O -the O O -day O O -' O O -high O O -of O O -$ O O -35 O O -. O O -Its O O -previous O O -close O O -on O O -Thursday O O -as O O -$ O O -35 O O -. O O -One O O -AD B-ORG O -represents O O -20 O O -ordinary O O -shares O O -- O O -Jakarta B-LOC B-LOC -news O O -+ O O -38 O O -. O O -Woman O O -charged O O -over O O -N B-LOC B-LOC -Ireland I-LOC I-LOC -arms O O -find O O -. O O -B B-LOC B-LOC -1996 O O -A O O -woman O O -was O O -charged O O -on O O -Friday O O -with O O -terrorist O O -offences O O -after O O -three O O -Irish B-ORG B-ORG -Republican I-ORG I-ORG -Army I-ORG I-ORG -mortar O O -bombs O O -were O O -found O O -in O O -a O O -Belfast B-LOC B-LOC -house O O -, O O -police O O -said O O -. O O -Police O O -said O O -the O O -bombs O O -were O O -found O O -hidden O O -with O O -in O O -and O O -ammunition O O -that O O -were O O -blocked O O -up O O -behind O O -a O O -kitchen O O -wall O O -. O O -The O O -35 O O -woman O O -was O O -charged O O -with O O -possession O O -of O O -explosives O O -with O O -intent O O -to O O -end O O -life O O -and O O -making O O -a O O -house O O -available O O -for O O -the O O -purpose O O -of O O -terrorism O O -, O O -police O O -said O O -. O O -She O O -will O O -appear O O -in O O -court O O -on O O -Saturday O O -. O O -Her O O -name O O -was O O -not O O -released O O -. O O -Security O O -forces O O -said O O -the O O -bombs O O -may O O -have O O -been O O -intended O O -for O O -use O O -in O O -a O O -pre O O -bombing O O -campaign O O -by O O -the O O -guerrilla O O -group O O -that O O -is O O -battling O O -to O O -ou O O -Britain B-LOC B-LOC -from O O -Northern B-LOC B-LOC -Ireland I-LOC I-LOC -. O O -Britain B-LOC B-LOC -sets O O -conditions O O -to O O -clear O O -American B-MISC B-MISC -alliance O O -. O O -Edna B-PER B-PER -Fe I-PER I-PER -L B-LOC B-LOC -1996 O O -The O O -British B-MISC B-MISC -government O O -warned O O -Friday O O -that O O -it O O -would O O -refer O O -the O O -proposed O O -trans B-MISC B-MISC -alliance O O -between O O -British B-ORG B-ORG -Airways I-ORG I-ORG -P I-ORG I-ORG -and O O -American B-ORG B-ORG -Airlines I-ORG I-ORG -to O O -Britain B-LOC B-LOC -' O O -Mon B-ORG B-ORG -and I-ORG I-ORG -Me I-ORG I-ORG -Commission I-ORG I-ORG -unless O O -the O O -carriers O O -com O O -with O O -a O O -number O O -of O O -conditions O O -. O O -Trade O B-ORG -and I-ORG I-ORG -Industry I-ORG I-ORG -Secretary O O -Ian B-PER B-PER -Lang I-PER I-PER -added O O -that O O -even O O -if O O -the O O -conditions O O -were O O -met O O -by O O -both O O -airlines O O -, O O -final O O -clearance O O -would O O -hi O O -on O O -an O O -open O O -skies O O -deal O O -between O O -Britain B-LOC B-LOC -and O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -to O O -liberal O O -trans B-MISC B-MISC -air O O -traffic O O -, O O -which O O -would O O -create O O -greater O O -competition O O -on O O -the O O -routes O O -. O O -Lang B-PER B-PER -said O O -he O O -supported O O -conditions O O -proposed O O -by O O -Britain B-LOC B-LOC -' O O -Office B-ORG B-ORG -of I-ORG I-ORG -Fair I-ORG I-ORG -Trading I-ORG I-ORG -, O O -which O O -was O O -asked O O -to O O -examine O O -the O O -case O O -last O O -month O O -. O O -" O O -I O O -agree O O -. O O -that O O -without O O -suitable O O -undertaking O O -the O O -alliance O O -would O O -be O O -likely O O -to O O -lead O O -to O O -a O O -significant O O -loss O O -of O O -actual O O -and O O -potential O O -passengers O O -, O O -on O O -those O O -routes O O -where O O -BA B-ORG B-ORG -and O O -AA B-ORG B-ORG -currently O O -compete O O -and O O -for O O -all O O -passengers O O -on O O -the O O -trans B-MISC B-MISC -market O O -route O O -between O O -the O O -UK B-LOC B-LOC -and O O -U B-LOC B-LOC -, O O -" O O -he O O -said O O -. O O -His O O -comments O O -came O O -just O O -minutes O O -after O O -the O O -latest O O -set O O -of O O -open O O -skies O O -talks O O -ended O O -in O O -London B-LOC B-LOC -with O O -no O O -deal O O -signed O O -. O O -Industry O O -sources O O -said O O -there O O -was O O -no O O -new O O -date O O -for O O -fresh O O -talks O O -and O O -blamed O O -the O O -dead O O -on O O -uncertainty O O -over O O -whether O O -the O O -British B-MISC B-MISC -Airways I-MISC I-MISC -deal O O -would O O -be O O -cleared O O -. O O -The O O -conditions O O -for O O -clearance O O -of O O -the O O -alliance O O -were O O -that O O -British B-ORG B-ORG -Airways I-ORG I-ORG -and O O -American B-ORG B-ORG -drop O O -168 O O -slots O O -at O O -London B-LOC B-LOC -Heath I-LOC I-LOC -airport O O -, O O -the O O -busiest O O -in O O -Europe B-LOC B-LOC -. O O -American B-MISC B-ORG -' O O -parent O O -, O O -AM B-ORG B-ORG -Corp I-ORG I-ORG -, O O -said O O -it O O -did O O -not O O -view O O -the O O -terms O O -as O O -a O O -" O O -deal O O -break O O -. O O -" O O -However O O -, O O -it O O -called O O -the O O -conditions O O -" O O -more O O -severe O O -" O O -than O O -those O O -imposed O O -by O O -other O O -regulatory O O -authorities O O -on O O -similar O O -airline O O -alliances O O -. O O -British B-ORG B-ORG -Airways I-ORG I-ORG -' O O -initial O O -response O O -was O O -that O O -" O O -un O O -dive O O -of O O -slots O O -is O O -unprecedented O O -and O O -if O O -done O O -it O O -must O O -be O O -on O O -the O O -basis O O -of O O -fair O O -market O O -value O O -. O O -" O O -It O O -added O O -that O O -it O O -would O O -be O O -" O O -prepared O O -to O O -take O O -reasonable O O -steps O O -to O O -assist O O -the O O -introduction O O -of O O -additional O O -competition O O -. O O -" O O -The O O -government O O -also O O -wants O O -British B-ORG B-ORG -Airways I-ORG I-ORG -to O O -drop O O -a O O -clause O O -in O O -its O O -agreement O O -with O O -USA B-ORG B-ORG -that O O -bars O O -it O O -from O O -competing O O -on O O -trans B-MISC B-MISC -routes O O -, O O -and O O -said O O -both O O -British B-ORG B-ORG -Airways I-ORG I-ORG -and O O -American B-ORG B-ORG -should O O -be O O -prepared O O -to O O -reduce O O -services O O -on O O -the O O -London B-LOC B-LOC -to O O -Dallas B-LOC B-LOC -Worth O I-LOC -route O O -in O O -the O O -event O O -that O O -a O O -new O O -en O O -wishes O O -to O O -enter O O -. O O -It O O -also O O -suggested O O -losing O O -some O O -slots O O -on O O -the O O -London B-LOC B-MISC -route O O -. O O -The O O -Office B-ORG B-ORG -of I-ORG I-ORG -Fair I-ORG I-ORG -Trade I-ORG I-ORG -called O O -for O O -British B-ORG B-ORG -Airways I-ORG I-ORG -/ O O -American B-ORG B-ORG -to O O -allow O O -third O O -access O O -to O O -their O O -joint O O -frequent O O -fly O O -programme O O -where O O -the O O -applicant O O -does O O -not O O -have O O -access O O -to O O -an O O -equivalent O O -programme O O -. O O -Lang B-PER B-PER -said O O -responses O O -should O O -be O O -made O O -to O O -the O O -Office B-ORG B-ORG -of I-ORG I-ORG -Fair I-ORG I-ORG -Trading I-ORG I-ORG -by O O -Jan O O -10 O O -, O O -1997 O O -. O O -Me O O -oil O O -products O O -mostly O O -lower O O -as O O -El B-MISC B-ORG -strike O O -ends O O -. O O -L B-LOC B-LOC -1996 O O -Mediterranean B-MISC O -oil O O -products O O -were O O -steady O O -to O O -mostly O O -lower O O -on O O -Friday O O -after O O -El B-ORG B-ORG -re O O -workers O O -voted O O -to O O -end O O -their O O -nine O O -strike O O -. O O -Gas O O -oil O O -erased O O -Thursday O O -' O O -gains O O -, O O -p O O -$ O O -5 O O -a O O -ton O O -in O O -line O O -with O O -the O O -screen O O -. O O -Volume O O -was O O -very O O -thin O O -and O O -market O O -remained O O -long O O -, O O -with O O -premium O O -down O O -$ O O -1 O O -at O O -about O O -high O O -c O O -quotes O O -+ O O -basis O O -Genoa B-LOC B-ORG -. O O -" O O -The O O -sharp O O -moves O O -on O O -the O O -screen O O -make O O -everyone O O -nervous O O -, O O -" O O -a O O -trader O O -said O O -. O O -Trade O O -were O O -discussed O O -in O O -0 O O -, O O -0 O O -and O O -one O O -percent O O -heating O O -oil O O -into O O -Syria B-LOC B-LOC -and O O -Lebanon B-LOC B-LOC -and O O -there O O -were O O -fresh O O -in O O -from O O -France B-LOC B-LOC -and O O -Spain B-LOC B-LOC -for O O -low O O -su O O -diesel O O -. O O -Interest O O -remains O O -focus O O -on O O -a O O -tender O O -by O O -India B-LOC B-LOC -for O O -a O O -second O O -purchase O O -of O O -high O O -speed O O -diesel O O -for O O -January O O -delivery O O -. O O -Fuel O O -oil O O -lost O O -ground O O -sharply O O -with O O -weaker O O -crude O O -, O O -but O O -also O O -suffered O O -from O O -some O O -pricing O O -pressure O O -. O O -High O O -su O O -cracked O O -fuel O O -lost O O -about O O -$ O O -3 O O -to O O -$ O O -109 O O -f O O -Me B-ORG O -with O O -several O O -cargo O O -threatening O O -to O O -over O O -the O O -market O O -. O O -The O O -chance O O -of O O -material O O -heading O O -north O O -, O O -talked O O -earlier O O -this O O -week O O -, O O -may O O -be O O -in O O -j O O -now O O -since O O -American B-MISC B-MISC -fuel O O -oil O O -is O O -expected O O -to O O -head O O -trans O O -following O O -out O O -at O O -two O O -co O O -units O O -in O O -the O O -U B-LOC B-LOC -. O O -Up O O -to O O -165 O O -tonnes O O -of O O -fuel O O -will O O -have O O -to O O -find O O -a O O -new O O -home O O -and O O -with O O -the O O -a O O -from O O -the O O -U B-LOC B-LOC -to O O -Europe B-LOC B-LOC -open O O -Rotterdam B-LOC B-LOC -is O O -a O O -prime O O -candidate O O -. O O -Low O O -su O O -prices O O -were O O -lower O O -with O O -c O O -Me B-MISC O -p O O -in O O -the O O -mid O O -to O O -low O O -$ O O -140 O O -. O O -Gas O O -prices O O -fell O O -after O O -striking O O -El B-ORG B-ORG -re O O -workers O O -voted O O -to O O -go O O -back O O -to O O -work O O -, O O -traders O O -said O O -. O O -But O O -an O O -open O O -a O O -to O O -the O O -U B-LOC B-LOC -and O O -tight O O -Italian B-MISC B-MISC -supplies O O -after O O -El B-PER B-ORG -scooped O O -up O O -Me B-MISC O -material O O -over O O -the O O -last O O -week O O -, O O -continued O O -to O O -under O O -prices O O -into O O -next O O -week O O -. O O -New O O -men O O -scare O O -hits O O -Britain B-LOC B-LOC -. O O -L B-LOC B-LOC -1996 O O -A O O -boy O O -has O O -died O O -from O O -men O O -and O O -a O O -girl O O -from O O -the O O -same O O -school O O -has O O -contracted O O -the O O -disease O O -in O O -the O O -second O O -such O O -scare O O -to O O -hit O O -Britain B-LOC B-LOC -in O O -as O O -many O O -weeks O O -, O O -health O O -authorities O O -said O O -on O O -Friday O O -. O O -The O O -16 O O -who O O -attended O O -Sale B-LOC B-ORG -Grammar I-LOC I-ORG -School I-LOC I-ORG -in O O -the O O -northern O O -England B-LOC B-LOC -city O O -of O O -Manchester B-LOC B-LOC -died O O -less O O -than O O -a O O -day O O -after O O -becoming O O -ill O O -. O O -The O O -15 O O -girl O O -is O O -also O O -suffering O O -from O O -the O O -disease O O -and O O -hospital O O -officials O O -described O O -her O O -condition O O -as O O -serious O O -. O O -" O O -At O O -the O O -moment O O -there O O -is O O -no O O -evidence O O -the O O -two O O -cases O O -are O O -linked O O -. O O -However O O -, O O -we O O -are O O -assuming O O -they O O -are O O -as O O -a O O -pre O O -for O O -the O O -time O O -being O O -, O O -" O O -a O O -spoke O O -said O O -. O O -The O O -more O O -than O O -1 O O -students O O -at O O -the O O -school O O -are O O -being O O -given O O -anti O O -as O O -a O O -pre O O -. O O -Wales B-LOC B-LOC -g O O -with O O -its O O -own O O -cluster O O -of O O -men O O -cases O O -on O O -a O O -university O O -campus O O -in O O -Cardiff B-LOC B-LOC -. O O -At O O -least O O -two O O -people O O -have O O -died O O -and O O -hundreds O O -have O O -been O O -v O O -in O O -an O O -effort O O -to O O -contain O O -the O O -virus O O -. O O -In O O -Scotland B-LOC B-LOC -, O O -eight O O -people O O -have O O -died O O -and O O -hundreds O O -more O O -are O O -fighting O O -a O O -widespread O O -food O O -outbreak O O -. O O -A O O -health O O -authority O O -spoke O O -said O O -78 O O -people O O -suspected O O -of O O -having O O -the O O -disease O O -, O O -including O O -64 O O -confirmed O O -cases O O -, O O -were O O -still O O -being O O -treated O O -. O O -Three O O -were O O -listed O O -in O O -poor O O -condition O O -. O O -More O O -than O O -290 O O -people O O -have O O -reported O O -symptoms O O -in O O -Lana B-LOC B-LOC -county O O -, O O -the O O -worst O O -area O O -, O O -since O O -the O O -outbreak O O -first O O -came O O -to O O -light O O -after O O -people O O -ate O O -ta O O -meat O O -pie O O -at O O -a O O -pension O O -' O O -lunch O O -. O O -Major B-PER B-PER -' O O -office O B-MISC -still O O -have O O -majority O O -. O O -L B-LOC B-LOC -1996 O O -British B-MISC B-MISC -Prime O O -Minister O O -John B-PER B-PER -Major I-PER I-PER -' O O -office O O -said O O -on O O -Friday O O -that O O -rebel O O -Conservative B-MISC B-MISC -MP O O -Sir O O -John B-PER B-PER -Go I-PER I-PER -had O O -not O O -" O O -resigned O O -the O O -whip O O -" O O -( O O -quit O O -the O O -parliamentary O O -party O O -) O O -and O O -the O O -government O O -still O O -had O O -a O O -majority O O -in O O -the O O -65 O O -parliament O O -. O O -" O O -He O O -( O O -Go B-PER B-PER -) O O -is O O -the O O -right O O -not O O -to O O -cooperate O O -, O O -but O O -he O O -has O O -not O O -resigned O O -the O O -whip O O -. O O -The O O -government O O -still O O -has O O -a O O -majority O O -, O O -" O O -a O O -spokesman O O -from O O -Major B-PER B-PER -' O O -office O O -in O O -Down B-LOC B-LOC -Street I-LOC I-LOC -said O O -. O O -Go B-PER B-PER -' O O -office O O -said O O -later O O -the O O -MP O O -would O O -not O O -feel O O -himself O O -obliged O O -to O O -vote O O -with O O -the O O -government O O -. O O -He O O -said O O -at O O -one O O -point O O -during O O -a O O -press O O -conference O O -: O O -" O O -I O O -have O O -seen O O -my O O -whip O O -( O O -party O O -manager O O -) O O -for O O -next O O -week O O -which O O -, O O -of O O -course O O -, O O -does O O -n O O -mean O O -very O O -much O O -to O O -me O O -now O O -. O O -" O O -Before O O -Go B-PER B-PER -' O O -statement O O -, O O -Major B-PER B-PER -had O O -a O O -one O O -majority O O -in O O -the O O -65 O O -House B-ORG B-ORG -of I-ORG I-ORG -Commons I-ORG I-ORG -lower O O -house O O -of O O -parliament O O -. O O -In O O -his O O -formal O O -statement O O -, O O -Go B-PER B-PER -said O O -: O O -" O O -I O O -am O O -today O O -withdrawing O O -my O O -cooperation O O -from O O -the O O -government O O -and O O -shall O O -not O O -treat O O -the O O -" O O -whip O O -' O O -as O O -either O O -a O O -summon O O -to O O -attend O O -the O O -House B-ORG B-ORG -of I-ORG I-ORG -Commons I-ORG I-ORG -or O O -as O O -placing O O -me O O -under O O -any O O -obligation O O -to O O -vote O O -as O O -advised O O -. O O -" O O -Go B-PER B-PER -resigned O O -over O O -a O O -hospital O O -closure O O -in O O -his O O -constituency O O -. O O -Electronic B-ORG B-ORG -Data I-ORG I-ORG -bags O O -flight O O -data O O -contract O O -. O O -L B-LOC B-LOC -1996 O O -Information O O -technology O O -firm O O -Electronic B-ORG B-ORG -Data I-ORG I-ORG -Systems I-ORG I-ORG -said O O -on O O -Friday O O -it O O -had O O -bag O O -a O O -contract O O -for O O -the O O -first O O -air O O -traffic O O -control O O -project O O -being O O -funded O O -under O O -the O O -Private B-ORG O -Finance I-ORG O -Initiative I-ORG O -. O O -In O O -a O O -statement O O -, O O -E B-ORG B-ORG -said O O -the O O -contract O O -would O O -be O O -in O O -the O O -region O O -of O O -50 O O -million O O -s O O -. O O -The O O -contract O O -involved O O -up O O -the O O -flight O O -data O O -processing O O -system O O -at O O -the O O -Ocean B-LOC B-LOC -Control I-ORG I-LOC -Centre I-LOC I-LOC -in O O -Pre B-LOC B-LOC -in O O -south O O -west O O -Scotland B-LOC B-LOC -for O O -National B-ORG B-ORG -Air I-ORG I-ORG -Traffic I-ORG I-ORG -Services I-ORG I-ORG -Ltd I-ORG I-ORG -( O O -N B-ORG B-ORG -) O O -, O O -subsidiary O O -of O O -the O O -Civil B-ORG B-ORG -Aviation I-ORG I-ORG -Authority I-ORG I-ORG -. O O -The O O -system O O -is O O -responsible O O -for O O -the O O -control O O -of O O -aircraft O O -flying O O -trans O O -routes O O -from O O -Europe B-LOC B-LOC -and O O -North B-LOC B-LOC -America I-LOC I-LOC -. O O -The O O -system O O -, O O -which O O -would O O -use O O -satellite O O -technology O O -, O O -is O O -scheduled O O -to O O -enter O O -service O O -in O O -2000 O O -. O O -- O O -London B-ORG B-ORG -News I-ORG I-ORG -+ O O -77 O O -R B-ORG B-ORG -- O O -Cricket O O -- O O -Play O O -restart O O -in O O -Australia B-LOC B-MISC -Indies I-LOC I-MISC -match O O -. O O -ME B-LOC B-LOC -1996 O O -Play O O -restart O O -in O O -the O O -first O O -World B-MISC B-MISC -Series I-MISC I-MISC -limited O O -overs O O -match O O -between O O -West B-LOC B-LOC -Indies I-LOC I-LOC -and O O -Australia B-LOC B-LOC -after O O -a O O -rain O O -delay O O -of O O -50 O O -minutes O O -on O O -Friday O O -. O O -West B-LOC B-LOC -Indies I-LOC I-LOC -resumed O O -their O O -innings O O -on O O -53 O O -for O O -two O O -with O O -opener O O -She B-PER B-PER -Campbell I-PER I-PER -on O O -25 O O -and O O -Shi B-PER B-PER -Chan I-PER I-PER -10 O O -. O O -Rain O O -earlier O O -delayed O O -the O O -start O O -of O O -play O O -by O O -30 O O -minutes O O -. O O -- O O -Sydney B-ORG B-ORG -News I-ORG I-ORG -61 O O -93 O O -Cricket O O -- O O -Pakistan B-LOC B-LOC -beat O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -by O O -46 O O -runs O O -. O O -S B-LOC B-LOC -, O O -Pakistan B-LOC B-LOC -1996 O O -Pakistan B-LOC B-LOC -beat O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -by O O -46 O O -runs O O -on O O -Friday O O -to O O -take O O -an O O -un O O -2 O O -lead O O -in O O -the O O -three O O -one O O -series O O -. O O -Score O O -: O O -Pakistan B-LOC B-LOC -27 O O -, O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -231 O O -Manitoba B-ORG B-ORG -Po I-ORG I-ORG -forward O O -contract O O -PM O O -prices O O -- O O -Dec O O -6 O O -. O O -W B-LOC B-LOC -1996 O O -Manitoba B-ORG B-ORG -Po I-ORG I-ORG -closing O O -forward O O -contract O O -prices O O -in O O -Canadian B-MISC B-MISC -dollars O O -per O O -hundred O O -lbs O O -( O O -C B-MISC O -) O O -for O O -Dec O O -6 O O -including O O -minimum O O -guaranteed O O -price O O -- O O -CO O O -PR O O -C O O -PM O O -C O O -PM O O -C O O -RA O O -D O O -PM O O -C O O -F O O -MI O O -AT O O -123 O O -CS O O -Feb O O -97 O O -79 O O -79 O O -75 O O -77 O O -Mar O O -97 O O -76 O O -76 O O -72 O O -73 O O -Apr O O -97 O O -74 O O -74 O O -( O O -( O O -Winnipeg B-LOC B-LOC -bureau O O -204 O O -) O O -) O O -Canadian B-ORG B-MISC -West I-ORG O -Coast I-ORG O -V I-ORG O -Lo I-ORG O -- O O -CW B-ORG O -. O O -W B-LOC B-LOC -1996 O O -The O O -Canadian B-ORG B-ORG -W I-ORG I-ORG -Board I-ORG I-ORG -reported O O -six O O -ships O O -loading O O -, O O -10 O O -waiting O O -and O O -four O O -due O O -at O O -the O O -Canadian B-LOC B-MISC -West I-LOC O -Coast I-LOC O -, O O -as O O -of O O -Friday O O -. O O -The O O -longest O O -wait O O -to O O -load O O -on O O -the O O -West B-LOC O -Coast I-LOC O -was O O -13 O O -days O O -. O O -Two O O -ship O O -loaded O O -in O O -Thunder B-LOC B-LOC -Bay I-LOC I-LOC -, O O -one O O -waited O O -and O O -seven O O -were O O -due O O -. O O -Two O O -ships O O -loaded O O -on O O -the O O -East B-LOC O -Coast I-LOC O -, O O -three O O -waited O O -to O O -load O O -, O O -six O O -were O O -due O O -. O O -Port B-ORG O -Lo I-ORG O -Waiting O O -Vancouver B-ORG B-LOC -5 O O -7 O O -Prince B-ORG B-LOC -Rupert I-ORG I-LOC -1 O O -3 O O -( O O -( O O -Gilbert B-PER B-PER -Le I-PER I-PER -G I-PER I-PER -204 O O -94 O O -35 O O -) O O -) O O -New B-LOC B-LOC -York I-LOC I-LOC -time O O -fixtures O O -- O O -Dec O O -6 O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -No O O -new O O -fixtures O O -reported O O -from O O -New B-LOC B-LOC -York I-LOC I-LOC -. O O -- O O -New B-ORG B-ORG -York I-ORG I-ORG -Co I-ORG I-ORG -Des I-ORG I-ORG -+ O O -212 O O -85 O O -1640 O O -New B-LOC B-LOC -York I-LOC I-LOC -coal O O -/ O O -ore O O -/ O O -scrap O O -fixtures O O -- O O -Dec O O -6 O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -OR B-ORG O -- O O -Maritime B-ORG B-MISC -Queen I-ORG I-MISC -70 O O -tonnes O O -Dam B-ORG B-LOC -/ O O -Ka B-LOC B-LOC -20 O O -$ O O -5 O O -fi O O -35 O O -/ O O -30 O O -China B-ORG B-ORG -Steel I-ORG I-ORG -. O O -- O O -New B-ORG B-ORG -York I-ORG I-ORG -Co I-ORG I-ORG -Des I-ORG O -+ O O -212 O O -85 O O -1640 O O -Clean O O -tanker O O -fixtures O O -and O O -en O O -- O O -232 O O -GM B-ORG B-MISC -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -F O O -- O O -W B-LOC O -H O O -- O O -Dani B-PER B-MISC -28 O O -16 O O -Car B-ORG B-LOC -/ O O -up O O -W O O -Mo B-ORG B-ORG -. O O -- O O -New B-ORG B-ORG -York I-ORG I-ORG -Co I-ORG I-ORG -Des I-ORG O -, O O -212 O O -Dirty O O -tanker O O -fixtures O O -and O O -en O O -- O O -231 O O -GM B-ORG B-MISC -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -MI B-LOC O -/ O O -R B-LOC B-LOC -SE I-LOC I-LOC -- O O -Thai B-MISC B-MISC -Resource O I-MISC -264 O O -31 O O -Ra B-LOC B-LOC -Tan I-LOC I-LOC -/ O O -Red B-LOC B-LOC -Sea I-LOC I-LOC -W O O -Mo B-ORG B-ORG -. O O -ME O B-MISC -- O O -Lu B-ORG B-MISC -I I-ORG I-MISC -85 O O -25 O O -Sid B-ORG B-LOC -K I-LOC I-LOC -/ O O -Augusta B-LOC B-LOC -W O O -Ex B-ORG B-ORG -. O O -S B-ORG B-MISC -139 O O -17 O O -Sid B-ORG B-LOC -K I-LOC I-LOC -/ O O -Augusta B-LOC B-LOC -W O O -Ex B-ORG B-ORG -. O O -Me B-ORG B-MISC -77 O O -17 O O -Baja B-LOC B-LOC -/ O O -F B-LOC B-LOC -W O O -Ex B-ORG B-ORG -. O O -- O O -New B-ORG B-ORG -York I-ORG I-ORG -Co I-ORG I-ORG -Des I-ORG I-ORG -+ O O -212 O O -85 O O -1640 O O -NYC B-ORG B-MISC -Jan O O -re O O -has O O -its O O -1st O O -Euro B-MISC B-MISC -floating O O -rate O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -New B-LOC B-LOC -York I-LOC I-LOC -City I-LOC I-LOC -on O O -Friday O O -said O O -that O O -it O O -planned O O -a O O -$ O O -77 O O -million O O -re O O -for O O -January O O -that O O -will O O -include O O -its O O -first O O -floating O O -rate O O -issue O O -of O O -taxa O O -debt O O -for O O -European B-MISC B-MISC -investors O O -. O O -A O O -city O O -official O O -, O O -who O O -declined O O -to O O -be O O -named O O -, O O -explained O O -that O O -Goldman B-ORG B-ORG -, I-ORG I-ORG -Sachs B-ORG I-ORG -, O O -which O O -this O O -summer O O -was O O -demo O O -to O O -the O O -second O O -tier O O -of O O -the O O -s O O -, O O -proposed O O -the O O -floating O O -rate O O -issue O O -and O O -as O O -a O O -result O O -was O O -promoted O O -to O O -book O O -runner O O -for O O -this O O -offering O O -. O O -By O O -selling O O -the O O -floating O O -rate O O -debt O O -, O O -the O O -city O O -hopes O O -to O O -establish O O -a O O -bench O O -, O O -the O O -city O O -official O O -said O O -, O O -adding O O -that O O -it O O -needed O O -a O O -large O O -deal O O -to O O -accomplish O O -this O O -objective O O -. O O -The O O -city O O -in O O -late O O -June O O -sold O O -its O O -first O O -issue O O -of O O -Euro B-MISC B-MISC -, O O -a O O -strategy O O -that O O -it O O -says O O -saved O O -it O O -$ O O -500 O O -in O O -interest O O -costs O O -, O O -and O O -it O O -has O O -been O O -trying O O -to O O -build O O -on O O -this O O -strategy O O -of O O -expanding O O -the O O -pool O O -of O O -potential O O -investors O O -since O O -then O O -. O O -In O O -November O O -, O O -New B-LOC B-LOC -York I-LOC I-LOC -City I-LOC I-LOC -said O O -it O O -became O O -the O O -first O O -U B-LOC B-LOC -municipality O O -to O O -offer O O -bonds O O -for O O -sale O O -in O O -European B-MISC B-MISC -markets O O -by O O -competitive O O -bidding O O -as O O -it O O -listed O O -taxa O O -bonds O O -on O O -the O O -London B-ORG B-ORG -Stock I-ORG I-ORG -Exchange I-ORG I-ORG -. O O -The O O -re O O -planned O O -for O O -January O O -also O O -includes O O -a O O -$ O O -47 O O -million O O -tax O O -offering O O -. O O -No O O -specific O O -date O O -in O O -January O O -has O O -been O O -selected O O -for O O -the O O -debt O O -sale O O -, O O -the O O -official O O -added O O -. O O -- O O -Joan B-PER B-PER -G I-PER I-PER -, O O -212 O O -USD B-ORG B-ORG -gross O O -cut O O -hide O O -and O O -off O O -value O O -. O O -DE B-LOC B-LOC -M I-LOC I-LOC -1996 O O -The O O -hide O O -and O O -off O O -value O O -from O O -a O O -typical O O -slaughter O O -steer O O -for O O -Friday O O -was O O -estimated O O -at O O -$ O O -9 O O -per O O -c O O -live O O -, O O -d O O -0 O O -when O O -compared O O -to O O -Thursday O O -' O O -value O O -. O O -- O O -USD B-ORG B-ORG -Wall B-ORG B-LOC -St I-ORG I-LOC -s O O -about O O -Santa B-LOC B-LOC -Fe I-LOC I-LOC -sa O O -. O O -Brendan B-PER B-PER -In I-PER I-PER -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Homes B-ORG B-ORG -Mining I-ORG I-ORG -Co I-ORG I-ORG -tops O O -Wall B-LOC B-LOC -Street I-LOC I-LOC -' O O -list O O -as O O -the O O -most O O -likely O O -white O O -knight O O -buyer O O -for O O -Santa B-ORG B-ORG -Fe I-ORG I-ORG -Pacific I-ORG I-ORG -Gold I-ORG I-ORG -Corp I-ORG I-ORG -if O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -rejects O O -un O O -suit O O -New B-ORG B-ORG -Mining I-ORG I-ORG -Corp I-ORG I-ORG -. O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -is O O -so O O -far O O -mum O O -on O O -the O O -more O O -than O O -$ O O -2 O O -billion O O -stock O O -swap O O -takeover O O -proposal O O -from O O -New B-ORG B-ORG -, O O -announced O O -Thursday O O -. O O -Wall B-LOC B-LOC -Street I-LOC I-LOC -, O O -since O O -the O O -bid O O -, O O -has O O -speculated O O -that O O -any O O -deal O O -between O O -New B-LOC B-ORG -and O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -would O O -be O O -a O O -" O O -bear O O -hug O O -, O O -" O O -or O O -a O O -reluctantly O O -negotiated O O -agreement O O -where O O -the O O -buyer O O -is O O -not O O -necessarily O O -a O O -friendly O O -suit O O -. O O -New B-ORG B-ORG -said O O -the O O -companies O O -have O O -had O O -previous O O -contact O O -, O O -though O O -declined O O -to O O -detail O O -the O O -encounters O O -. O O -Ana O O -predict O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -will O O -go O O -to O O -the O O -highest O O -bid O O -, O O -and O O -that O O -if O O -a O O -rival O O -buyer O O -is O O -found O O -, O O -New B-ORG B-ORG -may O O -not O O -be O O -able O O -to O O -match O O -its O O -offer O O -. O O -They O O -said O O -the O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -deal O O -, O O -which O O -includes O O -desirable O O -Nevada B-LOC B-LOC -mining O O -territory O O -, O O -would O O -only O O -pay O O -for O O -New B-PER B-ORG -longer O O -term O O -. O O -New B-ORG B-ORG -, O O -in O O -fact O O -, O O -will O O -not O O -benefit O O -from O O -the O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -acquisition O O -on O O -an O O -earnings O O -basis O O -for O O -at O O -least O O -two O O -years O O -, O O -which O O -also O O -limits O O -its O O -capacity O O -to O O -raise O O -its O O -offer O O -. O O -Any O O -deal O O -, O O -friendly O O -or O O -hostile O O -, O O -would O O -almost O O -assured O O -be O O -a O O -stock O O -swap O O -, O O -which O O -is O O -necessary O O -to O O -preserve O O -the O O -tax O O -, O O -pool O O -accounting O O -, O O -they O O -said O O -. O O -Ana O O -and O O -a O O -immediately O O -ruled O O -out O O -Barr B-ORG B-ORG -Gold I-ORG I-ORG -Corp I-ORG I-ORG -and O O -B B-ORG B-ORG -Mine I-ORG I-ORG -Ltd I-ORG I-ORG -as O O -Santa B-MISC B-ORG -Fe I-ORG I-ORG -sa O O -because O O -they O O -are O O -locked O O -in O O -negotiations O O -over O O -their O O -splitting O O -Indonesia B-LOC B-LOC -' O O -Bus B-LOC B-ORG -vast O O -gold O O -deposit O O -. O O -Place B-ORG B-ORG -Dome I-ORG I-ORG -Inc I-ORG I-ORG -too O O -was O O -considered O O -un O O -because O O -it O O -is O O -focusing O O -on O O -geographic O O -expansion O O -in O O -areas O O -that O O -do O O -match O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -' O O -Nevada B-LOC B-LOC -, O O -South B-LOC B-LOC -America I-LOC I-LOC -and O O -Central B-LOC B-LOC -Asia I-LOC I-LOC -presence O O -, O O -they O O -said O O -. O O -A O O -Homes B-ORG B-ORG -spokesman O O -was O O -not O O -immediately O O -available O O -to O O -comment O O -on O O -speculation O O -that O O -it O O -tops O O -the O O -list O O -. O O -Homes B-ORG B-ORG -, O O -based O O -in O O -San B-LOC B-LOC -Francisco I-LOC I-LOC -, O O -operates O O -gold O O -mines O O -in O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -, O O -Australia B-LOC B-LOC -, O O -Chile B-LOC B-LOC -and O O -Canada B-LOC B-LOC -. O O -E O O -in O O -1995 O O -were O O -$ O O -0 O O -per O O -share O O -, O O -or O O -$ O O -30 O O -million O O -, O O -on O O -revenues O O -of O O -$ O O -74 O O -million O O -. O O -Santa B-ORG B-ORG -Fe I-ORG I-ORG -is O O -headquartered O O -Albuquerque B-LOC B-LOC -, O O -N B-LOC B-LOC -and O O -reported O O -1995 O O -earnings O O -of O O -$ O O -0 O O -per O O -share O O -, O O -or O O -$ O O -40 O O -million O O -, O O -on O O -revenues O O -of O O -$ O O -350 O O -million O O -. O O -Santa B-ORG B-ORG -Fe I-ORG I-ORG -has O O -mining O O -and O O -exploration O O -operations O O -in O O -Nevada B-LOC B-LOC -, O O -California B-LOC B-LOC -, O O -Montana B-LOC B-LOC -, O O -Canada B-LOC B-LOC -, O O -Brazil B-LOC B-LOC -, O O -Australia B-LOC B-LOC -, O O -Chile B-LOC B-LOC -, O O -Ka B-LOC B-LOC -, O O -Mexico B-LOC B-LOC -and O O -Ghana B-LOC B-LOC -. O O -Pain B-ORG B-ORG -analyst O O -Marc B-PER B-PER -Cohen I-PER I-PER -said O O -he O O -lowered O O -his O O -rating O O -on O O -New B-ORG B-ORG -to O O -neutral O O -from O O -attractive O O -today O O -because O O -if O O -New B-ORG B-ORG -merged O O -with O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -, O O -investors O O -would O O -have O O -to O O -wait O O -until O O -the O O -second O O -half O O -of O O -1998 O O -to O O -realize O O -earnings O O -a O O -. O O -" O O -I O O -think O O -Homes B-PER B-ORG -could O O -come O O -in O O -as O O -a O O -white O O -knight O O -, O O -but O O -how O O -much O O -is O O -someone O O -willing O O -to O O -come O O -in O O -above O O -the O O -New B-LOC B-ORG -number O O -. O O -One O O -would O O -have O O -to O O -out O O -by O O -at O O -least O O -15 O O -percent O O -, O O -but O O -there O O -is O O -going O O -to O O -be O O -a O O -( O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -) O O -deal O O -with O O -someone O O -, O O -" O O -he O O -said O O -. O O -" O O -Long O O -term O O -, O O -two O O -to O O -three O O -years O O -out O O -, O O -( O O -a O O -New B-ORG B-MISC -Fe O I-MISC -deal O O -) O O -is O O -positive O O -, O O -it O O -does O O -all O O -the O O -right O O -things O O -. O O -But O O -in O O -the O O -near O O -it O O -is O O -, O O -at O O -worst O O -, O O -neutral O O -, O O -" O O -the O O -analyst O O -added O O -. O O -New B-ORG B-ORG -proposed O O -to O O -Santa B-LOC B-LOC -Fe I-LOC I-LOC -a O O -stock O O -merger O O -at O O -a O O -ratio O O -of O O -0 O O -New B-ORG B-ORG -shares O O -for O O -each O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -shares O O -. O O -In O O -Friday O O -New B-ORG B-ORG -York I-ORG I-ORG -Stock I-ORG I-ORG -Exchange I-ORG I-ORG -trade O O -, O O -New B-PER B-ORG -was O O -off O O -1 O O -to O O -46 O O -while O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -added O O -1 O O -to O O -15 O O -. O O -" O O -New B-ORG B-ORG -said O O -it O O -wants O O -to O O -discuss O O -a O O -friendly O O -deal O O -with O O -Santa B-LOC B-ORG -Fe I-LOC I-ORG -, O O -which O O -is O O -almost O O -always O O -a O O -e O O -for O O -' O O -We O O -have O O -more O O -money O O -in O O -our O O -pocket O O -, O O -' O O -" O O -said O O -an O O -a O O -, O O -referring O O -to O O -a O O -possible O O -sweet O O -bid O O -from O O -New B-PER B-ORG -. O O -Two O O -other O O -a O O -called O O -New B-PER B-ORG -' O O -move O O -a O O -" O O -a O O -32 O O -cent O O -bid O O -" O O -because O O -there O O -is O O -no O O -formal O O -tender O O -offer O O -, O O -only O O -the O O -proposal O O -letter O O -" O O -mail O O -" O O -to O O -Santa B-ORG B-ORG -Fe I-LOC I-ORG -' O O -board O O -. O O -- O O -Wall B-ORG B-ORG -Street I-ORG I-ORG -Des I-ORG I-ORG -, O O -212 O O -. O O -Russ B-PER B-ORG -Be I-PER I-ORG -president O O -to O O -retire O O -in O O -July O O -. O O -O B-LOC B-LOC -, O O -N B-LOC B-LOC -1996 O O -Russ B-ORG B-ORG -Be I-ORG I-ORG -and I-ORG I-ORG -Co I-ORG I-ORG -Inc I-ORG I-ORG -said O O -on O O -Friday O O -that O O -A B-PER B-PER -C I-PER I-PER -Cooke I-PER I-PER -will O O -retire O O -as O O -president O O -and O O -chief O O -operating O O -officer O O -effective O O -July O O -1 O O -, O O -1997 O O -. O O -Cooke B-PER B-PER -will O O -provide O O -consulting O O -services O O -to O O -the O O -company O O -through O O -July O O -1 O O -, O O -1998 O O -, O O -and O O -will O O -continue O O -to O O -serve O O -as O O -a O O -director O O -, O O -the O O -toy O O -and O O -gift O O -maker O O -said O O -. O O -Zimbabwe B-LOC B-LOC -execute O O -convicted O O -murderer O O -. O O -H B-LOC B-LOC -1996 O O -Zimbabwe B-LOC B-LOC -hanged O O -a O O -convicted O O -murderer O O -on O O -Friday O O -, O O -bringing O O -to O O -eight O O -the O O -number O O -of O O -executions O O -carried O O -out O O -in O O -the O O -past O O -year O O -. O O -A O O -statement O O -said O O -Pi B-PER B-PER -Sin I-PER I-PER -N I-PER I-PER -was O O -hanged O O -at O O -dawn O O -. O O -President O O -Robert B-PER B-PER -Mu I-PER I-PER -' O O -government O O -has O O -resisted O O -pressure O O -from O O -local O O -and O O -international O O -human O O -rights O O -groups O O -to O O -a O O -the O O -death O O -sentence O O -. O O -Multi O O -commander O O -going O O -back O O -to O O -east O O -Z B-LOC B-LOC -. O O -Jonathan B-PER B-PER -Wright I-PER I-PER -N B-LOC B-LOC -1996 O O -The O O -Canadian B-MISC B-MISC -general O O -in O O -charge O O -of O O -a O O -multinational O O -force O O -for O O -eastern O O -Z B-LOC B-LOC -said O O -on O O -Friday O O -he O O -was O O -going O O -back O O -to O O -Z B-LOC B-LOC -for O O -more O O -information O O -about O O -the O O -p O O -of O O -about O O -165 O O -Rwanda B-MISC B-MISC -refugees O O -ad O O -in O O -the O O -countryside O O -. O O -Lieutenant O O -Maurice B-PER B-PER -Bari I-PER I-PER -told O O -a O O -news O O -conference O O -in O O -Nairobi B-LOC B-LOC -his O O -main O O -concern O O -was O O -for O O -a O O -large O O -group O O -of O O -about O O -150 O O -refugees O O -living O O -off O O -the O O -land O O -in O O -a O O -valley O O -about O O -65 O O -km O O -( O O -40 O O -miles O O -) O O -west O O -of O O -the O O -eastern O O -city O O -of O O -Go B-LOC B-LOC -. O O -If O O -he O O -decided O O -it O O -was O O -necessary O O -and O O -safe O O -for O O -the O O -air O O -, O O -he O O -would O O -not O O -hesitate O O -to O O -order O O -air O O -of O O -food O O -for O O -the O O -refugees O O -, O O -even O O -against O O -the O O -wishes O O -of O O -the O O -government O O -in O O -Ki B-LOC B-LOC -and O O -the O O -Z B-MISC B-MISC -rebels O O -who O O -control O O -much O O -of O O -eastern O O -Z B-LOC B-LOC -, O O -he O O -said O O -. O O -" O O -Tomorrow O O -I O O -' O O -going O O -into O O -Rwanda B-LOC B-LOC -and O O -my O O -intention O O -is O O -to O O -go O O -across O O -into O O -eastern O O -Z B-LOC B-LOC -and O O -try O O -to O O -find O O -out O O -for O O -the O O -second O O -time O O -what O O -the O O -situation O O -is O O -on O O -the O O -ground O O -, O O -" O O -he O O -said O O -. O O -General O O -Bari B-PER B-PER -saw O O -rebel O O -leader O O -Laurent B-PER B-PER -Ka I-PER I-PER -in O O -Go B-LOC B-LOC -last O O -week O O -but O O -the O O -rebels O O -told O O -him O O -the O O -crisis O O -was O O -over O O -because O O -most O O -of O O -the O O -Rwanda B-MISC B-MISC -refugees O O -have O O -already O O -gone O O -home O O -. O O -The O O -rebels O O -do O O -not O O -want O O -the O O -multinational O O -force O O -to O O -deploy O O -on O O -the O O -ground O O -, O O -for O O -fear O O -it O O -might O O -help O O -the O O -Z B-MISC B-MISC -army O O -regain O O -control O O -of O O -the O O -area O O -. O O -Ki B-LOC B-LOC -oppose O O -air O O -, O O -apparently O O -because O O -the O O -food O O -could O O -fall O O -into O O -the O O -hands O O -of O O -the O O -rebels O O -and O O -their O O -local O O -supporters O O -. O O -Canadian B-MISC B-MISC -Defence O O -Minister O O -Doug B-PER B-PER -Young I-PER I-PER -said O O -on O O -Thursday O O -that O O -the O O -multinational O O -force O O -would O O -probably O O -not O O -have O O -to O O -make O O -food O O -air O O -or O O -intervene O O -mi O O -in O O -any O O -major O O -way O O -. O O -" O O -It O O -does O O -n O O -look O O -as O O -though O O -they O O -( O O -air O O -) O O -are O O -going O O -to O O -be O O -required O O -in O O -any O O -significant O O -way O O -because O O -the O O -NGOs O O -( O O -non O O -organisations O O -) O O -are O O -in O O -that O O -area O O -on O O -the O O -border O O -between O O -Z B-LOC B-LOC -and O O -Rwanda B-LOC B-LOC -, O O -" O O -Young B-PER B-PER -told O O -reporters O O -. O O -But O O -General O O -Bari B-PER B-PER -said O O -it O O -would O O -be O O -premature O O -to O O -rule O O -out O O -any O O -course O O -of O O -action O O -until O O -he O O -had O O -more O O -information O O -. O O -" O O -We O O -hope O O -that O O -if O O -the O O -front O O -moves O O -forward O O -or O O -stab O O -then O O -we O O -will O O -have O O -access O O -( O O -to O O -the O O -large O O -group O O -of O O -refugees O O -) O O -with O O -reconnaissance O O -or O O -humanitarian O O -agencies O O -. O O -" O O -If O O -they O O -ca O O -n O O -move O O -because O O -they O O -are O O -too O O -weak O O -, O O -then O O -we O O -will O O -probably O O -consider O O -very O O -seriously O O -using O O -air O O -delivery O O -means O O -( O O -air O O -) O O -. O O -' O O -complex O O -, O O -it O O -' O O -dangerous O O -for O O -the O O -air O O -crew O O -that O O -fly O O -in O O -there O O -and O O -it O O -will O O -have O O -to O O -be O O -absolutely O O -necessary O O -. O O -If O O -it O O -is O O -necessary O O -, O O -I O O -w O O -n O O -hesitate O O -to O O -use O O -it O O -, O O -" O O -he O O -said O O -. O O -Ask O O -if O O -he O O -would O O -di O O -the O O -objections O O -of O O -the O O -Z B-MISC B-MISC -government O O -, O O -he O O -said O O -: O O -" O O -It O O -would O O -have O O -to O O -be O O -in O O -the O O -last O O -resort O O -. O O -It O O -would O O -have O O -to O O -mean O O -that O O -tens O O -of O O -thousands O O -of O O -lives O O -are O O -in O O -danger O O -. O O -Do O O -you O O -think O O -that O O -I O O -would O O -have O O -a O O -conscience O O -problem O O -doing O O -it O O -or O O -not O O -at O O -that O O -time O O -? O O -And O O -my O O -mandate O O -is O O -also O O -under O O -Chapter O O -Seven O O -to O O -operate O O -in O O -eastern O O -Z B-LOC B-LOC -. O O -" O O -Under O O -Chapter O O -Seven O O -of O O -the O O -U B-ORG B-ORG -charter O O -, O O -the O O -Security B-ORG B-ORG -Council I-ORG I-ORG -has O O -wide O O -powers O O -to O O -preserve O O -peace O O -and O O -security O O -. O O -" O O -I O O -know O O -their O O -( O O -the O O -Z B-MISC B-MISC -government O O -' O O -) O O -position O O -and O O -I O O -know O O -it O O -' O O -very O O -delicate O O -and O O -we O O -are O O -very O O -sensitive O O -to O O -their O O -position O O -also O O -, O O -" O O -the O O -general O O -added O O -. O O -He O O -denied O O -that O O -his O O -contacts O O -, O O -criticised O O -by O O -Ki B-LOC B-LOC -, O O -with O O -the O O -Z B-MISC B-MISC -rebels O O -amounted O O -to O O -negotiations O O -. O O -" O O -I O O -do O O -n O O -negotiate O O -, O O -" O O -he O O -said O O -. O O -" O O -I O O -coordinate O O -with O O -those O O -who O O -are O O -holding O O -ground O O -and O O -that O O -' O O -a O O -wise O O -thing O O -to O O -do O O -. O O -When O O -we O O -do O O -n O O -know O O -where O O -the O O -front O O -is O O -, O O -we O O -do O O -n O O -know O O -what O O -the O O -risk O O -is O O -. O O -" O O -Bari B-PER B-PER -said O O -that O O -apart O O -from O O -the O O -group O O -of O O -150 O O -, O O -U B-LOC B-LOC -and O O -British B-MISC B-MISC -reconnaissance O O -plans O O -had O O -tracked O O -two O O -much O O -smaller O O -groups O O -of O O -refugees O O -- O O -one O O -of O O -up O O -to O O -1 O O -north O O -of O O -the O O -town O O -of O O -Ma B-LOC O -and O O -one O O -of O O -up O O -to O O -8 O O -on O O -the O O -road O O -from O O -B B-LOC B-LOC -west O O -to O O -Kind B-LOC B-LOC -. O O -The O O -Ki B-LOC B-LOC -office O O -of O O -the O O -medical O O -charity O O -Me B-ORG B-ORG -sa I-ORG I-ORG -Frontier I-ORG I-ORG -said O O -on O O -Friday O O -that O O -more O O -than O O -100 O O -refugees O O -were O O -t O O -northwest O O -from O O -the O O -Go B-LOC B-LOC -area O O -and O O -many O O -of O O -them O O -were O O -now O O -in O O -the O O -town O O -of O O -W B-LOC B-LOC -. O O -The O O -general O O -did O O -not O O -mention O O -these O O -refugees O O -, O O -who O O -are O O -on O O -the O O -outer O O -limit O O -of O O -the O O -strip O O -the O O -planes O O -have O O -been O O -checking O O -. O O -Mauritius B-LOC B-LOC -put O O -on O O -cyclone O O -alert O O -. O O -P B-LOC B-LOC -L I-LOC I-LOC -1996 O O -Ma B-MISC B-MISC -authorities O O -put O O -the O O -Indian B-LOC B-LOC -Ocean I-LOC I-LOC -island O O -on O O -cyclone O O -alert O O -on O O -Friday O O -. O O -The O O -weather O O -services O O -office O O -said O O -the O O -centre O O -of O O -the O O -intense O O -tropical O O -cyclone O O -Daniel B-PER B-MISC -was O O -570 O O -km O O -( O O -310 O O -miles O O -) O O -north O O -by O O -northwest O O -of O O -the O O -island O O -on O O -Friday O O -afternoon O O -and O O -was O O -moving O O -south O O -by O O -southwest O O -at O O -eight O O -km O O -an O O -hour O O -( O O -four O O -knots O O -) O O -. O O -Although O O -not O O -threatening O O -Mauritius B-LOC B-LOC -directly O O -, O O -it O O -is O O -coming O O -closer O O -to O O -the O O -island O O -and O O -could O O -change O O -direction O O -, O O -it O O -added O O -. O O -Wind O O -up O O -to O O -75 O O -km O O -an O O -hour O O -( O O -40 O O -knots O O -) O O -could O O -blow O O -over O O -Mauritius B-LOC B-LOC -during O O -the O O -night O O -of O O -Friday O O -to O O -Saturday O O -, O O -it O O -said O O -. O O -The O O -weather O O -in O O -the O O -capital O O -Port B-LOC B-LOC -Louis I-LOC I-LOC -was O O -heavily O O -cloud O O -on O O -Friday O O -afternoon O O -with O O -occasional O O -shower O O -. O O -The O O -northeastern O O -coast O O -of O O -the O O -nearby O O -island O O -of O O -Madagascar B-LOC B-LOC -has O O -also O O -gone O O -on O O -alert O O -. O O -U B-ORG B-ORG -evacuate O O -staff O O -from O O -Central B-LOC B-LOC -African I-LOC I-LOC -Republic I-LOC I-LOC -. O O -AB B-LOC B-LOC -1996 O O -The O O -United B-ORG B-ORG -Nations I-ORG I-ORG -evacuated O O -its O O -staff O O -in O O -the O O -Central B-LOC B-LOC -African I-LOC I-LOC -Republic I-LOC I-LOC -on O O -Friday O O -because O O -of O O -mounting O O -violence O O -in O O -a O O -two O O -army O O -m O O -in O O -the O O -capital O O -, O O -a O O -U B-ORG B-ORG -official O O -said O O -. O O -The O O -official O O -from O O -the O O -U B-ORG B-ORG -refugee O O -agency O O -UN B-ORG B-ORG -said O O -a O O -chartered O O -plane O O -had O O -picked O O -up O O -the O O -staff O O -from O O -Bang B-LOC B-LOC -and O O -was O O -heading O O -for O O -A B-LOC B-LOC -, O O -Ivory B-LOC B-LOC -Coast I-LOC I-LOC -. O O -Senegal B-LOC B-LOC -proposes O O -foreign O O -minister O O -for O O -U B-ORG B-ORG -post O O -. O O -D B-LOC B-LOC -1996 O O -Senegal B-LOC B-LOC -' O O -President O O -Abd B-PER B-PER -Di I-PER I-PER -said O O -on O O -Friday O O -he O O -was O O -proposing O O -his O O -foreign O O -minister O O -Mo B-PER B-PER -Ni I-PER I-PER -for O O -the O O -post O O -of O O -United B-ORG B-ORG -Nations I-ORG I-ORG -secretary O O -. O O -Di B-PER B-PER -announced O O -his O O -intention O O -to O O -reporters O O -when O O -he O O -returned O O -from O O -the O O -Franco B-MISC B-MISC -summit O O -in O O -Burkina B-LOC B-LOC -Faso I-LOC I-LOC -where O O -an O O -African B-MISC B-MISC -successor O O -to O O -Secretary O O -Bo B-PER B-PER -Bo I-PER I-PER -was O O -discussed O O -. O O -The O O -United B-LOC B-LOC -States I-LOC I-LOC -has O O -veto O O -a O O -second O O -term O O -for O O -the O O -Egyptian B-MISC B-MISC -but O O -left O O -the O O -door O O -open O O -for O O -another O O -African B-MISC B-MISC -candidate O O -. O O -" O O -If O O -Africa B-LOC B-LOC -does O O -not O O -wish O O -to O O -lose O O -its O O -turn O O -we O O -have O O -to O O -act O O -fast O O -, O O -" O O -Di B-PER B-PER -said O O -. O O -" O O -Some O O -of O O -my O O -brother O O -heads O O -of O O -state O O -asked O O -me O O -if O O -I O O -would O O -n O O -nominate O O -Mo B-PER B-PER -Ni I-PER I-PER -. O O -I O O -see O O -in O O -him O O -the O O -profile O O -of O O -a O O -secret O O -of O O -the O O -United B-ORG B-ORG -Nations I-ORG I-ORG -and O O -I O O -have O O -given O O -my O O -endorsement O O -. O O -" O O -Ex O O -, O O -son O O -killed O O -in O O -Central B-LOC B-LOC -Africa I-LOC I-LOC -unrest O O -. O O -Raphael B-PER B-PER -Ko I-PER I-PER -BA B-LOC B-LOC -1996 O O -A O O -former O O -cabinet O O -minister O O -in O O -Central B-LOC B-LOC -African I-LOC I-LOC -Republic I-LOC I-LOC -and O O -his O O -son O O -were O O -abducted O O -from O O -their O O -home O O -and O O -murdered O O -in O O -growing O O -ethnic O O -violence O O -in O O -the O O -capital O O -Bang B-LOC B-LOC -, O O -a O O -government O O -minister O O -said O O -on O O -Friday O O -. O O -With O O -violence O O -spiral O O -out O O -of O O -control O O -, O O -France B-LOC B-LOC -voiced O O -backing O O -for O O -the O O -elected O O -Bang B-LOC B-LOC -government O O -but O O -said O O -its O O -troops O O -based O O -in O O -the O O -former O O -colony O O -under O O -defence O O -pact O O -would O O -not O O -help O O -it O O -combat O O -army O O -m O O -. O O -" O O -France B-LOC B-LOC -cannot O O -be O O -involved O O -in O O -the O O -domestic O O -political O O -debate O O -, O O -" O O -President O O -Jacques B-PER B-PER -Chi I-PER I-PER -told O O -a O O -news O O -conference O O -at O O -the O O -end O O -of O O -a O O -Franco B-MISC B-MISC -summit O O -in O O -Burkina B-LOC B-LOC -Faso I-LOC I-LOC -. O O -" O O -French B-MISC B-MISC -troops O O -may O O -only O O -take O O -part O O -in O O -maintaining O O -order O O -to O O -avoid O O -major O O -abuses O O -and O O -protect O O -foreign O O -communities O O -, O O -" O O -he O O -said O O -. O O -Public B-ORG O -Service I-ORG O -Minister O O -David B-PER B-PER -Do I-PER I-PER -, O O -who O O -is O O -the O O -head O O -of O O -the O O -national O O -Red B-ORG B-ORG -Cross I-ORG I-ORG -, O O -told O O -Re B-ORG B-ORG -he O O -had O O -seen O O -the O O -bodies O O -of O O -former O O -interior O O -minister O O -Christophe B-PER B-PER -G I-PER I-PER -and O O -his O O -son O O -, O O -who O O -was O O -not O O -named O O -. O O -Witness O O -said O O -they O O -had O O -been O O -seized O O -by O O -troops O O -loyal O O -to O O -President O O -Ang B-PER B-PER -Pat I-PER I-PER -at O O -dawn O O -on O O -Thursday O O -when O O -they O O -clashed O O -with O O -soldiers O O -staging O O -a O O -m O O -since O O -November O O -16 O O -. O O -G B-PER B-PER -is O O -from O O -the O O -Ya B-LOC B-MISC -tribe O O -to O O -which O O -most O O -of O O -the O O -rebel O O -soldiers O O -belong O O -. O O -The O O -uprising O O -began O O -over O O -pay O O -demands O O -but O O -has O O -turned O O -into O O -a O O -campaign O O -to O O -top O O -Pat B-PER B-PER -, O O -spark O O -ethnic O O -violence O O -and O O -dividing O O -the O O -capital O O -. O O -The O O -former O O -minister O O -and O O -his O O -son O O -had O O -been O O -taken O O -from O O -their O O -home O O -close O O -to O O -the O O -presidential O O -palace O O -, O O -which O O -is O O -guarded O O -by O O -loyal O O -soldiers O O -backed O O -by O O -French B-MISC B-MISC -troops O O -based O O -in O O -Bang B-LOC B-LOC -. O O -The O O -bodies O O -were O O -found O O -on O O -Thursday O O -in O O -an O O -open O O -field O O -about O O -two O O -km O O -( O O -one O O -mile O O -) O O -further O O -away O O -, O O -said O O -Do B-PER B-PER -and O O -other O O -witnesses O O -. O O -The O O -men O O -were O O -seized O O -as O O -loyal O O -forces O O -and O O -French B-MISC B-MISC -troops O O -fought O O -gun O O -with O O -m O O -who O O -fired O O -rockets O O -into O O -the O O -city O O -centre O O -. O O -A O O -French B-MISC B-MISC -hotel O O -was O O -slightly O O -damaged O O -. O O -Ya B-MISC B-MISC -are O O -ho O O -in O O -stronghold O O -districts O O -of O O -Pat B-LOC B-PER -' O O -Bay B-LOC B-MISC -people O O -while O O -other O O -tribes O O -have O O -fled O O -areas O O -in O O -rebel O O -hands O O -. O O -Road O O -have O O -been O O -erected O O -in O O -city O O -districts O O -while O O -central O O -Bang B-LOC B-LOC -, O O -which O O -is O O -patrol O O -by O O -French B-MISC B-MISC -troops O O -with O O -tanks O O -, O O -is O O -deserted O O -. O O -Shop O O -and O O -businesses O O -have O O -remained O O -shut O O -this O O -week O O -. O O -The O O -Franco B-MISC B-MISC -summit O O -decided O O -to O O -send O O -a O O -mission O O -Bang B-LOC B-LOC -to O O -seek O O -ways O O -of O O -containing O O -the O O -m O O -and O O -a O O -threat O O -of O O -civil O O -war O O -. O O -Chi B-PER B-PER -said O O -Burkina B-LOC B-LOC -Faso I-LOC I-LOC -President O O -B B-PER B-PER -Co I-PER I-PER -would O O -visit O O -Bang B-LOC B-LOC -" O O -in O O -the O O -coming O O -hours O O -" O O -with O O -the O O -heads O O -of O O -state O O -of O O -Gabon B-LOC B-LOC -, O O -Mali B-LOC B-LOC -and O O -Chad B-LOC B-LOC -to O O -try O O -and O O -establish O O -dialogue O O -between O O -authorities O O -and O O -rebels O O -. O O -The O O -m O O -forced O O -Pat B-PER B-PER -to O O -miss O O -the O O -summit O O -. O O -His O O -spokesman O O -had O O -predicted O O -the O O -meeting O O -to O O -send O O -an O O -assessment O O -mission O O -. O O -Pat B-PER B-PER -, O O -who O O -won O O -Central B-LOC B-LOC -Africa I-LOC I-LOC -' O O -first O O -multi O O -elections O O -, O O -refuses O O -to O O -resign O O -. O O -Church B-MISC O -me O O -attempts O O -hit O O -dead O O -over O O -rebel O O -demands O O -for O O -his O O -departure O O -. O O -Soldiers O O -staged O O -m O O -in O O -April O O -and O O -May O O -, O O -with O O -French B-MISC B-MISC -troops O O -stepping O O -in O O -with O O -tanks O O -and O O -helicopters O O -to O O -que O O -the O O -more O O -serious O O -second O O -uprising O O -. O O -Pat B-PER B-PER -offered O O -concessions O O -and O O -am O O -to O O -rebels O O -before O O -the O O -May O O -rebellion O O -ended O O -after O O -rebels O O -lo O O -the O O -city O O -centre O O -. O O -Rebels O O -a O O -Pat B-PER B-PER -of O O -tribal O O -and O O -of O O -arm O O -his O O -civilian O O -supporters O O -and O O -hired O O -guns O O -from O O -Sudan B-LOC B-LOC -and O O -Chad B-LOC B-LOC -. O O -Mu O O -have O O -vowed O O -to O O -di O O -all O O -civilians O O -and O O -to O O -chase O O -out O O -the O O -foreign O O -forces O O -kn O O -as O O -Co B-LOC B-MISC -. O O -Hospital O O -sources O O -and O O -witnesses O O -said O O -about O O -10 O O -people O O -were O O -known O O -to O O -have O O -been O O -killed O O -in O O -the O O -more O O -than O O -two O O -weeks O O -of O O -fighting O O -, O O -including O O -two O O -rebels O O -killed O O -in O O -Thursday O O -' O O -clashes O O -. O O -An O O -und O O -number O O -of O O -people O O -are O O -reported O O -to O O -have O O -been O O -abducted O O -and O O -killed O O -outside O O -the O O -town O O -by O O -tribal O O -v O O -groups O O -. O O -In O O -Thursday O O -' O O -fighting O O -, O O -French B-MISC B-MISC -troops O O -fired O O -back O O -as O O -m O O -trying O O -to O O -break O O -out O O -of O O -their O O -stronghold O O -rain O O -mortar O O -shells O O -on O O -the O O -city O O -centre O O -. O O -Five O O -die O O -as O O -SA B-MISC B-MISC -crop O O -plane O O -hits O O -pickup O O -. O O -J B-LOC B-LOC -1996 O O -Five O O -people O O -were O O -killed O O -when O O -a O O -crop O O -plane O O -preparing O O -for O O -takeoff O O -crashed O O -into O O -a O O -light O O -delivery O O -vehicle O O -in O O -South B-LOC B-LOC -Africa I-LOC I-LOC -' O O -North B-LOC B-LOC -West I-LOC I-LOC -region O O -, O O -state O O -radio O O -reported O O -on O O -Friday O O -. O O -The O O -freak O O -accident O O -occurred O O -in O O -Ma B-LOC B-LOC -on O O -Thursday O O -. O O -The O O -pilot O O -survived O O -the O O -crash O O -, O O -but O O -the O O -driver O O -and O O -passengers O O -of O O -the O O -van O O -were O O -killed O O -. O O -W O O -- O O -Con O O -at O O -C B-LOC B-LOC -airports O O -- O O -Dec O O -6 O O -. O O -M B-LOC B-LOC -1996 O O -No O O -weather O O -closure O O -of O O -C B-LOC B-LOC -airports O O -are O O -expected O O -on O O -December O O -7 O O -and O O -8 O O -, O O -the O O -Russian B-ORG B-ORG -Weather I-ORG I-ORG -Service I-ORG I-ORG -said O O -on O O -Friday O O -. O O -- O O -Moscow B-ORG B-ORG -News I-ORG I-ORG -+ O O -94 O O -85 O O -Skin O O -attack O O -Bratislava B-LOC B-LOC -Rabbi O O -- O O -police O O -. O O -BR B-LOC B-LOC -1996 O O -Four O O -skin O O -attacked O O -and O O -insulted O O -the O O -rabbi O O -of O O -Bratislava B-LOC B-LOC -, O O -Bar B-PER B-PER -Meyer I-PER I-PER -, O O -in O O -the O O -city O O -centre O O -on O O -Friday O O -, O O -but O O -he O O -escaped O O -un O O -, O O -a O O -police O O -spokesman O O -told O O -Re B-ORG B-ORG -. O O -" O O -A O O -group O O -of O O -four O O -skin O O -attacked O O -the O O -rabbi O O -, O O -one O O -kicked O O -him O O -in O O -the O O -hand O O -but O O -caused O O -no O O -injury O O -, O O -" O O -the O O -spokesman O O -said O O -. O O -" O O -All O O -four O O -attackers O O -were O O -app O O -and O O -two O O -have O O -been O O -detained O O -, O O -" O O -the O O -spokesman O O -added O O -He O O -was O O -unable O O -to O O -give O O -more O O -details O O -. O O -" O O -The O O -further O O -procedure O O -is O O -now O O -in O O -the O O -hands O O -of O O -the O O -local O O -police O O -investigator O O -, O O -" O O -the O O -spokesman O O -said O O -. O O -It O O -was O O -the O O -second O O -attack O O -by O O -skin O O -in O O -two O O -years O O -on O O -Meyer B-PER B-PER -, O O -an O O -American B-MISC B-MISC -. O O -Meyer B-PER B-PER -was O O -not O O -available O O -for O O -comment O O -. O O -Albanian B-MISC B-MISC -jailed O O -for O O -threat O O -of O O -bomb O O -suicide O O -. O O -T B-LOC B-LOC -1996 O O -An O O -Albanian B-MISC B-MISC -court O O -on O O -Friday O O -sentenced O O -a O O -man O O -who O O -threatened O O -to O O -blow O O -himself O O -up O O -outside O O -President O O -Sal B-PER B-PER -Be I-PER I-PER -' O O -office O O -to O O -13 O O -years O O -in O O -jail O O -for O O -guerrilla O O -action O O -and O O -illegal O O -possession O O -of O O -arms O O -. O O -B B-PER B-PER -last O O -April O O -said O O -he O O -would O O -blow O O -himself O O -up O O -outside O O -the O O -presidential O O -palace O O -unless O O -he O O -was O O -allowed O O -to O O -speak O O -to O O -Be B-PER B-PER -, O O -who O O -was O O -at O O -the O O -time O O -meeting O O -Italian B-MISC B-MISC -President O O -Oscar B-PER B-PER -Luigi I-PER I-PER -Sc I-PER I-PER -. O O -B B-PER B-PER -was O O -over O O -by O O -riot O O -police O O -less O O -than O O -one O O -hour O O -after O O -he O O -began O O -his O O -action O O -. O O -" O O -Eva O O -all O O -the O O -conditions O O -of O O -the O O -case O O -the O O -court O O -thinks O O -the O O -sentence O O -should O O -be O O -lower O O -than O O -the O O -minimum O O -( O O -15 O O -years O O -) O O -, O O -" O O -Tirana B-LOC B-LOC -judge O O -Q B-PER B-PER -G I-PER I-PER -added O O -. O O -The O O -defendant O O -denied O O -the O O -charges O O -, O O -saying O O -his O O -action O O -was O O -intended O O -to O O -urge O O -the O O -authorities O O -to O O -give O O -him O O -a O O -$ O O -20 O O -loan O O -. O O -Medical O O -experts O O -had O O -concluded O O -B B-PER B-PER -was O O -mentally O O -unstable O O -but O O -fully O O -responsible O O -for O O -the O O -act O O -he O O -had O O -committed O O -, O O -G B-PER B-PER -said O O -. O O -Polish B-MISC B-MISC -ex O O -president O O -to O O -visit O O -Pope B-PER O -. O O -WA B-LOC B-LOC -1996 O O -Poland B-LOC B-LOC -' O O -ex O O -President O O -Al B-PER B-PER -K I-PER I-PER -is O O -likely O O -to O O -visit O O -Polish B-MISC O -Pope O O -John B-PER B-PER -Paul I-PER I-PER -in O O -early O O -1997 O O -despite O O -uneasy O O -relations O O -between O O -the O O -Vatican B-LOC B-LOC -and O O -Warsaw B-LOC B-LOC -, O O -the O O -foreign O O -minister O O -said O O -on O O -Friday O O -. O O -" O O -President O O -K B-PER B-PER -plans O O -to O O -visit O O -Italy B-LOC B-LOC -on O O -a O O -invitation O O -from O O -President O O -Oscar B-PER B-PER -Sc I-PER I-PER -. O O -A O O -meeting O O -with O O -the O O -Pope B-MISC O -is O O -also O O -planned O O -, O O -" O O -Darius B-PER B-PER -Rosa I-PER I-PER -told O O -a O O -news O O -conference O O -. O O -Rosa B-PER B-PER -said O O -that O O -the O O -atmosphere O O -of O O -the O O -meeting O O -, O O -if O O -it O O -takes O O -place O O -, O O -would O O -largely O O -depend O O -on O O -the O O -progress O O -in O O -talks O O -on O O -rat O O -of O O -a O O -treaty O O -between O O -Warsaw B-LOC B-LOC -and O O -the O O -Vatican B-LOC B-LOC -. O O -@ O O -The O O -rat O O -of O O -the O O -treaty O O -, O O -which O O -was O O -signed O O -in O O -1993 O O -by O O -the O O -then O O -right O O -government O O -, O O -is O O -being O O -delayed O O -by O O -an O O -ex O O -party O O -, O O -which O O -won O O -parliamentary O O -elections O O -in O O -the O O -same O O -year O O -and O O -now O O -dominate O O -parliament O O -. O O -The O O -party O O -, O O -the O O -Democratic B-ORG B-ORG -Left I-ORG I-ORG -Alliance I-ORG I-ORG -, O O -says O O -the O O -agreement O O -would O O -give O O -the O O -Catholic B-ORG B-ORG -Church I-ORG I-ORG -too O O -much O O -influence O O -over O O -life O O -in O O -Poland B-LOC B-LOC -and O O -could O O -in O O -on O O -rights O O -of O O -other O O -religious O O -groups O O -and O O -non O O -. O O -The O O -relations O O -with O O -the O O -Vatican B-LOC B-LOC -have O O -also O O -been O O -sour O O -by O O -a O O -recent O O -relaxation O O -of O O -Poland B-LOC B-LOC -' O O -anti O O -rules O O -, O O -which O O -K B-PER B-PER -signed O O -into O O -law O O -last O O -month O O -. O O -Russia B-LOC B-LOC -warns O O -Nor B-PER B-ORG -, O O -not O O -expected O O -to O O -liquid O O -it O O -. O O -Lynn B-PER B-PER -Browning I-PER I-PER -M B-LOC B-LOC -1996 O O -Russian B-MISC B-MISC -Finance O O -Minister O O -Alexander B-PER B-PER -Liv I-PER I-PER -warned O O -financially O O -Nor B-ORG B-ORG -Nick I-ORG I-ORG -on O O -Friday O O -that O O -it O O -must O O -pay O O -over O O -taxes O O -, O O -but O O -analysts O O -said O O -the O O -firm O O -would O O -not O O -be O O -liquid O O -or O O -that O O -its O O -would O O -assets O O -would O O -be O O -frozen O O -. O O -" O O -Nor B-ORG B-ORG -really O O -is O O -a O O -big O O -debt O O -, O O -both O O -to O O -the O O -federal O O -and O O -regional O O -budget O O -, O O -" O O -said O O -Konstantin B-PER B-PER -Ch I-PER I-PER -, O O -e O O -analyst O O -at O O -Moscow B-LOC B-LOC -broker O O -R B-ORG B-ORG -Plus I-ORG I-ORG -and O O -a O O -Nor B-ORG B-ORG -watch O O -. O O -" O O -Liv B-PER B-PER -' O O -words O O -are O O -an O O -attempt O O -to O O -put O O -pressure O O -on O O -the O O -company O O -. O O -" O O -The O O -official O O -It B-ORG B-ORG -news O O -agency O O -quoted O O -Liv B-PER B-PER -as O O -telling O O -parliamentary O O -deputies O O -that O O -RA B-ORG B-ORG -Nor I-ORG I-ORG -Nike I-ORG I-ORG -0 O O -had O O -to O O -pay O O -its O O -tax O O -a O O -and O O -that O O -bankruptcy O O -procedures O O -applied O O -to O O -the O O -metals O O -group O O -. O O -" O O -If O O -it O O -was O O -an O O -un O O -statement O O -and O O -a O O -bolt O O -out O O -of O O -the O O -blue O O -, O O -then O O -it O O -obviously O O -means O O -something O O -, O O -" O O -said O O -Christopher B-PER B-PER -Gran I-PER I-PER -, O O -chief O O -economist O O -at O O -United B-ORG B-ORG -City I-ORG I-ORG -Bank I-ORG I-ORG -in O O -Moscow B-LOC B-LOC -. O O -" O O -But O O -if O O -it O O -was O O -a O O -response O O -to O O -a O O -deputy O O -' O O -question O O -that O O -was O O -essentially O O -loaded O O -, O O -then O O -it O O -was O O -the O O -only O O -answer O O -he O O -could O O -have O O -given O O -. O O -" O O -Russian B-MISC B-MISC -tax O O -and O O -cabinet O O -authorities O O -, O O -under O O -pressure O O -from O O -the O O -International B-ORG B-ORG -Mon I-ORG I-ORG -Fund I-ORG I-ORG -to O O -boost O O -tax O O -revenues O O -as O O -a O O -condition O O -for O O -receiving O O -payments O O -of O O -a O O -$ O O -10 O O -billion O O -, O O -three O O -loan O O -to O O -Moscow B-LOC B-LOC -, O O -have O O -been O O -striking O O -fear O O -into O O -the O O -hearts O O -of O O -some O O -of O O -Russia B-LOC B-LOC -' O O -most O O -prominent O O -industrial O O -firms O O -by O O -saying O O -they O O -must O O -pay O O -up O O -or O O -face O O -liquid O O -. O O -" O O -They O O -could O O -freeze O O -metal O O -, O O -but O O -it O O -' O O -not O O -a O O -long O O -solution O O -to O O -the O O -problem O O -and O O -would O O -n O O -put O O -money O O -in O O -the O O -budget O O -, O O -" O O -Ch B-PER B-PER -said O O -. O O -" O O -I O O -do O O -n O O -think O O -they O O -would O O -do O O -that O O -. O O -" O O -En O O -social O O -infrastructure O O -in O O -the O O -icy O O -Far B-LOC O -North I-LOC O -where O O -Nor B-ORG B-ORG -is O O -based O O -depend O O -on O O -the O O -company O O -, O O -and O O -Moscow B-LOC B-LOC -has O O -said O O -it O O -has O O -no O O -finances O O -to O O -re O O -hundreds O O -of O O -thousands O O -of O O -people O O -- O O -an O O -expenditure O O -which O O -could O O -far O O -outs O O -Nor B-ORG B-ORG -' O O -debts O O -. O O -Nor B-ORG B-ORG -officials O O -declined O O -to O O -comment O O -. O O -Ana O O -said O O -the O O -government O O -, O O -while O O -anxious O O -about O O -Nor B-ORG B-ORG -' O O -debts O O -, O O -is O O -highly O O -unlikely O O -to O O -bring O O -the O O -nickel O O -, O O -copper O O -, O O -co O O -, O O -platinum O O -and O O -platinum O O -group O O -metals O O -producer O O -to O O -its O O -knees O O -or O O -take O O -measures O O -that O O -could O O -significantly O O -affect O O -output O O -. O O -But O O -it O O -also O O -wants O O -Nor B-ORG B-ORG -, O O -the O O -world O O -' O O -second O O -nickel O O -producer O O -, O O -to O O -clean O O -up O O -its O O -act O O -. O O -" O O -The O O -procedure O O -of O O -bankruptcy O O -will O O -be O O -applied O O -, O O -" O O -Ta B-ORG B-ORG -quoted O O -Liv B-PER B-PER -as O O -telling O O -Du B-ORG B-ORG -deputies O O -about O O -Nor B-ORG B-ORG -. O O -It O O -indirectly O O -quoted O O -him O O -as O O -saying O O -Nor B-PER B-ORG -should O O -first O O -pay O O -salary O O -a O O -, O O -which O O -in O O -the O O -past O O -have O O -led O O -to O O -worker O O -strikes O O -. O O -" O O -It O O -is O O -unlikely O O -that O O -Nor B-ORG B-ORG -will O O -pay O O -these O O -debts O O -in O O -the O O -near O O -- O O -the O O -company O O -will O O -remain O O -a O O -debt O O -in O O -the O O -near O O -future O O -, O O -" O O -Ch B-PER B-PER -said O O -. O O -He O O -estimated O O -the O O -company O O -' O O -regional O O -debts O O -at O O -least O O -one O O -trillion O O -r O O -and O O -said O O -30 O O -percent O O -of O O -the O O -giant O O -K B-ORG B-LOC -regional O O -budget O O -was O O -fuel O O -by O O -Nor B-ORG B-ORG -money O O -. O O -Nor B-ORG B-ORG -' O O -new O O -majority O O -shareholder O O -, O O -Russian B-MISC B-MISC -com O O -bank O O -Un B-ORG B-ORG -, O O -has O O -said O O -it O O -is O O -re O O -metal O O -exports O O -through O O -Inter B-ORG B-ORG -in O O -order O O -to O O -boost O O -revenues O O -. O O -But O O -the O O -changes O O -have O O -yet O O -to O O -improve O O -significantly O O -Nor B-PER B-ORG -' O O -situation O O -. O O -" O O -Un B-ORG B-ORG -has O O -inherited O O -a O O -mountain O O -and O O -whether O O -or O O -not O O -they O O -climb O O -out O O -and O O -over O O -it O O -remains O O -to O O -be O O -seen O O -, O O -" O O -said O O -one O O -metals O O -source O O -. O O -Nor B-ORG B-ORG -said O O -in O O -September O O -that O O -it O O -total O O -debts O O -, O O -including O O -unpaid O O -salaries O O -to O O -workers O O -, O O -were O O -13 O O -trillion O O -r O O -. O O -The O O -company O O -said O O -last O O -month O O -that O O -it O O -had O O -worked O O -out O O -a O O -tax O O -payment O O -schedule O O -with O O -authorities O O -, O O -after O O -regional O O -tax O O -officials O O -threatened O O -to O O -seize O O -some O O -nickel O O -and O O -copper O O -assets O O -. O O -- O O -Moscow B-ORG B-ORG -News I-ORG I-ORG -, O O -+ O O -94 O O -85 O O -Estonian B-MISC B-MISC -Tallinn B-ORG B-ORG -Pan I-ORG I-ORG -11 O O -net O O -46 O O -m O O -k O O -. O O -T B-LOC B-LOC -1996 O O -Tallinn B-ORG B-ORG -Pan I-ORG I-ORG -, O O -one O O -of O O -the O O -largest O O -banks O O -in O O -Estonia B-LOC B-LOC -, O O -made O O -a O O -11 O O -1996 O O -net O O -profit O O -of O O -46 O O -million O O -k O O -, O O -the O O -bank O O -said O O -on O O -Friday O O -. O O -It O O -said O O -in O O -a O O -statement O O -that O O -it O O -made O O -profits O O -of O O -4 O O -million O O -k O O -in O O -November O O -. O O -The O O -bank O O -made O O -a O O -profit O O -of O O -20 O O -million O O -k O O -in O O -the O O -first O O -half O O -of O O -the O O -year O O -. O O -Tallinn B-ORG B-ORG -Pan I-ORG I-ORG -said O O -its O O -assets O O -rose O O -17 O O -million O O -k O O -to O O -1 O O -billion O O -k O O -. O O -De O O -deposits O O -rose O O -to O O -85 O O -million O O -k O O -from O O -83 O O -million O O -k O O -and O O -time O O -deposits O O -increased O O -to O O -295 O O -million O O -k O O -from O O -285 O O -million O O -k O O -. O O -- O O -Riga B-ORG B-ORG -News I-ORG I-ORG -, O O -+ O O -72 O O -52 O O -Russia B-LOC B-LOC -ready O O -for O O -construct O O -work O O -with O O -Al B-ORG B-PER -. O O -M B-LOC B-LOC -1996 O O -Russia B-LOC B-LOC -said O O -on O O -Friday O O -it O O -expected O O -a O O -construct O O -relationship O O -with O O -Madeleine B-PER B-PER -Al I-PER I-PER -, O O -nominated O O -by O O -U B-LOC B-LOC -President O O -Bill B-PER B-PER -Clinton I-PER I-PER -to O O -be O O -Secretary O O -of O O -State O O -. O O -Inter B-ORG B-ORG -news O O -agency O O -quoted O O -First O O -Deputy O O -Foreign O O -Minister O O -Igor B-PER B-PER -Ivan I-PER I-PER -as O O -saying O O -Moscow B-LOC B-LOC -was O O -ready O O -for O O -" O O -most O O -active O O -and O O -construct O O -" O O -work O O -with O O -Al B-ORG B-PER -. O O -But O O -he O O -noted O O -that O O -policy O O -would O O -be O O -shaped O O -by O O -Clinton B-PER B-PER -and O O -President O O -Boris B-PER B-PER -Ye I-PER I-PER -. O O -Clinton B-PER B-PER -and O O -Ye B-PER B-PER -are O O -due O O -to O O -meet O O -next O O -March O O -for O O -their O O -first O O -summit O O -since O O -both O O -were O O -re O O -. O O -" O O -Our O O -countries O O -' O O -leaders O O -have O O -agreed O O -to O O -meet O O -in O O -March O O -, O O -1997 O O -. O O -The O O -Russian B-MISC B-MISC -foreign O O -ministry O O -believes O O -the O O -new O O -directions O O -in O O -the O O -development O O -of O O -Russian B-MISC B-MISC -relations O O -will O O -be O O -worked O O -out O O -there O O -, O O -" O O -Ivan B-PER B-PER -told O O -Inter B-ORG B-ORG -. O O -Inter B-ORG B-ORG -, O O -out O O -Al B-PER B-PER -' O O -biography O O -, O O -pointed O O -out O O -that O O -she O O -had O O -defended O O -Washington B-LOC B-LOC -' O O -interests O O -fiercely O O -as O O -U B-LOC B-LOC -ambassador O O -to O O -the O O -United B-ORG B-ORG -Nations I-ORG I-ORG -and O O -that O O -this O O -had O O -included O O -actively O O -supporting O O -NATO B-ORG B-ORG -' O O -plans O O -to O O -expand O O -eastward O O -. O O -Russia B-LOC B-LOC -oppose O O -NATO B-ORG B-ORG -' O O -plans O O -to O O -take O O -in O O -countries O O -of O O -eastern O O -and O O -central O O -Europe B-LOC B-LOC -which O O -used O O -to O O -be O O -part O O -of O O -the O O -Soviet B-MISC B-MISC -Warsaw B-ORG B-MISC -Pact I-ORG I-MISC -, O O -saying O O -such O O -moves O O -would O O -threaten O O -its O O -security O O -. O O -Ye B-PER B-PER -plans O O -return O O -to O O -K B-LOC B-LOC -for O O -Dec O O -25 O O -- O O -speaker O O -. O O -M B-LOC B-LOC -1996 O O -Russian B-MISC B-MISC -President O O -Boris B-PER B-PER -Ye I-PER I-PER -, O O -who O O -had O O -heart O O -bypass O O -surgery O O -a O O -month O O -ago O O -, O O -plans O O -to O O -return O O -to O O -work O O -on O O -December O O -25 O O -, O O -the O O -head O O -of O O -the O O -upper O O -chamber O O -of O O -parliament O O -told O O -Inter B-ORG B-ORG -news O O -agency O O -on O O -Friday O O -. O O -" O O -Today O O -he O O -is O O -a O O -mobile O O -, O O -energetic O O -man O O -with O O -lots O O -of O O -colour O O -in O O -his O O -cheeks O O -, O O -" O O -said O O -Ye B-PER B-PER -St I-PER I-PER -who O O -met O O -Ye B-PER B-PER -, O O -65 O O -, O O -on O O -Friday O O -at O O -a O O -country O O -residence O O -. O O -" O O -He O O -told O O -me O O -that O O -he O O -had O O -lost O O -20 O O -kg O O -( O O -44 O O -lbs O O -) O O -which O O -is O O -natural O O -after O O -such O O -an O O -operation O O -. O O -" O O -December O O -25 O O -, O O -a O O -normal O O -working O O -day O O -in O O -Russia B-LOC B-LOC -, O O -is O O -the O O -fifth O O -anniversary O O -of O O -Ye B-PER B-PER -' O O -arrival O O -in O O -the O O -K B-LOC B-LOC -. O O -He O O -took O O -over O O -there O O -, O O -and O O -took O O -control O O -of O O -the O O -red O O -button O O -controlling O O -nuclear O O -arms O O -, O O -in O O -December O O -1991 O O -when O O -Mikhail B-PER B-PER -Go I-PER I-PER -resigned O O -, O O -marking O O -the O O -end O O -of O O -the O O -Soviet B-LOC B-LOC -Union I-LOC I-LOC -. O O -Ye B-PER B-PER -has O O -been O O -shown O O -a O O -few O O -times O O -on O O -television O O -since O O -his O O -q O O -bypass O O -on O O -November O O -5 O O -but O O -has O O -yet O O -to O O -deliver O O -any O O -major O O -television O O -or O O -radio O O -address O O -to O O -the O O -nation O O -. O O -Sur O O -Ren B-PER B-PER -A I-PER I-PER -who O O -led O O -the O O -operation O O -, O O -told O O -It B-ORG B-ORG -news O O -agency O O -Ye B-PER B-PER -was O O -working O O -up O O -to O O -four O O -hours O O -a O O -day O O -at O O -his O O -residence O O -. O O -Bomb O O -explode O O -outside O O -home O O -of O O -expelled O O -Slovak B-MISC B-MISC -MP O O -. O O -BR B-LOC B-LOC -1996 O O -A O O -bomb O O -exploded O O -on O O -Friday O O -outside O O -the O O -home O O -of O O -a O O -Slovak B-MISC B-MISC -politician O O -expelled O O -from O O -parliament O O -after O O -he O O -quit O O -the O O -ruling O O -party O O -, O O -complaining O O -of O O -a O O -lack O O -of O O -democracy O O -in O O -the O O -country O O -. O O -The O O -official O O -T B-ORG B-ORG -news O O -agency O O -said O O -the O O -explosion O O -blew O O -out O O -all O O -ground O O -floor O O -windows O O -of O O -Fr B-PER B-PER -G I-PER I-PER -' O O -family O O -home O O -in O O -Gala B-LOC B-LOC -, O O -western O O -Slovakia B-LOC B-LOC -, O O -and O O -damaged O O -the O O -main O O -entrance O O -, O O -but O O -no O O -was O O -injured O O -. O O -G B-PER B-PER -, O O -formerly O O -a O O -member O O -of O O -Prime O O -Minister O O -Vladimir B-PER B-PER -Me I-PER I-PER -' O O -ruling O O -Movement B-ORG B-ORG -for I-ORG I-ORG -a I-ORG I-ORG -Democratic I-ORG I-ORG -Slovakia I-ORG I-ORG -, O O -was O O -stripped O O -of O O -his O O -parliamentary O O -mandate O O -on O O -Wednesday O O -after O O -leaving O O -the O O -party O O -last O O -month O O -in O O -protest O O -over O O -what O O -he O O -said O O -was O O -a O O -lack O O -of O O -democracy O O -in O O -the O O -country O O -. O O -He O O -said O O -he O O -had O O -been O O -receiving O O -anonymous O O -death O O -threats O O -since O O -making O O -the O O -move O O -. O O -" O O -This O O -was O O -an O O -act O O -of O O -terrorism O O -and O O -now O O -I O O -fear O O -not O O -only O O -for O O -my O O -own O O -life O O -, O O -but O O -also O O -of O O -that O O -of O O -my O O -wife O O -and O O -children O O -, O O -" O O -he O O -told O O -T B-ORG B-ORG -. O O -G B-PER B-PER -' O O -family O O -was O O -sleeping O O -in O O -a O O -bedroom O O -at O O -the O O -back O O -of O O -the O O -house O O -and O O -were O O -un O O -by O O -the O O -blast O O -. O O -It O O -was O O -not O O -immediately O O -clear O O -who O O -was O O -behind O O -the O O -blast O O -. O O -Bomb O O -explode O O -at O O -mosque O O -in O O -central O O -Bulgaria B-LOC B-LOC -. O O -S B-LOC B-LOC -1996 O O -A O O -bomb O O -exploded O O -on O O -Friday O O -at O O -a O O -mosque O O -in O O -the O O -central O O -Bulgarian B-MISC B-MISC -town O O -of O O -Kazan B-LOC B-LOC -, O O -causing O O -damage O O -but O O -no O O -injuries O O -, O O -state O O -radio O O -said O O -. O O -V O O -crime O O -has O O -so O O -since O O -the O O -collapse O O -of O O -communism O O -in O O -1989 O O -as O O -Bulgaria B-LOC B-LOC -moves O O -to O O -a O O -market O O -economy O O -. O O -Bomb O O -are O O -often O O -carried O O -out O O -by O O -criminals O O -to O O -settle O O -scores O O -but O O -the O O -motive O O -in O O -this O O -case O O -was O O -not O O -immediately O O -clear O O -. O O -Some O O -residents O O -of O O -the O O -Kazan B-LOC B-LOC -area O O -are O O -Mo B-MISC B-MISC -who O O -converted O O -to O O -Islam B-MISC B-MISC -during O O -Ottoman B-MISC O -Turkish B-MISC B-MISC -rule O O -. O O -The O O -majority O O -in O O -Bulgaria B-LOC B-LOC -are O O -Christians O B-MISC -. O O -The O O -radio O O -quoted O O -police O O -as O O -saying O O -the O O -blast O O -broke O O -windows O O -and O O -shattered O O -the O O -door O O -of O O -the O O -mosque O O -. O O -Hungary B-LOC B-LOC -o O O -/ O O -n O O -rates O O -end O O -up O O -before O O -Dec O O -10 O O -tax O O -payment O O -. O O -B B-LOC B-LOC -1996 O O -Hungarian B-MISC B-MISC -overnight O O -interest O O -rates O O -closed O O -higher O O -on O O -Friday O O -as O O -market O O -liquid O O -tightened O O -before O O -the O O -December O O -10 O O -social O O -security O O -contribution O O -payment O O -deadline O O -, O O -dealers O O -said O O -. O O -" O O -The O O -banks O O -are O O -already O O -preparing O O -for O O -the O O -December O O -10 O O -tax O O -payment O O -, O O -" O O -said O O -Budapest B-ORG B-LOC -Bank I-ORG O -' O O -Sand B-PER B-PER -To I-PER I-PER -. O O -" O O -They O O -expect O O -a O O -larger O O -payment O O -. O O -" O O -The O O -overnight O O -market O O -opened O O -at O O -22 O O -/ O O -22 O O -percent O O -, O O -then O O -substantial O O -money O O -was O O -taken O O -up O O -at O O -22 O O -percent O O -. O O -But O O -later O O -, O O -rates O O -dropped O O -and O O -closed O O -at O O -22 O O -/ O O -22 O O -as O O -a O O -large O O -bank O O -finished O O -borrow O O -money O O -. O O -On O O -Thursday O O -, O O -overnight O O -rates O O -moved O O -between O O -21 O O -and O O -22 O O -. O O -Deal O O -said O O -liquid O O -could O O -tighten O O -further O O -early O O -next O O -week O O -as O O -the O O -social O O -security O O -contribution O O -payments O O -date O O -approaches O O -. O O -- O O -Sand B-PER B-PER -Pet I-PER I-PER -, O O -Budapest B-LOC B-LOC -news O O -( O O -36 O O -1 O O -) O O -32 O O -404 O O -Mexico B-LOC B-LOC -stocks O O -off O O -low O O -but O O -still O O -hit O O -by O O -Greens B-ORG B-PER -. O O -ME B-ORG B-LOC -C I-ORG I-LOC -Mexican B-MISC B-MISC -stocks O O -closed O O -sharply O O -lower O O -Friday O O -, O O -but O O -had O O -made O O -a O O -tentative O O -recovery O O -as O O -initial O O -panic O O -and O O -vol O O -a O O -. O O -" O O -It O O -was O O -Greens B-PER B-PER -at O O -first O O -. O O -Then O O -once O O -we O O -saw O O -the O O -Dow B-MISC B-MISC -( O O -Jones B-MISC B-MISC -industrial O O -average O O -) O O -was O O -not O O -about O O -to O O -crash O O -, O O -some O O -buyers O O -stepped O O -in O O -, O O -" O O -said O O -a O O -trader O O -, O O -referring O O -to O O -Federal B-ORG B-ORG -Reserve I-ORG I-ORG -Chairman O O -Alan B-PER B-PER -Greens I-PER I-PER -, O O -whose O O -comments O O -that O O -assets O O -were O O -" O O -irrational O O -ex O O -" O O -upset O O -financial O O -markets O O -worldwide O O -. O O -The O O -blue O O -IP B-MISC B-MISC -index O O -ended O O -down O O -1 O O -points O O -, O O -or O O -43 O O -percent O O -, O O -at O O -3 O O -. O O -Volume O O -was O O -regular O O -at O O -74 O O -million O O -shares O O -traded O O -. O O -Mexican B-MISC B-MISC -stocks O O -were O O -also O O -hurt O O -by O O -U B-LOC B-LOC -long O O -bond O O -rates O O -which O O -had O O -begun O O -to O O -rise O O -before O O -Greens B-PER B-PER -' O O -comments O O -and O O -were O O -in O O -by O O -employment O O -data O O -released O O -before O O -trade O O -began O O -in O O -Mexico B-LOC B-LOC -. O O -Yi O O -on O O -U B-LOC B-LOC -30 O O -Treasury B-ORG B-ORG -bonds O O -were O O -6 O O -percent O O -when O O -stock O O -trading O O -closed O O -in O O -Mexico B-LOC B-LOC -, O O -unchanged O O -from O O -Thursday O O -. O O -On O O -the O O -broad O O -market O O -, O O -107 O O -stocks O O -changed O O -hands O O -, O O -of O O -which O O -loser O O -well O O -outnumbered O O -winners O O -by O O -75 O O -to O O -13 O O -. O O -Trade O O -noted O O -the O O -lack O O -of O O -blue O O -chips O O -or O O -stocks O O -traded O O -at O O -significant O O -volume O O -among O O -the O O -gain O O -. O O -Si B-ORG B-ORG -, O O -the O O -steel O O -arm O O -of O O -the O O -debt O O -Side B-ORG B-ORG -group O O -headed O O -the O O -loser O O -, O O -off O O -7 O O -cent O O -( O O -1 O O -cent O O -) O O -at O O -1 O O -p O O -( O O -18 O O -cents O O -) O O -. O O -Side B-ORG B-ORG -fell O O -4 O O -cent O O -( O O -1 O O -cent O O -) O O -to O O -95 O O -cent O O -( O O -12 O O -cents O O -) O O -. O O -Trade O O -also O O -remarked O O -that O O -Mexican B-MISC B-MISC -AD B-MISC B-MISC -suffered O O -in O O -New B-LOC B-LOC -York I-LOC I-LOC -. O O -Heavyweight O O -Tel B-ORG B-ORG -and O O -Tel B-ORG B-ORG -ended O O -off O O -25 O O -cents O O -and O O -75 O O -cents O O -, O O -respectively O O -, O O -at O O -$ O O -31 O O -and O O -$ O O -25 O O -. O O -" O O -Falling O O -share O O -prices O O -in O O -New B-LOC B-LOC -York I-LOC I-LOC -do O O -n O O -hurt O O -Mexico B-LOC B-LOC -as O O -long O O -as O O -it O O -happens O O -gradually O O -, O O -as O O -earlier O O -this O O -week O O -. O O -It O O -' O O -a O O -sudden O O -p O O -that O O -takes O O -its O O -toll O O -, O O -" O O -said O O -Carlos B-PER B-PER -Ponce I-PER I-PER -, O O -research O O -director O O -at O O -Santa B-LOC B-LOC -. O O -Trade O O -and O O -analysts O O -differed O O -as O O -to O O -how O O -firm O O -the O O -relative O O -recovery O O -on O O -Friday O O -was O O -. O O -" O O -Some O O -buyers O O -stepped O O -in O O -, O O -but O O -the O O -market O O -was O O -not O O -very O O -convinced O O -. O O -Volume O O -was O O -lack O O -, O O -" O O -said O O -one O O -trader O O -. O O -" O O -The O O -market O O -' O O -very O O -healthy O O -, O O -we O O -' O O -buying O O -, O O -" O O -said O O -another O O -trader O O -. O O -Ponce B-PER B-PER -said O O -shares O O -were O O -certainly O O -attractive O O -priced O O -in O O -Mexico B-LOC B-LOC -, O O -but O O -would O O -not O O -appreciate O O -until O O -foreign O O -buyers O O -stepped O O -in O O -, O O -which O O -they O O -had O O -yet O O -to O O -do O O -. O O -' O O -Plastic O O -surgery O O -gets O O -boost O O -in O O -Brazil B-LOC B-LOC -. O O -Simon B-PER B-PER -de I-PER I-PER -Lo I-PER I-PER -R B-LOC B-LOC -DE I-LOC I-LOC -J I-LOC I-LOC -1996 O O -Plastic O O -surgery O O -is O O -boom O O -, O O -especially O O -among O O -men O O -, O O -as O O -Brazilian B-MISC B-MISC -spend O O -much O O -of O O -their O O -new O O -wealth O O -on O O -the O O -latest O O -beauty O O -treatments O O -, O O -said O O -the O O -organise O O -of O O -a O O -four O O -international O O -plastic O O -surgery O O -conference O O -that O O -opened O O -on O O -Friday O O -. O O -The O O -number O O -of O O -plastic O O -surge O O -in O O -Brazil B-LOC B-LOC -has O O -jumped O O -30 O O -percent O O -to O O -an O O -estimated O O -150 O O -this O O -year O O -since O O -an O O -anti O O -plan O O -was O O -introduced O O -in O O -July O O -1994 O O -, O O -Far B-PER B-PER -Ha I-PER I-PER -, O O -the O O -president O O -of O O -the O O -Brazilian B-ORG B-ORG -Plastic I-ORG I-ORG -Surgery I-ORG I-ORG -Society I-ORG I-ORG -( O O -S B-ORG B-ORG -) O O -, O O -said O O -. O O -The O O -number O O -of O O -operations O O -on O O -men O O -increased O O -even O O -more O O -- O O -by O O -80 O O -percent O O -, O O -from O O -8 O O -in O O -1994 O O -to O O -15 O O -in O O -1995 O O -, O O -he O O -said O O -. O O -" O O -Brazil B-LOC B-LOC -ranks O O -right O O -at O O -the O O -top O O -for O O -plastic O O -surgery O O -with O O -respect O O -to O O -the O O -number O O -of O O -surgeon O O -, O O -the O O -number O O -of O O -patients O O -, O O -number O O -of O O -operations O O -, O O -number O O -of O O -conferences O O -. O O -Our O O -statistics O O -are O O -the O O -highest O O -for O O -everything O O -, O O -" O O -Ha B-PER B-PER -said O O -. O O -" O O -We O O -believe O O -the O O -increase O O -in O O -plastic O O -surge O O -for O O -men O O -results O O -from O O -the O O -difficulties O O -in O O -the O O -job O O -market O O -. O O -People O O -need O O -to O O -have O O -a O O -more O O -youthful O O -look O O -to O O -compete O O -in O O -the O O -job O O -market O O -, O O -given O O -the O O -profound O O -changes O O -in O O -Latin B-LOC B-LOC -America I-LOC I-LOC -' O O -economy O O -. O O -" O O -A O O -controlled O O -exchange O O -rate O O -, O O -trade O O -liberal O O -and O O -tight O O -monetary O O -policies O O -have O O -also O O -dramatically O O -curb O O -inflation O O -, O O -making O O -more O O -money O O -available O O -for O O -co O O -surgery O O -. O O -Brazil B-LOC B-LOC -has O O -been O O -at O O -the O O -forefront O O -in O O -plastic O O -surgery O O -for O O -decades O O -and O O -is O O -home O O -to O O -one O O -of O O -the O O -most O O -famous O O -surgeon O O -, O O -I B-PER B-PER -Pit I-PER I-PER -. O O -There O O -are O O -6 O O -plastic O O -surgeon O O -there O O -, O O -of O O -which O O -4 O O -have O O -qualified O O -to O O -be O O -members O O -of O O -the O O -S B-ORG B-ORG -. O O -Every O O -year O O -, O O -500 O O -new O O -plastic O O -surgeon O O -graduate O O -in O O -Brazil B-LOC B-LOC -and O O -medical O O -students O O -from O O -all O O -over O O -the O O -world O O -come O O -to O O -study O O -there O O -. O O -Ha B-PER B-PER -attributes O O -Brazil B-LOC B-LOC -' O O -fascination O O -with O O -plastic O O -surgery O O -not O O -to O O -excessive O O -van O O -but O O -to O O -the O O -country O O -' O O -mix O O -and O O -match O O -of O O -different O O -races O O -, O O -which O O -can O O -create O O -physical O O -dish O O -. O O -" O O -What O O -happens O O -is O O -the O O -nose O O -sometimes O O -does O O -n O O -match O O -the O O -mouth O O -or O O -the O O -butt O O -do O O -n O O -match O O -with O O -the O O -legs O O -, O O -" O O -he O O -said O O -. O O -Brazil B-LOC B-LOC -' O O -most O O -sought O O -beauty O O -treatment O O -is O O -lip O O -in O O -which O O -fat O O -is O O -sucked O O -away O O -from O O -areas O O -of O O -the O O -body O O -, O O -with O O -about O O -30 O O -operations O O -a O O -year O O -at O O -a O O -cost O O -of O O -$ O O -3 O O -to O O -$ O O -4 O O -each O O -. O O -St O O -t O O -and O O -breast O O -operations O O -are O O -also O O -popular O O -since O O -the O O -tropical O O -climate O O -calls O O -for O O -flesh O O -fashion O O -, O O -but O O -unlike O O -women O O -elsewhere O O -Brazilian B-MISC B-MISC -tend O O -to O O -have O O -breast O O -reduction O O -and O O -butt O O -imp O O -. O O -" O O -The O O -women O O -who O O -want O O -to O O -reduce O O -their O O -breasts O O -here O O -would O O -probably O O -want O O -to O O -increase O O -them O O -in O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -, O O -" O O -S B-ORG B-ORG -Vice O O -Oswald B-PER B-PER -Sal I-PER I-PER -said O O -. O O -" O O -Beauty O O -ideals O O -and O O -cultures O O -are O O -different O O -in O O -every O O -country O O -. O O -" O O -Plastic O O -surgery O O -scare O O -like O O -the O O -case O O -in O O -which O O -Brazilian B-MISC B-MISC -model O O -Claudia B-PER B-PER -Liz I-PER I-PER -fell O O -into O O -a O O -coma O O -after O O -being O O -an O O -for O O -a O O -lip O O -in O O -October O O -are O O -not O O -much O O -of O O -a O O -de O O -. O O -Sal B-ORG B-PER -said O O -operations O O -fell O O -30 O O -percent O O -immediately O O -after O O -that O O -case O O -but O O -the O O -rate O O -was O O -back O O -to O O -normal O O -now O O -. O O -Daily O O -Argentine B-MISC B-MISC -grain O O -fixing O O -- O O -Cam B-ORG O -A I-ORG O -. O O -B B-LOC B-LOC -AI I-LOC I-LOC -1996 O O -A O O -December O O -5 O O -price O O -fix O O -: O O -Buenos B-LOC B-LOC -Aires I-LOC I-LOC -Que B-ORG B-LOC -Rosario I-ORG B-LOC -Ba I-ORG B-LOC -Blanc I-ORG I-LOC -O I-ORG O -un O O -un O O -un O O -un O O -W O O -121 O O -130 O O -121 O O -121 O O -Mai B-ORG O -( O O -Flint B-LOC O -) O O -113 O O -114 O O -113 O O -112 O O -Mai B-ORG O -( O O -Den B-ORG O -) O O -113 O O -114 O O -113 O O -112 O O -So B-ORG O -un O O -un O O -un O O -un O O -Mill B-ORG O -un O O -un O O -90 O O -un O O -So B-ORG O -28 O O -un O O -28 O O -un O O -Suns B-ORG O -219 O O -216 O O -220 O O -216 O O -- O O -Buenos B-ORG B-ORG -Aires I-ORG I-ORG -News I-ORG I-ORG -+ O O -31 O O -Mexican B-MISC B-MISC -daily O O -port O O -ME B-LOC B-LOC -C I-LOC I-LOC -1996 O O -All O O -major O O -ports O O -were O O -open O O -as O O -of O O -1000 O O -local O O -/ O O -1600 O O -GM B-MISC B-MISC -, O O -the O O -Communications B-ORG B-ORG -and I-ORG I-ORG -Transportation I-ORG I-ORG -Ministry I-ORG I-ORG -said O O -in O O -a O O -daily O O -update O O -. O O -Tam B-LOC B-LOC -port O O -authorities O O -said O O -fishing O O -restrictions O O -were O O -in O O -place O O -in O O -an O O -area O O -adjacent O O -to O O -the O O -port O O -because O O -of O O -a O O -g O O -study O O -being O O -carried O O -out O O -in O O -deep O O -waters O O -of O O -the O O -region O O -from O O -the O O -ship O O -Ken B-ORG B-MISC -. O O -The O O -ministry O O -updated O O -port O O -conditions O O -and O O -shipping O O -warnings O O -for O O -the O O -Gulf B-LOC B-LOC -of I-LOC I-LOC -Mexico I-LOC I-LOC -, O O -Caribbean B-LOC B-LOC -and O O -Pacific B-LOC B-LOC -Coast I-LOC I-LOC -. O O -- O O -Pacific B-LOC B-LOC -Coast I-LOC I-LOC -: O O -Light O O -rains O O -along O O -the O O -coast O O -of O O -southern O O -Baja B-LOC B-LOC -California I-LOC I-LOC -and O O -Sin B-LOC B-LOC -, O O -with O O -the O O -rest O O -of O O -the O O -coast O O -seeing O O -clear O O -skies O O -. O O -Wind O O -from O O -the O O -northeast O O -of O O -10 O O -to O O -15 O O -knots O O -( O O -19 O O -to O O -28 O O -kilometers O O -/ O O -11 O O -to O O -17 O O -miles O O -per O O -hour O O -) O O -. O O -A O O -new O O -front O O -is O O -seen O O -emerging O O -during O O -the O O -course O O -of O O -Friday O O -, O O -affecting O O -the O O -north O O -of O O -the O O -Baja B-LOC B-LOC -California I-LOC I-LOC -peninsula O O -and O O -Son B-LOC B-LOC -state O O -, O O -bringing O O -lower O O -temperatures O O -, O O -light O O -rains O O -and O O -waves O O -up O O -to O O -six O O -feet O O -. O O -- O O -Gulf B-LOC B-LOC -of I-LOC I-LOC -Mexico I-LOC I-LOC -: O O -Cold O O -front O O -bringing O O -light O O -rains O O -to O O -the O O -coast O O -of O O -Tam B-LOC B-LOC -, O O -but O O -with O O -the O O -rest O O -of O O -the O O -Gulf B-LOC B-LOC -in O O -clear O O -skies O O -. O O -Wind O O -from O O -the O O -northeast O O -at O O -10 O O -to O O -15 O O -knots O O -( O O -19 O O -to O O -28 O O -kilometers O O -/ O O -11 O O -to O O -17 O O -miles O O -per O O -hour O O -) O O -. O O -- O O -Caribbean B-LOC B-LOC -: O O -Tropical O O -air O O -carrying O O -s O O -light O O -rains O O -over O O -the O O -coast O O -of O O -Q B-LOC B-LOC -R I-LOC I-LOC -state O O -. O O -Wind O O -from O O -the O O -northeast O O -at O O -10 O O -to O O -15 O O -knots O O -with O O -waves O O -three O O -to O O -five O O -feet O O -high O O -. O O -- O O -Chris B-PER B-PER -As I-PER I-PER -, O O -Mexico B-LOC B-LOC -City I-LOC I-LOC -news O O -+ O O -72 O O -Brazil B-LOC B-LOC -exam O O -cheat O O -caught O O -using O O -" O O -page O O -" O O -watches O O -. O O -R B-LOC B-LOC -DE I-LOC I-LOC -J I-LOC I-LOC -1996 O O -Brazilian B-MISC B-MISC -students O O -have O O -been O O -caught O O -cheating O O -in O O -university O O -entrance O O -exams O O -by O O -using O O -digital O O -watches O O -which O O -gave O O -the O O -correct O O -answers O O -to O O -test O O -questions O O -, O O -a O O -newspaper O O -said O O -on O O -Friday O O -. O O -Rio B-LOC B-LOC -de I-LOC I-LOC -Janeiro I-LOC I-LOC -state O O -university O O -officials O O -discovered O O -students O O -were O O -paying O O -15 O O -re O O -( O O -$ O O -14 O O -) O O -for O O -the O O -special O O -watches O O -, O O -which O O -operated O O -like O O -a O O -telephone O O -page O O -to O O -receive O O -correct O O -answers O O -, O O -O B-ORG B-ORG -G I-ORG I-ORG -said O O -. O O -Seven O O -students O O -were O O -found O O -with O O -the O O -watches O O -and O O -disqualified O O -, O O -O B-ORG B-ORG -G I-ORG I-ORG -said O O -. O O -Chile B-LOC B-LOC -, O O -Mexico B-LOC B-LOC -to O O -seek O O -to O O -broad O O -trade O O -deal O O -. O O -SA B-LOC B-PER -1996 O O -Chile B-LOC B-LOC -and O O -Mexico B-LOC B-LOC -will O O -start O O -negotiations O O -next O O -year O O -to O O -broad O O -their O O -free O O -trade O O -agreement O O -to O O -include O O -services O O -and O O -investments O O -, O O -Finance O B-ORG -Minister O O -Eduardo B-PER B-PER -An I-PER I-PER -said O O -. O O -Chile B-LOC B-LOC -hopes O O -to O O -broad O O -the O O -treaty O O -signed O O -in O O -1994 O O -beyond O O -reduction O O -of O O -ta O O -on O O -imports O O -and O O -exports O O -and O O -add O O -provisions O O -covering O O -services O O -and O O -investment O O -codes O O -, O O -said O O -An B-PER B-PER -. O O -Both O O -areas O O -tend O O -to O O -more O O -laden O O -with O O -friction O O -in O O -free O O -trade O O -negotiations O O -than O O -ta O O -reduction O O -. O O -' O O -' O O -In O O -January O O -or O O -February O O -, O O -we O O -' O O -have O O -some O O -very O O -close O O -contacts O O -with O O -Mexico B-LOC B-LOC -to O O -add O O -the O O -issue O O -of O O -services O O -and O O -advance O O -on O O -the O O -issue O O -of O O -investments O O -, O O -' O O -' O O -An B-PER B-PER -told O O -reporters O O -after O O -signing O O -a O O -free O O -trade O O -deal O O -with O O -Canada B-LOC B-LOC -. O O -' O O -' O O -We O O -want O O -to O O -give O O -the O O -treaty O O -between O O -Mexico B-LOC B-LOC -and O O -Chile B-LOC B-LOC -greater O O -depth O O -and O O -coverage O O -than O O -it O O -has O O -now O O -. O O -It O O -' O O -very O O -good O O -now O O -, O O -but O O -it O O -practically O O -only O O -covers O O -trade O O -in O O -goods O O -, O O -' O O -' O O -he O O -said O O -. O O -An B-PER B-PER -also O O -said O O -he O O -was O O -confident O O -the O O -Chilean B-ORG B-MISC -Congress I-ORG O -would O O -rat O O -the O O -treaty O O -with O O -Congress B-ORG O -quickly O O -. O O -' O O -' O O -The O O -reactions O O -from O O -business O O -and O O -unions O O -which O O -I O O -have O O -seen O O -have O O -been O O -almost O O -unanimously O O -positive O O -, O O -so O O -I O O -do O O -n O O -see O O -any O O -problem O O -, O O -' O O -' O O -he O O -said O O -. O O -- O O -Roger B-PER B-PER -At I-PER I-PER -, O O -Santiago B-LOC B-LOC -news O O -+ O O -x O O -Indonesia B-LOC B-LOC -' O O -Bel B-PER B-PER -leaves O O -for O O -Nobel B-MISC B-MISC -award O O -ceremony O O -. O O -D B-LOC B-LOC -, O O -East B-LOC B-LOC -Timor I-LOC I-LOC -1996 O O -East B-MISC B-MISC -Timor I-MISC I-MISC -Roman B-MISC B-MISC -Catholic I-MISC I-MISC -Bishop O O -Carlos B-PER B-PER -Bel I-PER I-PER -left O O -Di B-LOC B-LOC -on O O -Friday O O -on O O -his O O -way O O -to O O -Norway B-LOC B-LOC -for O O -the O O -awards O O -ceremony O O -as O O -co O O -of O O -the O O -1996 O O -Nobel B-MISC B-MISC -Peace I-MISC I-MISC -Prize I-MISC I-MISC -. O O -Witness O O -said O O -Bel B-PER B-PER -left O O -the O O -territory O O -for O O -the O O -Indonesian B-MISC B-MISC -capital O O -Jakarta B-LOC B-LOC -accompanied O O -by O O -five O O -other O O -people O O -. O O -It O O -was O O -not O O -immediately O O -known O O -when O O -he O O -would O O -arrive O O -in O O -Oslo B-LOC B-LOC -. O O -The O O -bishop O O -will O O -jointly O O -receive O O -the O O -Nobel B-MISC B-MISC -award O O -next O O -Tuesday O O -with O O -East B-MISC B-MISC -Timor I-MISC I-MISC -activist O O -Jose B-PER B-PER -Ramos I-PER I-PER -Ho I-PER I-PER -, O O -who O O -lives O O -in O O -self O O -in O O -Australia B-LOC B-LOC -. O O -The O O -Indonesian B-MISC B-MISC -government O O -has O O -condemned O O -the O O -inclusion O O -of O O -Ramos B-PER B-PER -Ho I-PER I-PER -in O O -the O O -award O O -, O O -and O O -Foreign O O -Minister O O -Ali B-PER B-PER -Al I-PER I-PER -said O O -on O O -Friday O O -that O O -Indonesia B-LOC B-LOC -would O O -not O O -be O O -represented O O -officially O O -at O O -the O O -ceremony O O -in O O -the O O -Norwegian B-MISC B-MISC -capital O O -. O O -" O O -I O O -sincere O O -believe O O -that O O -this O O -unfortunate O O -choice O O -in O O -giving O O -the O O -honour O O -to O O -such O O -a O O -controversial O O -figure O O -as O O -Ramos B-PER B-PER -Ho I-PER I-PER -. O O -will O O -ex O O -the O O -problem O O -in O O -finding O O -a O O -solution O O -( O O -to O O -East B-LOC B-LOC -Timor I-LOC I-LOC -) O O -, O O -" O O -Al B-PER B-PER -said O O -on O O -Friday O O -. O O -He O O -was O O -responding O O -to O O -questions O O -at O O -a O O -news O O -conference O O -called O O -to O O -discuss O O -next O O -week O O -' O O -ministerial O O -meeting O O -of O O -the O O -Organisation B-ORG B-ORG -of I-ORG I-ORG -the I-ORG I-ORG -Islamic I-ORG I-ORG -Conference I-ORG I-ORG -( O O -O B-ORG B-ORG -) O O -in O O -Jakarta B-LOC B-LOC -. O O -Ramos B-PER B-PER -Ho I-PER I-PER -has O O -been O O -a O O -vocal O O -leader O O -of O O -the O O -opposition O O -to O O -Jakarta B-LOC B-LOC -' O O -rule O O -in O O -the O O -territory O O -. O O -Bel B-PER B-PER -and O O -Ramos B-PER B-PER -Ho I-PER I-PER -were O O -aware O O -the O O -prize O O -for O O -their O O -efforts O O -to O O -secure O O -a O O -peaceful O O -solution O O -to O O -the O O -issue O O -of O O -East B-LOC B-LOC -Timor I-LOC I-LOC -, O O -a O O -former O O -Portuguese B-MISC B-MISC -colony O O -which O O -Indonesia B-LOC B-LOC -invaded O O -in O O -1975 O O -and O O -annexed O O -the O O -following O O -year O O -. O O -The O O -United B-ORG B-ORG -Nations I-ORG I-ORG -has O O -never O O -recognised O O -Jakarta B-LOC B-LOC -' O O -move O O -. O O -Al B-PER B-PER -said O O -the O O -government O O -' O O -position O O -on O O -the O O -Nobel B-MISC B-MISC -Peace I-MISC I-MISC -Prize I-MISC I-MISC -would O O -have O O -been O O -different O O -if O O -it O O -had O O -been O O -awarded O O -solely O O -to O O -Bel B-PER B-PER -. O O -Ask O O -if O O -the O O -Indonesian B-MISC B-MISC -ambassador O O -to O O -Norway B-LOC B-LOC -would O O -have O O -attended O O -the O O -ceremony O O -if O O -only O O -Bel B-PER B-PER -had O O -been O O -involved O O -, O O -Al B-PER B-PER -replied O O -: O O -" O O -Probably O O -, O O -yes O O -, O O -but O O -that O O -is O O -a O O -h O O -question O O -. O O -" O O -Al B-PER B-PER -said O O -on O O -Tuesday O O -that O O -on O O -his O O -way O O -back O O -from O O -Oslo B-LOC B-LOC -, O O -Bel B-PER B-PER -would O O -visit O O -the O O -Vatican B-LOC B-LOC -to O O -see O O -the O O -Pope B-MISC O -, O O -and O O -would O O -also O O -meet O O -German B-MISC B-MISC -Chancellor O O -Helmut B-PER B-PER -Ko I-PER I-PER -in O O -Bonn B-LOC B-LOC -. O O -Ko B-PER B-PER -had O O -wanted O O -to O O -meet O O -Bel B-PER B-PER -during O O -the O O -chancellor O O -' O O -official O O -visit O O -to O O -Indonesia B-LOC B-LOC -last O O -month O O -, O O -but O O -the O O -bishop O O -was O O -too O O -busy O O -in O O -East B-LOC B-LOC -Timor I-LOC I-LOC -to O O -come O O -to O O -Jakarta B-LOC B-LOC -. O O -China B-LOC B-LOC -to O O -open O O -port O O -in O O -Hai B-LOC B-LOC -to O O -foreign O O -ships O O -. O O -B B-LOC B-LOC -1996 O O -China B-LOC B-LOC -' O O -State B-ORG B-ORG -Council I-ORG I-ORG -, O O -or O O -cabinet O O -, O O -has O O -given O O -a O O -port O O -in O O -the O O -southern O O -province O O -of O O -Hai B-LOC B-LOC -permission O O -to O O -open O O -to O O -foreign O O -vessels O O -, O O -the O O -Xi B-ORG B-ORG -news O O -agency O O -said O O -on O O -Friday O O -. O O -Xi B-PER B-ORG -did O O -not O O -say O O -when O O -Qing B-LOC B-LOC -port O O -in O O -Wen B-LOC B-LOC -city O O -would O O -be O O -opened O O -to O O -foreign O O -vessels O O -. O O -Wen B-LOC B-LOC -has O O -built O O -a O O -berth O O -for O O -5 O O -dead O O -container O O -ships O O -at O O -the O O -port O O -and O O -invested O O -34 O O -million O O -y O O -( O O -$ O O -4 O O -million O O -) O O -to O O -d O O -the O O -harbour O O -, O O -Xi B-PER B-ORG -said O O -. O O -It O O -gave O O -no O O -further O O -details O O -. O O -( O O -$ O O -1 O O -= O O -8 O O -y O O -) O O -Government O O -di O O -protest O O -with O O -water O O -cannons O O -. O O -RA B-LOC B-LOC -1996 O O -Burmese B-MISC B-MISC -troops O O -and O O -riot O O -police O O -moved O O -in O O -to O O -di O O -a O O -student O O -street O O -protest O O -at O O -a O O -suburban O O -road O O -junction O O -near O O -the O O -Ra B-ORG B-ORG -( I-LOC I-ORG -Ra B-LOC I-ORG -) I-ORG I-ORG -University I-ORG I-ORG -early O O -on O O -Saturday O O -, O O -witnesses O O -said O O -. O O -They O O -said O O -police O O -and O O -troops O O -used O O -water O O -cannons O O -from O O -fire O O -engines O O -to O O -sub O O -about O O -120 O O -university O O -students O O -sitting O O -in O O -at O O -the O O -centre O O -of O O -the O O -junction O O -at O O -about O O -3 O O -a O O -before O O -they O O -moved O O -in O O -to O O -round O O -them O O -up O O -. O O -The O O -students O O -, O O -who O O -had O O -staged O O -an O O -11 O O -protest O O -at O O -the O O -junction O O -in O O -northern O O -Ra B-LOC B-LOC -, O O -were O O -taken O O -away O O -in O O -three O O -vehicles O O -. O O -The O O -witnesses O O -said O O -some O O -of O O -the O O -students O O -were O O -hit O O -with O O -bat O O -while O O -they O O -were O O -herd O O -onto O O -the O O -vehicles O O -and O O -it O O -was O O -believed O O -they O O -were O O -taken O O -to O O -the O O -In B-LOC B-LOC -prison O O -in O O -suburban O O -Ra B-LOC B-LOC -. O O -The O O -protesting O O -students O O -, O O -mostly O O -from O O -Ra B-LOC B-ORG -University I-ORG I-ORG -, O O -were O O -demanding O O -the O O -right O O -to O O -organise O O -independent O O -unions O O -on O O -campuses O O -and O O -the O O -release O O -of O O -about O O -80 O O -student O O -leaders O O -currently O O -in O O -jail O O -. O O -They O O -were O O -among O O -500 O O -students O O -who O O -started O O -demonstrating O O -at O O -the O O -intersection O O -on O O -late O O -Friday O O -afternoon O O -. O O -The O O -protest O O -was O O -the O O -second O O -major O O -one O O -in O O -five O O -days O O -in O O -the O O -capital O O -. O O -Burmese B-MISC B-MISC -students O O -march O O -briefly O O -out O O -of O O -campus O O -. O O -V B-PER B-PER -Amor I-PER I-PER -RA B-LOC B-LOC -1996 O O -About O O -200 O O -Burmese B-MISC B-MISC -students O O -marched O O -briefly O O -from O O -troubled O O -Yang B-ORG B-ORG -Institute I-ORG I-ORG -of I-ORG I-ORG -Technology I-ORG I-ORG -in O O -northern O O -Ra B-LOC B-LOC -on O O -Friday O O -towards O O -the O O -University B-ORG B-ORG -of I-ORG I-ORG -Yang I-ORG I-ORG -six O O -km O O -( O O -four O O -miles O O -) O O -away O O -, O O -and O O -returned O O -to O O -their O O -campus O O -, O O -witnesses O O -said O O -. O O -Seven O O -truck O O -of O O -armed O O -riot O O -police O O -and O O -three O O -fire O O -engines O O -were O O -on O O -stand O O -at O O -one O O -of O O -the O O -junction O O -near O O -the O O -institute O O -. O O -There O O -were O O -no O O -clashes O O -. O O -" O O -They O O -are O O -now O O -back O O -in O O -the O O -Y B-LOC B-ORG -campus O O -, O O -" O O -an O O -institute O O -official O O -who O O -declined O O -to O O -be O O -identified O O -told O O -Re B-ORG B-ORG -by O O -telephone O O -. O O -One O O -of O O -two O O -roads O O -leading O O -to O O -the O O -University B-ORG B-ORG -of I-ORG I-ORG -Yang I-ORG I-ORG -from O O -the O O -institute O O -had O O -been O O -closed O O -by O O -authorities O O -. O O -But O O -about O O -300 O O -university O O -students O O -were O O -still O O -gathered O O -outside O O -the O O -gates O O -of O O -their O O -campus O O -, O O -witnesses O O -said O O -. O O -They O O -were O O -singing O O -peacefully O O -. O O -On O O -Monday O O -and O O -Tuesday O O -, O O -students O O -from O O -the O O -institute O O -and O O -the O O -university O O -launched O O -protests O O -against O O -what O O -they O O -said O O -was O O -unfair O O -handling O O -by O O -police O O -of O O -a O O -bra O O -between O O -some O O -of O O -their O O -colleagues O O -and O O -restaurant O O -owners O O -in O O -October O O -. O O -On O O -Tuesday O O -and O O -Wednesday O O -, O O -opposition O O -leader O O -Au B-PER B-PER -San I-PER I-PER -Su I-PER I-PER -K I-PER I-PER -was O O -restricted O O -to O O -her O O -home O O -by O O -the O O -military O O -government O O -to O O -prevent O O -her O O -from O O -being O O -drawn O O -into O O -the O O -protests O O -. O O -She O O -was O O -allowed O O -to O O -move O O -freely O O -on O O -Thursday O O -. O O -The O O -protest O O -culminated O O -at O O -dawn O O -on O O -Tuesday O O -with O O -several O O -hundred O O -students O O -being O O -detained O O -briefly O O -by O O -police O O -in O O -central O O -Ra B-LOC B-LOC -. O O -The O O -street O O -protests O O -were O O -the O O -biggest O O -seen O O -in O O -the O O -capital O O -since O O -the O O -student O O -pro O O -demonstrations O O -of O O -September O O -1988 O O -when O O -the O O -j O O -crushed O O -the O O -uprising O O -. O O -Thousands O O -were O O -killed O O -or O O -imprisoned O O -. O O -Earlier O O -on O O -Friday O O -some O O -of O O -the O O -students O O -, O O -who O O -were O O -held O O -briefly O O -by O O -police O O -during O O -the O O -protests O O -earlier O O -this O O -week O O -, O O -said O O -they O O -were O O -still O O -di O O -with O O -the O O -military O O -government O O -. O O -They O O -told O O -Re B-ORG B-ORG -they O O -were O O -unhappy O O -that O O -the O O -ruling O O -State B-ORG B-ORG -Law I-ORG I-ORG -and I-ORG I-ORG -Order I-ORG I-ORG -Restoration I-ORG I-ORG -Council I-ORG I-ORG -( O O -SL B-ORG B-ORG -) O O -had O O -not O O -he O O -their O O -calls O O -for O O -the O O -right O O -to O O -organise O O -independent O O -unions O O -on O O -campus O O -. O O -" O O -We O O -still O O -want O O -government O O -answers O O -to O O -our O O -demands O O -. O O -We O O -want O O -police O O -punishment O O -to O O -be O O -published O O -in O O -newspapers O O -, O O -" O O -one O O -student O O -said O O -. O O -But O O -the O O -students O O -stressed O O -their O O -protests O O -were O O -non O O -and O O -they O O -had O O -no O O -contact O O -with O O -Su B-PER B-PER -K I-PER I-PER -' O O -National B-ORG B-ORG -League I-ORG I-ORG -for I-ORG I-ORG -Democracy I-ORG I-ORG -( O O -NL B-ORG B-ORG -) O O -. O O -Su B-PER B-PER -K I-PER I-PER -, O O -a O O -Nobel B-MISC B-MISC -la O O -and O O -daughter O O -of O O -independence O O -hero O O -Au B-PER B-PER -San I-PER I-PER -, O O -and O O -key O O -NL B-ORG B-ORG -officials O O -have O O -also O O -denied O O -any O O -link O O -with O O -the O O -students O O -. O O -But O O -they O O -have O O -said O O -both O O -parties O O -had O O -a O O -" O O -moral O O -link O O -" O O -in O O -that O O -they O O -were O O -against O O -police O O -brutality O O -and O O -injustice O O -. O O -The O O -students O O -also O O -demanded O O -the O O -government O O -announce O O -punishment O O -met O O -out O O -to O O -policemen O O -who O O -they O O -said O O -had O O -man O O -students O O -involved O O -in O O -a O O -bra O O -with O O -some O O -restaurant O O -owners O O -near O O -the O O -Yang B-LOC B-LOC -institute O O -in O O -October O O -. O O -The O O -students O O -appealed O O -to O O -the O O -government O O -not O O -to O O -close O O -the O O -institute O O -because O O -of O O -their O O -latest O O -demonstration O O -. O O -The O O -institute O O -was O O -shut O O -for O O -nearly O O -two O O -years O O -after O O -the O O -1988 O O -uprising O O -. O O -On O O -Friday O O -, O O -the O O -road O O -leading O O -to O O -Su B-PER B-PER -K I-PER I-PER -' O O -lakes O O -residence O O -in O O -central O O -Ra B-LOC B-LOC -remained O O -closed O O -by O O -police O O -. O O -Union O O -leaders O O -outrage O O -by O O -W B-ORG B-ORG -s O O -to O O -IL B-ORG B-ORG -head O O -. O O -S B-LOC B-LOC -1996 O O -International O O -trade O O -union O O -leaders O O -on O O -Friday O O -expressed O O -outrage O O -that O O -the O O -head O O -of O O -the O O -International B-ORG B-ORG -Labour I-ORG I-ORG -Organisation I-ORG I-ORG -( O O -IL B-ORG B-ORG -) O O -had O O -been O O -barred O O -from O O -speaking O O -at O O -next O O -week O O -' O O -W B-ORG B-ORG -meeting O O -in O O -Singapore B-LOC B-LOC -. O O -Bill B-PER B-PER -Jordan I-PER I-PER -, O O -general O O -secretary O O -of O O -the O O -International B-ORG B-ORG -Confederation I-ORG I-ORG -of I-ORG I-ORG -Free I-ORG I-ORG -Trade I-ORG I-ORG -Union I-ORG I-ORG -( O O -I B-ORG B-ORG -) O O -, O O -told O O -a O O -news O O -conference O O -the O O -withdrawal O O -of O O -a O O -W B-ORG B-ORG -invitation O O -to O O -IL B-ORG B-ORG -director O O -general O O -Michel B-PER B-PER -Hansen I-PER I-PER -was O O -" O O -outrage O O -behaviour O O -on O O -the O O -part O O -of O O -an O O -organisation O O -that O O -wants O O -to O O -command O O -respect O O -in O O -the O O -world O O -" O O -. O O -Jordan B-LOC B-PER -said O O -a O O -small O O -group O O -of O O -developing O O -nations O O -that O O -oppose O O -linking O O -trade O O -talks O O -and O O -labour O O -conditions O O -had O O -pressure O O -World B-ORG B-ORG -Trade I-ORG I-ORG -Organisation I-ORG I-ORG -( O O -W B-ORG B-ORG -) O O -officials O O -to O O -prevent O O -Hansen B-PER O -from O O -taking O O -the O O -platform O O -to O O -urge O O -such O O -links O O -. O O -" O O -It O O -is O O -to O O -their O O -shame O O -that O O -those O O -who O O -are O O -responsible O O -for O O -encouraging O O -this O O -meeting O O -responded O O -( O O -to O O -the O O -pressure O O -) O O -in O O -si O O -him O O -, O O -" O O -Jordan B-LOC B-PER -said O O -after O O -the O O -opening O O -of O O -an O O -I B-ORG B-ORG -conference O O -on O O -international O O -labour O O -standards O O -and O O -trade O O -. O O -The O O -three O O -trade O O -union O O -conference O O -in O O -Singapore B-LOC B-LOC -hopes O O -to O O -push O O -labour O O -issues O O -onto O O -the O O -W B-ORG B-ORG -agenda O O -. O O -Jordan B-LOC B-PER -said O O -the O O -W B-ORG B-ORG -' O O -credibility O O -was O O -at O O -stake O O -over O O -the O O -issue O O -of O O -trade O O -and O O -labour O O -. O O -The O O -I B-ORG B-ORG -said O O -it O O -wanted O O -the O O -W B-ORG B-ORG -conference O O -beginning O O -on O O -Monday O O -to O O -out O O -forced O O -and O O -child O O -labour O O -, O O -end O O -discrimination O O -in O O -hiring O O -, O O -and O O -guarantee O O -the O O -right O O -to O O -join O O -a O O -union O O -. O O -Bill B-PER B-PER -Brett I-PER I-PER -, O O -chairman O O -of O O -the O O -IL B-ORG B-ORG -Workers I-ORG I-ORG -Group I-ORG I-ORG -, O O -told O O -Re B-ORG B-ORG -before O O -the O O -news O O -conference O O -he O O -was O O -" O O -not O O -too O O -surprised O O -, O O -but O O -very O O -disappointed O O -" O O -that O O -the O O -speaking O O -invitation O O -had O O -been O O -withdrawn O O -. O O -" O O -Some O O -governments O O -are O O -very O O -determined O O -to O O -stop O O -the O O -issue O O -( O O -of O O -trade O O -and O O -labour O O -rights O O -) O O -being O O -discussed O O -, O O -" O O -he O O -said O O -, O O -adding O O -that O O -the O O -Association B-ORG B-ORG -of I-ORG I-ORG -Southeast I-ORG I-ORG -Asian I-ORG I-ORG -Nations I-ORG I-ORG -( O O -AS B-ORG B-ORG -) O O -seemed O O -particularly O O -hostile O O -to O O -the O O -IL B-ORG B-ORG -agenda O O -. O O -AS B-ORG B-ORG -groups O O -Brunei B-LOC B-LOC -, O O -Indonesia B-LOC B-LOC -, O O -Malaysia B-LOC B-LOC -, O O -the O O -Philippines B-LOC B-LOC -, O O -Singapore B-LOC B-LOC -, O O -Thailand B-LOC B-LOC -and O O -Vietnam B-LOC B-LOC -. O O -The O O -IL B-ORG B-ORG -wants O O -a O O -trade O O -and O O -labour O O -rights O O -" O O -social O O -clause O O -" O O -included O O -in O O -the O O -final O O -ministerial O O -statement O O -issued O O -by O O -W B-ORG B-ORG -leaders O O -at O O -the O O -end O O -of O O -the O O -meeting O O -, O O -the O O -organization O O -' O O -first O O -ministerial O O -gathering O O -. O O -Speaking O O -to O O -I B-ORG B-ORG -delegates O O -, O O -Richard B-PER B-PER -E I-PER I-PER -, O O -director O O -of O O -the O O -W B-ORG B-ORG -secret O O -, O O -said O O -the O O -W B-ORG B-ORG -was O O -capable O O -of O O -making O O -a O O -significant O O -contribution O O -to O O -governmental O O -efforts O O -to O O -solve O O -social O O -problems O O -. O O -But O O -he O O -said O O -the O O -W B-ORG B-ORG -' O O -organisation O O -structure O O -made O O -it O O -difficult O O -to O O -impose O O -on O O -its O O -members O O -a O O -social O O -clause O O -such O O -as O O -that O O -called O O -for O O -by O O -the O O -IL B-ORG B-ORG -. O O -Indian B-MISC B-MISC -rubber O O -demand O O -seen O O -outs O O -production O O -. O O -S B-LOC B-LOC -1996 O O -Indian B-MISC B-MISC -rubber O O -demand O O -is O O -seen O O -out O O -local O O -production O O -in O O -the O O -1996 O O -April O O -/ O O -March O O -season O O -and O O -the O O -trend O O -will O O -per O O -way O O -into O O -the O O -next O O -century O O -, O O -the O O -chairman O O -of O O -the O O -R B-ORG B-ORG -Board I-ORG I-ORG -of I-ORG I-ORG -India I-ORG I-ORG -said O O -on O O -Friday O O -. O O -K B-PER B-PER -Matthew I-PER I-PER -said O O -at O O -the O O -Asia B-ORG B-MISC -R I-ORG I-MISC -Markets I-ORG I-MISC -meeting O I-MISC -here O O -Indian B-MISC B-MISC -production O O -of O O -natural O O -rubber O O -in O O -1996 O O -will O O -reach O O -54 O O -tonnes O O -against O O -projected O O -demand O O -of O O -57 O O -tonnes O O -, O O -a O O -gap O O -of O O -31 O O -tonnes O O -. O O -For O O -synthetic O O -rubber O O -, O O -production O O -reached O O -68 O O -tonnes O O -in O O -1995 O O -while O O -consumption O O -in O O -the O O -same O O -season O O -hit O O -134 O O -tonnes O O -, O O -Matthew B-PER B-PER -added O O -. O O -" O O -Though O O -schemes O O -designed O O -to O O -realise O O -further O O -increases O O -in O O -the O O -production O O -of O O -natural O O -rubber O O -are O O -being O O -operated O O -successfully O O -, O O -the O O -demand O O -gap O O -is O O -expected O O -to O O -widen O O -, O O -" O O -he O O -said O O -. O O -Indian B-MISC B-MISC -synthetic O O -rubber O O -output O O -is O O -not O O -expected O O -to O O -rise O O -significantly O O -in O O -the O O -next O O -season O O -but O O -demand O O -will O O -rise O O -to O O -145 O O -tonnes O O -. O O -Matthew B-PER B-PER -estimates O O -that O O -by O O -the O O -2000 O O -season O O -, O O -the O O -gap O O -between O O -natural O O -rubber O O -output O O -and O O -consumption O O -will O O -rise O O -to O O -51 O O -tonnes O O -and O O -31 O O -tonnes O O -in O O -2010 O O -. O O -Natural O O -rubber O O -production O O -will O O -go O O -up O O -to O O -69 O O -tonnes O O -in O O -2000 O O -against O O -consumption O O -of O O -74 O O -tonnes O O -. O O -In O O -2010 O O -, O O -domestic O O -demand O O -should O O -rise O O -further O O -to O O -1 O O -million O O -tonnes O O -while O O -production O O -will O O -only O O -reach O O -about O O -91 O O -tonnes O O -. O O -One O O -way O O -to O O -bridge O O -the O O -widening O O -gap O O -is O O -to O O -put O O -more O O -land O O -under O O -cultivation O O -which O O -the O O -R B-ORG B-ORG -Board I-ORG I-ORG -official O O -estimates O O -will O O -reach O O -220 O O -hectares O O -between O O -now O O -and O O -the O O -year O O -2003 O O -although O O -Matthew B-PER B-PER -said O O -this O O -may O O -not O O -be O O -possible O O -at O O -this O O -time O O -in O O -India B-LOC B-LOC -. O O -" O O -The O O -development O O -objective O O -for O O -the O O -rubber O O -plantation O O -industry O O -will O O -be O O -to O O -increase O O -production O O -to O O -the O O -best O O -extent O O -possibly O O -with O O -a O O -view O O -to O O -mini O O -imports O O -of O O -natural O O -rubber O O -, O O -" O O -he O O -said O O -. O O -- O O -Singapore B-ORG B-ORG -News I-ORG I-ORG -( O O -65 O O -) O O -Japan B-LOC B-LOC -' O O -authorities O O -seen O O -seeking O O -to O O -re O O -in O O -dollar O O -. O O -George B-PER B-PER -Ni I-PER I-PER -TO B-LOC B-LOC -1996 O O -Co O O -by O O -Japan B-LOC B-LOC -' O O -tight O O -central O O -bank O O -chief O O -and O O -an O O -influential O O -top O O -bureau O O -are O O -further O O -signs O O -that O O -the O O -nation O O -' O O -authorities O O -want O O -to O O -keep O O -the O O -dollar O O -at O O -current O O -levels O O -, O O -market O O -sources O O -said O O -on O O -Friday O O -. O O -In O O -a O O -rare O O -expression O O -of O O -a O O -view O O -on O O -cu O O -by O O -the O O -Bank B-ORG B-ORG -of I-ORG I-ORG -Japan I-ORG I-ORG -( O O -B B-ORG B-ORG -) O O -governor O O -, O O -Ya B-PER B-PER -Mat I-PER I-PER -was O O -quoted O O -in O O -Japan B-LOC B-LOC -' O O -leading O O -economic O O -daily O O -on O O -Friday O O -as O O -saying O O -that O O -he O O -sees O O -no O O -further O O -, O O -immediate O O -fall O O -in O O -the O O -ye O O -. O O -This O O -followed O O -a O O -widely O O -watched O O -television O O -appearance O O -late O O -on O O -Thursday O O -by O O -E B-PER B-PER -Sa I-PER I-PER -, O O -a O O -high O O -Finance B-ORG B-ORG -Ministry I-ORG I-ORG -official O O -, O O -who O O -denied O O -he O O -had O O -said O O -he O O -wants O O -to O O -guide O O -the O O -dollar O O -lower O O -to O O -between O O -108 O O -and O O -110 O O -ye O O -. O O -But O O -many O O -in O O -the O O -market O O -thought O O -Sa B-PER B-PER -' O O -real O O -intentions O O -lay O O -elsewhere O O -, O O -and O O -took O O -more O O -notice O O -of O O -his O O -comments O O -about O O -the O O -U B-LOC B-LOC -government O O -' O O -stance O O -on O O -the O O -dollar O O -, O O -dealers O O -said O O -. O O -" O O -I O O -think O O -his O O -views O O -on O O -( O O -U B-LOC B-ORG -Treasury O I-ORG -Secretary O O -Robert B-PER B-PER -) O O -Rubin B-PER B-PER -' O O -comments O O -were O O -indeed O O -what O O -he O O -himself O O -thinks O O -about O O -the O O -dollar O O -, O O -" O O -said O O -Hank B-PER B-PER -Note I-PER I-PER -, O O -chief O O -dealer O O -at O O -Su B-ORG B-ORG -Bank I-ORG I-ORG -. O O -Ask O O -about O O -Rubin B-PER B-PER -' O O -comment O O -that O O -a O O -strong O O -dollar O O -was O O -in O O -U B-LOC B-LOC -interests O O -, O O -Sa B-PER B-PER -said O O -the O O -remark O O -does O O -not O O -necessarily O O -mean O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -supports O O -a O O -stronger O O -dollar O O -. O O -" O O -It O O -' O O -a O O -strong O O -dollar O O -, O O -not O O -stronger O O -. O O -In O O -that O O -sense O O -, O O -the O O -comments O O -are O O -not O O -pointing O O -to O O -a O O -certain O O -direction O O -, O O -" O O -he O O -said O O -. O O -" O O -It O O -shows O O -that O O -Sa B-PER B-PER -is O O -not O O -for O O -a O O -stronger O O -dollar O O -either O O -, O O -" O O -said O O -Su B-PER B-ORG -' O O -Note B-MISC B-PER -. O O -Ta B-PER B-PER -Sa I-PER I-PER -, O O -first O O -vice O O -president O O -at O O -Union B-ORG B-ORG -Bank I-ORG I-ORG -of I-ORG I-ORG -Switzerland I-ORG I-ORG -in O O -Tokyo B-LOC B-LOC -, O O -said O O -: O O -" O O -Maybe O O -a O O -dollar O O -at O O -104 O O -ye O O -is O O -not O O -acceptable O O -( O O -to O O -Sa B-PER B-PER -) O O -, O O -but O O -it O O -may O O -be O O -okay O O -at O O -the O O -current O O -level O O -, O O -at O O -the O O -lower O O -end O O -of O O -112 O O -ye O O -. O O -" O O -Market O O -participants O O -have O O -kept O O -a O O -close O O -eye O O -on O O -Sa B-PER B-PER -, O O -chief O O -of O O -the O O -ministry O O -' O O -International B-ORG B-ORG -Finance I-ORG I-ORG -Bureau I-ORG I-ORG -, O O -as O O -a O O -comment O O -he O O -made O O -in O O -November O O -after O O -the O O -dollar O O -hit O O -this O O -year O O -' O O -high O O -of O O -114 O O -ye O O -pushed O O -the O O -currency O O -down O O -sharply O O -. O O -He O O -had O O -said O O -then O O -that O O -the O O -market O O -' O O -view O O -on O O -Japan B-LOC B-LOC -' O O -economy O O -was O O -too O O -p O O -and O O -that O O -he O O -believed O O -it O O -was O O -stronger O O -than O O -the O O -market O O -thought O O -. O O -Deal O O -have O O -come O O -to O O -refer O O -to O O -115 O O -ye O O -as O O -the O O -" O O -Sa B-MISC B-PER -ceiling O O -" O O -for O O -the O O -dollar O O -following O O -the O O -remark O O -. O O -Ad O O -to O O -the O O -comments O O -by O O -" O O -Mr O B-PER -Ye B-PER I-PER -" O O -, O O -as O O -Sa B-PER B-PER -is O O -known O O -for O O -his O O -prominence O O -in O O -the O O -currency O O -market O O -, O O -was O O -B B-ORG B-ORG -governor O O -Mat B-PER B-PER -' O O -remark O O -. O O -" O O -The O O -recent O O -level O O -of O O -the O O -ye O O -exchange O O -rate O O -has O O -been O O -stable O O -, O O -and O O -it O O -does O O -not O O -appear O O -to O O -be O O -moving O O -towards O O -a O O -further O O -de O O -of O O -the O O -ye O O -immediately O O -, O O -so O O -import O O -prices O O -are O O -likely O O -to O O -stab O O -at O O -current O O -levels O O -, O O -" O O -Mat B-PER B-PER -said O O -in O O -an O O -interview O O -with O O -the O O -Ni B-ORG B-ORG -Ke I-ORG I-ORG -Shi I-ORG I-ORG -. O O -" O O -The O O -fact O O -that O O -he O O -touched O O -on O O -the O O -issue O O -of O O -inflation O O -triggered O O -by O O -import O O -prices O O -shows O O -that O O -the O O -B B-ORG B-ORG -does O O -not O O -want O O -a O O -further O O -de O O -of O O -the O O -ye O O -, O O -past O O -115 O O -ye O O -, O O -" O O -said O O -Ya B-PER B-PER -Ka I-PER I-PER -, O O -chief O O -fore O O -manager O O -at O O -Toy B-ORG B-ORG -Trust I-ORG I-ORG -& I-ORG I-ORG -Banking I-ORG I-ORG -Co I-ORG I-ORG -. O O -Some O O -said O O -the O O -central O O -bank O O -may O O -have O O -been O O -concerned O O -a O O -weaker O O -ye O O -would O O -lead O O -to O O -un O O -p O O -about O O -Japan B-LOC B-LOC -' O O -economy O O -. O O -" O O -There O O -was O O -concern O O -that O O -foreign O O -investors O O -may O O -sell O O -Japanese B-MISC B-MISC -stocks O O -if O O -the O O -dollar O O -goes O O -above O O -115 O O -ye O O -. O O -The O O -B B-ORG B-ORG -does O O -not O O -want O O -the O O -ye O O -' O O -weakness O O -to O O -lead O O -to O O -p O O -over O O -the O O -economy O O -, O O -" O O -said O O -Tai B-PER B-PER -Tanaka I-PER I-PER -, O O -market O O -s O O -with O O -Credit B-ORG B-ORG -Su I-ORG I-ORG -in O O -Tokyo B-LOC B-LOC -. O O -Senior O O -B B-ORG B-ORG -officials O O -have O O -separately O O -said O O -financial O O -markets O O -' O O -views O O -on O O -the O O -economy O O -have O O -been O O -too O O -negative O O -. O O -" O O -I O O -realise O O -there O O -are O O -negative O O -views O O -in O O -the O O -markets O O -about O O -the O O -impact O O -of O O -the O O -consumption O O -tax O O -hike O O -and O O -drop O O -in O O -public O O -spending O O -, O O -but O O -the O O -markets O O -appear O O -to O O -be O O -ex O O -the O O -magnitude O O -of O O -the O O -negative O O -impact O O -, O O -" O O -a O O -senior O O -B B-ORG B-ORG -official O O -told O O -Re B-ORG B-ORG -on O O -Friday O O -. O O -The O O -consumption O O -tax O O -is O O -due O O -to O O -raised O O -in O O -April O O -from O O -three O O -to O O -five O O -percent O O -. O O -Lebanon B-LOC B-LOC -sentences O O -pro B-MISC B-MISC -warlord O O -to O O -death O O -. O O -Hai B-PER B-ORG -Had I-PER I-ORG -B B-LOC B-LOC -1996 O O -A O O -Lebanese B-MISC B-MISC -military O O -court O O -on O O -Friday O O -sentenced O O -to O O -death O O -in O O -absent O O -the O O -commander O O -of O O -Israel B-LOC B-LOC -' O O -sur O O -militia O O -in O O -south O O -Lebanon B-LOC B-LOC -on O O -treason O O -charges O O -. O O -The O O -court O O -convicted O O -General O O -Antoine B-PER B-PER -La I-PER I-PER -, O O -head O O -of O O -the O O -South B-ORG B-ORG -Lebanon I-ORG I-ORG -Army I-ORG I-ORG -( O O -SL B-ORG B-ORG -) O O -, O O -of O O -collaborating O O -with O O -Israel B-LOC B-LOC -with O O -which O O -Lebanon B-LOC B-LOC -is O O -officially O O -at O O -war O O -. O O -La B-PER B-PER -, O O -a O O -69 O O -former O O -Lebanese B-MISC B-MISC -army O O -major O O -, O O -heads O O -the O O -3 O O -SL B-ORG B-ORG -militia O O -which O O -helps O O -Israel B-LOC B-LOC -hold O O -a O O -border O O -zone O O -in O O -south O O -Lebanon B-LOC B-LOC -to O O -ward O O -off O O -cross O O -guerrilla O O -raids O O -on O O -northern O O -Israel B-LOC B-LOC -. O O -La B-PER B-PER -said O O -a O O -few O O -days O O -after O O -the O O -trial O O -began O O -on O O -February O O -16 O O -that O O -Lebanese B-MISC B-MISC -authorities O O -must O O -drop O O -the O O -charges O O -or O O -risk O O -blocking O O -any O O -peace O O -deal O O -with O O -the O O -Jewish B-MISC B-MISC -state O O -. O O -He O O -said O O -Israel B-LOC B-LOC -was O O -capable O O -of O O -press O O -Lebanon B-LOC B-LOC -' O O -Syrian B-MISC B-MISC -government O O -to O O -stop O O -the O O -legal O O -pursuit O O -. O O -The O O -charges O O -against O O -La B-PER B-PER -were O O -: O O -forming O O -a O O -hostile O O -army O O -, O O -carrying O O -weapons O O -on O O -Israel B-LOC B-LOC -' O O -side O O -, O O -helping O O -Israel B-LOC B-LOC -strip O O -off O O -a O O -part O O -of O O -Lebanese B-MISC B-MISC -territory O O -by O O -violence O O -, O O -forming O O -an O O -armed O O -gang O O -, O O -killing O O -or O O -trying O O -to O O -kill O O -Lebanese B-MISC B-MISC -civilians O O -by O O -artillery O O -shell O O -and O O -kidnapping O O -Lebanese B-MISC B-MISC -citizens O O -for O O -long O O -periods O O -. O O -Shortly O O -before O O -La B-PER B-PER -' O O -trial O O -began O O -, O O -a O O -Beirut B-LOC B-LOC -military O O -prosecutor O O -charged O O -another O O -89 O O -former O O -Lebanese B-MISC B-MISC -army O O -soldiers O O -with O O -collaborating O O -with O O -Israel B-LOC B-LOC -. O O -No O O -date O O -has O O -been O O -set O O -for O O -the O O -trial O O -of O O -the O O -men O O -, O O -all O O -members O O -of O O -the O O -SL B-ORG B-ORG -living O O -in O O -the O O -Israeli B-MISC B-MISC -zone O O -in O O -south O O -Lebanon B-LOC B-LOC -. O O -Israel B-LOC B-LOC -and O O -La B-LOC B-PER -have O O -repeatedly O O -demanded O O -safety O O -guarantees O O -for O O -the O O -SL B-ORG B-ORG -- O O -a O O -mixed O O -Christian B-MISC B-MISC -Mo B-MISC B-MISC -force O O -- O O -which O O -the O O -Jewish B-MISC B-MISC -states O O -regards O O -as O O -loyal O O -allies O O -. O O -Israel B-LOC B-LOC -has O O -said O O -the O O -Lebanese B-MISC B-MISC -army O O -must O O -incorporate O O -the O O -SL B-ORG B-ORG -fighters O O -into O O -its O O -ranks O O -as O O -an O O -army O O -brigade O O -as O O -a O O -condition O O -for O O -peace O O -. O O -But O O -Lebanese B-MISC B-MISC -political O O -analysts O O -have O O -said O O -that O O -would O O -be O O -out O O -of O O -the O O -question O O -and O O -Lebanese B-MISC B-MISC -authorities O O -pre O O -the O O -issue O O -by O O -taking O O -legal O O -action O O -against O O -La B-PER B-PER -. O O -Former O O -Israeli B-MISC B-MISC -Prime O O -Minister O O -Shi B-PER B-PER -Per I-PER I-PER -, O O -calling O O -La B-PER B-PER -" O O -a O O -great O O -Lebanese B-MISC B-MISC -pat O O -" O O -, O O -said O O -earlier O O -this O O -year O O -Lebanon B-LOC B-LOC -had O O -insulted O O -the O O -SL B-ORG B-ORG -commander O O -by O O -ordering O O -his O O -arrest O O -on O O -the O O -treason O O -charges O O -. O O -Per B-PER B-PER -, O O -who O O -was O O -ou O O -in O O -May O O -by O O -right O O -Israeli B-MISC B-MISC -leader O O -Benjamin B-PER B-PER -Net I-PER I-PER -, O O -said O O -there O O -could O O -not O O -be O O -real O O -negotiations O O -with O O -Lebanon B-LOC B-LOC -" O O -unless O O -it O O -will O O -stop O O -the O O -ma O O -of O O -the O O -SL B-ORG B-ORG -and O O -its O O -commanders O O -. O O -" O O -The O O -Beirut B-LOC B-LOC -military O O -court O O -also O O -sentenced O O -to O O -life O O -in O O -jail O O -in O O -absent O O -E B-PER B-PER -Sa I-PER I-PER -, O O -former O O -head O O -of O O -the O O -pro B-MISC B-MISC -Guardians I-ORG B-ORG -of I-ORG I-ORG -the I-ORG I-ORG -Cedar I-ORG I-ORG -, O O -a O O -small O O -right O O -Christian B-MISC B-MISC -civil O O -war O O -militia O O -. O O -Sa B-PER B-PER -, O O -whose O O -trial O O -was O O -concurrent O O -with O O -La B-PER B-PER -' O O -, O O -was O O -convicted O O -of O O -" O O -contact O O -the O O -Israeli B-MISC B-MISC -enemy O O -, O O -passing O O -information O O -to O O -Israel B-LOC B-LOC -and O O -undertaking O O -hostile O O -acts O O -against O O -Lebanon B-LOC B-LOC -" O O -. O O -Texas B-LOC B-LOC -/ O O -w O O -Ok B-LOC B-LOC -fed O O -cattle O O -market O O -thin O O -at O O -$ O O -67 O O -- O O -USD O B-ORG -. O O -AM B-LOC B-LOC -1996 O O -Trade O O -was O O -slow O O -in O O -the O O -Pan B-LOC B-LOC -area O O -Friday O O -, O O -USD B-ORG B-ORG -said O O -. O O -S O O -steer O O -and O O -he O O -were O O -$ O O -1 O O -per O O -c O O -lower O O -. O O -Fe O O -reporting O O -moderate O O -inquiry O O -. O O -Sales O O -reported O O -on O O -8 O O -head O O -slaughter O O -steer O O -and O O -1 O O -he O O -; O O -following O O -weekly O O -movement O O -of O O -71 O O -head O O -. O O -Note O O -- O O -all O O -cattle O O -prices O O -based O O -on O O -net O O -weights O O -F O O -the O O -feed O O -after O O -a O O -4 O O -percent O O -shrink O O -. O O -S B-ORG O -St I-ORG O -- O O -Select O O -and O O -Choice O O -2 O O -115 O O -lbs O O -67 O O -. O O -S B-ORG O -He I-ORG O -- O O -Select O O -and O O -Choice O O -2 O O -105 O O -lbs O O -67 O O -. O O -Con O O -- O O -9 O O -Week O O -A O O -- O O -None O O -Year O O -A O O -- O O -None O O -( O O -( O O -Chicago B-LOC B-LOC -news O O -312 O O -40 O O -87 O O -) O O -) O O -USD B-ORG B-ORG -daily O O -cattle O O -and O O -ho O O -slaughter O O -- O O -Dec O O -6 O O -. O O -E O O -daily O O -livestock O O -slaughter O O -under O O -Fed B-MISC B-ORG -inspection O O -- O O -USD B-ORG B-ORG -CA I-ORG O -CA O O -H O O -Friday O O -12 O O -( O O -est O O -) O O -132 O O -7 O O -35 O O -Week O O -ago O O -( O O -est O O -) O O -130 O O -6 O O -34 O O -Year O O -ago O O -( O O -act O O -) O O -132 O O -6 O O -33 O O -Saturday O O -12 O O -( O O -est O O -) O O -38 O O -0 O O -18 O O -Week O O -to O O -date O O -( O O -est O O -) O O -68 O O -31 O O -1 O O -Same O O -Period O O -Last O O -Week O O -( O O -est O O -) O O -60 O O -26 O O -1 O O -Same O O -Period O O -Previous O O -day O O -estimated O O -St B-ORG O -and O O -He B-ORG O -Co I-ORG O -and I-ORG O -Bull B-ORG O -Thursday O O -100 O O -33 O O -BA O O -- O O -Hartford B-LOC B-LOC -, O O -Con B-LOC B-LOC -. O O -, O O -$ O O -11 O O -m O O -. O O -C O O -OF I-ORG O -H I-ORG B-LOC -, O O -CO B-LOC B-LOC -R O O -: O O -$ O O -25 O O -GE O O -O O O -B O O -M O B-ORG -' O I-ORG -: O O -A O O -/ O O -A1 O O -S B-ORG B-ORG -: O O -AAA O O -/ O O -AA O O -Del O O -Date O O -: O O -12 O O -F O B-ORG -IN O O -Mat O O -Ba O O -Co O O -List O O -12 O O -1 O O -6 O O -4 O O -12 O O -57 O O -4 O O -4 O O -12 O O -265 O O -4 O O -4 O O -12 O O -625 O O -T B-LOC B-LOC -, O O -F B-LOC B-LOC -1996 O O -Fourteen O O -years O O -after O O -he O O -b O O -and O O -shot O O -a O O -man O O -whose O O -trailer O O -home O O -he O O -robbed O O -in O O -1982 O O -, O O -John B-PER B-PER -Mills I-PER I-PER -Jr I-PER O -. O O -, O O -41 O O -, O O -was O O -put O O -to O O -death O O -in O O -Florida B-LOC B-LOC -' O O -electric O O -chair O O -Friday O O -. O O -As O O -Glenn B-PER B-PER -Law I-PER I-PER -, O O -a O O -rural O O -Florida B-LOC B-LOC -minister O O -who O O -is O O -the O O -victim O O -' O O -father O O -, O O -looked O O -on O O -, O O -Mills B-PER B-PER -was O O -pronounced O O -dead O O -at O O -7 O O -a O O -E O O -( O O -121 O O -GM B-MISC B-MISC -) O O -for O O -the O O -murder O O -of O O -Lester B-PER B-PER -Law I-PER I-PER -. O O -Speaking O O -in O O -Arabic B-MISC B-MISC -, O O -Mills B-PER B-PER -made O O -a O O -final O O -statement O O -before O O -an O O -anonymous O O -citizen O O -flipped O O -the O O -switch O O -that O O -sent O O -2000 O O -vol O O -of O O -electricity O O -through O O -his O O -body O O -, O O -said O O -Department B-ORG B-ORG -of I-ORG I-ORG -Co I-ORG I-ORG -spokesman O O -Eugene B-PER B-PER -Morris I-PER I-PER -, O O -who O O -was O O -present O O -at O O -the O O -execution O O -. O O -" O O -I O O -bear O O -witness O O -that O O -there O O -is O O -no O O -God B-PER B-PER -but O O -Allah B-PER B-PER -and O O -I O O -bear O O -witness O O -that O O -the O O -prophet O O -Mohammed B-PER B-PER -is O O -the O O -messenger O O -of O O -God B-PER B-PER -, O O -" O O -he O O -quoted O O -Mills B-PER B-PER -as O O -saying O O -. O O -Prison O O -officials O O -said O O -they O O -had O O -no O O -record O O -of O O -Mills B-PER B-PER -' O O -official O O -conversion O O -, O O -but O O -they O O -said O O -that O O -, O O -on O O -May O O -14 O O -, O O -1991 O O -, O O -he O O -had O O -asked O O -that O O -a O O -new O O -name O O -, O O -Yu B-PER B-PER -Abdullah I-PER I-PER -Mu I-PER I-PER -, O O -be O O -added O O -to O O -his O O -prison O O -file O O -, O O -which O O -is O O -usually O O -an O O -indication O O -of O O -a O O -conversion O O -to O O -Islam B-MISC B-MISC -. O O -Mills B-PER B-PER -is O O -the O O -38th O O -person O O -to O O -die O O -in O O -Florida B-LOC B-LOC -' O O -electric O O -chair O O -since O O -the O O -U B-ORG B-ORG -Supreme I-ORG I-ORG -Court I-ORG I-ORG -reversed O O -itself O O -in O O -1976 O O -and O O -legal O O -the O O -death O O -penalty O O -. O O -Prison O O -officials O O -said O O -Mills B-PER B-PER -made O O -no O O -special O O -request O O -for O O -a O O -last O O -meal O O -and O O -did O O -not O O -eat O O -the O O -steak O O -, O O -fried O O -potatoes O O -and O O -orange O O -juice O O -offered O O -him O O -. O O -He O O -spent O O -Thursday O O -with O O -family O O -members O O -and O O -his O O -spiritual O O -adviser O O -, O O -Morris B-PER B-PER -said O O -. O O -Mills B-PER B-PER -was O O -scheduled O O -to O O -die O O -Wednesday O O -but O O -had O O -his O O -sentence O O -temporarily O O -postponed O O -by O O -the O O -Florida B-ORG B-ORG -Supreme I-ORG I-ORG -Court I-ORG I-ORG -. O O -On O O -Thursday O O -, O O -the O O -11th B-ORG O -Circuit B-ORG O -U I-ORG B-ORG -Court I-ORG I-ORG -of I-ORG I-ORG -Appeals I-ORG I-ORG -in O O -Atlanta B-LOC B-LOC -denied O O -his O O -appeal O O -in O O -federal O O -court O O -. O O -In O O -March O O -1982 O O -, O O -Mills B-PER B-PER -and O O -a O O -Michael B-PER B-PER -Frederick I-PER I-PER -knocked O O -on O O -the O O -door O O -of O O -Lester B-PER B-PER -Law I-PER I-PER -' O O -trailer O O -in O O -an O O -attempt O O -to O O -r O O -it O O -, O O -police O O -said O O -. O O -Lester B-PER B-PER -Law I-PER I-PER -was O O -taken O O -to O O -a O O -nearby O O -airs O O -where O O -he O O -was O O -b O O -with O O -a O O -tire O O -iron O O -. O O -Mills B-PER B-PER -then O O -fired O O -two O O -shots O O -that O O -killed O O -Law B-PER B-PER -as O O -the O O -victim O O -tried O O -to O O -run O O -away O O -. O O -Frederick B-PER B-PER -is O O -serving O O -a O O -34 O O -sentence O O -. O O -New B-LOC B-LOC -York I-LOC I-LOC -grain O O -freight O O -fixtures O O -- O O -Dec O O -5 O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Man B-ORG O -50 O O -tonnes O O -so O O -US B-LOC O -/ O O -China B-LOC B-LOC -10 O O -$ O O -23 O O -10 O O -/ O O -4 O O -Gee B-ORG O -. O O -- O O -New B-ORG B-ORG -York I-ORG I-ORG -Co I-ORG I-ORG -Des I-ORG I-ORG -+ O O -212 O O -85 O O -1640 O O -Iowa B-ORG B-LOC -Min I-ORG B-LOC -fed O O -cattle O O -market O O -quiet O O -, O O -no O O -sales O B-MISC -. O O -DE B-LOC B-LOC -M I-LOC I-LOC -1996 O O -S O O -steer O O -and O O -he O O -not O O -tested O O -, O O -compared O O -with O O -Thursday O O -' O O -close O O -, O O -USD B-ORG B-ORG -said O O -. O O -Trade O O -slow O O -. O O -De O O -and O O -seller O O -interest O O -light O O -. O O -Off O O -light O O -. O O -St B-ORG O -- O O -Select B-MISC O -and O O -Choice B-MISC O -2 O O -no O O -sales O O -. O O -He B-ORG O -- O O -Select B-MISC O -and O O -Choice O O -2 O O -no O O -sales O O -. O O -Car B-PER O -Ba I-ORG O -( O O -weight O O -only O O -) O O -Compared O O -Thursday O O -Close O O -- O O -S O O -steer O O -and O O -he O O -not O O -tested O O -. O O -St B-ORG O -- O O -Select B-MISC O -and O O -Choice B-MISC O -2 O O -no O O -sales O O -. O O -Holstein B-PER O -- O O -( O O -weight O O -only O O -) O O -Select O O -to O O -mostly O O -Choice B-MISC O -2 O O -125 O O -lbs O O -no O O -sales O O -. O O -Holstein B-MISC O -- O O -( O O -grade O O -and O O -weight O O -) O O -Choice O O -2 O O -125 O O -lbs O O -no O O -sales O O -Select O O -2 O O -125 O O -lbs O O -no O O -sales O O -. O O -He B-ORG O -- O O -Select B-MISC O -and O O -Choice O O -2 O O -no O O -sales O O -. O O -Con O O -- O O -None O O -Week O O -A O O -- O O -800 O O -Year O O -A O O -- O O -900 O O -W O O -to O O -Date O O -- O O -None O O -Week O O -A O O -- O O -800 O O -Year O O -A O O -- O O -900 O O -( O O -( O O -Chicago B-LOC B-LOC -news O O -312 O O -) O O -) O O -Man O O -stole O O -pigs O O -, O O -tipped O O -strip O O -, O O -gets O O -10 O O -years O O -. O O -AP B-LOC B-LOC -, O O -W B-LOC B-LOC -. O O -1996 O O -A O O -farm O O -used O O -the O O -proceeds O O -from O O -stolen O O -pigs O O -to O O -lavish O O -tips O O -on O O -dancers O O -at O O -strip O O -clubs O O -and O O -offered O O -one O O -$ O O -3 O O -to O O -pay O O -for O O -breast O O -imp O O -surgery O O -, O O -authorities O O -said O O -Friday O O -. O O -In O O -sent O O -Dar B-PER B-PER -V I-PER I-PER -, O O -38 O O -, O O -to O O -a O O -10 O O -prison O O -term O O -on O O -Thursday O O -, O O -Out B-LOC B-LOC -County I-ORG I-LOC -Circuit I-ORG O -Court I-ORG O -Judge O O -Dennis B-PER B-PER -Lu I-PER I-PER -said O O -he O O -was O O -" O O -a O O -thief O O -by O O -habit O O -. O O -" O O -" O O -You O O -are O O -self O O -. O O -You O O -are O O -na O O -, O O -" O O -Lu B-PER O -said O O -at O O -the O O -sent O O -, O O -adding O O -V B-PER B-PER -should O O -pay O O -rest O O -of O O -more O O -than O O -$ O O -100 O O -to O O -the O O -farming O O -family O O -who O O -had O O -hired O O -him O O -. O O -V B-PER B-PER -, O O -who O O -was O O -already O O -on O O -probation O O -for O O -prior O O -pig O O -theft O O -, O O -pleaded O O -that O O -he O O -was O O -trying O O -to O O -pay O O -bills O O -for O O -his O O -ex O O -and O O -children O O -. O O -But O O -the O O -court O O -heard O O -that O O -re O O -showed O O -much O O -of O O -the O O -money O O -went O O -to O O -dancers O O -at O O -strip O O -clubs O O -where O O -he O O -was O O -known O O -as O O -a O O -big O O -tip O O -. O O -One O O -strip O O -said O O -V B-PER B-PER -offered O O -to O O -give O O -her O O -$ O O -3 O O -for O O -breast O O -imp O O -surgery O O -. O O -Canadian B-MISC B-MISC -grain O O -statistics O O -weekly O O -. O O -CH B-ORG B-LOC -, O O -Dec O O -6 O O -( O O -Re B-ORG B-ORG -) O O -Statistics O O -for O O -the O O -week O O -ending O O -December O O -1 O O -in O O -000 O O -' O O -tonnes O O -. O O -- O O -Canadian B-ORG B-ORG -G I-ORG I-ORG -Commission I-ORG I-ORG -V O O -Su I-ORG O -Farmers I-ORG O -Del I-ORG O -C O O -W O O -Y O O -A O O -C O O -W O O -Y O O -to O O -Date O O -Y O O -A O O -W O O -43 O O -390 O O -288 O O -62 O O -55 O O -Du B-ORG O -116 O O -122 O O -44 O O -96 O O -106 O O -O B-ORG O -286 O O -28 O O -31 O O -93 O O -58 O O -Bar B-ORG O -107 O O -110 O O -178 O O -253 O O -1897 O O -R B-ORG O -44 O O -86 O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -NY B-ORG B-ORG -Henry B-ORG B-LOC -Hu I-ORG I-LOC -na O O -future O O -settled O O -significantly O O -lower O O -Friday O O -, O O -pressure O O -early O O -by O O -profit O O -taking O O -and O O -driven O O -even O O -lower O O -late O O -by O O -the O O -National B-ORG B-ORG -Weather I-ORG I-ORG -Service I-ORG I-ORG -' O O -bear O O -six O O -to O O -10 O O -day O O -forecast O O -, O O -sources O O -said O O -. O O -January O O -ended O O -29 O O -cents O O -lower O O -at O O -$ O O -3 O O -per O O -million O O -British B-MISC B-MISC -thermal O O -units O O -after O O -dipping O O -to O O -a O O -low O O -of O O -$ O O -3 O O -. O O -Feb O O -settled O O -down O O -22 O O -cents O O -at O O -$ O O -3 O O -. O O -Most O O -others O O -also O O -were O O -down O O -. O O -" O O -Weather O O -forecast O O -have O O -been O O -sketch O O -. O O -Now O O -the O O -National B-ORG B-ORG -Weather I-ORG I-ORG -Service I-ORG I-ORG -is O O -calling O O -for O O -above O O -temperatures O O -in O O -more O O -than O O -half O O -of O O -the O O -U B-LOC B-LOC -, O O -" O O -one O O -future O O -trader O O -said O O -. O O -In O O -its O O -forecast O O -, O O -the O O -NW B-ORG B-ORG -said O O -it O O -expects O O -above O O -temperatures O O -" O O -over O O -the O O -lower O O -48 O O -states O O -" O O -from O O -December O O -12 O O -through O O -December O O -16 O O -. O O -With O O -more O O -room O O -to O O -the O O -down O O -anticipated O O -early O O -next O O -week O O -, O O -traders O O -said O O -support O O -in O O -January O O -was O O -at O O -$ O O -3 O O -, O O -then O O -$ O O -3 O O -. O O -The O O -next O O -backs O O -were O O -seen O O -at O O -$ O O -3 O O -and O O -$ O O -3 O O -, O O -the O O -low O O -set O O -on O O -November O O -21 O O -. O O -Resistance O O -was O O -p O O -at O O -the O O -new O O -contract O O -high O O -of O O -$ O O -3 O O -. O O -In O O -the O O -cash O O -market O O -, O O -Gulf B-LOC B-LOC -Coast I-LOC I-LOC -prices O O -were O O -around O O -$ O O -3 O O -shortly O O -before O O -nomination O O -deadline O O -. O O -Mid B-MISC O -prices O O -were O O -similarly O O -lower O O -in O O -the O O -$ O O -3 O O -New B-LOC B-LOC -York I-LOC I-LOC -city O O -gate O O -gas O O -slipped O O -into O O -the O O -$ O O -4 O O -, O O -down O O -almost O O -15 O O -cents O O -. O O -NY B-ORG B-ORG -said O O -an O O -estimated O O -35 O O -Hu B-MISC O -contracts O O -traded O O -, O O -down O O -from O O -Thursday O O -' O O -revised O O -tally O O -of O O -43 O O -. O O -NY B-ORG B-ORG -Alberta I-ORG B-LOC -na O O -remained O O -un O O -, O O -with O O -January O O -settling O O -at O O -$ O O -1 O O -, O O -off O O -10 O O -cents O O -from O O -Thursday O O -. O O -Physical O O -prices O O -for O O -the O O -weekend O O -at O O -the O O -A B-ORG B-ORG -storage O O -hub O O -were O O -also O O -down O O -about O O -10 O O -cents O O -in O O -the O O -C B-MISC B-MISC -1 O O -per O O -gig O O -, O O -or O O -$ O O -1 O O -per O O -mm O O -range O O -, O O -pressure O O -by O O -un O O -mild O O -weather O O -in O O -western O O -Canada B-LOC B-LOC -. O O -NY B-MISC B-ORG -Per I-MISC B-MISC -na O O -, O O -also O O -un O O -, O O -ended O O -10 O O -cents O O -lower O O -at O O -$ O O -2 O O -. O O -In O O -con O O -with O O -future O O -, O O -Per B-MISC B-MISC -cash O O -prices O O -for O O -the O O -weekend O O -fell O O -more O O -than O O -10 O O -cents O O -to O O -the O O -high O O -- O O -$ O O -3 O O -On O O -the O O -KC B-MISC B-ORG -, O O -January O O -finished O O -26 O O -cents O O -lower O O -at O O -$ O O -3 O O -after O O -dipping O O -to O O -a O O -low O O -of O O -$ O O -3 O O -earlier O O -in O O -the O O -session O O -. O O -February O O -was O O -down O O -22 O O -cents O O -at O O -the O O -close O O -, O O -while O O -other O O -def O O -were O O -4 O O -to O O -nine O O -cents O O -lower O O -. O O -The O O -East B-MISC O -/ O O -West B-LOC O -spread O O -narrowed O O -by O O -3 O O -cents O O -to O O -13 O O -cents O O -( O O -NY B-MISC B-ORG -premium O O -) O O -. O O -Physical O O -prices O O -at O O -W B-LOC B-LOC -for O O -the O O -weekend O O -lost O O -more O O -than O O -15 O O -cents O O -to O O -the O O -low O O -$ O O -3 O O -as O O -mild O O -weather O O -moved O O -into O O -the O O -Southwest B-LOC O -. O O -- O O -H B-PER B-PER -M I-PER I-PER -, O O -New B-ORG B-ORG -York I-ORG I-ORG -Power I-ORG I-ORG -Des I-ORG I-ORG -+ O O -U B-LOC B-LOC -barge O O -lightly O O -quoted O O -on O O -call O O -session O O -. O O -ST B-LOC B-LOC -L O I-LOC -1996 O O -U B-LOC B-LOC -barge O O -rates O O -were O O -lightly O O -quoted O O -Friday O O -on O O -the O O -St B-ORG B-LOC -Louis I-ORG I-LOC -Merchant I-ORG O -Exchange I-ORG O -call O O -session O O -. O O -No O O -barge O O -traded O O -versus O O -no O O -trades O O -Thursday O O -. O O -- O O -Two O O -barge O O -next O O -week O O -Illinois B-LOC B-LOC -bid O O -at O O -a O O -steady O O -130 O O -percent O O -of O O -ta O O -, O O -offered O O -at O O -135 O O -percent O O -. O O -- O O -One O O -barge O O -, O O -week O O -of O O -December O O -15 O O -, O O -lower O O -Ohio B-LOC B-LOC -bid O O -2 O O -points O O -higher O O -at O O -105 O O -percent O O -, O O -no O O -offer O O -. O O -Two O O -barge O O -, O O -week O O -of O O -January O O -5 O O -, O O -Illinois B-LOC B-LOC -, O O -offered O O -five O O -points O O -lower O O -at O O -195 O O -percent O O -, O O -bid O O -at O O -150 O O -percent O O -. O O -- O O -Five O O -barge O O -, O O -30 O O -open O O -, O O -mid O B-MISC -( O O -McGregor B-LOC B-PER -and O O -south O O -) O O -bid O O -at O O -160 O O -percent O O -, O O -offered O O -at O O -170 O O -percent O O -, O O -no O O -comparisons O O -. O O -- O O -36 O O -barge O O -, O O -two O O -each O O -week O O -May O O -, O O -Illinois B-LOC B-LOC -, O O -offered O O -at O O -130 O O -percent O O -of O O -ta O O -, O O -no O O -bid O O -or O O -comparison O O -. O O -- O O -36 O O -barge O O -, O O -two O O -each O O -week O O -May O O -, O O -mid B-LOC B-MISC -offered O O -at O O -a O O -steady O O -135 O O -percent O O -, O O -bid O O -at O O -120 O O -percent O O -( O O -basis O O -one O O -each O O -week O O -) O O -. O O -- O O -Chicago B-LOC B-LOC -news O O -312 O O -87 O O -CB B-ORG B-ORG -grain O O -/ O O -oils O O -re O O -and O O -shipment O O -. O O -CH B-LOC B-LOC -1996 O O -G O O -and O O -so O O -re O O -and O O -shipment O O -, O O -in O O -bush O O -, O O -at O O -delivery O O -locations O O -for O O -the O O -previous O O -trading O O -day O O -, O O -according O O -to O O -the O O -Chicago B-ORG B-ORG -Board I-ORG I-ORG -of I-ORG I-ORG -Trade I-ORG I-ORG -- O O -Re O O -Ship I-ORG O -W I-ORG O -Chicago I-ORG B-LOC -0 O O -0 O O -St B-ORG B-LOC -Louis O I-LOC -21 O O -0 O O -Toledo B-ORG B-LOC -61 O O -0 O O -Co B-ORG O -Chicago I-ORG B-LOC -78 O O -0 O O -St B-ORG B-LOC -Louis O I-LOC -217 O O -75 O O -Toledo B-ORG B-LOC -285 O O -56 O O -O B-ORG O -Chicago I-ORG B-LOC -0 O O -0 O O -Minneapolis B-ORG B-LOC -306 O O -153 O O -So B-ORG O -Chicago I-ORG B-LOC -8 O O -48 O O -St B-ORG B-LOC -Louis I-ORG I-LOC -253 O O -WA B-LOC B-LOC -1996 O O -President O O -Clinton B-PER B-PER -aims O O -to O O -hold O O -more O O -news O O -conferences O O -in O O -his O O -second O O -term O O -and O O -will O O -have O O -one O O -Dec O O -13 O O -, O O -the O O -White B-LOC B-LOC -House I-LOC I-LOC -said O O -Friday O O -. O O -The O O -president O O -had O O -only O O -two O O -formal O O -, O O -full O O -news O O -conferences O O -last O O -year O O -, O O -one O O -in O O -January O O -and O O -one O O -after O O -he O O -won O O -re O O -in O O -November O O -, O O -although O O -he O O -had O O -various O O -other O O -limited O O -sessions O O -with O O -the O O -press O O -. O O -White B-LOC B-LOC -House I-LOC I-LOC -spokesman O O -Mike B-PER B-PER -M I-PER I-PER -said O O -Clinton B-PER B-PER -" O O -plans O O -to O O -have O O -regular O O -news O O -conferences O O -" O O -during O O -his O O -second O O -term O O -. O O -But O O -when O O -asked O O -how O O -frequent O O -these O O -would O O -be O O -, O O -he O O -was O O -e O O -, O O -saying O O -, O O -" O O -periodic O O -, O O -occasional O O -. O O -" O O -" O O -He O O -enjoys O O -the O O -give O O -and O O -take O O -" O O -with O O -reporters O O -, O O -the O O -spokesman O O -added O O -. O O -Action O O -Performance I-ORG O -to O O -acquire O O -firms O O -. O O -T B-LOC B-LOC -, O O -Ari B-LOC B-LOC -. O O -1996 O O -Action B-ORG B-ORG -Performance I-ORG I-ORG -Co I-ORG I-ORG -Inc I-ORG I-ORG -said O O -Friday O O -it O O -has O O -agreed O O -to O O -acquire O O -Motors B-ORG B-ORG -Tradition I-ORG I-ORG -Ltd I-ORG I-ORG -and O O -Creative B-ORG B-ORG -Marketing I-ORG I-ORG -& I-ORG I-ORG -Promotion I-ORG I-ORG -Inc I-ORG I-ORG -for O O -about O O -$ O O -13 O O -million O O -in O O -cash O O -and O O -stock O O -. O O -The O O -two O O -firms O O -to O O -be O O -acquired O O -have O O -about O O -$ O O -25 O O -million O O -in O O -annual O O -revenues O O -from O O -the O O -design O O -, O O -manufacture O O -and O O -sale O O -and O O -distribution O O -of O O -licensed O O -motors O O -products O O -. O O -The O O -deal O O -is O O -expected O O -to O O -close O O -by O O -the O O -end O O -of O O -the O O -year O O -subject O O -to O O -due O O -di O O -and O O -other O O -customary O O -closing O O -conditions O O -. O O -Half O O -of O O -dog O O -bites O O -provoked O O -, O O -says O O -American B-MISC B-MISC -ve O O -. O O -CH B-LOC B-LOC -1996 O O -As O O -many O O -as O O -1 O O -million O O -dog O O -bites O O -are O O -recorded O O -in O O -the O O -United B-LOC B-LOC -States I-LOC I-LOC -every O O -year O O -and O O -half O O -of O O -them O O -are O O -provoked O O -by O O -humans O O -, O O -a O O -ve O O -told O O -fellow O O -animal O O -doctors O O -on O O -Friday O O -. O O -The O O -Human B-ORG O -Society I-ORG O -of I-ORG O -the I-ORG O -United I-ORG B-LOC -States I-ORG I-LOC -estimates O O -that O O -between O O -500 O O -and O O -one O O -million O O -bites O O -are O O -delivered O O -by O O -dogs O O -each O O -year O O -, O O -more O O -than O O -half O O -of O O -which O O -are O O -suffered O O -by O O -children O O -. O O -" O O -Most O O -bites O O -can O O -be O O -prevented O O -by O O -teaching O O -children O O -how O O -to O O -respect O O -a O O -dog O O -, O O -" O O -Michael B-PER B-PER -Co I-PER I-PER -of O O -the O O -Glen B-LOC B-ORG -Animal I-LOC I-ORG -Hospital I-LOC I-ORG -in O O -Columbus B-LOC B-LOC -, O O -Ohio I-LOC B-LOC -, O O -told O O -the O O -annual O O -meeting O O -of O O -the O O -American B-ORG B-ORG -Veterinary I-ORG I-ORG -Medical I-ORG I-ORG -Association I-ORG I-ORG -. O O -" O O -Let O O -' O O -not O O -let O O -our O O -kids O O -jump O O -on O O -them O O -or O O -crawl O O -on O O -them O O -. O O -Dogs O O -and O O -children O O -do O O -n O O -have O O -to O O -have O O -an O O -interaction O O -. O O -Let O O -' O O -respect O O -their O O -territories O O -, O O -" O O -he O O -said O O -. O O -Co B-PER B-PER -said O O -50 O O -percent O O -of O O -reported O O -bites O O -were O O -provoked O O -by O O -a O O -person O O -and O O -60 O O -percent O O -were O O -suffered O O -by O O -children O O -. O O -He O O -also O O -estimated O O -that O O -only O O -25 O O -percent O O -of O O -bites O O -were O O -reported O O -because O O -medical O O -attention O O -was O O -not O O -needed O O -. O O -Don B-PER B-PER -R I-PER I-PER -, O O -president O O -of O O -the O O -National B-ORG B-ORG -Animal I-ORG I-ORG -Control I-ORG I-ORG -Association I-ORG I-ORG -, O O -said O O -aggressive O O -in O O -dogs O O -was O O -related O O -more O O -to O O -gender O O -than O O -breed O O -and O O -a O O -male O O -dog O O -that O O -had O O -not O O -been O O -ne O O -was O O -three O O -times O O -more O O -likely O O -to O O -bite O O -than O O -an O O -un O O -female O O -. O O -The O O -five O O -breeds O O -credited O O -with O O -the O O -most O O -incidents O O -were O O -ch O O -ch O O -, O O -R B-MISC B-MISC -, O O -German B-MISC B-MISC -she O I-MISC -, O O -cock O B-MISC -span O I-MISC -and O O -Dal B-MISC B-MISC -. O O -" O O -The O O -trends O O -in O O -dog O O -bites O O -by O O -particular O O -breeds O O -have O O -more O O -to O O -do O O -with O O -f O O -pets O O -owned O O -by O O -individuals O O -who O O -need O O -to O O -have O O -something O O -unique O O -. O O -Speaking O O -strictly O O -of O O -dogs O O -, O O -15 O O -years O O -ago O O -the O O -mac O O -f O O -pet O O -was O O -a O O -Do B-MISC B-MISC -. O O -Today O O -, O O -R B-MISC B-MISC -are O O -on O O -the O O -way O O -up O O -, O O -" O O -R B-PER B-PER -said O O -. O O -If O O -approached O O -by O O -a O O -stray O O -dog O O -, O O -children O O -should O O -be O O -taught O O -to O O -stand O O -still O O -with O O -fists O O -folded O O -underneath O O -the O O -neck O O -, O O -elbows O O -in O O -, O O -and O O -gaze O O -forward O O -until O O -the O O -dog O O -goes O O -away O O -. O O -Iowa B-MISC B-LOC -Min I-MISC B-LOC -feed O O -cattle O O -market O O -not O O -tested O O -USD B-ORG B-ORG -. O O -DE B-LOC B-LOC -M I-LOC I-LOC -1996 O O -St O O -and O O -he O O -were O O -not O O -tested O O -, O O -compared O O -with O O -Thursday O O -' O O -close O O -, O O -USD B-ORG B-ORG -said O O -. O O -Report O O -sales O O -for O O -Fr B-MISC O -None O O -. O O -Week O O -to O O -Date O O -- O O -None O O -. O O -( O O -( O O -Chicago B-LOC B-LOC -news O O -312 O O -) O O -) O O -Nebraska B-LOC B-LOC -fed O O -cattle O O -round O O -- O O -USD B-ORG B-ORG -. O O -O B-LOC B-LOC -1996 O O -S O O -steer O O -and O O -he O O -were O O -not O O -established O O -Thursday O O -. O O -De O O -limited O O -. O O -Se O O -interest O O -light O O -. O O -- O O -USD B-ORG B-ORG -Thursday O O -200 O O -Last O O -Week O O -Holiday O O -Last O O -Year O O -N O O -/ O O -A O O -Week O O -to O O -Date O O -3 O O -S B-MISC O -P O O -L O O -W O O -800 O O -S O O -P O O -L O O -Y O O -N O O -/ O O -A O O -Dr O O -Ba O O -Del O O -not O O -well O O -tested O O -. O O -Dr O O -Ba O O -St O O -: O O -Few O O -Select O O -and O O -Choice O O -2 O O -, O O -1200 O O -lbs O O -112 O O -; O O -load O O -early O O -114 O O -. O O -Dr O O -Ba O O -He O O -: O O -Few O O -Select O O -and O O -Choice O O -2 O O -, O O -1100 O O -lbs O O -112 O O -. O O -Four O O -Africans B-MISC B-MISC -said O O -to O O -v O O -for O O -top O O -U B-ORG B-ORG -post O O -. O O -Evelyn B-PER B-PER -Leopold I-PER I-PER -UN B-LOC B-ORG -N I-LOC I-ORG -1996 O O -Four O O -African B-MISC B-MISC -states O O -are O O -ready O O -to O O -nominate O O -candidates O O -for O O -the O O -post O O -of O O -U B-ORG B-ORG -secretary O O -on O O -Friday O O -now O O -that O O -Bo B-PER B-PER -Bo I-PER I-PER -has O O -temporarily O O -put O O -aside O O -his O O -bid O O -for O O -re O O -. O O -The O O -nominees O O -, O O -according O O -to O O -diplomat O O -, O O -are O O -: O O -Ko B-PER B-PER -Anna I-PER I-PER -of O O -Ghana B-LOC B-LOC -, O O -the O O -U B-ORG B-ORG -under O O -for O O -peace O O -; O O -Ahmed B-PER B-PER -O I-PER I-PER -Abd I-PER I-PER -of O O -Ma B-LOC B-LOC -, O O -the O O -former O O -U B-ORG B-ORG -special O O -en O O -for O O -B B-LOC B-LOC -; O O -Amar B-PER B-PER -E I-PER I-PER -of O O -the O O -Ivory B-LOC B-LOC -Coast I-LOC I-LOC -, O O -its O O -foreign O O -minister O O -and O O -the O O -U B-ORG B-ORG -General I-ORG I-ORG -Assembly I-ORG I-ORG -president O O -in O O -1994 O O -; O O -and O O -Ham B-PER B-PER -Al I-PER I-PER -of O O -Niger B-LOC B-LOC -, O O -the O O -secretary O O -of O O -the O O -Organisation B-ORG B-ORG -of I-ORG I-ORG -the I-ORG I-ORG -Islamic I-ORG I-ORG -Conference I-ORG I-ORG -. O O -Representatives O O -of O O -the O O -U B-ORG B-ORG -missions O O -of O O -Ghana B-LOC B-LOC -, O O -the O O -Ivory B-LOC B-LOC -Coast I-LOC I-LOC -, O O -Ma B-LOC B-LOC -and O O -Niger B-LOC B-LOC -have O O -scheduled O O -a O O -meeting O O -with O O -Security B-ORG B-ORG -Council I-ORG I-ORG -president O O -Paolo B-PER B-PER -Fu I-PER I-PER -of O O -Italy B-LOC B-LOC -to O O -hand O O -in O O -the O O -nominations O O -in O O -writing O O -, O O -the O O -en O O -said O O -. O O -It O O -was O O -not O O -known O O -if O O -other O O -candidates O O -would O O -step O O -forward O O -. O O -Diploma O O -said O O -General O O -Joseph B-PER B-PER -G I-PER I-PER -of O O -Nigeria B-LOC B-LOC -, O O -a O O -U B-ORG B-ORG -General I-ORG I-ORG -Assembly I-ORG I-ORG -president O O -in O O -1989 O O -, O O -was O O -putting O O -forth O O -his O O -own O O -candidacy O O -without O O -being O O -nominated O O -by O O -his O O -country O O -. O O -Bo B-PER B-PER -on O O -Wednesday O O -opened O O -the O O -door O O -for O O -other O O -Africans B-MISC B-MISC -to O O -contest O O -his O O -job O O -by O O -saying O O -he O O -was O O -su O O -temporarily O O -his O O -candidacy O O -but O O -was O O -not O O -withdrawing O O -completely O O -from O O -the O O -race O O -. O O -His O O -supporters O O -said O O -this O O -meant O O -he O O -remained O O -a O O -candidate O O -in O O -case O O -the O O -race O O -reached O O -an O O -imp O O -. O O -The O O -United B-LOC B-LOC -States I-LOC I-LOC -on O O -Nov O O -19 O O -veto O O -his O O -bid O O -for O O -re O O -while O O -the O O -other O O -14 O O -Security B-ORG B-ORG -Council I-ORG I-ORG -members O O -supported O O -him O O -. O O -The O O -move O O -by O O -the O O -African B-MISC B-MISC -states O O -means O O -that O O -the O O -council O O -could O O -begin O O -voting O O -on O O -candidates O O -next O O -week O O -, O O -a O O -procedure O O -that O O -could O O -either O O -result O O -in O O -a O O -decision O O -or O O -turn O O -into O O -a O O -bitter O O -fight O O -with O O -veto O O -against O O -each O O -nominee O O -. O O -The O O -Security B-ORG B-ORG -Council I-ORG I-ORG -has O O -to O O -vote O O -on O O -a O O -new O O -secretary O O -and O O -then O O -seek O O -the O O -endorsement O O -of O O -the O O -185 O O -General B-ORG B-ORG -Assembly I-ORG I-ORG -before O O -December O O -31 O O -when O O -Bo B-PER B-PER -' O O -term O O -ex O O -. O O -Spain B-LOC B-LOC -' O O -police O O -seize O O -petrol O O -bombs O O -, O O -arrest O O -five O O -. O O -MA B-LOC B-LOC -1996 O O -Spanish B-MISC B-MISC -police O O -said O O -on O O -Friday O O -they O O -had O O -arrested O O -five O O -people O O -and O O -seized O O -more O O -than O O -90 O O -petrol O O -bombs O O -during O O -disturbance O O -after O O -a O O -protest O O -in O O -the O O -Basque B-MISC B-MISC -country O O -against O O -Spain B-LOC B-LOC -' O O -constitution O O -. O O -Hood O O -protesters O O -threw O O -burning O O -bottles O O -and O O -other O O -objects O O -at O O -police O O -in O O -Pam B-LOC B-LOC -after O O -the O O -protest O O -organised O O -by O O -Herr B-PER B-ORG -Bat I-PER I-ORG -, O O -the O O -political O O -wing O O -of O O -Basque B-MISC B-MISC -se O O -group O O -ET B-ORG B-ORG -. O O -Police O O -also O O -confiscated O O -eight O O -kg O O -( O O -18 O O -lb O O -) O O -of O O -screw O O -, O O -b O O -and O O -spray O O -paint O O -cans O O -. O O -The O O -protest O O -, O O -which O O -attracted O O -several O O -thousand O O -supporters O O -, O O -coincided O O -with O O -the O O -18th O O -anniversary O O -of O O -Spain B-LOC B-LOC -' O O -constitution O O -. O O -Mussolini B-PER B-PER -' O O -granddaughter O O -re O O -far O O -party O O -. O O -ROM B-LOC B-LOC -1996 O O -Al B-PER B-PER -Mussolini I-PER I-PER -, O O -the O O -granddaughter O O -of O O -Italy B-LOC B-LOC -' O O -F B-MISC O -dictator O O -Benito B-PER B-PER -Mussolini I-PER I-PER -, O O -said O O -on O O -Friday O O -she O O -had O O -rejoined O O -the O O -far O O -National B-ORG B-ORG -Alliance I-ORG I-ORG -( O O -AN B-ORG B-ORG -) O O -party O O -she O O -quit O O -over O O -policy O O -differences O O -last O O -month O O -. O O -" O O -I O O -' O O -gone O O -back O O -, O O -" O O -she O O -told O O -a O O -radio O O -show O O -shortly O O -after O O -AN B-ORG B-ORG -leader O O -G B-PER B-PER -Fin I-PER I-PER -, O O -who O O -was O O -being O O -interviewed O O -on O O -the O O -programme O O -, O O -said O O -the O O -row O O -had O O -been O O -resolved O O -. O O -" O O -He O O -did O O -n O O -want O O -to O O -lose O O -me O O -and O O -I O O -did O O -n O O -want O O -to O O -lose O O -him O O -. O O -" O O -Fin B-PER B-PER -told O O -state O O -radio O O -RA B-ORG B-LOC -he O O -met O O -Mussolini B-PER B-PER -thanks O O -to O O -the O O -good O O -offices O O -of O O -Giuseppe B-PER B-PER -Ta I-PER I-PER -, O O -AN B-ORG B-ORG -' O O -leader O O -in O O -the O O -Chamber B-ORG B-ORG -of I-ORG I-ORG -Deputies I-ORG I-ORG -( O O -lower O O -house O O -) O O -, O O -and O O -had O O -overcome O O -their O O -differences O O -. O O -Mussolini B-PER B-PER -, O O -33 O O -, O O -resigned O O -from O O -the O O -parliamentary O O -party O O -group O O -for O O -what O O -she O O -said O O -were O O -strictly O O -political O O -reasons O O -. O O -The O O -fiery O O -politician O O -, O O -who O O -is O O -also O O -a O O -niece O O -of O O -screen O O -star O O -Sophia B-PER B-PER -Lo I-PER I-PER -, O O -had O O -accused O O -AN B-ORG B-ORG -leaders O O -of O O -s O O -internal O O -party O O -debate O O -. O O -Mussolini B-PER B-PER -, O O -who O O -sits O O -in O O -the O O -Chamber B-ORG B-ORG -, O O -told O O -La B-ORG B-ORG -St I-ORG I-ORG -newspaper O O -last O O -month O O -after O O -quit O O -AN B-ORG B-ORG -' O O -parliamentary O O -party O O -that O O -she O O -was O O -considering O O -joining O O -the O O -neo B-MISC O -Social B-ORG B-ORG -Movement I-ORG I-ORG -( O O -MS B-ORG B-ORG -) O O -formed O O -by O O -some O O -of O O -the O O -Du B-PER O -' O O -World B-MISC B-MISC -War I-MISC I-MISC -Two I-MISC I-MISC -followers O O -. O O -German B-MISC B-MISC -Santa B-PER B-PER -in O O -bank O O -nearly O O -gets O O -arrested O O -. O O -H B-LOC B-LOC -, O O -Germany B-LOC B-LOC -1996 O O -A O O -Santa B-MISC B-PER -Claus I-MISC I-PER -distributing O O -presents O O -to O O -workers O O -in O O -a O O -German B-MISC B-MISC -bank O O -on O O -Friday O O -nearly O O -ended O O -up O O -behind O O -bars O O -when O O -a O O -passing O O -police O O -patrol O O -thought O O -he O O -was O O -a O O -r O O -in O O -disguise O O -. O O -The O O -man O O -, O O -doing O O -his O O -rounds O O -in O O -the O O -northern O O -city O O -of O O -Hanover B-LOC B-LOC -on O O -the O O -day O O -when O O -German B-MISC B-MISC -children O O -traditionally O O -receive O O -small O O -presents O O -from O O -Saint B-PER B-PER -Nicholas I-PER I-PER -, O O -convinced O O -police O O -eventually O O -that O O -he O O -was O O -genuine O O -. O O -Italy B-LOC B-LOC -commission O O -concludes O O -1997 O O -budget O O -examination O O -. O O -ROM B-LOC B-LOC -1996 O O -The O O -Italian B-MISC B-MISC -upper O O -house O O -Senate B-ORG B-ORG -budget O O -commission O O -has O O -concluded O O -its O O -examination O O -of O O -Italy B-LOC B-LOC -' O O -1997 O O -budget O O -, O O -and O O -it O O -will O O -approve O O -the O O -measure O O -officially O O -by O O -Saturday O O -. O O -From O O -Tuesday O O -, O O -the O O -full O O -assembly O O -of O O -the O O -Senate B-ORG B-ORG -will O O -start O O -its O O -examination O O -of O O -the O O -financial O O -package O O -. O O -- O O -Milan B-LOC B-LOC -news O O -+ O O -66 O O -EU B-LOC B-ORG -, O O -Poland B-LOC B-LOC -agree O O -on O O -oil O O -import O O -ta O O -. O O -BR B-LOC B-LOC -1996 O O -The O O -European B-ORG B-ORG -Union I-ORG I-ORG -and O O -Poland B-LOC B-LOC -have O O -resolved O O -disagreements O O -over O O -a O O -new O O -Polish B-MISC B-MISC -oil O O -import O O -regime O O -, O O -the O O -European B-ORG B-ORG -Commission I-ORG I-ORG -said O O -on O O -Friday O O -. O O -The O O -EU B-ORG B-ORG -had O O -objected O O -to O O -increases O O -in O O -Polish B-MISC B-MISC -ta O O -on O O -imports O O -of O O -gasoline O O -and O O -gas O O -products O O -introduced O O -on O O -January O O -1 O O -, O O -1996 O O -, O O -saying O O -they O O -con O O -levels O O -en O O -in O O -the O O -so O O -Europe B-LOC B-LOC -Agreement I-MISC O -between O O -the O O -EU B-ORG B-ORG -and O O -Poland B-LOC B-LOC -. O O -The O O -increases O O -were O O -aimed O O -at O O -protecting O O -the O O -Polish B-MISC B-MISC -market O O -while O O -helping O O -to O O -modern O O -the O O -local O O -oil O O -industry O O -. O O -" O O -The O O -EU B-ORG B-ORG -and O O -Poland B-LOC B-LOC -have O O -now O O -reached O O -a O O -final O O -settlement O O -regarding O O -issues O O -related O O -to O O -the O O -Polish B-MISC B-MISC -import O O -regime O O -in O O -the O O -oils O O -sector O O -, O O -" O O -the O O -Commission B-ORG B-ORG -said O O -in O O -a O O -statement O O -. O O -Under O O -the O O -agreement O O -, O O -Poland B-LOC B-LOC -will O O -a O O -all O O -oil O O -import O O -ta O O -by O O -2001 O O -, O O -remove O O -all O O -oil O O -price O O -controls O O -and O O -end O O -quantitative O O -restrictions O O -on O O -imports O O -by O O -January O O -1 O O -, O O -1997 O O -. O O -The O O -agreement O O -includes O O -the O O -early O O -p O O -and O O -modern O O -of O O -Polish B-MISC B-MISC -oil O O -re O O -, O O -which O O -will O O -be O O -obliged O O -to O O -offer O O -equal O O -treatment O O -to O O -all O O -buyers O O -. O O -The O O -EU B-ORG B-ORG -and O O -Poland B-LOC B-LOC -will O O -monitor O O -the O O -settlement O O -at O O -six O O -meetings O O -. O O -Hindu B-MISC B-MISC -party O O -forces O O -India B-LOC B-LOC -parliament O O -to O O -ad O O -. O O -NE B-LOC B-LOC -DE I-LOC I-LOC -1996 O O -Hindu B-MISC B-MISC -nationalists O O -forced O O -ad O O -of O O -India B-LOC B-LOC -' O O -lower O O -house O O -of O O -parliament O O -on O O -Friday O O -, O O -in O O -protest O O -against O O -a O O -proposal O O -to O O -observe O O -a O O -minute O O -' O O -silence O O -over O O -the O O -destruction O O -of O O -a O O -mosque O O -by O O -a O O -Hindu B-MISC B-MISC -mob O O -in O O -1992 O O -. O O -Members O O -of O O -the O O -Hindu B-MISC B-MISC -nationalist O O -Bharatiya B-ORG B-ORG -Janata I-ORG I-ORG -Party I-ORG I-ORG -( O O -B B-ORG B-ORG -) O O -shouted O O -pro B-MISC B-MISC -slogan O O -in O O -the O O -house O O -after O O -a O O -communist O O -deputy O O -made O O -the O O -proposal O O -in O O -re O O -of O O -the O O -Ba B-LOC O -mosque O O -, O O -which O O -was O O -r O O -on O O -December O O -6 O O -, O O -1992 O O -. O O -The O O -house O O -was O O -first O O -ad O O -for O O -two O O -hours O O -. O O -When O O -it O O -re O O -, O O -B B-ORG B-ORG -deputies O O -resumed O O -the O O -slogan O O -, O O -and O O -deputy O O -speaker O O -Sur B-PER B-PER -B I-PER I-PER -suspended O O -work O O -until O O -Monday O O -. O O -The O O -destruction O O -of O O -the O O -16th O O -mosque O O -in O O -the O O -northern O O -Indian B-MISC B-MISC -town O O -of O O -A B-LOC B-LOC -triggered O O -nationwide O O -Hindu B-MISC B-MISC -violence O O -in O O -which O O -more O O -than O O -3 O O -people O O -were O O -killed O O -. O O -Indian B-MISC B-MISC -officials O O -blame O O -revenge O O -Mo B-MISC B-MISC -underworld O O -gangs O O -in O O -Bombay B-LOC B-LOC -for O O -a O O -string O O -of O O -bombings O O -in O O -the O O -city O O -three O O -months O O -later O O -that O O -killed O O -260 O O -people O O -. O O -The O O -B B-ORG B-ORG -backs O O -a O O -hard O O -Hindu B-MISC B-MISC -campaign O O -to O O -build O O -a O O -temple O O -at O O -the O O -site O O -of O O -the O O -mosque O O -, O O -which O O -Hindus B-MISC B-MISC -believe O O -was O O -the O O -birthplace O O -of O O -the O O -Lord O O -Rama B-PER B-PER -. O O -The O O -campaign O O -cat O O -B B-ORG B-ORG -from O O -the O O -political O O -fringe O O -to O O -become O O -India B-LOC B-LOC -' O O -main O O -opposition O O -party O O -in O O -1991 O O -. O O -Indian B-MISC B-MISC -Sept O O -crude O O -oil O O -output O O -falls O O -to O O -2 O O -m O O -T O O -. O O -NE B-LOC B-LOC -DE I-LOC I-LOC -1996 O O -India B-LOC B-LOC -' O O -crude O O -petroleum O O -output O O -fell O O -to O O -2 O O -million O O -tonnes O O -in O O -September O O -from O O -2 O O -million O O -in O O -the O O -same O O -month O O -in O O -1995 O O -, O O -the O O -government O O -said O O -on O O -Friday O O -. O O -ST O O -O I-ORG O -Sept O O -Sept O O -Apr O O -Apr O O -1996 O O -1995 O O -1996 O O -1995 O O -C O O -petroleum O O -2 O O -2 O O -15 O O -17 O O -Petroleum O O -products O O -4 O O -5 O O -30 O O -29 O O -Note O O -- O O -Figure O O -are O O -in O O -thousands O O -of O O -tonnes O O -and O O -preliminary O O -. O O -L B-MISC B-LOC -CH O O -MA O O -GO O O -ON O O -W O O -W O O -W O O -. O O -BR B-LOC B-LOC -1996 O O -Luxembourg B-LOC B-LOC -' O O -traditional O O -Christmas O O -market O O -, O O -which O O -starts O O -on O O -Saturday O O -and O O -runs O O -to O O -December O O -24 O O -, O O -has O O -taken O O -to O O -the O O -world O O -wide O O -web O O -as O O -a O O -way O O -of O O -public O O -its O O -activities O O -. O O -The O O -web O O -site O O -( O O -http O O -) O O -gives O O -details O O -of O O -the O O -market O O -' O O -concert O O -programme O O -as O O -well O O -as O O -its O O -various O O -retailers O O -. O O -- O O -Brussels B-ORG B-ORG -News I-ORG I-ORG -+ O O -2 O O -287 O O -68 O O -, O O -F O O -+ O O -2 O O -230 O O -77 O O -London B-LOC B-LOC -coal O O -/ O O -ore O O -fixtures O O -. O O -L B-LOC B-LOC -1996 O O -CO B-LOC O -- O O -La B-LOC B-MISC -Peak I-LOC I-MISC -- O O -120 O O -tones O O -coal O O -Hay B-LOC B-LOC -Point I-LOC I-LOC -or O O -Newcastle B-LOC B-LOC -/ O O -Ka B-LOC B-LOC -20 O O -$ O O -5 O O -and O O -$ O O -5 O O -fi O O -respectively O O -40 O O -/ O O -28 O O -s O O -China B-ORG B-ORG -Steel I-ORG I-ORG -. O O -Royal B-ORG B-MISC -C I-ORG I-MISC -- O O -77 O O -tonnes O O -coal O O -Mara B-LOC B-LOC -/ O O -F B-LOC B-LOC -19 O O -$ O O -9 O O -fi O O -20 O O -s O O -/ O O -25 O O -s O O -Co B-ORG B-ORG -and O I-ORG -C B-ORG I-ORG -. O O -OR B-ORG O -- O O -I B-ORG O -T I-ORG O -- O O -70 O O -tonnes O O -Dam B-ORG B-LOC -/ O O -Ka B-LOC B-LOC -20 O O -$ O O -5 O O -fi O O -35 O O -s O O -/ O O -30 O O -s O O -China B-ORG B-ORG -Steel I-ORG I-ORG -. O O -UK B-LOC B-LOC -book O O -length O O -Conservative B-MISC B-MISC -victory O O -odds O O -. O O -L B-LOC B-LOC -1996 O O -UK B-LOC B-LOC -book O O -William B-PER B-PER -Hill I-PER I-PER -said O O -on O O -Friday O O -they O O -have O O -length O O -the O O -odds O O -of O O -a O O -Conservative B-MISC B-MISC -victory O O -in O O -the O O -next O O -general O O -election O O -from O O -9 O O -to O O -5 O O -. O O -William B-PER B-PER -Hill I-PER I-PER -said O O -the O O -odds O O -were O O -the O O -longest O O -they O O -had O O -been O O -for O O -six O O -months O O -. O O -The O O -Labour B-ORG B-ORG -opposition O O -are O O -now O O -1 O O -favourite O O -, O O -it O O -said O O -. O O -The O O -election O O -must O O -be O O -held O O -by O O -May O O -. O O -- O O -London B-ORG B-ORG -News I-ORG I-ORG -+ O O -171 O O -54 O O -Italy B-LOC B-LOC -tops O O -week O O -of O O -me O O -bond O O -returns O O -- O O -Sal B-PER B-ORG -. O O -L B-LOC B-LOC -1996 O O -High O O -Italy B-LOC B-LOC -topped O O -the O O -league O O -in O O -a O O -week O O -of O O -me O O -returns O O -on O O -government O O -bonds O O -, O O -Sal B-ORG B-ORG -Brothers I-ORG I-ORG -said O O -on O O -Friday O O -. O O -In O O -local O O -currency O O -terms O O -, O O -Italian B-MISC B-MISC -BT B-ORG O -offered O O -returns O O -of O O -0 O O -percent O O -in O O -the O O -week O O -ended O O -on O O -Thursday O O -, O O -with O O -fellow O O -high O O -Sweden B-LOC B-LOC -close O O -behind O O -on O O -0 O O -percent O O -. O O -The O O -weekly O O -government O O -bond O O -index O O -rose O O -0 O O -percent O O -in O O -local O O -currency O O -terms O O -. O O -France B-LOC B-LOC -managed O O -third O O -place O O -with O O -0 O O -percent O O -in O O -the O O -16 O O -world O O -government O O -bond O O -index O O -. O O -Canada B-LOC B-LOC -' O O -were O O -the O O -worst O O -performing O O -bonds O O -. O O -They O O -lost O O -2 O O -percent O O -, O O -depressed O O -by O O -a O O -wave O O -of O O -new O O -Canadian B-MISC B-MISC -supply O O -. O O -Return O O -on O O -T B-ORG B-MISC -were O O -also O O -in O O -negative O O -territory O O -at O O -minus O O -0 O O -percent O O -, O O -the O O -poor O O -result O O -after O O -Canada B-LOC B-LOC -and O O -British B-MISC B-MISC -g O O -which O O -lost O O -0 O O -percent O O -. O O -Australia B-LOC B-LOC -was O O -the O O -only O O -dollar O O -country O O -in O O -the O O -table O O -to O O -e O O -out O O -a O O -positive O O -return O O -, O O -albeit O O -a O O -p O O -0 O O -percent O O -. O O -German B-MISC B-MISC -B I-MISC O -were O O -not O O -much O O -better O O -, O O -offering O O -returns O O -of O O -0 O O -percent O O -, O O -while O O -Japanese B-MISC B-MISC -government O O -bonds O O -managed O O -a O O -0 O O -percent O O -gain O O -. O O -Spanish B-MISC B-MISC -bonds O O -, O O -which O O -had O O -been O O -top O O -performers O O -in O O -Sal B-ORG B-ORG -Brothers I-ORG I-ORG -' O O -league O O -table O O -for O O -November O O -as O O -a O O -whole O O -, O O -turned O O -in O O -a O O -more O O -subdued O O -weekly O O -performance O O -with O O -a O O -return O O -of O O -only O O -0 O O -percent O O -. O O -In O O -U B-LOC B-LOC -dollar O O -terms O O -, O O -Japan B-LOC B-LOC -was O O -the O O -only O O -country O O -to O O -give O O -positive O O -returns O O -at O O -1 O O -percent O O -. O O -France B-LOC B-LOC -lost O O -0 O O -percent O O -, O O -followed O O -by O O -Italy B-LOC B-LOC -on O O -minus O O -0 O O -percent O O -. O O -The O O -biggest O O -loser O O -in O O -dollar O O -terms O O -were O O -British B-MISC B-MISC -g O O -, O O -which O O -shed O O -3 O O -percent O O -, O O -Canada B-LOC B-LOC -with O O -minus O O -3 O O -percent O O -and O O -Australia B-LOC B-LOC -at O O -minus O O -1 O O -percent O O -. O O -Sal B-ORG B-ORG -' O O -bond O O -index O O -is O O -calculated O O -using O O -all O O -government O O -bonds O O -with O O -over O O -one O O -year O O -to O O -maturity O O -, O O -weighted O O -for O O -market O O -capital O O -. O O -Only O O -bonds O O -freely O O -available O O -to O O -institutional O O -investors O O -and O O -with O O -a O O -certain O O -minimum O O -amount O O -outstanding O O -are O O -included O O -. O O -Return O O -take O O -account O O -of O O -price O O -moves O O -and O O -a O O -interest O O -. O O -- O O -Stephen B-PER B-PER -Ni I-PER I-PER -, O O -International B-ORG B-ORG -Bond I-ORG I-ORG -+ O O -171 O O -63 O O -O B-ORG B-ORG -basket O O -price O O -$ O O -24 O O -on O O -Thursday O O -. O O -L B-LOC B-LOC -1996 O O -The O O -price O O -of O O -the O O -O B-ORG B-ORG -basket O O -of O O -seven O O -crude O O -stood O O -at O O -$ O O -24 O O -a O O -barrel O O -on O O -Thursday O O -, O O -against O O -$ O O -23 O O -on O O -Wednesday O O -, O O -the O O -O B-ORG B-ORG -news O O -agency O O -said O O -, O O -q O O -the O O -O B-ORG B-ORG -secret O O -. O O -The O O -basket O O -comprises O O -Algeria B-LOC B-LOC -' O O -Saharan B-MISC B-MISC -B I-MISC I-MISC -, O O -Indonesia B-LOC B-LOC -' O O -Minas B-LOC B-MISC -, O O -Nigeria B-LOC B-LOC -' O O -Bonn B-ORG B-MISC -Light I-ORG I-MISC -, O O -Saudi B-LOC B-LOC -Arabia I-LOC I-LOC -' O O -Arabian B-ORG B-MISC -Light I-ORG I-MISC -, O O -Dubai B-LOC B-MISC -of O O -the O O -UAE B-LOC B-LOC -, O O -Venezuela B-LOC B-LOC -' O O -T B-PER B-MISC -Juan I-PER I-MISC -and O O -Mexico B-LOC B-LOC -' O O -Is B-LOC B-MISC -. O O -- O O -London B-ORG B-ORG -News I-ORG I-ORG -+ O O -171 O O -54 O O -76 O O -Relations O O -between O O -Clarke B-PER B-PER -, O O -Major O B-PER -good O O -- O O -spokesman O O -. O O -L B-LOC B-LOC -1996 O O -Relations O O -between O O -Chancellor O O -of O O -the O O -Ex B-ORG O -Kenneth B-PER B-PER -Clarke I-PER I-PER -and O O -Prime O O -Minister O O -John B-PER B-PER -Major I-PER I-PER -are O O -good O O -despite O O -media O O -reports O O -of O O -a O O -rift O O -over O O -European B-MISC B-MISC -policy O O -, O O -a O O -spokesman O O -for O O -Major B-PER B-PER -' O O -office O O -said O O -on O O -Friday O O -. O O -Ask O O -about O O -the O O -reports O O -, O O -the O O -spokesman O O -said O O -: O O -" O O -Relations O O -are O O -good O O -. O O -" O O -Ask O O -about O O -Major B-PER B-PER -' O O -mood O O -after O O -a O O -day O O -of O O -media O O -speculation O O -about O O -his O O -political O O -fortunes O O -, O O -the O O -spokesman O O -said O O -: O O -" O O -He O O -is O O -re O O -. O O -He O O -is O O -getting O O -on O O -with O O -the O O -job O O -. O O -" O O -The O O -spokesman O O -said O O -he O O -was O O -not O O -aware O O -of O O -any O O -meetings O O -overnight O O -between O O -Clarke B-PER B-PER -and O O -Major B-PER B-PER -, O O -nor O O -of O O -any O O -talks O O -between O O -the O O -prime O O -minister O O -and O O -parliamentary O O -business O O -managers O O -. O O -Both O O -Major B-PER B-PER -and O O -Clarke B-PER B-PER -were O O -in O O -their O O -constituencies O O -on O O -Friday O O -. O O -Two O O -dead O O -after O O -executive O O -jet O O -crashes O O -in O O -Newfoundland B-LOC B-LOC -. O O -ST B-LOC B-LOC -, O O -Newfoundland B-LOC B-LOC -1996 O O -Two O O -people O O -were O O -killed O O -when O O -an O O -executive O O -jet O O -en O O -route O O -to O O -Ireland B-LOC B-LOC -from O O -Michigan B-LOC B-LOC -crashed O O -on O O -approach O O -to O O -an O O -airport O O -in O O -Stephen B-LOC B-LOC -, O O -Newfoundland B-LOC B-LOC -, O O -on O O -Friday O O -, O O -authorities O O -said O O -. O O -The O O -pilot O O -and O O -co O O -, O O -the O O -only O O -two O O -aboard O O -, O O -were O O -killed O O -in O O -the O O -crash O O -of O O -the O O -Lea B-MISC B-MISC -36 I-MISC I-MISC -, O O -airport O O -manager O O -David B-PER B-PER -Snow I-PER I-PER -said O O -in O O -a O O -telephone O O -interview O O -. O O -Snow O B-PER -said O O -the O O -plane O O -last O O -reported O O -to O O -air O O -traffic O O -control O O -at O O -about O O -3 O O -A B-MISC O -local O O -time O O -/ O O -1 O O -A B-MISC O -E O O -( O O -06 O O -GM B-MISC B-MISC -) O O -when O O -it O O -began O O -its O O -final O O -approach O O -about O O -10 O O -miles O O -( O O -16 O O -km O O -) O O -from O O -the O O -airport O O -in O O -this O O -east O O -coast O O -Canadian B-MISC B-MISC -province O O -. O O -That O O -was O O -the O O -last O O -communication O O -the O O -aircraft O O -made O O -with O O -the O O -airport O O -, O O -he O O -added O O -. O O -" O O -We O O -considered O O -it O O -as O O -being O O -missing O O -until O O -about O O -06 O O -( O O -4 O O -A B-MISC O -E O O -) O O -( O O -09 O O -GM B-MISC B-MISC -) O O -. O O -That O O -' O O -when O O -the O O -wreckage O O -was O O -discovered O O -, O O -" O O -Snow B-PER B-PER -said O O -. O O -He O O -said O O -the O O -cargo O O -flight O O -originated O O -in O O -Grand B-LOC B-LOC -Rapids I-LOC I-LOC -, O O -Michigan B-LOC B-LOC -, O O -and O O -was O O -due O O -to O O -stop O O -at O O -Stephen B-LOC B-LOC -for O O -re O O -before O O -going O O -to O O -Shannon B-LOC B-LOC -, O O -Ireland B-LOC B-LOC -. O O -The O O -cause O O -of O O -the O O -crash O O -was O O -not O O -yet O O -known O O -. O O -In O O -were O O -due O O -to O O -fly O O -to O O -Stephen B-LOC B-LOC -later O O -on O O -Friday O O -. O O -P B-ORG B-ORG -says O O -Ara B-PER B-PER -, O O -Net B-PER B-PER -could O O -meet O O -Saturday O O -. O O -J B-LOC B-LOC -1996 O O -P B-ORG B-ORG -ne O O -said O O -on O O -Friday O O -Palestinian B-MISC B-MISC -President O O -Ya B-PER B-PER -Ara I-PER I-PER -, O O -Israeli B-MISC B-MISC -Prime O O -Minister O O -Benjamin B-PER B-PER -Net I-PER I-PER -and O O -Egyptian B-MISC B-MISC -President O O -Ho B-PER B-PER -Mu I-PER I-PER -might O O -all O O -meet O O -on O O -Saturday O O -to O O -try O O -to O O -c O O -a O O -deal O O -on O O -Israel B-LOC B-LOC -' O O -hand O O -of O O -He B-LOC B-LOC -to O O -the O O -P B-ORG B-ORG -. O O -" O O -It O O -is O O -very O O -possible O O -that O O -Ara B-PER B-PER -and O O -Net B-PER B-PER -will O O -meet O O -in O O -Cairo B-LOC B-LOC -on O O -Saturday O O -. O O -There O O -is O O -work O O -on O O -arranging O O -such O O -a O O -meeting O O -hosted O O -by O O -President O O -Mu B-PER B-PER -, O O -" O O -one O O -P B-ORG B-ORG -official O O -, O O -who O O -requested O O -an O O -, O O -told O O -Re B-ORG B-ORG -. O O -Israeli B-MISC B-MISC -officials O O -said O O -no O O -meeting O O -had O O -yet O O -been O O -set O O -. O O -Ara B-PER B-PER -' O O -adviser O O -Na B-PER B-PER -Abu I-PER I-PER -Rd I-PER I-PER -said O O -: O O -" O O -President O O -Ara B-PER B-PER -is O O -ready O O -to O O -meet O O -Prime O O -Minister O O -Net B-PER B-PER -but O O -no O O -time O O -or O O -date O O -has O O -been O O -set O O -for O O -such O O -a O O -meeting O O -yet O O -. O O -" O O -President O O -Ara B-PER B-PER -' O O -position O O -is O O -clear O O -that O O -such O O -a O O -meeting O O -should O O -come O O -after O O -successful O O -negotiations O O -so O O -that O O -the O O -meeting O O -would O O -have O O -positive O O -results O O -. O O -Especially O O -since O O -the O O -He B-LOC B-LOC -issue O O -has O O -not O O -been O O -agreed O O -yet O O -and O O -the O O -crucial O O -disputed O O -issues O O -have O O -not O O -been O O -resolved O O -. O O -" O O -But O O -Rd B-PER B-PER -said O O -Ara B-PER B-PER -would O O -go O O -to O O -Cairo B-LOC B-LOC -on O O -Saturday O O -for O O -talks O O -with O O -Mu B-PER B-PER -. O O -Both O O -Ara B-PER B-PER -and O O -Net B-PER B-PER -have O O -expressed O O -willingness O O -to O O -meet O O -. O O -They O O -last O O -met O O -in O O -Washington B-LOC B-LOC -after O O -clashes O O -in O O -September O O -that O O -killed O O -60 O O -Palestinians B-MISC B-MISC -and O O -15 O O -Israeli B-MISC B-MISC -. O O -The O O -violence O O -was O O -spurred O O -by O O -Israel B-LOC B-LOC -' O O -opening O O -an O O -entrance O O -to O O -a O O -tunnel O O -near O O -Mo B-MISC B-MISC -sites O O -in O O -Jerusalem B-LOC B-LOC -. O O -The O O -Palestine B-ORG B-ORG -Liberation I-ORG I-ORG -Organisation I-ORG I-ORG -( O O -P B-ORG B-ORG -) O O -ne O O -said O O -the O O -last O O -two O O -weeks O O -of O O -talks O O -with O O -Israel B-LOC B-LOC -on O O -implementing O O -the O O -long O O -hand O O -of O O -most O O -of O O -He B-LOC B-LOC -to O O -P B-ORG B-ORG -rule O O -had O O -been O O -" O O -meaning O O -" O O -, O O -ne O O -an O O -Ara B-PER O -meeting O O -. O O -Mu B-PER B-PER -' O O -adviser O O -O B-PER B-PER -el I-PER I-PER -said O O -on O O -Thursday O O -there O O -were O O -efforts O O -to O O -arrange O O -a O O -meeting O O -between O O -the O O -Israeli B-MISC B-MISC -and O O -Palestinian B-MISC B-MISC -leaders O O -. O O -Palestinian B-ORG B-ORG -Authority I-ORG I-ORG -Secretary O O -General O O -Ahmed B-PER B-PER -Abd I-PER I-PER -said O O -on O O -Thursday O O -he O O -understood O O -it O O -could O O -be O O -held O O -in O O -Cairo B-LOC B-LOC -either O O -on O O -Friday O O -or O O -Sunday O O -. O O -Abd B-PER B-PER -had O O -said O O -on O O -Thursday O O -he O O -did O O -not O O -think O O -Saturday O O -would O O -be O O -the O O -date O O -because O O -it O O -is O O -the O O -Jewish B-MISC B-MISC -sa O O -. O O -But O O -the O O -Jewish B-MISC B-MISC -sa O O -ends O O -at O O -sun O O -, O O -so O O -a O O -night O O -meeting O O -would O O -not O O -interfere O O -with O O -the O O -religious O O -o O O -. O O -Turkey B-LOC B-LOC -hind O O -by O O -own O O -land O O -on O O -Syrian B-MISC B-MISC -border O O -. O O -AN B-LOC B-LOC -1996 O O -Turkey B-LOC B-LOC -' O O -efforts O O -to O O -prevent O O -Kurdish B-MISC B-MISC -rebels O O -and O O -smug O O -in O O -from O O -Syria B-LOC B-LOC -are O O -being O O -badly O O -hind O O -because O O -the O O -military O O -does O O -not O O -have O O -a O O -map O O -of O O -its O O -own O O -mine O O -on O O -the O O -border O O -, O O -a O O -commission O O -of O O -parliament O O -said O O -. O O -" O O -It O O -is O O -not O O -known O O -exactly O O -where O O -the O O -mines O O -have O O -been O O -so O O -because O O -a O O -mine O O -chart O O -cannot O O -be O O -found O O -, O O -" O O -the O O -commission O O -said O O -in O O -a O O -report O O -on O O -border O O -protection O O -. O O -The O O -report O O -, O O -to O O -be O O -debated O O -in O O -parliament O O -in O O -coming O O -weeks O O -, O O -was O O -seen O O -by O O -Re B-ORG B-ORG -on O O -Friday O O -. O O -" O O -Official O O -say O O -the O O -mine O O -present O O -an O O -obstacle O O -to O O -the O O -security O O -forces O O -, O O -" O O -it O O -said O O -. O O -It O O -said O O -Kurdistan B-ORG B-ORG -Workers I-ORG I-ORG -Party I-ORG I-ORG -( O O -P B-ORG B-ORG -) O O -guerrilla O O -sometimes O O -know O O -the O O -layout O O -of O O -mined O O -areas O O -along O O -the O O -border O O -better O O -than O O -the O O -security O O -forces O O -. O O -" O O -Terror O O -and O O -smug O O -have O O -dug O O -up O O -the O O -mines O O -, O O -def O O -them O O -and O O -opened O O -up O O -wide O O -paths O O -in O O -some O O -areas O O -. O O -They O O -can O O -come O O -in O O -and O O -out O O -easily O O -as O O -the O O -mine O O -are O O -not O O -an O O -obstacle O O -, O O -" O O -it O O -said O O -. O O -An O O -armed O O -forces I-ORG O -spokesman O O -was O O -not O O -available O O -for O O -comment O O -. O O -Turkey B-LOC B-LOC -says O O -Syria B-LOC B-LOC -sponsors O O -the O O -P B-ORG B-ORG -, O O -fighting O O -for O O -Kurdish B-MISC B-MISC -self O O -in O O -southeast O O -Turkey B-LOC B-LOC -. O O -Damascus B-LOC B-LOC -denies O O -aid O O -the O O -rebels O O -. O O -The O O -P B-ORG B-ORG -also O O -crosses O O -into O O -Turkey B-LOC B-LOC -from O O -bases O O -in O O -the O O -mountains O O -of O O -northern O O -Iraq B-LOC B-LOC -. O O -More O O -than O O -21 O O -people O O -have O O -died O O -in O O -the O O -12 O O -conflict O O -. O O -Three O O -dead O O -in O O -Ku B-MISC B-MISC -militia O O -blood O O -feud O O -in O O -Turkey B-LOC B-LOC -. O O -D B-LOC B-LOC -, O O -Turkey B-LOC B-LOC -1996 O O -Three O O -people O O -were O O -killed O O -on O O -Friday O O -in O O -a O O -gun O O -battle O O -between O O -rival O O -groups O O -of O O -anti O O -militia O O -on O O -the O O -streets O O -of O O -this O O -southeastern O O -Turkish B-MISC B-MISC -city O O -, O O -police O O -said O O -. O O -Four O O -others O O -were O O -wounded O O -in O O -the O O -clash O O -, O O -caused O O -by O O -a O O -blood O O -feud O O -between O O -two O O -families O O -, O O -the O O -Ke B-PER B-PER -and O O -Kara B-PER B-PER -, O O -serving O O -as O O -state O O -village O O -guards O O -against O O -Kurdish B-MISC B-MISC -rebels O O -. O O -Police O O -said O O -the O O -guards O O -fired O O -automatic O O -weapons O O -at O O -each O O -other O O -. O O -One O O -of O O -the O O -dead O O -was O O -a O O -civilian O O -pass O O -. O O -The O O -role O O -of O O -the O O -70 O O -mainly O O -Kurdish B-MISC B-MISC -village O O -guards O O -who O O -fight O O -Kurdistan B-ORG B-ORG -Workers I-ORG I-ORG -Party I-ORG I-ORG -( O O -P B-ORG B-ORG -) O O -guerrilla O O -in O O -the O O -southeast O O -has O O -been O O -questioned O O -recently O O -after O O -media O O -allegations O O -that O O -many O O -of O O -them O O -are O O -involved O O -in O O -common O O -crime O O -. O O -The O O -head O O -of O O -the O O -region O O -' O O -main O O -pro O O -militia O O -is O O -at O O -the O O -centre O O -of O O -a O O -security O O -scandal O O -that O O -has O O -shaken O O -the O O -government O O -. O O -More O O -than O O -21 O O -people O O -have O O -been O O -killed O O -in O O -the O O -12 O O -conflict O O -between O O -Turkish B-MISC B-MISC -security O O -forces O O -and O O -the O O -P B-ORG B-ORG -, O O -fighting O O -for O O -Kurdish B-MISC B-MISC -autonomy O O -or O O -independence O O -. O O -Texas B-LOC B-LOC -/ O O -w O O -Ok B-LOC B-LOC -fed O O -cattle O O -round O O -- O O -USD B-ORG B-ORG -. O O -AM B-LOC B-LOC -1996 O O -Trade O O -very O O -slow O O -in O O -the O O -Pan B-LOC B-LOC -area O O -Thursday O O -. O O -S O O -steer O O -and O O -he O O -not O O -well O O -tested O O -. O O -Fe O O -reporting O O -light O O -inquiry O O -from O O -buyers O O -. O O -- O O -USD B-ORG B-ORG -Thursday O O -200 O O -Week O O -A O O -Holiday O O -Year O O -A O O -10 O O -W O O -to O O -Date O O -69 O O -Week O O -A O O -58 O O -Year O O -A O O -30 O O -Sales O O -reported O O -on O O -200 O O -head O O -steer O O -; O O -69 O O -head O O -confirmed O O -for O O -week O O -to O O -date O O -which O O -includes O O -14 O O -formulated O O -and O O -3 O O -contracted O O -cattle O O -to O O -be O O -shipped O O -this O O -week O O -. O O -S B-ORG O -St I-ORG O -: O O -Pen B-MISC O -Select O O -and O O -Choice O O -2 O O -, O O -115 O O -lbs O O -67 O O -. O O -Pen O O -Select O O -, O O -few O O -choice O O -2 O O -115 O O -lbs O O -66 O O -. O O -Kansas B-LOC B-LOC -feed O O -cattle O O -round O O -- O O -USD B-ORG B-ORG -. O O -D B-LOC B-LOC -C I-LOC I-LOC -1996 O O -Trade O O -slow O O -. O O -Not O O -enough O O -slaughter O O -steer O O -or O O -he O O -sales O O -confirmed O O -for O O -an O O -adequate O O -market O O -test O O -. O O -- O O -USD B-ORG B-ORG -Thursday O O -600 O O -week O O -ago O O -holiday O O -year O O -ago O O -14 O O -week O O -to O O -date O O -89 O O -week O O -ago O O -71 O O -year O O -ago O O -47 O O -Inquiry O O -good O O -, O O -demand O O -light O O -. O O -Sales O O -confirmed O O -on O O -500 O O -slaughter O O -steer O O -and O O -100 O O -slaughter O O -he O O -Thursday O O -. O O -For O O -the O O -week O O -to O O -date O O -89 O O -head O O -confirmed O O -including O O -30 O O -head O O -of O O -contracted O O -or O O -formulated O O -cattle O O -. O O -St B-ORG O -: O O -Select O O -and O O -Choice O O -2 O O -, O O -1200 O O -lbs O O -67 O O -. O O -He O O -: O O -Select O O -and O O -Choice O O -2 O O -, O O -115 O O -lbs O O -67 O O -. O O -Del B-ORG B-ORG -Hanover I-ORG I-ORG -weekly O O -municipal O O -bond O O -yields O O -. O O -Del B-ORG B-ORG -Hanover I-ORG I-ORG -weekly O O -m O O -bond O O -yields O O -calculated O O -Dec O O -5 O O -A B-ORG O -A I-ORG O -A I-ORG O -Ba I-ORG O -1997 O O -3 O O -3 O O -3 O O -3 O O -4 O O -4 O O -4 O O -4 O O -2001 O O -4 O O -4 O O -4 O O -4 O O -4 O O -4 O O -5 O O -5 O O -2006 O O -4 O O -4 O O -4 O O -4 O O -5 O O -5 O O -5 O O -5 O O -2011 O O -5 O O -5 O O -5 O O -5 O O -5 O O -5 O O -5 O O -5 O O -2016 O O -5 O O -5 O O -L B-LOC B-ORG -AN I-LOC I-ORG -1996 O O -U B-LOC B-LOC -energy O O -future O O -added O O -to O O -floor O O -session O O -gains O O -in O O -light O O -NY B-MISC B-MISC -ACC I-MISC I-MISC -trade O O -Thursday O O -, O O -as O O -forecast O O -for O O -colder O O -temperatures O O -in O O -di O O -Northeastern B-LOC O -markets O O -raised O O -supply O O -concerns O O -. O O -" O O -The O O -cold O O -weather O O -forecast O O -are O O -helping O O -right O O -now O O -, O O -" O O -a O O -trader O O -said O O -. O O -Earlier O O -, O O -NY B-ORG B-ORG -crude O O -ended O O -daytime O O -trade O O -78 O O -cents O O -higher O O -at O O -$ O O -25 O O -a O O -barrel O O -, O O -following O O -breakthrough O O -of O O -key O O -technical O O -levels O O -and O O -reports O O -of O O -tighter O O -supplies O O -. O O -Front O O -heating O O -oil O O -firm O O -0 O O -cents O O -a O O -gal O O -to O O -75 O O -cents O O -as O O -roughly O O -100 O O -lots O O -changed O O -hands O O -within O O -the O O -first O O -few O O -hours O O -of O O -ACC O B-MISC -. O O -About O O -112 O O -lots O O -were O O -exchanged O O -overall O O -, O O -traders O O -said O O -. O O -NY B-ORG B-ORG -gasoline O O -for O O -January O O -delivery O O -climbed O O -0 O O -cents O O -a O O -gal O O -to O O -69 O O -cents O O -as O O -a O O -light O O -33 O O -lots O O -traded O O -in O O -the O O -nearby O O -month O O -and O O -35 O O -moved O O -overall O O -. O O -January O O -crude O O -was O O -barely O O -changed O O -from O O -its O O -settlement O O -, O O -ed O O -up O O -one O O -cent O O -to O O -$ O O -25 O O -a O O -barrel O O -. O O -About O O -350 O O -lots O O -were O O -traded O O -for O O -January O O -and O O -87 O O -in O O -all O O -months O O -. O O -- O O -David B-PER B-PER -B I-PER I-PER -, O O -Los B-LOC B-LOC -Angeles I-LOC I-LOC -bureau O O -+ O O -213 O O -380 O O -2014 O O -U B-LOC B-LOC -blast O O -release O O -of O O -convicted O O -bomber O O -. O O -WA B-LOC B-LOC -1996 O O -The O O -United B-LOC B-LOC -States I-LOC I-LOC -Thursday O O -blasted O O -the O O -release O O -from O O -a O O -Greek B-MISC B-MISC -prison O O -of O O -a O O -Palestinian B-MISC B-MISC -guerrilla O O -convicted O O -of O O -bombing O O -an O O -airline O O -and O O -killing O O -a O O -teenager O O -in O O -1982 O O -, O O -saying O O -the O O -move O O -" O O -does O O -not O O -make O O -sense O O -. O O -" O O -" O O -All O O -of O O -us O O -who O O -have O O -been O O -victim O O -by O O -terrorists O O -. O O -need O O -to O O -stand O O -together O O -against O O -terrorists O O -. O O -We O O -ca O O -n O O -let O O -terrorists O O -out O O -of O O -jail O O -when O O -they O O -are O O -a O O -danger O O -to O O -civilians O O -all O O -around O O -the O O -world O O -, O O -" O O -State B-ORG B-ORG -Department I-ORG I-ORG -spokesman O O -Nicholas B-PER B-PER -Burns I-PER I-PER -said O O -. O O -' O O -Mohammed B-PER B-PER -Rashid I-PER I-PER -" O O -is O O -a O O -terrorist O O -who O O -deserves O O -to O O -be O O -behind O O -bars O O -. O O -It O O -is O O -in O O -to O O -us O O -why O O -he O O -would O O -have O O -been O O -allowed O O -to O O -leave O O -Greece B-LOC B-LOC -before O O -serving O O -his O O -just O O -sentence O O -. O O -This O O -is O O -an O O -in O O -move O O -. O O -It O O -does O O -not O O -make O O -sense O O -, O O -" O O -Burns B-PER B-PER -told O O -a O O -news O O -brief O O -. O O -He O O -spoke O O -after O O -Rashid B-PER B-PER -left O O -Greece B-LOC B-LOC -Thursday O O -on O O -being O O -freed O O -from O O -prison O O -early O O -for O O -good O O -behaviour O O -after O O -serving O O -8 O O -years O O -. O O -The O O -Clinton B-PER B-PER -administration O O -' O O -strong O O -views O O -on O O -this O O -subject O O -have O O -been O O -conveyed O O -to O O -the O O -Greek B-MISC B-MISC -government O O -, O O -Burns B-PER B-PER -said O O -. O O -Ma B-PER B-PER -Rashid I-PER I-PER -was O O -w O O -from O O -Ko B-LOC B-LOC -maximum O O -security O O -prison O O -just O O -outside O O -Athens B-LOC B-LOC -to O O -the O O -airport O O -where O O -he O O -boarded O O -a O O -regular O O -Olympic B-ORG B-ORG -Airways I-ORG I-ORG -flight O O -to O O -Cairo B-LOC B-LOC -where O O -he O O -would O O -transit O O -to O O -Tu B-LOC B-LOC -and O O -the O O -former O O -Palestine B-ORG B-ORG -Liberation I-ORG I-ORG -Organisation I-ORG I-ORG -headquarters O O -. O O -Rashid B-PER B-PER -, O O -46 O O -, O O -was O O -sentenced O O -to O O -18 O O -years O O -in O O -prison O O -by O O -a O O -Greek B-MISC B-MISC -court O O -in O O -1992 O O -after O O -being O O -convicted O O -of O O -pre O O -murder O O -in O O -the O O -mid O O -bombing O O -of O O -a O O -Pan B-MISC B-MISC -American I-MISC I-MISC -airline O O -in O O -1982 O O -. O O -His O O -sentence O O -had O O -been O O -reduced O O -to O O -15 O O -years O O -in O O -1993 O O -. O O -A O O -parole O O -court O O -ruled O O -recently O O -that O O -Rashid B-PER B-PER -could O O -be O O -freed O O -after O O -serving O O -8 O O -years O O -, O O -with O O -time O O -in O O -pre O O -detention O O -counted O O -towards O O -his O O -term O O -, O O -but O O -said O O -he O O -must O O -be O O -expelled O O -immediately O O -from O O -Greece B-LOC B-LOC -. O O -The O O -United B-LOC B-LOC -States I-LOC I-LOC -a O O -Rashid B-PER B-PER -of O O -belonging O O -to O O -the O O -May O O -15 O O -Palestinian B-MISC B-MISC -guerrilla O O -group O O -and O O -being O O -an O O -accomplished O O -student O O -of O O -master O O -Palestinian B-MISC B-MISC -bomb O O -Abu B-PER B-PER -Ibrahim I-PER I-PER -. O O -Three O O -FBI B-ORG B-ORG -agents O O -who O O -testified O O -against O O -Rashid B-PER B-PER -during O O -the O O -trial O O -, O O -held O O -at O O -Ko B-LOC B-LOC -prison O O -, O O -said O O -they O O -had O O -ample O O -evidence O O -against O O -Rashid B-PER B-PER -for O O -a O O -bomb O O -planted O O -on O O -a O O -Pan B-MISC B-MISC -American I-MISC I-MISC -plane O O -in O O -Brazil B-LOC B-LOC -in O O -1982 O O -and O O -a O O -mid O O -bomb O O -blast O O -on O O -a O O -T B-ORG B-ORG -airline O O -approaching O O -Athens B-LOC B-LOC -in O O -1986 O O -which O O -killed O O -four O O -U B-LOC B-LOC -citizens O O -. O O -School O O -football O O -player O O -banned O O -for O O -slash O O -opponents O O -. O O -AL B-LOC B-LOC -, O O -N B-LOC B-LOC -1996 O O -A O O -New B-LOC B-LOC -Mexico I-LOC I-LOC -high O O -school O O -football O O -player O O -who O O -used O O -razor O O -helmet O O -b O O -to O O -slash O O -opponents O O -and O O -a O O -referee O O -was O O -expelled O O -from O O -high O O -school O O -banned O O -Thursday O O -from O O -competition O O -for O O -one O O -year O O -. O O -Mike B-PER B-PER -C I-PER I-PER -, O O -17 O O -, O O -was O O -expelled O O -from O O -St B-LOC B-ORG -Pius I-LOC I-ORG -X I-LOC I-ORG -High I-LOC I-ORG -School I-LOC I-ORG -in O O -Albuquerque B-LOC B-LOC -after O O -an O O -October O O -game O O -in O O -which O O -he O O -used O O -the O O -sharp O O -chin O O -strap O O -b O O -to O O -in O O -two O O -opposing O O -players O O -and O O -the O O -referee O O -. O O -One O O -of O O -the O O -players O O -need O O -10 O O -s O O -to O O -a O O -cut O O -on O O -his O O -forearm O O -. O O -Official O O -said O O -the O O -New B-ORG B-ORG -Mexico I-ORG I-ORG -Activities I-ORG I-ORG -Association I-ORG I-ORG -decided O O -to O O -bar O O -C B-PER B-PER -from O O -any O O -inter O O -competition O O -until O O -next O O -October O O -, O O -regardless O O -of O O -the O O -school O O -he O O -attends O O -. O O -C B-PER B-PER -' O O -father O O -, O O -Stephen B-PER B-PER -C I-PER I-PER -, O O -had O O -admitted O O -filing O O -the O O -metal O O -b O O -to O O -a O O -fine O O -edge O O -, O O -saying O O -he O O -did O O -it O O -to O O -get O O -even O O -with O O -the O O -referee O O -and O O -with O O -players O O -who O O -had O O -rough O O -up O O -his O O -son O O -in O O -a O O -previous O O -game O O -. O O -Cy B-ORG O -sq O O -overs O O -copyright O O -talks O O -. O O -Eli B-PER B-PER -Ka I-PER I-PER -GE B-LOC B-LOC -1996 O O -In O O -a O O -g O O -Geneva B-LOC B-LOC -conference O O -centre O O -built O O -before O O -the O O -dawn O O -of O O -the O O -Internet B-MISC B-MISC -, O O -groups O O -of O O -s O O -officials O O -made O O -a O O -first O O -stab O O -on O O -Friday O O -at O O -re O O -copyright O O -laws O O -for O O -the O O -digital O O -age O O -. O O -But O O -critics O O -at O O -the O O -first O O -government O O -meeting O O -to O O -re O O -copyright O O -laws O O -in O O -25 O O -years O O -said O O -the O O -officials O O -and O O -legislators O O -might O O -as O O -well O O -be O O -trying O O -to O O -police O O -the O O -et O O -. O O -After O O -four O O -days O O -of O O -diplomatic O O -w O O -over O O -procedures O O -, O O -some O O -600 O O -delegates O O -from O O -nations O O -small O O -and O O -large O O -got O O -down O O -to O O -the O O -ni O O -of O O -setting O O -the O O -digital O O -agenda O O -for O O -the O O -first O O -time O O -. O O -Cy B-ORG O -sq O O -overs O O -the O O -debate O O -on O O -a O O -stack O O -of O O -proposals O O -covering O O -literary O O -and O O -artistic O O -works O O -, O O -the O O -rights O O -of O O -performers O O -and O O -producers O O -of O O -music O O -and O O -producers O O -of O O -databases O O -. O O -" O O -If O O -it O O -goes O O -on O O -like O O -this O O -, O O -we O O -w O O -n O O -have O O -enough O O -time O O -to O O -finish O O -all O O -the O O -discussions O O -, O O -" O O -a O O -frustrated O O -Western B-MISC O -delegate O O -said O O -. O O -" O O -They O O -announced O O -they O O -will O O -start O O -evening O O -sessions O O -next O O -week O O -. O O -" O O -At O O -by O O -copyright O O -industries O O -to O O -ensure O O -they O O -get O O -a O O -cut O O -from O O -online O O -works O O -led O O -to O O -a O O -storm O O -of O O -protests O O -by O O -Internet B-ORG B-MISC -companies O O -and O O -critics O O -who O O -say O O -the O O -pact O O -would O O -curb O O -public O O -access O O -to O O -online O O -information O O -from O O -soccer O O -results O O -to O O -stock O O -prices O O -. O O -" O O -It O O -' O O -not O O -illegal O O -to O O -make O O -photo O O -of O O -newspaper O O -articles O O -. O O -It O O -' O O -fair O O -use O O -. O O -We O O -can O O -read O O -sports O O -statistics O O -or O O -stock O O -prices O O -. O O -But O O -with O O -the O O -treaty O O -, O O -this O O -kind O O -of O O -fact O O -will O O -be O O -owned O O -and O O -subject O O -to O O -licensing O O -, O O -" O O -said O O -James B-PER B-PER -Love I-PER I-PER -, O O -a O O -consumer O O -lobby O O -heading O O -the O O -Washington B-MISC B-MISC -Consumer B-ORG B-PER -Project I-ORG I-PER -on I-ORG O -Technology I-ORG O -. O O -" O O -None O O -of O O -the O O -treaties O O -are O O -ready O O -to O O -move O O -. O O -These O O -people O O -do O O -n O O -understand O O -what O O -they O O -' O O -doing O O -. O O -" O O -At O O -stake O O -are O O -billion O O -of O O -dollars O O -and O O -the O O -future O O -of O O -the O O -electronic O O -information O O -industry O O -- O O -the O O -coming O O -medium O O -for O O -the O O -distribution O O -of O O -music O O -, O O -films O O -, O O -literature O O -, O O -software O O -and O O -commerce O O -. O O -Support O O -of O O -the O O -three O O -pact O O -say O O -they O O -are O O -only O O -an O O -extension O O -of O O -existing O O -intellectual O O -property O O -rights O O -, O O -covered O O -by O O -the O O -century O O -Bern B-MISC B-MISC -Convention I-MISC I-MISC -. O O -But O O -an O O -array O O -of O O -opponents O O -from O O -the O O -network O O -industry O O -to O O -consumer O O -, O O -scientific O O -and O O -academic O O -groups O O -say O O -the O O -pact O O -will O O -give O O -sweeping O O -powers O O -to O O -entertainment O O -and O O -copyright O O -industries O O -. O O -A O O -quick O O -survey O O -at O O -the O O -conference O O -centre O O -found O O -few O O -officials O O -who O O -had O O -actually O O -surf O O -the O O -Internet B-MISC B-MISC -. O O -Mongolia B-LOC B-LOC -' O O -state O O -copyright O O -official O O -, O O -Gun B-PER B-PER -J I-PER I-PER -, O O -said O O -a O O -that O O -he O O -had O O -just O O -arrived O O -from O O -U B-LOC B-LOC -Bat I-LOC I-LOC -and O O -was O O -not O O -aware O O -of O O -the O O -details O O -of O O -the O O -digital O O -agenda O O -. O O -" O O -We O O -do O O -n O O -have O O -money O O -for O O -Internet B-ORG B-MISC -in O O -Mongolia B-LOC B-LOC -, O O -" O O -he O O -added O O -. O O -Alexander B-PER B-PER -Ba I-PER I-PER -, O O -deputy O O -legal O O -chief O O -at O O -Russia B-LOC B-LOC -' O O -foreign O O -ministry O O -, O O -said O O -Moscow B-LOC B-LOC -had O O -yet O O -to O O -formula O O -a O O -policy O O -on O O -copyright O O -in O O -c O O -. O O -He O O -too O O -had O O -never O O -brows O O -the O O -Net B-MISC O -. O O -" O O -I O O -' O O -never O O -tried O O -it O O -and O O -why O O -should O O -I O O -? O O -There O O -are O O -lots O O -of O O -other O O -things O O -in O O -this O O -life O O -I O O -have O O -n O O -tried O O -either O O -, O O -" O O -he O O -said O O -. O O -A O O -visit O O -to O O -the O O -computer O O -centre O O -offering O O -Internet B-MISC B-MISC -services O O -found O O -a O O -lone O O -European B-MISC B-MISC -official O O -clicking O O -away O O -on O O -his O O -mouse O O -. O O -" O O -Internet B-MISC B-MISC -is O O -a O O -potential O O -cash O O -cow O O -for O O -copyright O O -industries O O -and O O -we O O -need O O -road O O -on O O -the O O -information O O -super O O -, O O -" O O -said O O -Marc B-PER B-PER -Pearl I-PER I-PER -, O O -vice O O -of O O -the O O -Information B-ORG B-ORG -Technology I-ORG I-ORG -Association I-ORG I-ORG -of I-ORG I-ORG -America I-ORG I-ORG -, O O -a O O -trade O O -association O O -of O O -U B-LOC B-LOC -network O O -companies O O -opposing O O -the O O -treaties O O -. O O -" O O -But O O -there O O -are O O -a O O -lot O O -of O O -dinosaurs O O -here O O -. O O -People O O -here O O -do O O -n O O -understand O O -Internet B-MISC B-MISC -technology O O -. O O -Because O O -they O O -do O O -n O O -understand O O -technology O O -, O O -they O O -fear O O -the O O -unknown O O -. O O -" O O -Before O O -the O O -Internet B-MISC B-MISC -, O O -those O O -whose O O -business O O -was O O -to O O -protect O O -copyright O O -knew O O -where O O -they O O -stood O O -. O O -Their O O -enemies O O -were O O -tan O O -if O O -el O O -, O O -such O O -as O O -the O O -people O O -who O O -pirate O O -music O O -cassette O O -. O O -But O O -the O O -Internet B-ORG B-MISC -, O O -a O O -global O O -computer O O -network O O -where O O -anything O O -from O O -music O O -to O O -software O O -can O O -be O O -du O O -and O O -distributed O O -at O O -the O O -click O O -of O O -a O O -computer O O -mouse O O -, O O -has O O -ripped O O -up O O -the O O -rule O O -. O O -Network O B-MISC -operators O O -said O O -the O O -draft O O -laws O O -would O O -hold O O -them O O -responsible O O -for O O -copyright O O -infringement O O -in O O -the O O -system O O -and O O -expose O O -them O O -to O O -multi O O -l O O -. O O -" O O -There O O -are O O -500 O O -million O O -messages O O -transmitted O O -through O O -the O O -Internet B-MISC B-MISC -everyday O O -, O O -" O O -said O O -Tim B-PER B-PER -Casey I-PER I-PER -of O O -the O O -U B-MISC B-MISC -MC B-ORG B-ORG -Communications I-ORG I-ORG -Corporation I-ORG I-ORG -. O O -" O O -How O O -can O O -we O O -control O O -them O O -all O O -? O O -" O O -Italy B-LOC B-LOC -evacuate O O -17 O O -nuns O O -and O O -priests O O -from O O -Z B-LOC B-LOC -. O O -ROM B-LOC B-LOC -1996 O O -Italy B-LOC B-LOC -said O O -on O O -Friday O O -it O O -had O O -evacuated O O -17 O O -Roman B-MISC B-MISC -Catholic I-MISC I-MISC -nuns O O -and O O -priests O O -from O O -Z B-LOC B-LOC -where O O -they O O -had O O -been O O -at O O -risk O O -from O O -fighting O O -between O O -government O O -troops O O -and O O -ethnic O O -Tu B-MISC B-MISC -rebels O O -. O O -The O O -Foreign B-ORG B-ORG -Ministry I-ORG I-ORG -said O O -the O O -10 O O -Europeans B-MISC B-MISC -and O O -seven O O -Africans B-MISC B-MISC -took O O -a O O -special O O -flight O O -from O O -the O O -G B-LOC B-LOC -national O O -park O O -in O O -northern O O -Z B-LOC B-LOC -to O O -the O O -Uganda B-MISC B-MISC -capital O O -Ka B-LOC B-LOC -where O O -they O O -were O O -being O O -looked O O -after O O -at O O -the O O -Italian B-MISC B-MISC -embassy O O -. O O -The O O -group O O -had O O -travelled O O -from O O -their O O -mission O O -on O O -the O O -edge O O -of O O -the O O -park O O -to O O -a O O -landing O O -strip O O -to O O -make O O -the O O -re O O -, O O -a O O -ministry O O -official O O -said O O -. O O -The O O -ministry O O -said O O -the O O -group O O -consisted O O -of O O -13 O O -nuns O O -, O O -seven O O -Italians B-MISC B-MISC -and O O -six O O -Z B-MISC B-MISC -, O O -and O O -four O O -priests O O -, O O -two O O -from O O -Belgium B-LOC B-LOC -, O O -one O O -from O O -Spain B-LOC B-LOC -and O O -one O O -from O O -Zambia B-LOC B-LOC -. O O -Third O O -Paris B-LOC B-LOC -blast O O -victim O O -was O O -Moroccan B-MISC B-MISC -student O O -. O O -PA B-LOC B-LOC -1996 O O -Moroccan B-MISC B-MISC -Mohamed B-PER B-PER -Ben I-PER I-PER -, O O -the O O -third O O -person O O -to O O -die O O -after O O -a O O -bombing O O -on O O -a O O -Paris B-LOC B-LOC -train O O -, O O -was O O -a O O -25 O O -student O O -about O O -to O O -submit O O -a O O -mathematics O O -doctorate O O -, O O -the O O -Moroccan B-MISC B-MISC -embassy O O -said O O -on O O -Friday O O -. O O -Ben B-PER B-PER -died O O -of O O -his O O -injuries O O -on O O -Thursday O O -night O O -, O O -two O O -days O O -after O O -the O O -blast O O -. O O -A O O -newly O O -Canadian B-MISC B-MISC -woman O O -and O O -a O O -man O O -from O O -New B-LOC B-LOC -Caledonia I-LOC I-LOC -died O O -instantly O O -in O O -the O O -bomb O O -that O O -injured O O -90 O O -others O O -in O O -the O O -rush O O -train O O -. O O -An O O -embassy O O -spokesman O O -said O O -Ben B-PER B-PER -, O O -the O O -son O O -of O O -a O O -Moroccan B-MISC B-MISC -army O O -colonel O O -, O O -had O O -been O O -due O O -to O O -take O O -his O O -doctorate O O -in O O -March O O -and O O -hoped O O -to O O -become O O -a O O -teacher O O -. O O -In O O -have O O -said O O -the O O -explosion O O -bore O O -the O O -hall O O -of O O -Algerian B-MISC B-MISC -Mo I-MISC B-MISC -fundamental O O -who O O -staged O O -a O O -series O O -of O O -bombings O O -last O O -year O O -which O O -killed O O -eight O O -people O O -and O O -injured O O -more O O -than O O -160 O O -. O O -Italian B-MISC B-MISC -President O O -urges O O -se O O -to O O -turn O O -back O O -. O O -MA B-LOC O -, O O -Italy B-LOC B-LOC -1996 O O -Italian B-MISC B-MISC -President O O -Oscar B-PER B-PER -Luigi I-PER I-PER -Sc I-PER I-PER -visited O O -the O O -symbolic O O -heart O O -of O O -the O O -se O O -Northern B-ORG B-ORG -League I-ORG I-ORG -on O O -Friday O O -and O O -appealed O O -to O O -its O O -supporters O O -to O O -drop O O -their O O -campaign O O -for O O -a O O -break O O -state O O -. O O -Ad O O -a O O -convention O O -on O O -Italian B-MISC B-MISC -unity O O -in O O -Man B-LOC O -, O O -where O O -the O O -party O O -has O O -set O O -up O O -its O O -own O O -" O O -parliament O O -of O O -the O O -north O O -" O O -, O O -Sc B-PER B-PER -made O O -a O O -direct O O -appeal O O -to O O -what O O -he O O -called O O -" O O -my O O -friends O O -from O O -the O O -League B-ORG B-ORG -" O O -to O O -work O O -instead O O -for O O -federal O O -reform O O -. O O -" O O -It O O -is O O -an O O -invitation O O -, O O -a O O -commitment O O -, O O -a O O -promise O O -. O O -Let O B-LOC -' O O -march O O -together O O -, O O -" O O -Sc B-PER B-PER -, O O -a O O -northern O O -himself O O -, O O -said O O -. O O -" O O -Help O O -Italy B-LOC B-LOC -to O O -teach O O -, O O -to O O -propose O O -a O O -capacity O O -for O O -strong O O -local O O -autonomy O O -, O O -for O O -the O O -federal O O -which O O -can O O -give O O -new O O -v O O -to O O -our O O -blood O O -. O O -But O O -turn O O -back O O -from O O -the O O -line O O -you O O -are O O -taking O O -now O O -, O O -" O O -he O O -said O O -. O O -Sc B-PER B-PER -was O O -in O O -Man B-LOC B-LOC -to O O -attend O O -a O O -ceremony O O -commemorating O O -the O O -executions O O -there O O -by O O -Austrian B-MISC B-MISC -rulers O O -in O O -1852 O O -and O O -1853 O O -of O O -a O O -group O O -of O O -Italians B-MISC B-MISC -who O O -had O O -campaigned O O -for O O -national O O -unity O O -. O O -He O O -was O O -j O O -and O O -whistle O O -at O O -by O O -a O O -small O O -group O O -of O O -League B-ORG B-LOC -supporters O O -when O O -he O O -arrived O O -for O O -a O O -visit O O -marked O O -by O O -heavy O O -security O O -. O O -Witness O O -said O O -the O O -protesters O O -were O O -outnumbered O O -by O O -other O O -Italians B-MISC B-MISC -who O O -waved O O -t O O -flags O O -in O O -the O O -national O O -red O O -, O O -white O O -and O O -green O O -or O O -shouted O O -" O O -Viva B-MISC O -Italia I-MISC B-LOC -" O O -. O O -The O O -League B-ORG B-ORG -won O O -more O O -than O O -eight O O -percent O O -of O O -votes O O -at O O -the O O -last O O -general O O -election O O -in O O -April O O -on O O -a O O -federal O O -platform O O -but O O -its O O -leader O O -Um B-PER B-PER -Boss I-PER I-PER -later O O -switched O O -to O O -a O O -se O O -agenda O O -. O O -A O O -three O O -" O O -independence O O -" O O -march O O -along O O -the O O -Po B-LOC B-LOC -River I-LOC O -in O O -September O O -, O O -culminating O O -in O O -a O O -declaration O O -in O O -Venice B-LOC B-LOC -of O O -a O O -self O O -" O O -Republic B-ORG B-LOC -of I-ORG I-LOC -Pa I-LOC I-LOC -" O O -, O O -flopped O O -badly O O -. O O -Denmark B-LOC B-LOC -' O O -Radio B-MISC O -H I-MISC O -result O O -seen O O -flat O O -. O O -CO B-LOC B-LOC -1996 O O -A O O -Re B-ORG B-ORG -consensus O O -survey O O -sees O O -medical O O -equipment O O -group O O -Radio B-ORG B-ORG -reporting O O -largely O O -unchanged O O -earnings O O -when O O -it O O -publishes O O -first O O -half O O -1999 O O -results O O -next O O -Wednesday O B-ORG -. O O -An O O -average O O -of O O -four O O -analysts O O -' O O -forecast O O -predicted O O -pre O O -profit O O -of O O -147 O O -million O O -crown O O -compared O O -to O O -144 O O -million O O -in O O -the O O -first O O -six O O -months O O -of O O -1995 O O -. O O -They O O -said O O -that O O -the O O -group O O -' O O -failure O O -to O O -introduce O O -new O O -products O O -was O O -behind O O -the O O -share O O -' O O -weak O O -performance O O -in O O -1996 O O -, O O -during O O -which O O -it O O -has O O -lost O O -seven O O -percent O O -so O O -far O O -. O O -- O O -So B-PER B-PER -Lin I-PER I-PER -Jakob I-PER I-PER -, O O -Copenhagen B-LOC B-LOC -news O O -+ O O -33 O O -Mo B-MISC B-MISC -fundamental O O -kill O O -19 O O -Algerian B-MISC B-MISC -- O O -agency O O -. O O -PA B-LOC B-LOC -1996 O O -Mo B-MISC B-MISC -fundamental O O -killed O O -19 O O -civilians O O -overnight O O -in O O -B B-LOC B-LOC -province O O -south O O -of O O -Algiers B-LOC B-LOC -, O O -Algerian B-MISC B-MISC -security O O -forces O O -said O O -on O O -Friday O O -. O O -In O O -a O O -statement O O -carried O O -on O O -the O O -official O O -Algerian B-MISC B-MISC -news O O -agency O O -AP B-ORG B-ORG -, O O -the O O -security O O -forces O O -said O O -the O O -19 O O -had O O -been O O -killed O O -by O O -" O O -a O O -group O O -of O O -terrorists O O -" O O -. O O -Belgian B-MISC B-MISC -police O O -smash O O -major O O -drugs O O -rings O O -, O O -30 O O -arrested O O -. O O -BR B-LOC B-LOC -1996 O O -Police O O -smashed O O -two O O -drugs O O -smuggling O O -rings O O -and O O -arrested O O -30 O O -people O O -after O O -a O O -taxi O O -in O O -Spain B-LOC B-LOC -alerted O O -them O O -to O O -a O O -suitcase O O -of O O -heroin O O -left O O -in O O -his O O -cab O O -, O O -Belgian B-MISC B-MISC -police O O -said O O -on O O -Friday O O -. O O -Police O O -seized O O -dozens O O -of O O -k O O -of O O -heroin O O -with O O -a O O -street O O -value O O -of O O -hundreds O O -of O O -millions O O -of O O -Belgian B-MISC B-MISC -f O O -, O O -a O O -public O O -prosecutor O O -' O O -office O O -spokesman O O -in O O -the O O -port O O -city O O -of O O -Antwerp B-LOC B-ORG -said O O -. O O -He O O -said O O -a O O -24 O O -Belgian B-MISC B-MISC -woman O O -left O O -a O O -suitcase O O -containing O O -13 O O -kg O O -( O O -29 O O -lb O O -) O O -of O O -heroin O O -in O O -a O O -taxi O O -in O O -Barcelona B-LOC B-LOC -. O O -The O O -taxi O O -alerted O O -police O O -who O O -arrested O O -a O O -33 O O -Turkish B-MISC B-MISC -man O O -when O O -he O O -came O O -to O O -pick O O -up O O -the O O -suitcase O O -at O O -a O O -lost O O -luggage O O -office O O -. O O -The O O -woman O O -was O O -later O O -arrested O O -in O O -Belgium B-LOC B-LOC -. O O -She O O -and O O -the O O -Turkish B-MISC B-MISC -man O O -smug O O -heroin O O -from O O -Turkey B-LOC B-LOC -to O O -Antwerp B-LOC B-ORG -from O O -where O O -it O O -was O O -taken O O -to O O -Spain B-LOC B-LOC -, O O -France B-LOC B-LOC -and O O -Germany B-LOC B-LOC -by O O -others O O -, O O -the O O -spokesman O O -said O O -. O O -He O O -said O O -14 O O -people O O -were O O -arrested O O -in O O -Belgium B-LOC B-LOC -and O O -16 O O -others O O -in O O -other O O -European B-MISC B-MISC -nations O O -after O O -an O O -investigation O O -lasting O O -nearly O O -a O O -year O O -. O O -( O O -$ O O -1 O O -Belgian B-MISC B-MISC -Fr I-ORG O -) O O -Port O O -conditions O O -update O O -- O O -Lloyd B-ORG B-ORG -Shipping I-ORG I-ORG -. O O -G B-LOC B-LOC -, O O -Dec O O -5 O O -- O O -Greek B-MISC B-MISC -port O O -workers O O -called O O -off O O -a O O -strike O O -which O O -had O O -kept O O -the O O -country O O -' O O -ports O O -closed O O -, O O -giving O O -the O O -government O O -until O O -Feb O O -1 O O -to O O -introduce O O -a O O -promised O O -bonus O O -scheme O O -. O O -German B-MISC B-MISC -Jan O O -coffee O O -imports O O -detailed O O -. O O -H B-LOC B-LOC -1996 O O -German B-MISC B-MISC -net O O -green O O -coffee O O -imports O O -from O O -outside O O -the O O -EU B-LOC B-ORG -total O O -7 O O -million O O -bags O O -in O O -January O O -compared O O -with O O -7 O O -million O O -in O O -the O O -year O O -period O O -, O O -the O O -D B-ORG B-ORG -coffee O O -association O O -said O O -. O O -I O O -of O O -1 O O -million O O -bags O O -in O O -August O O -were O O -down O O -from O O -1 O O -million O O -in O O -August O O -1995 O O -but O O -up O O -from O O -99 O O -bags O O -in O O -July O O -1996 O O -. O O -Colombia B-LOC B-LOC -shipped O O -198 O O -bags O O -in O O -August O O -after O O -164 O O -in O O -July O O -, O O -El B-LOC B-LOC -Salvador I-LOC I-LOC -160 O O -( O O -129 O O -) O O -, O O -Indonesia B-LOC B-LOC -72 O O -( O O -78 O O -) O O -, O O -Ethiopia B-LOC B-LOC -69 O O -( O O -60 O O -) O O -and O O -Kenya B-LOC B-LOC -63 O O -( O O -60 O O -) O O -. O O -Brazil B-LOC B-LOC -was O O -in O O -seventh O O -position O O -with O O -54 O O -bags O O -( O O -29 O O -) O O -. O O -- O O -Hamburg B-LOC B-LOC -news O O -+ O O -Munich B-ORG B-ORG -Re I-ORG I-ORG -says O O -to O O -split O O -stock O O -. O O -M B-LOC B-LOC -, O O -Germany B-LOC B-LOC -1996 O O -Mu B-ORG B-MISC -Rue I-ORG B-ORG -AG I-ORG I-ORG -, O O -the O O -world O O -' O O -largest O O -reins O O -, O O -said O O -on O O -Friday O O -it O O -expected O O -to O O -switch O O -its O O -shares O O -to O O -a O O -lower O O -par O O -value O O -by O O -September O O -1997 O O -at O O -the O O -earliest O O -. O O -The O O -group O O -, O O -known O O -as O O -Munich B-ORG B-ORG -Re I-ORG I-ORG -, O O -plans O O -to O O -seek O O -approval O O -for O O -the O O -move O O -at O O -its O O -shareholders O O -' O O -meeting O O -today O O -. O O -The O O -company O O -said O O -the O O -switch O O -would O O -probably O O -become O O -effective O O -in O O -September O O -. O O -The O O -planned O O -10 O O -stock O O -split O O -would O O -reduce O O -the O O -par O O -value O O -of O O -Munich B-ORG B-ORG -Re I-ORG I-ORG -' O O -shares O O -to O O -five O O -marks O O -from O O -50 O O -, O O -causing O O -their O O -price O O -to O O -drop O O -to O O -around O O -one O O -tenth O O -of O O -the O O -present O O -value O O -. O O -Munich B-ORG B-ORG -Re I-ORG I-ORG -' O O -registered O O -shares O O -, O O -part O O -of O O -the O O -blue O O -D B-MISC B-MISC -index O O -, O O -were O O -trading O O -at O O -3 O O -marks O O -on O O -Friday O O -. O O -- O O -Frankfurt B-ORG B-ORG -News I-ORG I-ORG -, O O -+ O O -69 O O -75 O O -EU B-ORG B-ORG -experts O O -post O O -talks O O -on O O -rice O O -area O O -aid O O -. O O -BR B-LOC B-LOC -1996 O O -European B-ORG B-ORG -Union I-ORG I-ORG -rice O O -experts O O -on O O -Thursday O O -postponed O O -discussion O O -on O O -area O O -aid O O -payments O O -to O O -rice O O -producers O O -because O O -the O O -documents O O -were O O -not O O -available O O -in O O -all O O -the O O -EU B-ORG B-ORG -languages O O -, O O -an O O -EU B-ORG B-ORG -off O O -said O O -on O O -Friday O O -. O O -" O O -The O O -discussion O O -in O O -the O O -experts O O -group O O -had O O -to O O -be O O -postponed O O -because O O -the O O -documents O O -needed O O -to O O -be O O -translated O O -into O O -the O O -official O O -languages O O -and O O -the O O -item O O -will O O -be O O -on O O -next O O -week O O -' O O -agenda O O -, O O -" O O -the O O -off O O -said O O -. O O -European B-MISC B-MISC -rice O O -producers O O -are O O -due O O -to O O -get O O -com O O -area O O -aid O O -payments O O -similar O O -to O O -those O O -paid O O -to O O -cereal O O -producers O O -because O O -of O O -cuts O O -in O O -intervention O O -prices O O -. O O -- O O -Brussels B-ORG B-ORG -News I-ORG I-ORG -32 O O -2 O O -287 O O -680 O O -Frankfurt B-LOC B-LOC -dollar O O -fix O O -1 O O -marks O O -. O O -F B-LOC B-LOC -1996 O O -The O O -dollar O O -was O O -fixed O O -at O O -1 O O -marks O O -in O O -Frankfurt B-LOC B-LOC -on O O -Friday O O -, O O -after O O -1 O O -marks O O -on O O -Thursday O O -. O O -There O O -was O O -no O O -B B-ORG B-ORG -intervention O O -. O O -John B-PER B-ORG -Lewis I-PER I-ORG -UK B-LOC I-ORG -store O O -sales O O -up O O -4 O O -% O O -in O O -week O O -. O O -L B-LOC B-LOC -1996 O O -The O O -John B-ORG B-PER -Lewis I-ORG I-PER -Partnership I-ORG O -said O O -its O O -UK B-LOC B-LOC -department O O -store O O -sales O O -rose O O -4 O O -percent O O -in O O -the O O -week O O -to O O -November O O -30 O O -compared O O -with O O -the O O -same O O -week O O -a O O -year O O -earlier O O -. O O -In O O -the O O -18 O O -weeks O O -to O O -November O O -30 O O -, O O -sales O O -were O O -up O O -13 O O -percent O O -year O O -. O O -Total O O -sales O O -, O O -including O O -the O O -Wait B-ORG B-ORG -supermarket O O -chain O O -, O O -rose O O -5 O O -percent O O -in O O -the O O -week O O -and O O -were O O -up O O -11 O O -percent O O -in O O -the O O -18 O O -period O O -. O O -- O O -Rosemary B-PER B-PER -Bennett I-PER I-PER -, O O -London B-ORG B-ORG -News I-ORG I-ORG -44 O O -171 O O -54 O O -27 O O -Tim B-PER B-ORG -at O O -15 O O -in O O -London B-LOC B-LOC -at O O -09 O O -GM B-MISC B-MISC -. O O -L B-LOC B-LOC -1996 O O -PT B-ORG B-ORG -Tam I-ORG I-ORG -Tim I-ORG I-ORG -was O O -traded O O -at O O -$ O O -15 O O -per O O -G B-MISC O -in O O -London B-LOC B-LOC -on O O -Friday O O -at O O -around O O -09 O O -GM B-MISC B-MISC -. O O -It O O -recorded O O -a O O -low O O -of O O -$ O O -15 O O -and O O -a O O -high O O -of O O -$ O O -15 O O -. O O -Its O O -previous O O -close O O -on O O -Thursday O O -was O O -$ O O -15 O O -. O O -One O O -Global B-ORG O -De I-ORG O -Re I-ORG O -represents O O -10 O O -common O O -shares O O -. O O -- O O -Jakarta B-LOC B-LOC -news O O -+ O O -38 O O -British B-MISC B-MISC -" O O -Euro B-MISC B-MISC -" O O -says O O -Clarke B-PER B-PER -should O O -resign O O -. O O -L B-LOC B-LOC -1996 O O -A O O -" O O -Euro B-MISC B-MISC -" O O -member O O -of O O -the O O -ruling O O -Conservative B-ORG B-MISC -party O O -said O O -on O O -Thursday O O -British B-MISC B-MISC -finance O O -minister O O -Kenneth B-PER B-PER -Clarke I-PER I-PER -had O O -to O O -resign O O -to O O -prevent O O -the O O -party O O -di O O -over O O -the O O -issue O O -of O O -a O O -single O O -European B-MISC B-MISC -currency O O -. O O -Member O O -of O O -Parliament O O -Tony B-PER B-PER -Mar I-PER I-PER -said O O -the O O -resignation O O -of O O -the O O -chancellor O O -of O O -the O O -ex O O -was O O -the O O -only O O -way O O -to O O -make O O -the O O -Conservatives B-MISC O -elect O O -in O O -a O O -general O O -election O O -which O O -must O O -take O O -place O O -by O O -May O O -next O O -year O O -. O O -" O O -We O O -have O O -a O O -divided O O -and O O -split O O -Cabinet B-ORG B-ORG -. O O -This O O -cannot O O -endure O O -, O O -" O O -Mar B-PER B-PER -told O O -BBC B-ORG B-ORG -television O O -' O O -News B-ORG B-MISC -programme O O -on O O -Thursday O O -. O O -" O O -It O O -is O O -not O O -sustainable O O -. O O -Kenneth B-PER B-PER -Clarke I-PER I-PER -has O O -to O O -go O O -. O O -If O O -he O O -does O O -n O O -resign O O -, O O -the O O -prime O O -minister O O -has O O -got O O -to O O -fire O O -him O O -. O O -" O O -Mar B-PER B-PER -' O O -comment O O -come O O -on O O -the O O -heels O O -of O O -speculation O O -that O O -Clarke B-PER B-PER -had O O -threatened O O -to O O -resign O O -if O O -the O O -government O O -changed O O -its O O -" O O -wait O O -and O O -see O O -" O O -policy O O -on O O -a O O -single O O -currency O O -and O O -declared O O -it O O -would O O -not O O -sign O O -up O O -for O O -the O O -currency O O -in O O -the O O -next O O -Parliament B-ORG O -. O O -Clarke B-PER B-PER -denied O O -on O O -Thursday O O -he O O -had O O -threatened O O -to O O -resign O O -and O O -said O O -his O O -position O O -on O O -the O O -single O O -currency O O -was O O -in O O -tune O O -with O O -that O O -of O O -Prime O O -Minister O O -John B-PER B-PER -Major I-PER I-PER -. O O -Major B-PER B-PER -told O O -parliament O O -on O O -Thursday O O -he O O -would O O -keep O O -his O O -options O O -open O O -on O O -single O O -membership O O -. O O -His O O -statement O O -was O O -interpreted O O -as O O -a O O -significant O O -victory O O -for O O -Clarke B-PER B-PER -and O O -fellow O O -pro B-MISC B-MISC -Michael B-PER B-PER -He I-PER I-PER -, O O -deputy O O -prime O O -minister O O -. O O -Pro B-MISC B-MISC -Conservative B-MISC B-MISC -MP O O -Edwin B-PER B-PER -Currie I-PER I-PER -told O O -the O O -BBC B-ORG B-ORG -that O O -if O O -Clarke B-PER B-PER -resigned O O -, O O -other O O -ministers O O -would O O -go O O -with O O -him O O -. O O -Court B-ORG O -e O O -head O O -of O O -Australian B-MISC B-MISC -child O O -inquiry O O -. O O -CA B-LOC B-LOC -1996 O O -The O O -Australian B-MISC B-MISC -opposition O O -on O O -Friday O O -demanded O O -a O O -high O O -investigation O O -into O O -p O O -in O O -the O O -Australian B-MISC B-MISC -diplomatic O O -service O O -after O O -the O O -federal O O -court O O -forced O O -the O O -head O O -of O O -the O O -existing O O -inquiry O O -to O O -stand O O -aside O O -. O O -The O O -court O O -said O O -inquiry O O -head O O -Chris B-PER B-PER -Hunt I-PER I-PER -might O O -be O O -bias O O -, O O -since O O -he O O -privately O O -told O O -a O O -newspaper O O -he O O -had O O -turned O O -up O O -no O O -major O O -evidence O O -of O O -p O O -activity O O -, O O -even O O -though O O -he O O -still O O -had O O -months O O -' O O -of O O -investigation O O -before O O -him O O -. O O -" O O -Today O O -we O O -are O O -left O O -with O O -a O O -ruin O O -wreck O O -beyond O O -salvage O O -and O O -a O O -continuing O O -p O O -of O O -doubt O O -and O O -suspicion O O -hanging O O -over O O -our O O -diplomatic O O -service O O -, O O -" O O -opposition O O -foreign O O -affairs O O -spokesman O O -Laurie B-PER B-PER -B I-PER I-PER -said O O -. O O -But O O -the O O -government O O -responded O O -by O O -pressing O O -ahead O O -with O O -the O O -original O O -inquiry O O -, O O -established O O -in O O -May O O -, O O -appoint O O -a O O -new O O -head O O -to O O -lead O O -it O O -. O O -Critics O O -say O O -that O O -if O O -there O O -were O O -many O O -p O O -in O O -senior O O -posts O O -in O O -the O O -Foreign B-ORG B-ORG -Affairs I-ORG I-ORG -Department I-ORG I-ORG -then O O -a O O -secret O O -inquiry O O -would O O -be O O -open O O -to O O -internal O O -influence O O -and O O -would O O -become O O -a O O -public O O -service O O -white O O -. O O -Accordingly O O -, O O -they O O -demand O O -an O O -open O O -investigation O O -. O O -A O O -spokesman O O -for O O -Foreign B-ORG B-ORG -Affairs O I-ORG -Minister O O -Alexander B-PER B-PER -Down I-PER I-PER -said O O -the O O -appointment O O -of O O -a O O -new O O -inquiry O O -head O O -, O O -administrative O O -law O O -expert O O -Pamela B-PER B-PER -O I-PER I-PER -, O O -showed O O -the O O -government O O -' O O -commitment O O -to O O -pursue O O -the O O -matter O O -. O O -A O O -report O O -is O O -due O O -in O O -May O O -next O O -year O O -. O O -One O O -Australian B-MISC B-MISC -diplomat O O -has O O -been O O -prosecuted O O -this O O -year O O -for O O -having O O -sex O O -with O O -a O O -Cambodian B-MISC B-MISC -boy O O -under O O -16 O O -but O O -was O O -acquitted O O -. O O -Police O O -have O O -investigated O O -others O O -. O O -A O O -newspaper O O -reported O O -allegations O O -in O O -April O O -that O O -diplomat O O -had O O -directed O O -Australian B-MISC B-MISC -government O O -aid O O -to O O -certain O O -foreign O O -orphanage O O -to O O -secure O O -sex O O -with O O -children O O -. O O -Australian B-MISC B-MISC -hit O O -killed O O -wrong O O -victim O O -. O O -S B-LOC B-LOC -1996 O O -An O O -Australian B-MISC B-MISC -hit O O -who O O -went O O -to O O -the O O -wrong O O -house O O -and O O -killed O O -the O O -wrong O O -man O O -was O O -sentenced O O -to O O -20 O O -years O O -jail O O -on O O -Friday O O -. O O -Paul B-PER B-PER -C I-PER I-PER -, O O -33 O O -, O O -and O O -an O O -a O O -were O O -contracted O O -to O O -shoot O O -a O O -man O O -, O O -identified O O -only O O -as O O -Tony B-PER B-PER -, O O -in O O -the O O -leg O O -to O O -punish O O -him O O -for O O -his O O -misconduct O O -with O O -a O O -female O O -friend O O -of O O -the O O -contractor O O -, O O -the O O -New B-ORG B-ORG -South I-ORG I-ORG -Wales I-ORG I-ORG -Supreme I-ORG I-ORG -Court I-ORG I-ORG -was O O -told O O -. O O -But O O -in O O -February O O -1993 O O -Les B-PER B-PER -Bet I-PER I-PER -, O O -was O O -shot O O -and O O -killed O O -after O O -answering O O -a O O -knock O O -at O O -the O O -door O O -of O O -his O O -Sydney B-LOC B-LOC -home O O -. O O -" O O -The O O -in O O -from O O -all O O -the O O -material O O -is O O -that O O -the O O -ma O O -had O O -come O O -to O O -the O O -wrong O O -house O O -, O O -" O O -Judge O O -Michael B-PER B-PER -Grove I-PER I-PER -said O O -. O O -In O O -sent O O -C B-PER B-PER -, O O -who O O -pleaded O O -guilty O O -, O O -Grove B-PER B-PER -took O O -into O O -account O O -his O O -" O O -mildly O O -re O O -" O O -intellectual O O -state O O -, O O -which O O -placed O O -him O O -in O O -the O O -lowest O O -two O O -percent O O -of O O -the O O -population O O -. O O -Grove B-PER B-PER -said O O -Bet B-PER B-PER -was O O -" O O -not O O -only O O -the O O -victim O O -of O O -a O O -ho O O -crime O O -, O O -but O O -his O O -death O O -was O O -brought O O -about O O -in O O -circumstances O O -of O O -an O O -equally O O -g O O -error O O -on O O -the O O -part O O -of O O -the O O -prisoner O O -and O O -his O O -a O O -" O O -. O O -The O O -unnamed O O -a O O -was O O -earlier O O -sentenced O O -to O O -20 O O -years O O -in O O -prison O O -. O O -NZ B-LOC B-LOC -' O O -Bo B-PER B-PER -says O O -Nat B-PER B-PER -to O O -meet O O -NZ B-ORG B-LOC -First I-ORG O -on O O -Sunday O O -. O O -W B-LOC B-LOC -1996 O O -New B-LOC B-LOC -Zealand I-LOC I-LOC -Prime O O -Minister O O -Jim B-PER B-PER -Bo I-PER I-PER -, O O -emerging O O -from O O -coalition O O -talks O O -with O O -the O O -nationalist O O -New B-ORG B-ORG -Zealand I-ORG I-ORG -First I-ORG I-ORG -party O O -on O O -Friday O O -afternoon O O -, O O -said O O -National B-ORG B-ORG -and O O -NZ B-ORG B-ORG -First I-ORG I-ORG -would O O -meet O O -again O O -on O O -Sunday O O -. O O -Bo B-PER B-PER -said O O -he O O -expected O O -a O O -government O O -to O O -be O O -formed O O -by O O -Thursday O O -. O O -NZ B-LOC B-LOC -' O O -Peters B-PER B-PER -says O O -Nat B-PER B-PER -, O O -Lab B-ORG B-PER -talks O O -at O O -similar O O -stage O O -. O O -W B-LOC B-LOC -1996 O O -New B-ORG B-ORG -Zealand I-ORG I-ORG -First I-ORG I-ORG -leader O O -Winston B-PER B-PER -Peters I-PER I-PER -on O O -Friday O O -said O O -coalition O O -talks O O -with O O -the O O -National B-ORG B-ORG -and O O -Labour B-ORG B-ORG -parties O O -were O O -at O O -a O O -similar O O -level O O -of O O -completion O O -. O O -Peters B-PER B-PER -left O O -a O O -meeting O O -between O O -NZ B-ORG B-ORG -First I-ORG I-ORG -and O O -National B-ORG B-ORG -ne O O -to O O -spend O O -20 O O -minutes O O -speaking O O -to O O -Labour B-ORG B-ORG -leader O O -Helen B-PER B-PER -Clark I-PER I-PER -. O O -He O O -told O O -Re B-ORG B-ORG -he O O -had O O -needed O O -to O O -speak O O -to O O -her O O -before O O -she O O -left O O -Wellington B-LOC B-LOC -later O O -on O O -Friday O O -. O O -Peters B-PER B-PER -said O O -the O O -talks O O -with O O -Labour B-ORG B-ORG -and O O -National B-ORG B-ORG -had O O -reached O O -" O O -about O O -the O O -same O O -level O O -of O O -completion O O -, O O -and O O -that O O -' O O -good O O -" O O -. O O -R B-ORG B-ORG -- O O -Australian B-MISC B-MISC -MP O O -John B-PER B-PER -Lang I-PER I-PER -formally O O -resign O O -. O O -CA B-LOC B-LOC -1996 O O -Australian B-MISC B-MISC -parliament O O -John B-PER B-PER -Lang I-PER I-PER -has O O -formally O O -resigned O O -from O O -his O O -lower O O -house O O -seat O O -, O O -the O O -office O O -of O O -House B-ORG B-ORG -of I-ORG I-ORG -Representatives I-ORG I-ORG -speaker O O -Bob B-PER B-PER -Hal I-PER I-PER -said O O -on O O -Friday O O -. O O -" O O -Hal B-PER B-PER -announced O O -that O O -he O O -had O O -received O O -today O O -from O O -Mr O O -John B-PER B-PER -Vance I-PER I-PER -Lang I-PER I-PER -, O O -a O O -letter O O -resign O O -his O O -place O O -as O O -member O O -of O O -the O O -House B-ORG B-ORG -of I-ORG I-ORG -Representatives I-ORG I-ORG -for O O -the O O -electoral O O -division O O -of O O -Fraser B-LOC B-PER -in O O -the O O -Australian B-LOC B-MISC -Capital I-LOC I-MISC -Territory I-LOC I-MISC -, O O -" O O -his O O -office O O -said O O -in O O -a O O -statement O O -. O O -Hal B-PER B-PER -was O O -considering O O -possible O O -dates O O -for O O -the O O -by O O -, O O -his O O -office O O -said O O -. O O -Lang B-PER B-PER -, O O -57 O O -, O O -announced O O -in O O -November O O -that O O -he O O -intended O O -to O O -resign O O -from O O -parliament O O -to O O -take O O -up O O -a O O -position O O -as O O -Australia B-LOC B-LOC -' O O -senior O O -representative O O -at O O -the O O -United B-ORG B-ORG -Nations I-ORG I-ORG -headquarters O O -in O O -New B-LOC B-LOC -York I-LOC I-LOC -. O O -He O O -played O O -an O O -active O O -role O O -at O O -the O O -U B-ORG B-ORG -social O O -development O O -conference O O -in O O -Copenhagen B-LOC B-LOC -last O O -year O O -and O O -has O O -co O O -articles O O -with O O -U B-ORG B-ORG -development O O -programme O O -officer O O -In B-PER B-PER -Ka I-PER I-PER -. O O -Lang B-PER B-PER -, O O -a O O -persistent O O -campaign O O -for O O -intervention O O -economic O O -policy O O -, O O -has O O -been O O -Labor B-ORG O -member O O -for O O -Fraser B-LOC B-PER -since O O -1984 O O -. O O -He O O -was O O -senior O O -private O O -secretary O O -to O O -the O O -employment O O -and O O -industrial O O -relations O O -minister O O -from O O -1983 O O -to O O -1984 O O -and O O -was O O -economic O O -advisor O O -to O O -then O O -treasurer O O -Paul B-PER B-PER -Ke I-PER I-PER -in O O -1983 O O -. O O -His O O -previous O O -posts O O -include O O -assistant O O -director O O -of O O -the O O -national O O -planning O O -office O O -of O O -Papua B-LOC B-LOC -New I-LOC I-LOC -Guinea I-LOC I-LOC -from O O -1969 O O -to O O -1973 O O -. O O -- O O -Canberra B-ORG B-LOC -Bureau I-ORG O -61 O O -27 O O -Burmese B-MISC B-MISC -students O O -march O O -out O O -of O O -campus O O -again O O -. O O -RA B-LOC B-LOC -1996 O O -A O O -group O O -of O O -Burmese B-MISC B-MISC -students O O -on O O -Friday O O -marched O O -out O O -of O O -the O O -Yang B-ORG B-ORG -Institute I-ORG I-ORG -of I-ORG I-ORG -Technology I-ORG I-ORG -( O O -Y B-ORG B-ORG -) O O -in O O -the O O -northern O O -outskirts O O -of O O -Ra B-LOC B-LOC -and O O -moved O O -toward O O -the O O -University B-ORG B-ORG -of I-ORG I-ORG -Yang I-ORG I-ORG -about O O -six O O -km O O -( O O -four O O -miles O O -) O O -away O O -, O O -witnesses O O -said O O -. O O -The O O -witnesses O O -could O O -not O O -give O O -exact O O -numbers O O -of O O -those O O -taking O O -part O O -in O O -the O O -march O O -or O O -any O O -other O O -details O O -immediately O O -. O O -On O O -Monday O O -and O O -Tuesday O O -, O O -students O O -from O O -the O O -Y B-ORG B-ORG -and O O -the O O -university O O -launched O O -street O O -protests O O -against O O -what O O -they O O -called O O -unfair O O -handling O O -by O O -police O O -of O O -a O O -bra O O -between O O -some O O -of O O -their O O -colleagues O O -and O O -restaurant O O -owners O O -in O O -October O O -. O O -The O O -protests O O -culminated O O -at O O -dawn O O -on O O -Tuesday O O -with O O -several O O -hundred O O -of O O -the O O -student O O -protesters O O -being O O -detained O O -briefly O O -by O O -police O O -near O O -the O O -central O O -S B-LOC B-LOC -Da I-LOC I-LOC -p O O -in O O -Ra B-LOC B-LOC -. O O -They O O -were O O -later O O -released O O -. O O -On O O -Friday O O -, O O -some O O -students O O -told O O -Re B-ORG B-ORG -that O O -they O O -were O O -still O O -di O O -with O O -the O O -ruling O O -State B-ORG B-ORG -Law I-ORG I-ORG -and I-ORG I-ORG -Order I-ORG I-ORG -Restoration I-ORG I-ORG -Council I-ORG I-ORG -' O O -( O O -SL B-ORG B-ORG -) O O -handling O O -of O O -their O O -demands O O -. O O -They O O -said O O -they O O -wanted O O -to O O -organise O O -independent O O -unions O O -on O O -university O O -campuses O O -and O O -demanded O O -that O O -details O O -of O O -the O O -punishment O O -of O O -policemen O O -who O O -allegedly O O -man O O -some O O -students O O -at O O -the O O -October O O -bra O O -be O O -published O O -in O O -newspapers O O -. O O -Thai B-MISC B-MISC -rice O O -vessels O O -loading O O -and O O -movements O O -at O O -Dec O O -06 O O -. O O -BA B-LOC B-LOC -1996 O O -The O O -Thai B-MISC B-MISC -Commerce I-ORG B-ORG -Ministry I-ORG I-ORG -detailed O O -rice O O -loading O O -at O O -Thai B-MISC B-MISC -ports O O -as O O -follows O O -( O O -in O O -tonnes O O -) O O -: O O -V O O -Date O O -of O O -A O O -Q O O -Des O O -Iran B-LOC B-MISC -Sa O I-MISC -19 O O -9 O O -Iran B-LOC B-LOC -Princess O B-MISC -of O I-MISC -Lo B-LOC I-MISC -19 O O -10 O O -Philippines B-LOC B-LOC -Del O B-MISC -20 O O -5 O O -Indonesia B-LOC B-LOC -Sea B-ORG B-MISC -ace O O -20 O O -5 O O -Japan B-LOC B-LOC -Lucky B-ORG B-MISC -Em I-ORG I-MISC -20 O O -5 O O -Japan B-LOC B-LOC -Al B-LOC B-MISC -Day O I-MISC -21 O O -6 O O -Africa B-LOC B-LOC -Sang B-ORG B-MISC -Glory I-ORG I-MISC -22 O O -SH B-LOC B-LOC -1996 O O -A O O -five O O -girl O O -in O O -the O O -east O O -China B-LOC B-LOC -city O O -of O O -T B-LOC B-LOC -choked O O -and O O -almost O O -died O O -from O O -cigarette O O -smoke O O -at O O -her O O -grandfather O O -' O O -birthday O O -with O O -relatives O O -smoking O O -for O O -hours O O -in O O -a O O -small O O -room O O -, O O -the O O -Wen B-ORG B-ORG -Hui I-ORG I-ORG -Ba I-ORG I-ORG -newspaper O O -said O O -on O O -Friday O O -. O O -The O O -newspaper O O -said O O -the O O -girl O O -was O O -rushed O O -to O O -hospital O O -and O O -found O O -to O O -be O O -having O O -extreme O O -difficulty O O -breathing O O -. O O -It O O -said O O -eight O O -of O O -the O O -people O O -at O O -the O O -party O O -, O O -including O O -the O O -girl O O -' O O -father O O -, O O -immediately O O -announced O O -they O O -would O O -give O O -up O O -smoking O O -. O O -South B-MISC B-MISC -Korean I-MISC I-MISC -won O O -closes O O -down O O -on O O -import O O -settlements O O -. O O -SE B-LOC B-LOC -1996 O O -The O O -won O O -slid O O -against O O -the O O -U B-LOC B-LOC -unit O O -on O O -Friday O O -as O O -players O O -prepared O O -for O O -Monday O O -' O O -import O O -settlement O O -needs O O -, O O -traders O O -said O O -. O O -The O O -won O O -ended O O -at O O -83 O O -, O O -slightly O O -down O O -from O O -an O O -opening O O -of O O -83 O O -. O O -It O O -ranged O O -between O O -83 O O -and O O -83 O O -. O O -" O O -A O O -sale O O -of O O -about O O -$ O O -60 O O -million O O -by O O -H B-ORG B-ORG -Heavy I-ORG I-ORG -pushed O O -the O O -dollar O O -down O O -earlier O O -in O O -the O O -day O O -, O O -but O O -Monday O O -' O O -import O O -needs O O -helped O O -it O O -recover O O -, O O -" O O -said O O -a O O -Ko B-ORG B-ORG -Bank I-ORG I-ORG -dealer O O -. O O -Deal O O -said O O -the O O -dollar O O -/ O O -ye O O -' O O -movement O O -on O O -the O O -world O O -market O O -would O O -continue O O -to O O -set O O -the O O -trend O O -for O O -the O O -dollar O O -/ O O -won O O -next O O -week O O -. O O -Foreign O O -planes O O -to O O -land O O -in O O -China B-LOC B-LOC -' O O -popular O O -G B-LOC B-LOC -. O O -B B-LOC B-LOC -1996 O O -China B-LOC B-LOC -' O O -tourist O O -spot O O -of O O -G B-LOC B-LOC -in O O -the O O -southern O O -region O O -of O O -G B-LOC B-LOC -will O O -open O O -its O O -airport O O -to O O -foreign O O -aircraft O O -, O O -the O O -Xi B-ORG B-ORG -news O O -agency O O -said O O -on O O -Friday O O -. O O -An O O -assessment O O -group O O -made O O -up O O -of O O -the O O -State B-ORG B-ORG -Council I-ORG I-ORG -' O O -Port B-ORG O -Office I-ORG O -, O O -the O O -Civil B-ORG B-ORG -Aviation I-ORG I-ORG -Administration I-ORG I-ORG -of I-ORG I-ORG -China I-ORG I-ORG -, O O -the O O -General B-ORG B-ORG -Administration I-ORG I-ORG -of I-ORG I-ORG -Customs I-ORG I-ORG -and O O -other O O -authorities O O -had O O -granted O O -the O O -airport O O -permission O O -to O O -handle O O -foreign O O -aircraft O O -, O O -Xi B-PER B-ORG -said O O -. O O -" O O -The O O -move O O -is O O -expected O O -to O O -give O O -a O O -shot O O -in O O -the O O -arm O O -to O O -the O O -economic O O -expansion O O -of O O -G B-LOC B-LOC -and O O -southwest O O -China B-LOC B-LOC -as O O -a O O -whole O O -, O O -" O O -the O O -agency O O -said O O -but O O -gave O O -no O O -further O O -details O O -. O O -G B-LOC B-LOC -is O O -well O O -known O O -for O O -its O O -mountain O O -and O O -river O O -scenery O O -and O O -is O O -one O O -of O O -China B-LOC B-LOC -' O O -most O O -popular O O -tourist O O -destinations O O -. O O -EPA B-ORG B-ORG -says O O -economic O O -assessment O O -unchanged O O -by O O -GDP O O -data O O -. O O -TO B-LOC B-LOC -1996 O O -Japan B-LOC B-LOC -' O O -Economic B-ORG B-ORG -Planning I-ORG I-ORG -Agency I-ORG I-ORG -has O O -not O O -changed O O -its O O -view O O -that O O -the O O -economy O O -is O O -gradually O O -recovering O O -, O O -despite O O -relatively O O -weak O O -gross O O -domestic O O -product O O -figures O O -released O O -on O O -Tuesday O O -, O O -EPA B-ORG B-ORG -Vice O O -Minister O O -Shi B-PER B-PER -N I-PER I-PER -told O O -reporters O O -on O O -Friday O O -. O O -He O O -said O O -the O O -GDP O O -growth O O -was O O -weak O O -but O O -that O O -this O O -reflected O O -the O O -economy O O -between O O -July O O -and O O -September O O -and O O -did O O -not O O -take O O -into O O -account O O -more O O -recent O O -data O O -. O O -When O O -asked O O -about O O -the O O -outlook O O -for O O -the O O -fiscal O O -year O O -beginning O O -in O O -April O O -, O O -N B-ORG B-PER -said O O -the O O -economy O O -may O O -slow O O -down O O -in O O -the O O -early O O -part O O -of O O -the O O -fiscal O O -year O O -due O O -to O O -a O O -planned O O -consumption O O -tax O O -hike O O -, O O -but O O -that O O -would O O -be O O -only O O -temporary O O -. O O -The O O -consumption O O -tax O O -will O O -be O O -raised O O -to O O -five O O -percent O O -from O O -three O O -percent O O -from O O -April O O -1 O O -. O O -Sang B-ORG B-ORG -- O O -96 O O -parent O O -forecast O O -. O O -TO B-LOC B-LOC -1996 O O -Year O O -to O O -March O O -31 O O -, O O -1997 O O -( O O -in O O -billion O O -of O O -ye O O -unless O O -specified O O -) O O -LA O O -ACT O O -( O O -Pa O O -) O O -F O O -Y O O -Sales O O -128 O O -117 O O -Current O O -12 O O -9 O O -Net O O -6 O O -5 O O -EP O O -143 O O -ye O O -127 O O -ye O O -Or B-MISC O -di O O -30 O O -ye O O -30 O O -ye O O -NO O O -- O O -Sang B-ORG B-ORG -Co I-ORG I-ORG -Ltd I-ORG I-ORG -is O O -a O O -trader O O -specialising O O -in O O -interiors O O -. O O -B B-ORG B-ORG -, O O -Barr B-PER B-ORG -said O O -to O O -continue O O -Bus B-LOC B-LOC -talks O O -. O O -K B-LOC B-LOC -Ara I-ORG I-LOC -J B-LOC B-LOC -1996 O O -Canada B-LOC B-LOC -' O O -B B-ORG B-ORG -Mine I-ORG I-ORG -Ltd I-ORG I-ORG -and O O -Barr B-ORG B-ORG -Gold I-ORG I-ORG -Corp I-ORG I-ORG -are O O -to O O -continue O O -negotiations O O -to O O -hammer O O -out O O -a O O -partnership O O -agreement O O -to O O -develop O O -the O O -spectacular O O -Bus B-LOC B-LOC -gold O O -find O O -in O O -Indonesia B-LOC B-LOC -, O O -sources O O -close O O -to O O -the O O -talks O O -said O O -on O O -Friday O O -. O O -" O O -The O O -negotiations O O -will O O -be O O -held O O -both O O -in O O -Toronto B-LOC B-LOC -and O O -in O O -Jakarta B-LOC B-LOC -, O O -" O O -one O O -source O O -, O O -speaking O O -on O O -condition O O -of O O -an O O -, O O -told O O -Re B-ORG B-ORG -. O O -Another O O -source O O -said O O -most O O -of O O -the O O -key O O -ne O O -from O O -both O O -B B-ORG B-ORG -and O O -Barr B-ORG B-ORG -had O O -returned O O -to O O -Toronto B-LOC B-LOC -, O O -but O O -declined O O -to O O -say O O -if O O -there O O -had O O -been O O -any O O -progress O O -in O O -their O O -negotiations O O -. O O -Both O O -sources O O -said O O -B B-ORG B-ORG -and O O -Barr B-PER B-ORG -did O O -not O O -hold O O -talks O O -on O O -Thursday O O -with O O -Mines B-ORG B-ORG -and I-ORG I-ORG -Energy I-ORG I-ORG -Ministry I-ORG I-ORG -Secretary O O -Um B-PER B-PER -Said I-PER I-PER -, O O -who O O -is O O -coordinating O O -the O O -negotiations O O -over O O -the O O -Bus B-MISC B-LOC -find O O -in O O -East B-LOC B-LOC -Kali I-LOC I-LOC -. O O -The O O -first O O -source O O -also O O -said O O -B B-ORG B-ORG -had O O -until O O -December O O -21 O O -to O O -submit O O -to O O -the O O -Indonesian B-ORG B-ORG -Mines I-ORG I-ORG -and I-ORG I-ORG -Energy I-ORG I-ORG -Ministry I-ORG I-ORG -a O O -f O O -study O O -on O O -the O O -central O O -region O O -of O O -the O O -Bus B-LOC B-LOC -property O O -, O O -estimated O O -to O O -contain O O -2 O O -million O O -ounce O O -of O O -gold O O -. O O -The O O -richest O O -parts O O -of O O -the O O -property O O -to O O -the O O -north O O -and O O -south O O -of O O -the O O -central O O -region O O -have O O -been O O -estimated O O -by O O -B B-ORG B-ORG -to O O -contain O O -57 O O -million O O -ounce O O -of O O -gold O O -. O O -" O O -B B-ORG B-ORG -is O O -expected O O -to O O -complete O O -the O O -f O O -report O O -by O O -December O O -16 O O -and O O -submit O O -it O O -to O O -the O O -government O O -before O O -the O O -December O O -21 O O -deadline O O -, O O -" O O -the O O -source O O -said O O -. O O -He O O -said O O -B B-ORG B-ORG -would O O -then O O -formally O O -seek O O -the O O -permission O O -of O O -the O O -Indonesian B-MISC B-MISC -government O O -to O O -begin O O -construction O O -to O O -develop O O -Bus B-LOC B-LOC -' O O -central O O -region O O -, O O -which O O -might O O -take O O -up O O -to O O -two O O -years O O -. O O -The O O -source O O -declined O O -to O O -say O O -if O O -there O O -had O O -been O O -any O O -progress O O -in O O -the O O -talks O O -between O O -B B-ORG B-ORG -and O O -Barr B-ORG B-ORG -. O O -" O O -This O O -is O O -a O O -huge O O -project O O -. O O -we O O -are O O -not O O -selling O O -furniture O O -, O O -and O O -B B-ORG B-ORG -has O O -13 O O -shareholders O O -to O O -answer O O -to O O -, O O -" O O -the O O -source O O -said O O -. O O -" O O -While O O -there O O -has O O -been O O -some O O -agreement O O -in O O -principle O O -on O O -some O O -issues O O -, O O -there O O -are O O -still O O -others O O -such O O -as O O -procedures O O -and O O -mechanisms O O -that O O -needed O O -to O O -be O O -sorted O O -out O O -, O O -" O O -he O O -added O O -. O O -The O O -source O O -said O O -no O O -new O O -deadline O O -had O O -been O O -set O O -by O O -the O O -Mines B-ORG B-ORG -and I-ORG I-ORG -Energy I-ORG I-ORG -Ministry I-ORG I-ORG -for O O -B B-ORG B-ORG -and O O -Barr B-ORG B-ORG -to O O -strike O O -a O O -deal O O -. O O -The O O -Ministry B-ORG O -had O O -given O O -the O O -companies O O -until O O -December O O -4 O O -to O O -complete O O -a O O -partnership O O -deal O O -, O O -and O O -advised O O -B B-ORG B-ORG -to O O -take O O -a O O -25 O O -percent O O -stake O O -and O O -Barr B-ORG B-ORG -75 O O -percent O O -to O O -develop O O -the O O -property O O -. O O -" O O -As O O -far O O -as O O -I O O -am O O -aware O O -, O O -there O O -' O O -been O O -no O O -new O O -deadline O O -, O O -" O O -the O O -source O O -said O O -. O O -The O O -Ministry B-ORG O -' O O -Um B-PER B-PER -said O O -on O O -Thursday O O -that O O -both O O -B B-ORG B-ORG -and O O -Barr B-ORG B-ORG -had O O -responded O O -positively O O -to O O -a O O -government O O -letter O O -recommend O O -a O O -25 O O -split O O -in O O -the O O -Bus B-LOC B-ORG -gold O O -property O O -. O O -The O O -government O O -also O O -wants O O -10 O O -percent O O -of O O -the O O -property O O -. O O -Um B-PER B-PER -said O O -the O O -government O O -had O O -yet O O -to O O -receive O O -a O O -formal O O -reply O O -from O O -the O O -companies O O -. O O -He O O -had O O -said O O -earlier O O -that O O -if O O -the O O -two O O -companies O O -failed O O -to O O -reach O O -a O O -partnership O O -agreement O O -, O O -the O O -government O O -would O O -explore O O -other O O -ways O O -to O O -ex O O -development O O -of O O -the O O -Bus B-LOC B-ORG -find O O -. O O -B B-ORG B-ORG -has O O -a O O -partnership O O -deal O O -with O O -PT B-ORG B-PER -Pan I-ORG I-PER -Du I-ORG I-PER -of O O -the O O -Pan B-ORG B-ORG -Group I-ORG I-ORG -run O O -by O O -President O O -Su B-PER B-PER -' O O -eldest O O -son O O -, O O -Si B-PER B-PER -Ha I-PER I-PER -, O O -under O O -which O O -Pan B-ORG B-PER -would O O -receive O O -$ O O -40 O O -million O O -over O O -40 O O -months O O -plus O O -a O O -10 O O -percent O O -stake O O -Bus B-ORG B-ORG -' O O -richest O O -parts O O -. O O -Barr B-ORG B-ORG -has O O -teamed O O -up O O -with O O -a O O -construction O O -company O O -in O O -the O O -C B-ORG B-ORG -Group I-ORG I-ORG -of O O -Su B-LOC B-PER -' O O -eldest O O -daughter O O -, O O -Sit B-PER B-PER -Hard I-PER I-PER -R I-PER I-PER -, O O -in O O -what O O -Barr B-PER B-ORG -had O O -said O O -was O O -a O O -partnership O O -" O O -to O O -prepare O O -us O O -for O O -a O O -potential O O -mining O O -development O O -project O O -" O O -. O O -Honda B-ORG B-MISC -R I-ORG I-MISC -exceeds O O -sales O O -target O O -. O O -TO B-LOC B-LOC -1996 O O -Honda B-ORG B-ORG -Motor I-ORG I-ORG -Co I-ORG I-ORG -Ltd I-ORG I-ORG -said O O -on O O -Friday O O -that O O -it O O -had O O -received O O -15 O O -domestic O O -orders O O -for O O -its O O -S B-MISC B-MISC -recreational O O -vehicle O O -in O O -the O O -first O O -two O O -weeks O O -after O O -its O O -launch O O -. O O -Honda B-ORG B-ORG -launched O O -the O O -S B-MISC B-MISC -light O O -mini O O -, O O -featuring O O -cubic O O -body O O -styling O O -, O O -on O O -November O O -22 O O -with O O -a O O -monthly O O -sales O O -target O O -of O O -5 O O -units O O -. O O -A O O -version O O -with O O -lower O O -road O O -clearance O O -and O O -front O O -and O O -rear O O -s O O -accounted O O -for O O -two O O -of O O -the O O -sales O O -. O O -F O O -- O O -Singapore B-LOC B-LOC -sees O O -prestige O O -in O O -hosting O O -W B-ORG B-ORG -. O O -Ram B-PER B-PER -Hussain I-PER I-PER -S B-LOC B-LOC -1996 O O -Singapore B-LOC B-LOC -' O O -winning O O -campaign O O -to O O -host O O -the O O -World B-ORG B-ORG -Trade I-ORG I-ORG -Organisation I-ORG I-ORG -( O O -W B-ORG B-ORG -) O O -' O O -first O O -ministerial O O -meeting O O -reflected O O -its O O -ambition O O -to O O -play O O -a O O -key O O -role O O -in O O -shaping O O -global O O -free O O -trade O O -, O O -the O O -life O O -of O O -its O O -economy O O -, O O -analysts O O -said O O -. O O -" O O -As O O -one O O -of O O -the O O -world O O -' O O -most O O -external O O -oriented O O -economies O O -, O O -Singapore B-LOC B-LOC -has O O -a O O -di O O -large O O -stake O O -in O O -the O O -W B-ORG B-ORG -, O O -" O O -said O O -Desmond B-PER B-PER -Su I-PER I-PER -, O O -economist O O -at O O -research O O -house O O -I B-ORG B-ORG -. O O -" O O -Singapore B-LOC B-LOC -stands O O -to O O -benefit O O -more O O -than O O -most O O -from O O -continued O O -global O O -trade O O -liberal O O -as O O -trade O O -is O O -the O O -engine O O -of O O -its O O -growth O O -, O O -accounting O O -for O O -nearly O O -three O O -times O O -its O O -gross O O -domestic O O -product O O -. O O -" O O -The O O -city O O -met O O -U B-LOC B-LOC -opposition O O -two O O -years O O -ago O O -in O O -its O O -bid O O -to O O -host O O -the O O -meeting O O -, O O -expected O O -to O O -gather O O -4 O O -officials O O -from O O -160 O O -countries O O -from O O -December O O -9 O O -to O O -13 O O -. O O -In O O -a O O -stand O O -some O O -analysts O O -linked O O -to O O -controversy O O -over O O -Singapore B-LOC B-LOC -' O O -can O O -of O O -an O O -American B-MISC B-MISC -teenager O O -for O O -van O O -, O O -then O B-MISC -Trade O O -Representative O O -Mickey B-PER B-PER -Ka I-PER I-PER -had O O -said O O -the O O -meeting O O -ought O O -to O O -be O O -held O O -where O O -the O O -W B-ORG B-ORG -was O O -going O O -to O O -be O O -headquartered O O -. O O -That O O -would O O -have O O -meant O O -Geneva B-LOC B-LOC -. O O -But O O -Singapore B-LOC B-LOC -had O O -the O O -support O O -of O O -other O O -W B-ORG B-ORG -members O O -. O O -Derek B-PER B-PER -da I-PER I-PER -C I-PER I-PER -, O O -senior O O -fellow O O -at O O -the O O -Institute B-ORG B-ORG -of I-ORG I-ORG -Policy I-ORG I-ORG -Studies I-ORG I-ORG -( O O -IS B-ORG B-ORG -) O O -, O O -said O O -Singapore B-LOC B-LOC -' O O -hosting O O -of O O -the O O -conference O O -" O O -carries O O -a O O -great O O -deal O O -of O O -symbol O O -for O O -the O O -city O O -, O O -under O O -its O O -commitment O O -to O O -free O O -trade O O -and O O -its O O -trading O O -links O O -across O O -the O O -globe O O -. O O -" O O -There O O -is O O -the O O -international O O -prestige O O -Singapore B-LOC B-LOC -would O O -enjoy O O -, O O -but O O -" O O -more O O -importantly O O -there O O -is O O -a O O -genuine O O -national O O -interest O O -in O O -foster O O -better O O -global O O -free O O -trade O O -and O O -an O O -open O O -market O O -" O O -, O O -said O O -Tan B-PER B-PER -Kong I-PER I-PER -Ya I-PER I-PER -, O O -head O O -of O O -Business B-ORG O -Policy I-ORG O -at O O -the O O -National B-ORG B-ORG -University I-ORG I-ORG -of I-ORG I-ORG -Singapore I-ORG I-ORG -. O O -At O O -the O O -ministerial O O -meeting O O -, O O -trade O O -ministers O O -will O O -review O O -the O O -work O O -of O O -the O O -W B-ORG B-ORG -and O O -the O O -implementation O O -of O O -the O O -Uruguay B-MISC B-LOC -Round I-MISC O -free O O -trade O O -commitments O O -under O O -its O O -predecessor O O -the O O -General B-MISC B-MISC -Agreement I-MISC I-MISC -on I-MISC I-MISC -Ta I-ORG I-MISC -and I-ORG I-MISC -Trade I-ORG I-MISC -( O O -GA B-ORG B-MISC -) O O -. O O -In O O -June O O -, O O -the O O -W B-ORG B-ORG -hailed O O -Singapore B-LOC B-LOC -for O O -its O O -open O O -market O O -policies O O -but O O -the O O -European B-ORG B-ORG -Union I-ORG I-ORG -and O O -other O O -trading O O -powers O O -called O O -on O O -Singapore B-LOC B-LOC -to O O -speed O O -up O O -the O O -opening O O -of O O -its O O -services O O -sector O O -. O O -Su B-PER B-PER -said O O -the O O -struggle O O -that O O -Singapore B-LOC B-LOC -had O O -to O O -wage O O -in O O -v O O -to O O -host O O -the O O -meeting O O -would O O -be O O -repeated O O -during O O -the O O -talks O O -. O O -" O O -There O O -is O O -tension O O -at O O -every O O -step O O -of O O -the O O -way O O -, O O -" O O -since O O -a O O -battle O O -line O O -between O O -the O O -West B-MISC O -and O O -developing O O -countries O O -has O O -been O O -drawn O O -over O O -the O O -issue O O -of O O -linking O O -trade O O -liberal O O -with O O -labour O O -rights O O -, O O -he O O -said O O -. O O -Su B-PER B-PER -said O O -hosting O O -the O O -meeting O O -carried O O -prestige O O -for O O -Singapore B-LOC B-LOC -, O O -" O O -however O O -, O O -this O O -is O O -quite O O -in O O -as O O -the O O -prestige O O -factor O O -may O O -not O O -necessarily O O -lead O O -to O O -any O O -additional O O -investment O O -and O O -trade O O -flows O O -to O O -this O O -region O O -. O O -" O O -From O O -a O O -commercial O O -point O O -of O O -view O O -, O O -the O O -meeting O O -would O O -be O O -good O O -for O O -Singapore B-LOC B-LOC -' O O -tourism O O -industry O O -, O O -Tan B-PER B-PER -said O O -. O O -A O O -large O O -part O O -of O O -Singapore B-LOC B-LOC -' O O -workforce O O -would O O -be O O -mob O O -to O O -ensure O O -the O O -meeting O O -would O O -run O O -without O O -a O O -g O O -but O O -the O O -average O O -Singapore B-MISC B-MISC -" O O -would O O -probably O O -not O O -be O O -too O O -concerned O O -about O O -some O O -of O O -the O O -issues O O -, O O -" O O -Tan B-PER B-PER -said O O -. O O -" O O -But O O -the O O -more O O -educated O O -public O O -will O O -realise O O -that O O -these O O -kind O O -of O O -things O O -are O O -important O O -for O O -Singapore B-LOC B-LOC -as O O -a O O -small O O -economy O O -. O O -" O O -Su B-PER B-PER -said O O -any O O -political O O -gains O O -the O O -Singapore B-LOC B-LOC -government O O -would O O -get O O -from O O -the O O -W B-ORG B-ORG -meeting O O -- O O -ahead O O -of O O -a O O -general O O -election O O -due O O -by O O -April O O -1997 O O -- O O -would O O -depend O O -on O O -how O O -successful O O -it O O -was O O -in O O -pushing O O -its O O -economic O O -agenda O O -. O O -" O O -If O O -there O O -are O O -any O O -movements O O -toward O O -free O O -trade O O -, O O -then O O -Singapore B-LOC B-LOC -' O O -economy O O -and O O -the O O -electorate O O -would O O -gain O O -, O O -" O O -he O O -said O O -. O O -" O O -But O O -I O O -do O O -n O O -think O O -it O O -would O O -be O O -wise O O -to O O -play O O -up O O -the O O -political O O -aspect O O -of O O -this O O -. O O -I O O -think O O -political O O -issues O O -will O O -take O O -secondary O O -importance O O -to O O -all O O -these O O -economic O O -issues O O -that O O -will O O -be O O -displayed O O -. O O -" O O -Japan B-LOC B-LOC -N B-ORG B-ORG -says O O -hopes O O -to O O -start O O -in O O -business O O -soon O O -. O O -TO B-LOC B-LOC -1996 O O -Nippon B-ORG B-ORG -Telegraph I-ORG I-ORG -and I-ORG I-ORG -Telephone I-ORG I-ORG -Corp I-ORG I-ORG -( O O -N B-ORG B-ORG -) O O -said O O -on O O -Friday O O -that O O -it O O -hopes O O -to O O -move O O -into O O -the O O -international O O -telecommunications O O -business O O -as O O -soon O O -as O O -possible O O -following O O -the O O -government O O -' O O -decision O O -to O O -split O O -N B-ORG B-ORG -into O O -three O O -firms O O -under O O -a O O -holding O O -company O O -. O O -" O O -We O O -hope O O -to O O -start O O -international O O -telephone O O -business O O -as O O -soon O O -as O O -possible O O -, O O -" O O -a O O -company O O -official O O -told O O -Re B-ORG B-ORG -. O O -The O O -official O O -said O O -the O O -latest O O -government O O -decision O O -to O O -split O O -the O O -company O O -under O O -a O O -holding O O -company O O -would O O -allow O O -flexibility O O -in O O -N B-ORG B-ORG -' O O -international O O -phone O O -business O O -. O O -Earlier O O -, O O -Post B-ORG O -and I-ORG O -Telecommunications I-ORG O -Minister O O -His B-PER B-PER -Ho I-PER I-PER -told O O -a O O -news O O -conference O O -the O O -government O O -plans O O -to O O -split O O -N B-ORG B-ORG -into O O -three O O -firms O O -under O O -a O O -holding O O -company O O -, O O -but O O -did O O -not O O -specify O O -when O O -the O O -restructuring O O -would O O -likely O O -take O O -effect O O -. O O -One O O -of O O -the O O -three O O -new O O -companies O O -will O O -be O O -a O O -long O O -operator O O -and O O -the O O -other O O -two O O -will O O -be O O -local O O -operators O O -, O O -Ho B-ORG B-PER -said O O -. O O -One O O -of O O -the O O -local O O -firms O O -will O O -operate O O -in O O -west O O -Japan B-LOC B-LOC -and O O -the O O -other O O -in O O -east O O -Japan B-LOC B-LOC -, O O -he O O -added O O -. O O -The O O -long O O -operator O O -will O O -offer O O -international O O -services O O -, O O -Ho B-ORG B-PER -said O O -. O O -The O O -N B-ORG B-ORG -official O O -said O O -the O O -timing O O -of O O -the O O -planned O O -split O O -was O O -uncertain O O -because O O -more O O -discussions O O -by O O -government O O -officials O O -were O O -required O O -. O O -Ah B-ORG B-ORG -launches O O -Asian B-MISC B-MISC -food O O -discount O O -stores O O -. O O -Z B-LOC B-LOC -, O O -Netherlands B-LOC B-LOC -1996 O O -Dutch B-MISC B-MISC -supermarket O O -group O O -Ah B-ORG B-ORG -N I-ORG I-ORG -said O O -on O O -Friday O O -it O O -had O O -launched O O -a O O -second O O -food O O -store O O -format O O -for O O -Asian B-MISC B-MISC -consumers O O -today O O -, O O -opening O O -16 O O -B B-MISC B-MISC -food O O -discount O O -stores O O -in O O -Malaysia B-LOC B-LOC -. O O -The O O -B B-ORG B-MISC -stores O O -are O O -located O O -in O O -Mal B-ORG B-LOC -' O O -capital O O -Kuala B-LOC B-LOC -Lumpur O I-LOC -and O O -in O O -the O O -country O O -' O O -second O O -city O O -Jo B-LOC B-LOC -Ba I-LOC I-LOC -. O O -The O O -discount O O -price O O -format O O -store O O -B B-ORG B-MISC -is O O -to O O -complement O O -Ah B-ORG B-ORG -' O O -full O O -service O O -supermarket O O -TO B-ORG B-MISC -, O O -recently O O -launched O O -in O O -Asia B-LOC B-LOC -. O O -" O O -In O O -the O O -coming O O -five O O -to O O -ten O O -years O O -, O O -Ah B-ORG B-ORG -plans O O -to O O -open O O -many O O -more O O -stores O O -of O O -both O O -formats O O -, O O -making O O -TO B-MISC B-MISC -and O O -B B-MISC B-MISC -household O O -names O O -in O O -the O O -region O O -, O O -" O O -Ah B-ORG B-ORG -said O O -in O O -a O O -statement O O -. O O -As O O -well O O -as O O -its O O -activities O O -in O O -Asia B-LOC B-LOC -, O O -Dutch B-MISC B-MISC -retail O O -group O O -Ah B-ORG B-ORG -has O O -a O O -strong O O -presence O O -in O O -Europe B-LOC B-LOC -, O O -in O O -the O O -U B-LOC B-LOC -and O O -the O O -company O O -recently O O -announced O O -a O O -joint O O -venture O O -agreement O O -in O O -Brazil B-LOC B-LOC -. O O -Ah B-ORG B-ORG -has O O -annual O O -sales O O -of O O -approximately O O -US B-MISC B-MISC -24 O O -billion O O -, O O -and O O -employs O O -180 O O -people O O -worldwide O O -. O O -- O O -Amsterdam B-LOC B-LOC -news O O -+ O O -20 O O -50 O O -5000 O O -, O O -F O O -+ O O -20 O O -50 O O -50 O O -AL B-ORG O -SK O O -' O O -W O B-MISC -C I-MISC I-MISC -S O O -G O O -W O O -PR O O -. O O -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Profile O O -of O O -the O O -winner O O -of O O -Saturday O O -' O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -super O O -G O O -race O O -: O O -Name O O -: O O -S B-PER B-PER -Glad I-PER I-PER -Age O O -: O O -25 O O -Nation O O -: O O -Russia B-LOC B-LOC -Previous O O -World B-MISC B-MISC -Cup I-MISC I-MISC -victories O O -: O O -None O O -Other O O -F O O -: O O -Glad B-PER B-PER -won O O -a O O -silver O O -medal O O -in O O -super O O -G I-MISC O -at O O -the O O -1994 O O -Lille B-MISC B-LOC -Winter I-MISC B-MISC -Olympics I-MISC I-MISC -and O O -a O O -bronze O O -medal O O -in O O -downhill O O -at O O -the O O -1991 O O -World B-MISC B-MISC -Championships I-MISC I-MISC -. O O -AL B-MISC O -SK O O -' O O -W B-MISC B-MISC -C I-MISC I-MISC -S O O -G O O -R O O -. O O -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Provisional O O -results O O -from O O -Saturday O O -' O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -super O O -G O O -race O O -: O O -1 O O -S B-PER B-PER -Glad I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -one O O -minute O O -17 O O -seconds O O -2 O O -Per B-PER B-PER -W I-PER I-PER -( O O -Sweden B-LOC B-LOC -) O O -1 O O -3 O O -Carole B-PER B-PER -Mont I-PER I-PER -( O O -France B-LOC B-LOC -) O O -1 O O -4 O O -Hi B-PER B-PER -G I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -1 O O -5 O O -Is B-PER B-PER -Ko I-PER I-PER -( O O -Italy B-LOC B-LOC -) O O -1 O O -6 O O -War B-PER B-PER -Z I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -1 O O -7 O O -Mad B-PER B-PER -B I-PER I-PER -AL B-ORG O -SK O B-MISC -W O O -W O B-MISC -C I-MISC I-MISC -S O O -G O O -. O O -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -S B-PER B-PER -Glad I-PER I-PER -of O O -Russia B-LOC B-LOC -won O O -the O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -Super B-MISC O -G I-MISC O -race O O -on O O -Saturday O O -. O O -Per B-PER B-PER -W I-PER I-PER -of O O -Sweden B-LOC B-LOC -finished O O -second O O -and O O -Carole B-PER B-PER -Mont I-PER I-PER -of O O -France B-LOC B-LOC -came O O -in O O -third O O -, O O -according O O -to O O -provisional O O -results O O -. O O -GO O O -- O O -T O O -R O O -OF O O -J B-MISC B-MISC -C I-MISC I-MISC -WA O O -O O O -. O O -T B-LOC B-LOC -SP I-LOC I-LOC -, O O -Florida B-LOC B-LOC -1996 O O -Heavy O O -rains O O -on O O -Saturday O O -washed O O -out O O -the O O -third O O -round O O -of O O -the O O -$ O O -1 O O -million O O -J B-MISC B-MISC -Classic I-MISC I-MISC -at O O -the O O -Inn B-LOC B-LOC -Hilton I-LOC I-LOC -Resort I-LOC I-LOC -. O O -Official O O -said O O -the O O -tournament O O -would O O -be O O -reduced O O -to O O -54 O O -holes O O -for O O -the O O -first O O -time O O -in O O -its O O -37 O O -history O O -. O O -The O O -final O O -round O O -of O O -the O O -special O O -event O O -, O O -which O O -pairs O O -players O O -from O O -the O O -PGA B-MISC B-ORG -and O O -LP B-MISC B-ORG -Tours I-MISC O -, O O -will O O -be O O -played O O -in O O -the O O -alternate O O -shot O O -format O O -on O O -Sunday O O -. O O -The O O -duo O O -of O O -Pat B-PER B-PER -Hu I-PER I-PER -and O O -Scott B-PER B-PER -M I-PER I-PER -were O O -tied O O -for O O -the O O -lead O O -with O O -the O O -team O O -of O O -Donna B-PER B-PER -Andrews I-PER I-PER -and O O -Mike B-PER B-PER -Hu I-PER I-PER -at O O -13 O O -129 O O -through O O -36 O O -holes O O -. O O -The O O -tandem O O -of O O -reigning O O -U B-MISC B-LOC -Amateur I-MISC O -champions O O -Ke B-PER B-PER -Ku I-PER I-PER -and O O -Tiger B-PER B-PER -Woods I-PER I-PER -were O O -another O O -shot O O -back O O -at O O -12 O O -130 O O -. O O -De O O -champions O O -Beth B-PER B-PER -Daniel I-PER I-PER -and O O -Davis B-PER B-PER -Love I-PER I-PER -will O O -start O O -the O O -final O O -round O O -six O O -shots O O -off O O -the O O -pace O O -. O O -AL B-ORG O -SK O O -' O O -D O O -W O O -PR O O -. O O -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Profile O O -of O O -the O O -winner O O -of O O -Saturday O O -' O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -downhill O O -race O O -: O O -Name O O -: O O -Ren B-PER B-PER -Go I-PER I-PER -Age O O -: O O -20 O O -Nation O O -: O O -Austria B-LOC B-LOC -Previous O O -victories O O -( O O -two O O -) O O -: O O -slalom O O -, O O -Lille B-LOC B-LOC -Norway I-LOC B-LOC -, O O -1993 O O -; O O -super O O -G O O -, O O -F B-LOC B-LOC -, O O -Austria B-LOC B-LOC -, O O -1995 O O -. O O -Other O O -facts O O -: O O -As O O -a O O -qualifier O O -for O O -the O O -1993 O B-MISC -World B-MISC I-MISC -Cup I-MISC I-MISC -finals O O -through O O -Europa B-MISC B-MISC -Cup I-MISC I-MISC -results O O -, O O -16 O O -Go B-PER B-PER -won O O -the O O -slalom O O -to O O -become O O -history O O -' O O -youngest O O -World B-MISC B-MISC -Cup I-MISC I-MISC -v O O -. O O -AL B-MISC O -SK O O -' O O -W B-MISC B-MISC -C I-MISC I-MISC -ST O O -. O O -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -standings O O -after O O -Saturday O O -' O O -downhill O O -race O O -: O O -Down O O -Standing O O -1 O O -Kat B-PER B-PER -Se I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -180 O O -points O O -2 O O -Ren B-PER B-PER -Go I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -132 O O -3 O O -Carole B-PER B-PER -Mont I-PER I-PER -( O O -France B-LOC B-LOC -) O O -86 O O -4 O O -Per B-PER B-PER -W I-PER I-PER -( O O -Sweden B-LOC B-LOC -) O O -75 O O -5 O O -Heidi B-PER B-PER -Z I-PER I-PER -( O O -Switzerland B-LOC B-LOC -) O O -69 O O -6 O O -Regina B-PER B-PER -Ha I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -66 O O -7 O O -Alexandra B-PER B-PER -Mei I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -65 O O -8 O O -Is B-PER B-PER -Ko I-PER I-PER -( O O -Italy B-LOC B-LOC -) O O -60 O O -9 O O -In B-PER B-PER -Helen I-PER I-PER -Mark I-PER O -( O O -Norway B-LOC B-LOC -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Provisional O O -results O O -from O O -Saturday O O -' O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -downhill O O -race O O -: O O -1 O O -Ren B-PER B-PER -Go I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -one O O -minute O O -47 O O -seconds O O -2 O O -Kat B-PER B-PER -Se I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -1 O O -3 O O -Is B-PER B-PER -Ko I-PER I-PER -( O O -Italy B-LOC B-LOC -) O O -1 O O -4 O O -Alexandra B-PER B-PER -Mei I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -1 O O -5 O O -Megan B-PER B-PER -G I-PER I-PER -( O O -U B-LOC B-LOC -) O O -1 O O -6 O O -Miriam B-PER B-PER -V I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -1 O O -7 O O -Stefan B-PER B-PER -Schuster I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -1 O O -NO B-MISC O -SK O B-MISC -C I-MISC I-MISC -B O O -R O O -. O O -O B-LOC B-LOC -, O O -Sweden B-LOC B-LOC -1996 O O -Results O O -of O O -Saturday O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -bi O O -races O O -: O O -Men O O -' O O -10 O O -km O O -1 O O -V B-PER B-PER -Sa I-PER I-PER -( O O -Belarus B-LOC B-LOC -) O O -26 O O -minutes O O -17 O O -seconds O O -( O O -no O O -penalty O O -rounds O O -) O O -2 O O -Fr B-PER B-PER -Andre I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -26 O O -( O O -2 O O -) O O -3 O O -Ole B-PER B-PER -Ein I-PER I-PER -B I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -26 O O -( O O -2 O O -) O O -4 O O -Sven B-PER B-PER -Fischer I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -26 O O -( O O -1 O O -) O O -5 O O -R B-PER B-PER -Gross I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -26 O O -( O O -1 O O -) O O -World B-MISC B-MISC -Cup I-MISC I-MISC -standings O O -1 O O -Women O O -' O O -7 O O -km O O -1 O O -Olga B-PER B-PER -Mel I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -23 O O -( O O -0 O O -) O O -2 O O -S B-PER B-PER -Para I-PER I-PER -( O O -Bel B-LOC B-LOC -) O O -23 O O -( O O -0 O O -) O O -3 O O -Gunn B-PER B-PER -Mar I-PER I-PER -Andreas I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -24 O O -( O O -0 O O -) O O -4 O O -Simone B-PER B-PER -G I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -24 O O -( O O -1 O O -) O O -5 O O -Petra B-PER B-PER -Be I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -24 O O -( O O -2 O O -) O O -World B-MISC B-MISC -Cup I-MISC I-MISC -standings O O -1 O O -Be B-PER B-PER -89 O O -2 O O -Para B-PER B-PER -79 O O -3 O O -G B-PER B-PER -VA B-LOC B-LOC -, O O -Colorado B-LOC B-LOC -1996 O O -Ren B-PER B-PER -Go I-PER I-PER -of O O -Austria B-LOC B-LOC -won O O -the O O -women O O -' O O -World B-MISC B-MISC -Cup I-MISC I-MISC -downhill O O -race O O -on O O -Saturday O O -, O O -according O O -to O O -provisional O O -results O O -. O O -Kat B-PER B-PER -Se I-PER I-PER -of O O -Germany B-LOC B-LOC -finished O O -second O O -and O O -Is B-PER B-PER -Ko I-PER I-PER -of O O -Italy B-LOC B-LOC -took O O -third O O -. O O -B O B-MISC -P O O -USA B-LOC B-ORG -III O I-ORG -TO O O -S O O -W O O -. O O -I B-LOC B-LOC -, O O -Austria B-LOC B-LOC -1996 O O -Brian B-PER B-PER -Shi I-PER I-PER -pilot O O -USA B-LOC B-ORG -III I-MISC I-ORG -to O O -a O O -surprise O O -victory O O -in O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -two O O -b O O -race O O -on O O -Saturday O O -. O O -L O O -fifth O O -after O O -the O O -first O O -run O O -, O O -Shi B-PER B-PER -and O O -break O O -Randy B-PER B-PER -Jones I-PER I-PER -delivered O O -a O O -near O O -second O O -trip O O -down O O -the O O -1976 B-MISC O -Olympic B-MISC B-MISC -course O O -for O O -an O O -aggregate O O -time O O -of O O -one O O -minute O O -45 O O -seconds O O -. O O -First O O -run O O -leaders O O -G B-PER B-PER -Hu I-PER I-PER -and O O -break O O -Antonio B-PER B-PER -Ta I-PER I-PER -in O O -the O O -Italy B-LOC B-LOC -I O O -s O O -finished O O -second O O -two O O -of O O -a O O -second O O -behind O O -the O O -Americans B-MISC B-MISC -. O O -Canada B-LOC B-ORG -I O I-ORG -, O O -represented O O -by O O -Pierre B-PER B-PER -Lu I-PER I-PER -and O O -break O O -Dave B-PER B-PER -Mac I-PER I-PER -, O O -completed O O -the O O -third O O -World B-MISC B-MISC -cup O I-MISC -event O O -of O O -the O O -winter O O -a O O -further O O -one O O -of O O -a O O -second O O -behind O O -the O O -Italians B-MISC B-MISC -. O O -The O O -Canadians B-MISC B-MISC -, O O -winners O O -of O O -the O O -opening O O -two O O -events O O -in O O -Alt B-LOC B-LOC -, O O -Germany B-LOC B-LOC -, O O -and O O -La B-LOC B-LOC -P I-LOC I-LOC -, O O -France B-LOC B-LOC -, O O -increased O O -their O O -lead O O -in O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -standings O O -. O O -They O O -have O O -104 O O -points O O -, O O -15 O O -ahead O O -of O O -USA B-LOC B-ORG -I I-MISC I-ORG -' O O -Jim B-PER B-PER -Herb I-PER I-PER -and O O -break O O -Garrett B-PER B-PER -Hi I-PER I-PER -who O O -managed O O -only O O -10th O O -place O O -on O O -Saturday O O -. O O -SK O B-MISC -MA O O -PR O O -F O O -SK O O -DE O O -. O O -T B-LOC B-LOC -, O O -France B-LOC B-LOC -1996 O O -China B-LOC B-LOC -made O O -a O O -promising O O -debut O O -on O O -the O O -freestyle O O -skiing O O -world O O -cup O O -circuit O O -in O O -an O O -aerial O O -event O O -in O O -the O O -French B-MISC B-MISC -resort O O -of O O -T B-LOC B-LOC -on O O -Saturday O O -. O O -While O O -the O O -Chinese B-MISC B-MISC -failed O O -to O O -gain O O -a O O -place O O -in O O -the O O -men O O -' O O -final O O -, O O -they O O -had O O -two O O -in O O -the O O -top O O -10 O O -of O O -the O O -women O O -' O O -competition O O -, O O -C B-PER B-PER -Dan I-PER I-PER -finishing O O -a O O -respectable O O -seventh O O -and O O -Xu B-PER B-PER -Nan I-PER I-PER -ninth O O -. O O -But O O -overall O O -, O O -it O O -was O O -France B-LOC B-LOC -and O O -Canada B-LOC B-LOC -who O O -dominated O O -the O O -day O O -. O O -Alexis B-PER B-PER -Blanc I-PER I-PER -and O O -Se B-PER B-PER -F I-PER I-PER -gave O O -France B-LOC B-LOC -a O O -one O O -finish O O -in O O -the O O -first O O -aerial O O -competition O O -of O O -the O O -season O O -. O O -Blanc B-PER B-PER -collected O O -his O O -seventh O O -career O O -World B-MISC B-MISC -Cup I-MISC I-MISC -win O O -with O O -a O O -two O O -jump O O -combined O O -score O O -of O O -238 O O -points O O -, O O -easily O O -beating O O -F B-PER B-PER -, O O -the O O -overall O O -World B-MISC B-MISC -Cup I-MISC I-MISC -aerial O O -champion O O -, O O -who O O -was O O -a O O -distant O O -second O O -with O O -223 O O -. O O -Canada B-LOC B-LOC -' O O -Jeff B-PER B-PER -Bean I-PER I-PER -, O O -who O O -had O O -never O O -finished O O -higher O O -than O O -ninth O O -in O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -event O O -, O O -made O O -his O O -first O O -trip O O -to O O -the O O -podium O O -taking O O -third O O -place O O -with O O -a O O -mark O O -of O O -209 O O -. O O -Veronica B-PER B-PER -B I-PER I-PER -of O O -Canada B-LOC B-LOC -, O O -who O O -picked O O -up O O -her O O -first O O -career O O -victory O O -at O O -T B-LOC B-LOC -last O O -year O O -, O O -made O O -it O O -two O O -wins O O -in O O -a O O -row O O -at O O -the O O -French B-MISC B-MISC -resort O O -taking O O -first O O -in O O -the O O -women O O -' O O -competition O O -with O O -a O O -score O O -of O O -170 O O -. O O -Swiss B-MISC B-MISC -skier O O -occupied O O -the O O -other O O -two O O -places O O -on O O -the O O -podium O O -, O O -Karin B-PER B-PER -Ku I-PER I-PER -taking O O -second O O -with O O -160 O O -narrowly O O -ahead O O -of O O -Evelyn B-PER B-PER -Le I-PER I-PER -with O O -160 O O -. O O -B O B-MISC -C I-MISC I-MISC -T O O -R O O -. O O -I B-LOC B-LOC -, O O -Austria B-LOC B-LOC -1996 O O -Results O O -of O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -two O O -b O O -event O O -on O O -Saturday O O -: O O -1 O O -United B-LOC B-ORG -States I-LOC I-ORG -III O I-ORG -( O O -Brian B-PER B-PER -Shi I-PER I-PER -, O O -Randy B-PER B-PER -Jones I-PER I-PER -) O O -one O O -minute O O -45 O O -seconds O O -( O O -52 O O -/ O O -53 O O -) O O -2 O O -Italy B-LOC B-ORG -I O I-ORG -( O O -G B-PER B-PER -Hu I-PER I-PER -, O O -Antonio B-PER B-PER -Ta I-PER I-PER -) O O -1 O O -( O O -52 O O -/ O O -53 O O -) O O -3 O O -Canada B-LOC B-ORG -I O I-ORG -( O O -Pierre B-PER B-PER -Lu I-PER I-PER -, O O -Dave B-PER B-PER -Mac I-PER I-PER -) O O -1 O O -( O O -52 O O -/ O O -53 O O -) O O -4 O O -German B-MISC B-ORG -I O I-ORG -( O O -Sep B-PER B-PER -Do I-PER I-PER -, O O -Thomas B-PER B-PER -United B-LOC B-ORG -States I-LOC I-ORG -I O I-ORG -( O O -Jim B-PER B-PER -Herb I-PER I-PER -, O O -Garrett B-PER B-PER -Hi I-PER B-PER -) O O -1 O O -( O O -53 O O -/ O O -53 O O -) O O -and O O -Austria B-LOC B-ORG -III O I-ORG -( O O -Han B-PER B-PER -Con I-PER I-PER -, O O -Georg B-PER B-PER -Ku I-PER I-PER -) O O -1 O O -( O O -53 O O -53 O O -CR O O -- O O -W O B-PER -MA O O -SE O O -R O O -TO O O -K B-LOC B-LOC -. O O -K B-LOC B-LOC -, O O -India B-LOC B-LOC -1996 O O -South B-LOC B-LOC -Africa I-LOC I-LOC -' O O -trip O O -to O O -Ka B-LOC B-LOC -for O O -the O O -third O O -test O O -against O O -India B-LOC B-LOC -has O O -given O O -former O O -England B-LOC B-LOC -test O O -cricketer O O -Bob B-PER B-PER -W I-PER I-PER -the O O -chance O O -of O O -a O O -sentiment O O -return O O -to O O -his O O -birthplace O O -. O O -W B-PER B-PER -was O O -born O O -in O O -the O O -northern O O -city O O -of O O -Ka B-LOC B-LOC -when O O -his O O -father O O -worked O O -there O O -for O O -an O O -insurance O O -com O O -and O O -was O O -himself O O -an O O -active O O -cricketer O O -. O O -" O O -It O O -' O O -been O O -a O O -sentiment O O -journey O O -. O O -A O O -visit O O -to O O -India B-LOC B-LOC -is O O -always O O -an O O -intriguing O O -experience O O -, O O -" O O -W B-PER B-PER -, O O -now O O -the O O -South B-MISC B-MISC -African I-MISC I-MISC -coach O O -, O O -said O O -on O O -Saturday O O -. O O -W B-PER B-PER -, O O -48 O O -, O O -played O O -19 O O -tests O O -for O O -England B-LOC B-LOC -between O O -1975 O O -and O O -1981 O O -. O O -His O O -first O O -cricket O O -so O O -to O O -India B-LOC B-LOC -was O O -as O O -a O O -member O O -of O O -Tony B-PER B-PER -G I-PER I-PER -' O O -England B-LOC B-LOC -side O O -in O O -1976 O O -. O O -His O O -father O O -Clarence B-PER B-PER -W I-PER I-PER -represented O O -United B-ORG B-LOC -Province I-ORG I-LOC -, O O -now O O -renamed O O -Uttar B-LOC B-LOC -Pradesh I-LOC I-LOC -, O O -in O O -India B-LOC B-LOC -' O O -Ra B-MISC B-MISC -Trophy I-MISC I-MISC -national O O -championship O O -and O O -captained O O -the O O -state O O -during O O -1949 O O -. O O -Now O O -aged O O -86 O O -, O O -W B-PER B-PER -senior O O -lives O O -with O O -his O O -son O O -in O O -Cape B-LOC B-LOC -Town I-LOC I-LOC -. O O -W B-PER B-PER -' O O -memories O O -of O O -Ka B-LOC B-LOC -are O O -few O O -and O O -blurred O O -. O O -" O O -I O O -do O O -n O O -remember O O -much O O -of O O -the O O -place O O -, O O -" O O -he O O -said O O -. O O -" O O -I O O -came O O -here O O -on O O -zero O O -and O O -left O O -at O O -three O O -( O O -aged O O -three O O -) O O -when O O -my O O -father O O -was O O -transferred O O -to O O -Calcutta B-LOC B-LOC -where O O -I O O -spent O O -another O O -four O O -and O O -half O O -years O O -. O O -" O O -But O O -I O O -do O O -remember O O -we O O -had O O -a O O -co O O -snake O O -in O O -the O O -basement O O -of O O -our O O -house O O -. O O -Also O O -that O O -my O O -father O O -bought O O -a O O -bicycle O O -and O O -when O O -we O O -rode O O -over O O -a O O -hose O O -pipe O O -it O O -broke O O -into O O -two O O -. O O -" O O -W B-PER B-PER -said O O -the O O -hospital O O -where O O -he O O -was O O -born O O -is O O -close O O -to O O -the O O -stadium O O -where O O -the O O -India B-LOC B-MISC -Africa I-MISC I-MISC -test O O -will O O -be O O -played O O -. O O -F O O -SK O B-MISC -C I-MISC I-MISC -A I-MISC O -R O O -. O O -T B-LOC B-LOC -, O O -France B-LOC B-LOC -1996 O O -Results O O -of O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -freestyle O O -skiing O O -aerial O O -competition O O -on O O -Saturday O O -: O O -Men O O -: O O -1 O O -Alexis B-PER B-PER -Blanc I-PER I-PER -( O O -France B-LOC B-LOC -) O O -238 O O -points O O -2 O O -Se B-PER B-PER -F I-PER I-PER -( O O -France B-LOC B-LOC -) O O -223 O O -3 O O -Jeff B-PER B-PER -Bean I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -209 O O -4 O O -Eric B-PER B-PER -Berg I-PER I-PER -( O O -U B-LOC B-LOC -) O O -207 O O -5 O O -Christian B-PER B-PER -R I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -204 O O -6 O O -Alexandre B-PER B-PER -Mikhail I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -202 O O -7 O O -Al B-PER B-PER -Vale I-PER I-PER -( O O -Czech B-LOC B-LOC -Republic I-LOC I-LOC -) O O -194 O O -8 O O -Andy B-PER B-PER -Cap I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -193 O O -9 O O -K B-LOC B-LOC -, O O -Finland B-LOC B-LOC -1996 O O -Leading O O -results O O -in O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -high O O -hill O O -( O O -120 O O -) O O -ski O O -jumping O O -event O O -on O O -Saturday O O -: O O -1 O O -Ta B-PER B-PER -Ok I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -303 O O -points O O -( O O -first O O -jump O O -145 O O -/ O O -second O O -jump O O -158 O O -) O O -2 O O -Ka B-PER B-PER -Fun I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -295 O O -( O O -151 O O -/ O O -143 O O -) O O -3 O O -Andreas B-PER B-PER -Goldberg I-PER I-PER -( O O -Austria B-LOC B-LOC -) O O -27 O O -( O O -144 O O -/ O O -130 O O -) O O -4 O O -Diet B-PER B-PER -Thom I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -26 O O -( O O -141 O O -/ O O -124 O O -) O O -5 O O -Ari B-PER B-PER -Nik I-PER I-PER -T B-LOC B-LOC -DE I-LOC I-LOC -, O O -Bali B-LOC B-LOC -1996 O O -Results O O -of O O -semifinals O O -at O O -the O O -World B-MISC B-MISC -Grand B-MISC I-MISC -Prix I-MISC I-MISC -finals O O -on O O -Saturday O O -: O O -Men O O -' O O -singles O O -Fun B-PER B-PER -Per I-PER I-PER -( O O -Taiwan B-LOC B-LOC -) O O -beat O O -In B-PER B-PER -W I-PER I-PER -( O O -Indonesia B-LOC B-LOC -) O O -15 O O -15 O O -Sun B-PER B-PER -Jun I-PER I-PER -( O O -China B-LOC B-LOC -) O O -beat O O -Allan B-PER B-PER -Bud I-PER I-PER -Ku I-PER I-PER -( O O -Indonesia B-LOC B-LOC -) O O -15 O O -15 O O -Women O O -' O O -singles O O -Su B-PER B-PER -Susan I-PER I-PER -( O O -Indonesia B-LOC B-LOC -) O O -beat O O -Cam B-PER B-PER -Martin I-PER I-PER -( O O -Denmark B-LOC B-LOC -) O O -11 O O -11 O O -Ye B-PER B-PER -Zhao I-PER I-PER -( O O -China B-LOC B-LOC -) O O -beat O O -Gong B-PER B-PER -Z I-PER I-PER -( O O -China B-LOC B-LOC -) O O -11 O O -11 O O -SP O O -SK O O -CH B-LOC B-LOC -, O O -South B-LOC B-LOC -Korea I-LOC I-LOC -1996 O O -Results O O -on O O -the O O -first O O -day O O -of O O -the O O -World B-MISC B-MISC -Cup I-MISC I-MISC -speed O O -skating O O -races O O -here O O -on O O -Saturday O O -. O O -Men O O -' O O -500 O O -metres O O -first O O -round O O -: O O -1 O O -. O O -Ho B-PER B-PER -Man I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -37 O O -seconds O O -; O O -2 O O -. O O -J B-PER B-PER -Sung I-PER I-PER -( O O -South B-LOC B-LOC -Korea I-LOC I-LOC -) O O -37 O O -; O O -3 O O -. O O -G B-PER B-PER -N I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -37 O O -; O O -4 O O -. O O -Shi B-PER B-PER -Hi I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -37 O O -; O O -5 O O -. O O -Sergey B-PER B-PER -K I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -37 O O -; O O -6 O O -. O O -Ya B-PER B-PER -Hi I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -37 O O -; O O -7 O O -. O O -Casey B-PER B-PER -Fi I-PER I-PER -( O O -US B-LOC B-LOC -) O O -37 O O -; O O -8 O O -. O O -S B-PER B-PER -Bo I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -38 O O -; O O -9 O O -. O O -Kim B-PER B-PER -Yo I-PER I-PER -( O O -South B-LOC B-LOC -Korea I-LOC I-LOC -) O O -38 O O -; O O -10 O O -. O O -In B-PER B-PER -Jun I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -38 O O -. O O -Women O O -' O O -500 O O -metres O O -first O O -round O O -: O O -1 O O -. O O -Xu B-PER B-PER -R I-PER I-PER -( O O -China B-LOC B-LOC -) O O -40 O O -; O O -2 O O -. O O -S B-PER B-PER -J I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -41 O O -; O O -3 O O -. O O -Franz B-PER B-PER -Sc I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -41 O O -; O O -4 O O -. O O -Ok B-PER B-PER -Tom I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -41 O O -; O O -5 O O -. O O -Shi B-PER B-PER -K B-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -41 O O -; O O -6 O O -. O O -Marianne B-PER B-PER -Tim I-PER I-PER -( O O -Netherlands B-LOC B-LOC -) O O -41 O O -; O O -7 O O -. O O -Jin B-PER B-PER -Hu I-PER I-PER -( O O -China B-LOC B-LOC -) O O -41 O O -; O O -8 O O -. O O -Al B-PER B-PER -Ko I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -41 O O -; O O -9 O O -. O O -Chris B-PER B-PER -W I-PER I-PER -( O O -US B-LOC B-LOC -) O O -41 O O -; O O -10 O O -. O O -An B-PER B-PER -Ba I-PER I-PER -( O O -Germany B-LOC B-LOC -) O O -41 O O -. O O -Men O O -' O O -1 O O -metres O O -first O O -round O O -: O O -1 O O -S B-PER B-PER -Bo I-PER I-PER -( O O -Canada B-LOC B-LOC -) O O -1 O O -minute O O -16 O O -seconds O O -2 O O -Sergey B-PER B-PER -K I-PER I-PER -( O O -Russia B-LOC B-LOC -) O O -1 O O -3 O O -Jan B-PER B-PER -Bo I-PER I-PER -( O O -Netherlands B-LOC B-LOC -) O O -1 O O -4 O O -G B-PER B-PER -N I-PER I-PER -( O O -Norway B-LOC B-LOC -) O O -1 O O -5 O O -Lee B-PER B-PER -K I-PER I-PER -( O O -South B-LOC B-LOC -Korea I-LOC I-LOC -) O O -1 O O -6 O O -In B-PER B-PER -Jun I-PER I-PER -( O O -Japan B-LOC B-LOC -) O O -1 O O -7 O O -Gerard B-PER B-PER -Van I-PER I-PER -V I-PER I-PER -( O O -Netherlands B-LOC B-LOC -) O O -1 O O -8 O O -Kim B-PER B-PER -Yo I-PER I-PER -W B-LOC B-LOC -, O O -British B-LOC B-LOC -Columbia I-LOC I-LOC -1996 O O -World B-MISC B-MISC -Cup I-MISC I-MISC -ski O O -officials O O -hope O O -to O O -be O O -able O O -to O O -get O O -in O O -at O O -least O O -one O O -men O O -' O O -downhill O O -training O O -run O O -on O O -Saturday O O -in O O -an O O -effort O O -to O O -salvage O O -the O O -weekend O O -racing O O -programme O O -. O O -For O O -the O O -third O O -consecutive O O -day O O -, O O -Friday O O -' O O -scheduled O O -training O O -runs O O -were O O -cancelled O O -due O O -to O O -heavy O O -wet O O -snow O O -and O O -fog O O -on O O -W B-LOC B-LOC -Mountain I-LOC I-LOC -, O O -leaving O O -the O O -scheduled O O -World B-MISC B-MISC -Cup I-MISC I-MISC -events O O -in O O -j O O -. O O -Rules O O -call O O -for O O -at O O -least O O -one O O -training O O -run O O -to O O -be O O -completed O O -before O O -a O O -World B-MISC B-MISC -Cup I-MISC I-MISC -downhill O O -race O O -can O O -be O O -staged O O -. O O -Organ O O -hope O O -to O O -get O O -that O O -run O O -in O O -on O O -Saturday O O -morning O O -, O O -conditions O O -permitting O O -, O O -and O O -stage O O -the O O -race O O -later O O -in O O -the O O -day O O -or O O -on O O -Sunday O O -. O O -" O O -There O O -was O O -no O O -possibility O O -today O O -to O O -make O O -a O O -training O O -run O O -, O O -" O O -said O O -Bern B-PER B-PER -Z I-PER I-PER -, O O -the O O -Canadian B-MISC B-MISC -men O O -' O O -national O O -coach O O -, O O -citing O O -too O O -much O O -new O O -snow O O -and O O -poor O O -visibility O O -. O O -If O O -organise O O -are O O -forced O O -to O O -run O O -the O O -downhill O O -on O O -Sunday O O -, O O -the O O -super O O -slalom O O -originally O O -scheduled O O -for O O -Sunday O O -would O O -likely O O -be O O -abandoned O O -. O O -S O O -- O O -L O O -SC B-MISC B-MISC -PR O I-MISC -D O I-MISC -SC O O -. O O -G B-LOC B-LOC -1996 O O -Leading O O -goals O O -in O O -the O O -Scottish B-MISC B-MISC -premier O I-MISC -division O O -after O O -Saturday O O -' O O -matches O O -: O O -10 O O -- O O -Billy B-PER B-PER -Dodd I-PER I-PER -( O O -Aberdeen B-ORG B-ORG -) O O -, O O -Pierre B-PER B-PER -Van I-PER I-PER -Ho I-PER I-PER -( O O -Celtic B-ORG B-ORG -) O O -9 O O -- O O -Paul B-PER B-PER -Gas I-PER I-PER -( O O -Rangers B-ORG B-ORG -) O O -7 O O -- O O -Paul B-PER B-PER -Wright I-PER I-PER -( O O -Ki B-ORG B-ORG -) O O -, O O -Ally B-PER B-PER -M I-PER I-PER -( O O -Rangers B-ORG B-ORG -) O O -6 O O -- O O -Andreas B-PER B-PER -Thom I-PER I-PER -( O O -Celtic B-ORG B-ORG -) O O -, O O -Dean B-PER B-PER -Wind I-PER I-PER -( O O -Aberdeen B-ORG B-ORG -) O O -, O O -Brian B-PER B-PER -Lau I-PER I-PER -( O O -Rangers B-ORG B-ORG -) O O -, O O -Darren B-PER B-PER -Jackson I-PER I-PER -( O O -Hi B-ORG B-ORG -) O O -5 O O -- O O -Peter B-PER B-PER -van I-PER I-PER -V I-PER I-PER -( O O -Rangers B-ORG B-ORG -) O O -, O O -Gerry B-PER B-PER -B I-PER I-PER -( O O -Du B-ORG B-ORG -) O O -, O O -Colin B-PER B-PER -S O O -- O O -L O O -E B-MISC B-MISC -GO O O -. O O -L B-LOC B-LOC -1996 O O -Leading O O -goals O O -in O O -the O O -English B-MISC B-MISC -premier O O -league O O -after O O -Saturday O O -' O O -matches O O -: O O -13 O O -- O O -Ian B-PER B-PER -Wright I-PER I-PER -( O O -Arsenal B-ORG B-ORG -) O O -9 O O -- O O -F B-PER B-PER -Ra I-PER I-PER -( O O -Middlesbrough B-ORG B-ORG -) O O -, O O -Alan B-PER B-PER -Shea I-PER I-PER -( O O -Newcastle B-ORG B-ORG -) O O -8 O O -- O O -Matthew B-PER B-PER -Le I-PER I-PER -T I-PER I-PER -( O O -Southampton B-ORG B-ORG -) O O -, O O -Dwight B-PER B-PER -York I-PER I-PER -( O O -Aston B-ORG B-ORG -Villa I-ORG B-ORG -) O O -, O O -Les B-PER B-PER -Ferdinand I-PER I-PER -( O O -Newcastle B-ORG B-ORG -) O O -, O O -E B-PER B-PER -E I-PER I-PER -( O O -Wimbledon B-ORG B-LOC -) O O -, O O -G B-PER B-PER -Via I-PER I-PER -( O O -Chelsea B-ORG B-ORG -) O O -7 O O -- O O -Robbie B-PER B-PER -Earle I-PER I-PER -( O O -Wimbledon B-ORG B-LOC -) O O -, O O -Les B-PER B-PER -Ferdinand I-PER I-PER -( O O -Newcastle B-ORG B-ORG -) O O -6 O O -- O O -Marcus B-PER B-PER -Gay I-PER I-PER -( O O -Wimbledon B-ORG B-LOC -) O O -, O O -Gary B-PER B-PER -Speed I-PER I-PER -( O O -Everton B-ORG B-ORG -) O O -, O O -Chris B-PER B-PER -Sutton I-PER B-PER -( O O -Blackburn B-ORG B-ORG -) O O -5 O O -S O O -- O O -NO B-MISC B-LOC -I B-MISC I-LOC -PR O O -D O O -R O O -/ O O -ST O O -. O O -L B-LOC B-LOC -1996 O O -Results O O -of O O -Northern B-LOC B-LOC -Ireland I-LOC I-LOC -premier O O -division O O -matches O O -on O O -Saturday O O -: O O -A B-ORG B-ORG -0 O O -Crusaders B-ORG B-ORG -0 O O -Clifton B-ORG B-ORG -1 O O -Port B-ORG B-ORG -1 O O -Glen B-ORG B-ORG -2 O O -Lin B-ORG B-ORG -1 O O -Glen B-ORG B-ORG -1 O O -Cole B-ORG B-ORG -0 O O -Standing O O -( O O -ta O O -- O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -Cole B-ORG B-ORG -10 O O -7 O O -1 O O -2 O O -18 O O -11 O O -22 O O -Lin B-ORG B-ORG -10 O O -4 O O -3 O O -3 O O -13 O O -10 O O -15 O O -Crusaders B-ORG B-ORG -10 O O -3 O O -4 O O -3 O O -11 O O -9 O O -13 O O -Glen B-ORG B-ORG -10 O O -3 O O -4 O O -3 O O -15 O O -14 O O -13 O O -Glen B-ORG B-ORG -10 O O -3 O O -3 O O -4 O O -18 O O -18 O O -12 O O -Port B-ORG B-ORG -9 O O -3 O O -3 O O -3 O O -11 O O -12 O O -12 O O -L B-LOC B-LOC -1996 O O -Results O O -of O O -British B-MISC B-MISC -rugby O O -union O O -matches O O -on O O -Saturday O O -: O O -Pi B-MISC B-MISC -Cup I-MISC I-MISC -fourth O O -round O O -Reading B-ORG B-ORG -50 O O -W B-ORG B-ORG -3 O O -English B-MISC B-MISC -division O O -one O O -Bath B-ORG B-ORG -35 O O -Ha B-ORG B-ORG -20 O O -Gloucester B-ORG B-ORG -29 O O -London B-ORG B-ORG -Irish I-ORG I-ORG -19 O O -Or B-ORG B-ORG -22 O O -West B-ORG B-ORG -Hart I-ORG I-ORG -15 O O -Was B-ORG B-ORG -15 O O -Bristol B-ORG B-ORG -13 O O -Welsh B-MISC B-MISC -division O O -one O O -C B-ORG B-ORG -20 O O -Cardiff B-ORG B-ORG -34 O O -L B-ORG B-ORG -97 O O -New B-ORG B-ORG -10 O O -Newport B-ORG B-ORG -45 O O -Du B-ORG B-ORG -22 O O -Pont B-ORG B-ORG -53 O O -Bridge B-ORG B-ORG -9 O O -Swansea B-ORG B-ORG -49 O O -N B-ORG B-ORG -10 O O -T B-ORG B-ORG -13 O O -E B-ORG B-ORG -Vale I-ORG I-ORG -17 O O -Scottish B-MISC B-MISC -division O O -one O O -Borough B-ORG B-ORG -31 O O -Watson B-ORG B-ORG -35 O O -S O O -- O O -SC B-ORG B-MISC -G B-LOC B-LOC -1996 O O -Su O O -of O O -Scottish B-MISC B-MISC -premier O O -division O O -matches O O -played O O -on O O -Saturday O O -: O O -Du B-ORG B-ORG -2 O O -( O O -Mill B-PER B-PER -43 O O -, O O -46 O O -penalty O O -) O O -Aberdeen B-ORG B-ORG -3 O O -( O O -Miller B-PER B-PER -10 O O -, O O -Row B-PER B-PER -55 O O -, O O -Wind B-PER O -78 O O -) O O -. O O -Half O O -1 O O -. O O -Attendance O O -: O O -5 O O -Hearts B-ORG B-ORG -0 O O -Rai B-ORG B-ORG -0 O O -. O O -10 O O -Ki B-ORG B-ORG -0 O O -Dundee B-ORG B-ORG -United I-ORG I-ORG -2 O O -( O O -Olaf B-PER B-PER -22 O O -, O O -51 O O -) O O -. O O -0 O O -. O O -5 O O -Mother B-ORG B-ORG -2 O O -( O O -Davies B-PER B-PER -39 O O -, O O -Ross B-PER B-PER -89 O O -) O O -Celtic B-ORG B-ORG -1 O O -( O O -Hay B-PER B-PER -83 O O -) O O -. O O -1 O O -. O O -11 O O -Rangers B-ORG B-ORG -4 O O -( O O -Ferguson B-PER B-PER -34 O O -, O O -M B-PER B-PER -71 O O -74 O O -, O O -Lau B-PER B-PER -83 O O -) O O -Hi B-ORG B-ORG -3 O O -( O O -Wright B-PER B-PER -21 O O -, O O -Jackson B-PER B-PER -41 O O -, O O -M B-PER B-PER -86 O O -) O O -. O O -1 O O -. O O -48 O O -. O O -R B-ORG B-ORG -UN I-ORG I-ORG -- O O -R B-PER O -CA O B-PER -W O O -UP O O -O O O -. O O -L B-LOC B-LOC -1996 O O -David B-PER B-PER -Camp I-PER I-PER -will O O -consider O O -offers O O -to O O -play O O -club O O -rugby O O -in O O -England B-LOC B-LOC -but O O -looks O O -more O O -likely O O -to O O -spend O O -the O O -next O O -year O O -chasing O O -business O O -opportunities O O -in O O -Australia B-LOC B-LOC -. O O -The O O -34 O O -winger O O -played O O -his O O -final O O -game O O -in O O -a O O -Wall B-MISC B-ORG -jersey O O -on O O -Saturday O O -but O O -is O O -currently O O -a O O -target O O -for O O -clubs O O -eager O O -to O O -match O O -London B-LOC B-LOC -side O O -Sara B-ORG B-ORG -who O O -have O O -already O O -snapped O O -up O O -Franco B-PER B-PER -Pie I-PER I-PER -, O O -Michael B-PER B-PER -L I-PER I-PER -and O O -Philippe B-PER B-PER -Se I-PER I-PER -. O O -" O O -If O O -the O O -opportunity O O -is O O -there O O -I O O -' O O -obviously O O -think O O -about O O -it O O -but O O -the O O -thing O O -that O O -holds O O -me O O -back O O -is O O -business O O -, O O -" O O -said O O -Camp B-PER B-PER -. O O -" O O -I O O -' O O -like O O -to O O -come O O -over O O -but O O -there O O -are O O -a O O -lot O O -of O O -things O O -happening O O -at O O -home O O -. O O -I O O -' O O -also O O -got O O -a O O -contract O O -to O O -play O O -for O O -New B-LOC B-ORG -South I-LOC I-ORG -Wales I-LOC I-ORG -in O O -the O O -Super B-MISC O -12 I-MISC O -next O O -year O O -. O O -" O O -Former O O -Wall B-MISC B-ORG -captain O O -Nick B-PER B-PER -Far I-PER I-PER -believes O O -Camp B-PER B-PER -may O O -yet O O -be O O -tempted O O -to O O -England B-LOC B-LOC -. O O -" O O -I O O -' O O -sure O O -there O O -are O O -a O O -few O O -people O O -in O O -England B-LOC B-LOC -who O O -' O O -be O O -delighted O O -to O O -have O O -David B-PER B-PER -Camp I-PER I-PER -in O O -their O O -club O O -' O O -jersey O O -, O O -" O O -he O O -said O O -. O O -S O O -- O O -E B-MISC B-MISC -PR O O -L O O -S O O -. O O -L B-LOC B-LOC -1996 O O -Su O O -of O O -English B-MISC B-MISC -premier O O -le O O -matches O O -on O O -Saturday O O -: O O -Arsenal B-ORG B-ORG -2 O O -( O O -Adams B-PER B-PER -45 O O -, O O -V B-PER B-PER -90 O O -) O O -Derby B-ORG B-ORG -2 O O -( O O -St B-PER B-PER -62 O O -, O O -Powell B-PER B-PER -71 O O -) O O -. O O -Half O O -1 O O -. O O -Attendance O O -: O O -38 O O -Chelsea B-ORG B-ORG -2 O O -( O O -Z B-PER B-PER -12 O O -, O O -Via B-PER B-PER -55 O O -) O O -Everton B-ORG B-ORG -2 O O -( O O -Branch B-PER B-PER -17 O O -, O O -Ka B-PER B-PER -28 O O -) O O -. O O -1 O O -. O O -28 O O -Coventry B-ORG B-ORG -1 O O -( O O -W B-PER B-PER -60 O O -) O O -Tottenham B-ORG B-ORG -2 O O -( O O -She B-PER B-PER -27 O O -, O O -Sin B-ORG B-PER -75 O O -) O O -. O O -0 O O -. O O -19 O O -Leicester B-ORG B-ORG -1 O O -( O O -Marshall B-PER B-PER -78 O O -) O O -Blackburn B-ORG B-ORG -1 O O -( O O -Sutton B-PER B-PER -34 O O -) O O -. O O -0 O O -. O O -19 O O -Liverpool B-ORG B-ORG -0 O O -Sheffield B-ORG B-ORG -Wednesday I-ORG I-ORG -1 O O -( O O -W B-PER B-PER -22 O O -) O O -. O O -0 O O -. O O -39 O O -Middlesbrough B-ORG B-ORG -0 O O -Leeds B-ORG B-ORG -0 O O -. O O -30 O O -Southampton B-ORG B-ORG -0 O O -Aston B-ORG B-ORG -Villa I-ORG I-ORG -1 O O -( O O -Townsend B-PER B-PER -34 O O -) O O -. O O -0 O O -. O O -15 O O -Sunderland B-ORG B-ORG -1 O O -( O O -Melville B-PER B-PER -83 O O -) O O -Wimbledon B-ORG B-LOC -3 O O -( O O -E B-PER B-PER -8 O O -, O O -29 O O -, O O -Hold B-PER B-PER -89 O O -) O O -. O O -0 O O -. O O -19 O O -. O O -S O O -- O O -SC B-MISC B-MISC -L O O -ST O O -. O O -G B-LOC B-LOC -1996 O O -Scottish B-MISC B-MISC -league O O -standings O O -after O O -Saturday O O -' O O -matches O O -( O O -ta O O -- O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -Premier O O -division O O -Rangers B-ORG B-ORG -14 O O -11 O O -2 O O -1 O O -35 O O -12 O O -35 O O -Celtic B-ORG B-ORG -14 O O -8 O O -3 O O -3 O O -32 O O -15 O O -27 O O -Aberdeen B-ORG B-ORG -15 O O -7 O O -4 O O -4 O O -28 O O -19 O O -25 O O -Hearts B-ORG B-ORG -15 O O -5 O O -6 O O -4 O O -18 O O -19 O O -21 O O -Hi B-ORG B-ORG -15 O O -5 O O -3 O O -7 O O -16 O O -25 O O -18 O O -Dundee B-ORG B-ORG -United I-ORG I-ORG -15 O O -4 O O -5 O O -6 O O -17 O O -17 O O -17 O O -Mother B-ORG B-ORG -15 O O -4 O O -5 O O -6 O O -17 O O -23 O O -17 O O -Du B-ORG B-ORG -14 O O -4 O O -5 O O -5 O O -19 O O -27 O O -17 O O -Rai B-ORG B-ORG -15 O O -3 O O -3 O O -9 O O -14 O O -27 O O -12 O O -Ki B-ORG B-ORG -L B-LOC B-LOC -1996 O O -Standing O O -in O O -English B-MISC B-MISC -league O O -soccer O O -after O O -Saturday O O -' O O -matches O O -( O O -ta O O -- O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -Premier O B-MISC -league O I-MISC -Arsenal B-ORG B-ORG -17 O O -10 O O -5 O O -2 O O -34 O O -16 O O -35 O O -Wimbledon B-ORG B-ORG -16 O O -9 O O -4 O O -3 O O -29 O O -17 O O -31 O O -Liverpool B-ORG B-ORG -16 O O -9 O O -4 O O -3 O O -26 O O -14 O O -31 O O -Aston B-ORG B-ORG -Villa I-ORG I-ORG -17 O O -9 O O -3 O O -5 O O -22 O O -15 O O -30 O O -Newcastle B-ORG B-ORG -15 O O -9 O O -2 O O -4 O O -26 O O -17 O O -29 O O -Manchester B-ORG B-ORG -United I-ORG I-ORG -15 O O -7 O O -5 O O -3 O O -29 O O -22 O O -26 O O -Chelsea B-ORG B-ORG -16 O O -6 O O -7 O O -3 O O -25 O O -23 O O -25 O O -Everton B-ORG B-ORG -16 O O -6 O O -6 O O -4 O O -25 O O -20 O O -24 O O -Sheffield B-ORG B-ORG -Wednesday I-ORG I-ORG -16 O O -6 O O -6 O O -4 O O -17 O O -18 O O -24 O O -Tottenham B-ORG B-ORG -16 O O -7 O O -2 O O -7 O O -17 O O -17 O O -23 O O -L B-LOC B-LOC -1996 O O -French B-MISC B-MISC -Patrick B-PER B-PER -V I-PER I-PER -blasted O O -a O O -last O O -goal O O -to O O -salvage O O -a O O -2 O O -draw O O -for O O -English B-MISC B-MISC -premier O O -league O O -leaders O O -Arsenal B-ORG B-ORG -at O O -home O O -to O O -Derby B-ORG B-ORG -on O O -Saturday O O -. O O -The O O -London B-LOC B-LOC -club O O -had O O -been O O -rocked O O -by O O -a O O -two O O -burst O O -from O O -forwards O O -Dean B-PER B-PER -St I-PER I-PER -and O O -Darryl B-PER B-PER -Powell I-PER I-PER -in O O -the O O -62 O O -and O O -71 O O -minutes O O -which O O -overturned O O -Arsenal B-ORG B-ORG -' O O -1 O O -lead O O -from O O -a O O -diving O O -header O O -by O O -captain O O -Tony B-PER B-PER -Adams I-PER I-PER -on O O -the O O -stroke O O -of O O -halftime O O -. O O -Liverpool B-ORG B-ORG -suffered O O -an O O -upset O O -first O O -home O O -league O O -defeat O O -of O O -the O O -season O O -, O O -beaten O O -1 O O -by O O -a O O -Guy B-PER B-PER -W I-PER I-PER -goal O O -for O O -Sheffield B-ORG B-ORG -Wednesday I-ORG I-ORG -. O O -Wimbledon B-ORG B-ORG -leap O O -over O O -Liverpool B-ORG B-ORG -into O O -second O O -place O O -by O O -winning O O -3 O O -at O O -Sunderland B-ORG B-ORG -to O O -extend O O -their O O -unbeaten O O -league O O -and O O -cup O O -run O O -to O O -18 O O -games O O -. O O -Two O O -strikes O O -by O O -E B-PER B-PER -E I-PER I-PER -in O O -the O O -first O O -half O O -and O O -a O O -late O O -goal O O -from O O -fellow O O -forward O O -Dean B-PER B-PER -Hold I-PER I-PER -secured O O -victory O O -for O O -Wimbledon B-ORG B-ORG -, O O -who O O -trail O O -pace O O -Arsenal B-ORG B-ORG -by O O -four O O -points O O -. O O -S O O -- O O -SC B-MISC B-MISC -L O O -AND O O -C B-MISC O -R O O -. O O -G B-LOC B-LOC -1996 O O -Results O O -of O O -Scottish B-MISC B-MISC -league O O -and O O -cup O O -matches O O -played O O -on O O -Saturday O O -: O O -Premier O O -division O O -Du B-ORG B-ORG -2 O O -Aberdeen B-ORG B-ORG -3 O O -Hearts B-ORG B-ORG -0 O O -Rai B-ORG B-ORG -0 O O -Ki B-ORG B-ORG -0 O O -Dundee B-ORG B-ORG -United I-ORG I-ORG -2 O O -Mother B-ORG B-ORG -2 O O -Celtic B-ORG B-ORG -1 O O -Rangers B-ORG B-ORG -4 O O -Hi B-ORG B-ORG -3 O O -Division O O -one O O -Dundee B-ORG B-ORG -2 O O -F B-ORG B-ORG -0 O O -Green B-ORG B-ORG -Morton I-ORG I-ORG -0 O O -St B-ORG B-ORG -Johnstone I-ORG I-ORG -2 O O -Post O O -: O O -Air B-ORG B-ORG -v O O -Clyde B-ORG B-ORG -( O O -to O O -Wednesday O O -) O O -, O O -East B-ORG B-ORG -Fife I-ORG B-ORG -v O O -Part B-ORG B-ORG -, O O -Stirling B-ORG B-ORG -v O O -St B-ORG B-ORG -Mir I-ORG I-ORG -( O O -to O O -Tuesday O O -) O O -Division O O -two O O -Livingston B-ORG B-ORG -2 O O -St B-ORG B-ORG -1 O O -St B-ORG B-ORG -0 O O -B B-ORG B-ORG -1 O O -Division O O -three O O -Ross B-ORG B-ORG -County I-ORG I-ORG -4 O O -Mont B-ORG B-ORG -L B-LOC B-LOC -1996 O O -Results O O -of O O -English B-MISC B-MISC -league O O -and O O -cup O O -matches O O -on O O -Saturday O O -: O O -Premier O B-MISC -league O I-MISC -Arsenal B-ORG B-ORG -2 O O -Derby B-ORG B-ORG -2 O O -Chelsea B-ORG B-ORG -2 O O -Everton B-ORG B-ORG -2 O O -Coventry B-ORG B-ORG -1 O O -Tottenham B-ORG B-ORG -2 O O -Leicester B-ORG B-ORG -1 O O -Blackburn B-ORG B-ORG -1 O O -Liverpool B-ORG B-ORG -0 O O -Sheffield B-ORG B-ORG -Wednesday I-ORG I-ORG -1 O O -Middlesbrough B-ORG B-ORG -0 O O -Leeds B-ORG B-ORG -0 O O -Southampton B-ORG B-ORG -0 O O -Aston B-ORG B-ORG -Villa I-ORG I-ORG -1 O O -Sunderland B-ORG B-ORG -1 O O -Wimbledon B-ORG B-ORG -3 O O -Division O O -one O O -Bar B-ORG B-ORG -3 O O -South B-ORG B-ORG -0 O O -Birmingham B-ORG B-ORG -0 O O -Grimsby B-ORG B-ORG -0 O O -Charlton B-ORG B-ORG -2 O O -Swindon B-ORG B-ORG -0 O O -Crystal B-ORG B-ORG -Palace I-ORG I-ORG -2 O O -Oxford B-ORG B-ORG -2 O O -Huddersfield B-ORG B-ORG -2 O O -Norwich B-ORG B-ORG -0 O O -Ipswich B-ORG B-ORG -0 O O -Wolverhampton B-ORG B-ORG -0 O O -Manchester B-ORG B-ORG -City I-ORG I-ORG -3 O O -Bradford B-ORG B-ORG -2 O O -Oldham B-ORG B-ORG -0 O O -Queens B-ORG B-ORG -Park I-ORG I-ORG -Rangers I-ORG I-ORG -2 O O -Reading B-ORG B-ORG -0 O O -Port B-ORG B-ORG -Vale I-ORG I-ORG -1 O O -Sheffield B-ORG B-ORG -United I-ORG I-ORG -1 O O -Portsmouth B-ORG B-ORG -0 O O -Stoke B-ORG B-ORG -2 O O -T B-ORG B-ORG -0 O O -Playing O O -Sunday O O -: O O -West B-ORG B-ORG -B I-ORG I-ORG -v O O -Bolton B-ORG B-ORG -F B-ORG B-MISC -Challenge B-MISC I-MISC -Cup I-MISC I-MISC -second O O -L B-LOC B-LOC -1996 O O -Australia B-LOC B-LOC -bad O O -farewell O O -to O O -David B-PER B-PER -Camp I-PER I-PER -in O O -spectacular O O -fashion O O -by O O -overwhelming O O -the O O -Bar B-ORG B-ORG -39 O O -in O O -the O O -final O O -match O O -of O O -their O O -European B-MISC B-MISC -tour O O -at O O -T B-LOC B-LOC -on O O -Saturday O O -. O O -The O O -Wall B-ORG B-ORG -ran O O -in O O -five O O -tries O O -with O O -Camp B-PER B-PER -, O O -who O O -has O O -retired O O -from O O -test O O -rugby O O -after O O -collecting O O -101 O O -caps O O -and O O -a O O -world O O -record O O -64 O O -tries O O -, O O -adding O O -one O O -last O O -touchdown O O -in O O -a O O -Wall B-MISC B-ORG -jersey O O -before O O -departing O O -the O O -international O O -game O O -. O O -The O O -Bar B-ORG B-ORG -included O O -14 O O -international O O -but O O -, O O -with O O -only O O -two O O -pre O O -practice O O -sessions O O -behind O O -them O O -, O O -proved O O -no O O -real O O -match O O -for O O -a O O -Wall B-MISC B-ORG -side O O -determined O O -to O O -finish O O -their O O -12 O O -tour O O -unbeaten O O -. O O -The O O -touring O O -team O O -were O O -27 O O -ahead O O -by O O -half O O -before O O -e O O -up O O -in O O -the O O -second O O -. O O -Full O O -Matthew B-PER B-PER -Burke I-PER I-PER -finished O O -with O O -a O O -personal O O -haul O O -of O O -24 O O -points O O -to O O -take O O -his O O -tour O O -aggregate O O -to O O -136 O O -. O O -R B-ORG B-ORG -UN I-ORG I-ORG -- O O -AU B-ORG B-LOC -B O O -BA B-LOC B-ORG -39 O O -. O O -L B-LOC B-LOC -1996 O O -Australia B-LOC B-LOC -beat O O -the O O -Bar B-ORG B-ORG -39 O O -( O O -halftime O O -27 O O -) O O -in O O -the O O -final O O -match O O -of O O -their O O -European B-MISC B-MISC -tour O O -on O O -Saturday O O -: O O -Score O O -: O O -Australia B-LOC B-LOC -- O O -Tri O O -: O O -Matthew B-PER B-PER -Burke I-PER I-PER -( O O -2 O O -) O O -, O O -Joe B-PER B-PER -R I-PER I-PER -, O O -David B-PER B-PER -Camp I-PER I-PER -, O O -Tim B-PER B-PER -Ho I-PER I-PER -. O O -Pen O O -: O O -Burke B-PER B-PER -( O O -2 O O -) O O -. O O -Con O O -: O O -Burke B-PER B-PER -( O O -4 O O -) O O -. O O -Bar O B-ORG -- O O -Tri O O -: O O -Alan B-PER B-PER -Bat I-PER I-PER -, O O -Scott B-PER B-PER -Quinn I-PER I-PER -. O O -Con O O -: O O -Rob B-PER B-PER -Andrew I-PER I-PER -. O O -GO O O -- O O -Z B-LOC B-MISC -O I-MISC I-MISC -T O O -R O O -SC O O -. O O -H B-LOC B-LOC -1996 O O -Leading O O -third O O -round O O -scores O O -in O O -the O O -Zimbabwe B-MISC B-MISC -Open I-MISC I-MISC -on O O -Saturday O O -( O O -South B-MISC B-MISC -African I-MISC I-MISC -unless O O -stated O O -) O O -: O O -201 O O -Mark B-PER B-PER -M I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -72 O O -61 O O -68 O O -205 O O -Des B-PER B-PER -Te I-PER I-PER -65 O O -67 O O -73 O O -206 O O -Nick B-PER B-PER -Price I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -68 O O -68 O O -70 O O -207 O O -Clinton B-PER B-PER -White I-PER I-PER -70 O O -70 O O -67 O O -, O O -Mark B-PER B-PER -C I-PER I-PER -( O O -Zimbabwe B-LOC B-LOC -) O O -69 O O -69 O O -69 O O -, O O -Justin B-PER B-PER -Ho I-PER I-PER -71 O O -65 O O -71 O O -209 O O -Steve B-PER B-PER -van I-PER I-PER -V I-PER I-PER -65 O O -69 O O -75 O O -210 O O -Brett B-PER B-PER -Li I-PER I-PER -75 O O -65 O O -70 O O -211 O O -Hugh B-PER B-PER -Bai I-PER I-PER -73 O O -67 O O -71 O O -, O O -Greg B-PER B-PER -Reid I-PER I-PER -72 O O -68 O O -71 O O -, O O -Mark B-PER B-PER -Mu I-PER B-PER -71 O O -67 O O -73 O O -212 O O -Trevor B-PER B-PER -Dodd I-PER I-PER -( O O -Namibia B-LOC B-LOC -) O O -S O O -- O O -R B-MISC O -AL B-MISC B-LOC -N O O -S O O -TO O O -P O O -N B-ORG B-LOC -. O O -T B-LOC B-LOC -1996 O O -Albanian B-MISC B-MISC -coach O O -As B-PER B-PER -Ha I-PER I-PER -said O O -on O O -Saturday O O -it O O -was O O -important O O -that O O -his O O -players O O -brush O O -aside O O -the O O -country O O -' O O -short O O -ban O O -by O O -FIFA B-ORG B-ORG -in O O -order O O -to O O -concentrate O O -on O O -next O O -Saturday O B-MISC -Cup I-MISC I-MISC -group O O -nine O O -qualifier O O -against O O -Northern B-LOC B-LOC -Ireland I-LOC I-LOC -. O O -World O O -soccer O O -' O O -governing O O -body O O -reinstated O O -Albania B-LOC B-LOC -last O O -Tuesday O O -after O O -the O O -Balkan B-LOC B-LOC -country O O -' O O -government O O -lifted O O -suspension O O -on O O -various O O -soccer O O -officials O O -. O O -FIFA B-ORG B-ORG -had O O -banned O O -Albania B-LOC B-LOC -indefinitely O O -after O O -its O O -sports O O -ministry O O -had O O -ordered O O -the O O -suspension O O -of O O -Albanian B-ORG B-ORG -Football I-ORG I-ORG -Association I-ORG I-ORG -general O O -secretary O O -Eduard B-PER B-PER -Der I-PER I-PER -and O O -dissolved O O -the O O -executive O O -committee O O -. O O -" O O -We O O -would O O -be O O -very O O -happy O O -with O O -a O O -draw O O -in O O -Belfast B-LOC B-LOC -, O O -" O O -said O O -Ha B-PER B-PER -. O O -" O O -Especially O O -if O O -one O O -takes O O -into O O -consideration O O -our O O -difficult O O -post O O -situation O O -and O O -the O O -fact O O -Northern B-LOC B-LOC -Ireland I-LOC I-LOC -is O O -very O O -keen O O -to O O -win O O -. O O -" O O -Regular O O -defender O O -Art B-PER B-PER -Le I-PER I-PER -, O O -who O O -is O O -injured O O -, O O -was O O -missing O O -from O O -Ha B-PER B-PER -' O O -squad O O -named O O -on O O -Saturday O O -for O O -the O O -Belfast B-LOC B-LOC -match O O -. O O -Squad O O -: O O -Goal O O -- O O -B B-PER B-PER -Na I-PER I-PER -, O O -Arm B-PER B-PER -G I-PER I-PER -De O O -- O O -R B-PER B-PER -V I-PER I-PER -, O O -Sai B-PER B-PER -Mal I-PER I-PER -, O O -A B-PER B-PER -X I-PER I-PER -, O O -Il B-PER B-PER -Shu I-PER I-PER -, O O -A B-PER B-PER -To I-PER I-PER -, O O -N B-PER B-PER -De I-PER I-PER -, O O -A B-PER B-PER -Bella I-PER I-PER -Mid O O -- O O -B B-PER B-PER -Ko I-PER I-PER -, O O -Alt B-PER B-PER -Ha I-PER I-PER -, O O -So B-PER B-PER -Pre I-PER I-PER -, O O -E B-PER B-PER -F I-PER I-PER -Forward O O -- O O -Alt B-PER B-PER -R I-PER I-PER -, O O -Viktor B-PER B-PER -Pac I-PER I-PER -, O O -Fat B-PER B-PER -V I-PER I-PER -, O O -E B-PER B-PER -Bo I-PER I-PER -. O O -CR O O -- O O -J B-PER B-PER -H O O -CE O O -AS O O -VI B-MISC B-ORG -F O O -BA O O -. O O -H B-LOC B-LOC -, O O -Australia B-LOC B-LOC -1996 O O -Former O O -Australia B-LOC B-LOC -test O O -batsman O O -Dean B-PER B-PER -Jones I-PER I-PER -hit O O -an O O -unbeaten O O -130 O O -to O O -lead O O -Victoria B-LOC B-LOC -' O O -fight O O -in O O -their O O -Sheffield B-MISC B-MISC -Shield I-MISC I-MISC -match O O -against O O -Tasmania B-LOC B-ORG -on O O -Saturday O O -. O O -Rep O O -to O O -the O O -home O O -side O O -' O O -first O O -innings O O -48 O O -for O O -eight O O -declared O O -, O O -Victoria B-ORG B-LOC -reached O O -220 O O -for O O -three O O -at O O -close O O -of O O -play O O -on O O -the O O -second O O -day O O -of O O -the O O -four O O -match O O -at O O -Hobart B-LOC B-LOC -' O O -Belle B-LOC B-LOC -Oval I-LOC I-LOC -. O O -Jones B-PER B-PER -became O O -the O O -fourth O O -century O O -of O O -the O O -match O O -, O O -equal O O -the O O -feat O O -of O O -Tasmanian B-MISC B-MISC -trio O O -David B-PER B-PER -Bo I-PER I-PER -, O O -Shaun B-PER B-PER -Young I-PER I-PER -and O O -Michael B-PER B-PER -Di I-PER I-PER -. O O -Jones B-PER B-PER -, O O -who O O -took O O -over O O -as O O -captain O O -for O O -the O O -match O O -in O O -the O O -absence O O -of O O -Australia B-LOC B-LOC -test O O -leg O O -Shane B-PER B-PER -War I-PER I-PER -, O O -added O O -195 O O -runs O O -for O O -the O O -third O O -wicket O O -with O O -left O O -Laurie B-PER B-PER -Harper I-PER I-PER -. O O -Harper B-PER B-PER -was O O -eventually O O -dismissed O O -for O O -77 O O -after O O -the O O -pair O O -joined O O -forces O O -with O O -their O O -side O O -reel O O -on O O -nine O O -for O O -two O O -. O O -Earlier O O -, O O -former O O -Australia B-LOC B-LOC -test O O -batsman O O -David B-PER B-PER -Bo I-PER I-PER -scored O O -118 O O -and O O -all O O -Shaun B-PER B-PER -Young I-PER I-PER -hit O O -113 O O -. O O -The O O -pair O O -hammer O O -36 O O -boundaries O O -between O O -them O O -. O O -Pace O B-PER -bowler O O -Ian B-PER B-PER -Harvey I-PER I-PER -claimed O O -three O O -for O O -81 O O -for O O -Victoria B-LOC B-LOC -. O O -CR O O -- O O -SH B-PER B-MISC -SH O I-MISC -SC O O -. O O -H B-LOC B-LOC -, O O -Australia B-LOC B-LOC -1996 O O -Close O O -of O O -play O O -score O O -on O O -the O O -second O O -day O O -of O O -the O O -four O O -Sheffield B-MISC B-MISC -Shield I-MISC I-MISC -cricket O O -match O O -between O O -Tasmania B-LOC B-ORG -and O O -Victoria B-LOC B-ORG -at O O -Belle B-LOC B-LOC -Oval I-LOC I-LOC -on O O -Saturday O O -: O O -Tasmania B-LOC B-ORG -48 O O -for O O -eight O O -declared O O -( O O -Michael B-PER B-PER -Di I-PER I-PER -119 O O -, O O -David B-PER B-PER -Bo I-PER I-PER -118 O O -, O O -Shaun B-PER B-PER -Young I-PER I-PER -113 O O -) O O -; O O -Victoria B-ORG B-ORG -220 O O -for O O -three O O -( O O -Dean B-PER B-PER -Jones I-PER I-PER -130 O O -not O O -out O O -) O O -. O O -S O O -- O O -S B-PER B-LOC -K B-PER I-LOC -M O O -C O O -TO O O -Q O O -B O O -. O O -AB B-LOC B-LOC -D I-LOC I-LOC -1996 O O -South B-LOC B-LOC -Korea I-LOC I-LOC -made O O -virtually O O -certain O O -of O O -an O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -quarter O O -spot O O -with O O -a O O -4 O O -win O O -over O O -Indonesia B-LOC B-LOC -in O O -a O O -Group O O -A O O -match O O -on O O -Saturday O O -. O O -After O O -going O O -four O O -up O O -in O O -the O O -first O O -55 O O -minutes O O -South B-LOC B-LOC -Korea I-LOC I-LOC -allowed O O -Indonesia B-LOC B-LOC -, O O -newcomer O O -to O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -finals O O -, O O -back O O -into O O -the O O -match O O -, O O -con O O -two O O -goals O O -from O O -rare O O -counter O O -attacks O O -. O O -Kim B-PER B-PER -Do I-PER I-PER -Ho I-PER I-PER -opened O O -the O O -scoring O O -for O O -South B-LOC B-LOC -Korea I-LOC I-LOC -in O O -only O O -the O O -fifth O O -minute O O -, O O -turning O O -un O O -on O O -the O O -penalty O O -spot O O -to O O -fire O O -a O O -shot O O -into O O -the O O -top O O -corner O O -. O O -It O O -looked O O -like O O -turning O O -into O O -a O O -r O O -as O O -H B-PER B-PER -Sun I-PER I-PER -Hong I-PER I-PER -rapidly O O -added O O -two O O -more O O -in O O -the O O -seventh O O -and O O -15th O O -minutes O O -but O O -although O O -the O O -Koreans B-MISC B-MISC -continued O O -to O O -dominate O O -they O O -failed O O -to O O -add O O -to O O -the O O -score O O -before O O -the O O -interval O O -. O O -But O O -they O O -started O O -the O O -second O O -half O O -where O O -they O O -had O O -left O O -off O O -and O O -it O O -was O O -not O O -long O O -before O O -they O O -went O O -four O O -up O O -, O O -Ko B-PER B-PER -Je I-PER I-PER -W I-PER I-PER -heading O O -in O O -from O O -a O O -free O O -kick O O -in O O -the O O -55 O O -minute O O -. O O -The O O -Koreans B-MISC B-MISC -then O O -appeared O O -to O O -relax O O -, O O -allowing O O -the O O -Indonesian B-MISC B-MISC -to O O -get O O -back O O -into O O -the O O -match O O -. O O -Ron B-PER B-PER -W I-PER I-PER -scored O O -for O O -Indonesia B-LOC B-LOC -three O O -minutes O O -later O O -direct O O -from O O -a O O -a O O -corner O O -kick O O -that O O -Korean B-MISC B-MISC -goalkeeper O O -Kim B-PER B-PER -By I-PER I-PER -reached O O -with O O -one O O -hand O O -but O O -failed O O -to O O -keep O O -out O O -. O O -With O O -65 O O -minutes O O -gone O O -Indonesia B-LOC B-LOC -' O O -W B-PER B-PER -Put I-PER I-PER -, O O -who O O -scored O O -a O O -spectacular O O -goal O O -against O O -Kuwait B-LOC B-LOC -on O O -Wednesday O O -, O O -was O O -again O O -on O O -target O O -, O O -breaking O O -through O O -the O O -Korean B-MISC B-MISC -defence O O -to O O -beat O O -the O O -keeper O O -with O O -a O O -low O O -shot O O -. O O -Indonesian B-MISC B-MISC -keeper O O -He B-PER B-PER -Ka I-PER I-PER -produced O O -a O O -string O O -of O O -fine O O -saves O O -to O O -prevent O O -the O O -Koreans B-MISC B-MISC -increasing O O -their O O -lead O O -. O O -Teams O O -: O O -Indonesia B-LOC B-LOC -: O O -20 O O -- O O -He B-PER B-PER -Ka I-PER I-PER -; O O -2 O O -- O O -A B-PER B-PER -Set I-PER I-PER -; O O -3 O O -- O O -Su B-PER B-PER -Si I-PER I-PER -; O O -4 O O -- O O -Ye B-PER B-PER -Tu I-PER I-PER -; O O -5 O O -- O O -A B-PER B-PER -Te I-PER I-PER -; O O -6 O O -- O O -Su B-PER B-PER -; O O -7 O O -- O O -W B-PER B-PER -G I-PER I-PER -P I-PER I-PER -; O O -8 O O -- O O -Ron B-PER B-PER -W I-PER I-PER -; O O -11 O O -- O O -B B-PER B-PER -Sa I-PER I-PER -; O O -12 O O -- O O -Chris B-PER B-PER -Ya I-PER I-PER -( O O -15 O O -- O O -Francis B-PER B-PER -We I-PER I-PER -36 O O -) O O -; O O -16 O O -- O O -Mar B-PER B-PER -Bad I-PER I-PER -. O O -South B-LOC B-LOC -Korea I-LOC I-LOC -: O O -1 O O -- O O -Kim B-PER B-PER -By I-PER I-PER -Ji I-PER I-PER -; O O -2 O O -- O O -Kim B-PER B-PER -Pan I-PER I-PER -Ke I-PER I-PER -; O O -5 O O -- O O -Huh B-PER B-PER -Ki I-PER I-PER -Ta I-PER I-PER -; O O -8 O O -- O O -R B-PER B-PER -Sang I-PER I-PER -Rae I-PER I-PER -( O O -7 O O -- O O -Sin B-PER B-PER -Ta I-PER I-PER -Yong I-PER I-PER -33 O O -) O O -; O O -9 O O -- O O -Kim B-PER B-PER -Do I-PER I-PER -Ho I-PER I-PER -; O O -11 O O -- O O -Ko B-PER B-PER -Je I-PER I-PER -W I-PER I-PER -; O O -17 O O -- O O -Ha B-PER B-PER -Se I-PER I-PER -Ju I-PER I-PER -; O O -18 O O -- O O -H B-PER B-PER -Sun I-PER I-PER -Hong I-PER I-PER -; O O -22 O O -- O O -Lee B-PER B-PER -Young I-PER I-PER -Jin I-PER I-PER -; O O -23 O O -- O O -Yo B-PER B-PER -Sang I-PER I-PER -Chu I-PER I-PER -; O O -24 O O -- O O -Kim B-PER B-PER -Jo I-PER I-PER -Sung I-PER I-PER -. O O -S O O -- O O -IS B-MISC B-MISC -F O O -D O O -R O O -/ O O -ST O O -. O O -J B-LOC B-LOC -1996 O O -Results O O -of O O -first O O -division O O -soccer O O -matches O O -played O O -over O O -the O O -weekend O O -: O O -Z B-ORG B-ORG -Ho I-ORG I-ORG -1 O O -Hapoel B-ORG B-ORG -Pet I-ORG I-ORG -T I-ORG I-ORG -1 O O -Maccabi B-ORG B-ORG -Haifa I-ORG I-ORG -1 O O -Hapoel B-ORG B-ORG -Tai I-ORG I-ORG -1 O O -Hapoel B-ORG B-ORG -K I-ORG I-ORG -Sa I-ORG I-ORG -1 O O -B B-ORG B-ORG -Ye I-ORG I-ORG -0 O O -Hapoel B-ORG B-ORG -Tel I-ORG I-ORG -Aviv I-ORG I-ORG -1 O O -Beta B-ORG B-ORG -Jerusalem I-ORG I-ORG -4 O O -Hapoel B-ORG B-ORG -Jerusalem I-ORG I-ORG -0 O O -Maccabi B-ORG B-ORG -Tel I-ORG I-ORG -Aviv I-ORG I-ORG -4 O O -Iron B-ORG B-ORG -R I-ORG I-ORG -Le I-ORG I-ORG -1 O O -Maccabi B-ORG B-ORG -Her I-ORG I-ORG -0 O O -Hapoel B-ORG B-ORG -Be I-ORG I-ORG -She I-ORG I-ORG -2 O O -Hapoel B-ORG B-ORG -Beer I-ORG I-ORG -1 O O -Maccabi B-ORG B-ORG -Pet I-ORG I-ORG -T I-ORG I-ORG -0 O O -Hapoel B-ORG B-ORG -Haifa I-ORG I-ORG -2 O O -Standing O O -( O O -ta O O -under O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -against O O -, O O -AB B-LOC B-LOC -D I-LOC I-LOC -1996 O O -Results O O -of O O -Asian B-MISC B-MISC -Cup I-MISC I-MISC -group O O -A O O -matches O O -on O O -Saturday O O -: O O -United B-ORG B-LOC -Arab I-ORG I-LOC -Emirates I-ORG I-LOC -3 O O -Kuwait B-LOC B-LOC -2 O O -( O O -halftime O O -0 O O -) O O -Score O O -: O O -UAE B-LOC B-LOC -- O O -Hassan B-PER B-PER -Ahmed I-PER I-PER -53 O O -, O O -Ad B-PER B-PER -Al I-PER I-PER -Ta I-PER I-PER -55 O O -, O O -Ba B-PER B-PER -Sa I-PER I-PER -80 O O -Kuwait B-LOC B-LOC -- O O -J B-PER B-PER -Al I-PER I-PER -9 O O -, O O -44 O O -Attendance O O -: O O -15 O O -South B-LOC B-LOC -Korea I-LOC I-LOC -4 O O -Indonesia B-LOC B-LOC -2 O O -( O O -3 O O -) O O -Score O O -: O O -South B-LOC B-LOC -Korea I-LOC I-LOC -- O O -Kim B-PER B-PER -Do I-PER I-PER -Ho I-PER I-PER -5 O O -, O O -H B-PER B-PER -Sun I-PER I-PER -Hong I-PER I-PER -7 O O -and O O -15 O O -, O O -Ko B-PER B-PER -Je I-PER B-PER -W I-PER I-PER -55 O O -Indonesia B-LOC B-LOC -- O O -Ron B-PER B-PER -W I-PER I-PER -58 O O -, O O -W B-PER B-PER -Put I-ORG I-PER -65 O O -Attendance O O -: O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Standing O O -of O O -National B-MISC B-ORG -Basketball I-MISC B-ORG -Association I-MISC I-ORG -teams O O -after O O -games O O -played O O -on O O -Friday O O -( O O -ta O O -under O O -won O O -, O O -lost O O -, O O -percentage O O -, O O -games O O -behind O O -) O O -: O O -EA B-MISC O -CO O O -AT B-ORG B-LOC -D O O -W O O -L O O -PC O O -GB O O -MI B-ORG B-ORG -14 O O -5 O O -. O O -- O O -NE B-ORG B-ORG -Y I-ORG I-ORG -11 O O -6 O O -. O O -2 O O -OR B-ORG B-ORG -8 O O -7 O O -. O O -4 O O -WA B-ORG B-ORG -7 O O -9 O O -. O O -5 O O -1 O O -P B-ORG B-ORG -7 O O -10 O O -. O O -6 O O -NE B-ORG B-ORG -J I-ORG I-ORG -4 O O -10 O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Results O O -of O O -National B-MISC B-ORG -Basketball I-MISC I-ORG -Association I-MISC B-ORG -games O O -on O O -Friday O O -( O O -home O O -team O O -in O O -CA O O -) O O -: O O -New B-ORG B-ORG -Jersey I-ORG I-ORG -110 O O -B B-ORG B-ORG -108 O O -( O O -O O O -) O O -DE B-ORG B-ORG -93 O O -Cleveland B-ORG B-ORG -81 O O -New B-ORG B-ORG -York I-ORG I-ORG -103 O O -MI B-ORG B-ORG -85 O O -Phoenix B-ORG B-ORG -101 O O -SA B-ORG B-ORG -95 O O -Vancouver B-ORG B-ORG -105 O O -SA B-ORG B-ORG -AN I-ORG I-ORG -89 O O -U B-ORG B-ORG -106 O O -Minnesota B-ORG B-ORG -95 O O -P B-ORG B-ORG -97 O O -Charlotte B-ORG B-ORG -93 O O -Indiana B-ORG B-ORG -86 O O -GO B-ORG B-ORG -ST I-ORG I-ORG -71 O O -LA B-ORG B-ORG -LA I-ORG I-ORG -92 O O -Orlando B-ORG B-ORG -81 O O -NHL B-ORG B-ORG -I I-MISC O -H O O -- O O -ST O O -A O O -F O O -' O O -GA O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Standing O O -of O O -National B-MISC B-ORG -Hockey I-MISC I-ORG -League I-MISC B-ORG -teams O O -after O O -games O O -played O O -on O O -Friday O O -( O O -ta O O -under O O -won O O -, O O -lost O O -, O O -tied O O -, O O -goals O O -for O O -, O O -goals O O -against O O -, O O -points O O -) O O -: O O -EA B-MISC O -CO O O -NO O O -D O O -W O O -L O O -T O O -G O O -GA B-ORG O -PT I-ORG O -H I-ORG B-ORG -12 O O -7 O O -6 O O -77 O O -76 O O -30 O O -B B-ORG B-ORG -13 O O -12 O O -2 O O -78 O O -77 O O -28 O O -M B-ORG B-ORG -11 O O -14 O O -4 O O -99 O O -104 O O -26 O O -B B-ORG B-ORG -10 O O -11 O O -4 O O -74 O O -84 O O -24 O O -P B-ORG B-ORG -10 O O -13 O O -3 O O -86 O O -94 O O -23 O O -O B-ORG B-ORG -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Results O O -of O O -National B-MISC B-ORG -Hockey I-MISC I-ORG -League I-MISC B-ORG -games O O -on O O -Friday O O -( O O -home O O -team O O -in O O -CA O O -) O O -: O O -NY B-ORG B-ORG -RA I-ORG I-ORG -6 O O -Toronto B-ORG B-ORG -5 O O -B B-ORG B-ORG -1 O O -Anaheim B-ORG B-ORG -1 O O -( O O -O O O -) O O -Pittsburgh B-ORG B-ORG -5 O O -WA B-ORG B-ORG -3 O O -Montreal B-ORG B-ORG -3 O O -CH B-ORG B-ORG -1 O O -Philadelphia B-ORG B-LOC -6 O O -D B-ORG B-LOC -3 O O -St B-ORG B-LOC -Louis I-ORG I-LOC -4 O O -CO B-ORG B-LOC -3 O O -E B-ORG B-LOC -5 O O -Ottawa B-ORG B-LOC -2 O O -NHL B-ORG B-ORG -I O O -H O O -- O O -CA B-LOC B-ORG -R B-LOC O -B O B-PER -S O O -F O O -ONE O O -GA O O -. O O -NE B-LOC B-LOC -Y I-LOC I-LOC -1996 O O -Vancouver B-ORG B-ORG -Can I-ORG I-ORG -star O O -right O O -wing O O -Pavel B-PER B-PER -B I-PER I-PER -was O O -suspended O O -for O O -one O O -game O O -by O O -the O O -National B-MISC B-ORG -Hockey I-MISC I-ORG -League I-MISC I-ORG -and O O -fined O O -$ O O -1 O O -Friday O O -for O O -his O O -hit O O -on O O -Buffalo B-ORG B-ORG -Sa I-ORG I-ORG -defence O O -Garry B-PER B-PER -G I-PER I-PER -on O O -Wednesday O O -. O O -B B-PER B-PER -received O O -a O O -double O O -penalty O O -for O O -high O O -with O O -2 O O -left O O -in O O -the O O -first O O -period O O -of O O -Wednesday O O -' O O -7 O O -overtime O O -win O O -by O O -Vancouver B-ORG B-ORG -after O O -co O O -with O O -G B-PER B-PER -in O O -Buffalo B-LOC B-ORG -zone O O -. O O -G B-PER B-PER -suffered O O -a O O -concussion O O -and O O -did O O -not O O -return O O -to O O -the O O -game O O -. O O -" O O -Mr O O -B B-PER B-PER -left O O -his O O -feet O O -to O O -deliver O O -a O O -forearm O O -blow O O -to O O -Mr O O -G B-PER B-PER -as O O -he O O -was O O -about O O -to O O -be O O -checked O O -legally O O -by O O -his O O -opponent O O -, O O -" O O -said O O -NHL B-ORG B-ORG -discipline O O -chief O O -Brian B-PER B-PER -Burke I-PER I-PER -in O O -handing O O -out O O -the O O -suspension O O -. O O -" O O -Although O O -it O O -is O O -clear O O -from O O -the O O -video O O -that O O -Mr O O -B B-PER B-PER -' O O -actions O O -were O O -a O O -reaction O O -to O O -the O O -impending O O -hit O O -and O O -there O O -was O O -no O O -intent O O -to O O -in O O -his O O -opponent O O -, O O -there O O -can O O -be O O -no O O -excuse O O -for O O -this O O -type O O -of O O -conduct O O -, O O -" O O -Burke B-PER B-PER -said O O -. O O -B B-PER B-PER -, O O -who O O -is O O -struggling O O -with O O -only O O -nine O O -goals O O -and O O -12 O O -assists O O -in O O -26 O O -games O O -, O O -will O O -miss O O -Saturday O O -' O O -home O O -game O O -against O O -Ottawa B-ORG B-ORG -. O O -B O O -- O O -SC B-PER B-PER -DE O O -R B-LOC B-PER -IN O O -I B-ORG B-ORG -H O O -F O O -. O O -VI B-LOC B-LOC -1996 O O -German B-MISC B-MISC -Axel B-PER B-PER -Sc I-PER I-PER -out O O -Cuba B-LOC B-LOC -' O O -Jose B-PER B-PER -R I-PER I-PER -in O O -their O O -International B-ORG B-ORG -Boxing I-ORG I-ORG -Federation I-ORG I-ORG -non O O -10 O O -heavyweight O O -fight O O -on O O -Saturday O O -. O O -S O O -- O O -SP B-MISC B-MISC -F O O -D O O -S O O -. O O -MA B-LOC B-LOC -1996 O O -Su O O -of O O -Saturday O O -' O O -Spanish B-MISC B-MISC -first O O -division O O -match O O -: O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -2 O O -( O O -Da B-PER B-PER -Su I-PER I-PER -24 O O -, O O -Pre B-PER B-PER -Mi I-PER I-PER -48 O O -) O O -Barcelona B-ORG B-ORG -0 O O -. O O -Half O O -1 O O -. O O -Attendance O O -106 O O -. O O -S O O -- O O -BA B-PER B-LOC -ST O O -F O O -W O O -O O O -F O O -GA O O -F O O -R O B-ORG -. O O -MA B-LOC B-LOC -1996 O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -' O O -Balkan B-LOC B-LOC -strike O O -force O O -of O O -Da B-PER B-PER -Su I-PER I-PER -and O O -Pre B-PER B-PER -Mi I-PER I-PER -shot O O -their O O -side O O -to O O -a O O -2 O O -win O O -over O O -Barcelona B-ORG B-ORG -in O O -Spain B-LOC B-LOC -' O O -old O O -firm O O -game O O -on O O -Saturday O O -. O O -The O O -result O O -leaves O O -Real B-ORG B-ORG -on O O -38 O O -points O O -after O O -16 O O -games O O -, O O -four O O -ahead O O -of O O -Barcelona B-ORG B-ORG -. O O -With O O -just O O -one O O -league O O -match O O -scheduled O O -before O O -the O O -New O O -Year I-MISC O -break O O -, O O -Real B-ORG B-ORG -are O O -also O O -assured O O -of O O -spending O O -Christmas O O -ahead O O -of O O -their O O -arch O O -. O O -A O O -mix O O -in O O -the O O -Barcelona B-ORG B-ORG -defence O O -let O O -Croatian B-MISC B-MISC -international O O -Su B-PER B-PER -in O O -midway O O -through O O -the O O -first O O -half O O -, O O -and O O -Montenegrin B-MISC B-MISC -striker O O -Mi B-PER B-PER -made O O -it O O -2 O O -after O O -fine O O -work O O -by O O -Clarence B-PER B-PER -See I-PER I-PER -just O O -after O O -the O O -break O O -. O O -Barcelona B-ORG B-ORG -fought O O -back O O -strongly O O -but O O -were O O -twice O O -denied O O -by O O -the O O -wood O O -on O O -an O O -unusually O O -quiet O O -night O O -for O O -Brazilian B-MISC B-MISC -striker O O -Ronald B-PER B-PER -. O O -S O O -- O O -PS B-PER B-ORG -H O O -V O B-ORG -F O O -S O O -. O O -AM B-LOC B-LOC -1996 O O -Brazilian B-MISC B-MISC -striker O O -Marcel B-PER B-PER -and O O -Yugoslav B-MISC B-MISC -midfielder O O -Z B-PER B-PER -Pet I-PER I-PER -each O O -scored O O -twice O O -as O O -Dutch B-MISC B-MISC -first O O -division O O -leaders O O -PS B-ORG B-ORG -Ein I-ORG I-ORG -r O O -to O O -a O O -6 O O -win O O -over O O -Vol B-ORG B-ORG -on O O -Saturday O O -. O O -Their O O -other O O -marks O O -were O O -Brazilian B-MISC B-MISC -defender O O -V B-PER B-PER -and O O -Belgian B-MISC B-MISC -striker O O -Luc B-PER B-PER -Ni I-PER I-PER -, O O -his O O -14th O O -of O O -the O O -season O O -. O O -PS B-ORG B-ORG -, O O -well O O -on O O -the O O -way O O -to O O -their O O -14th O O -league O O -title O O -, O O -out O O -Vol B-ORG B-ORG -in O O -every O O -department O O -of O O -the O O -game O O -. O O -They O O -b O O -a O O -nine O O -lead O O -over O O -Fe B-ORG B-ORG -, O O -who O O -have O O -two O O -games O O -in O O -hand O O -, O O -and O O -are O O -16 O O -points O O -clear O O -of O O -champions O O -Ajax B-ORG B-ORG -Amsterdam I-ORG I-ORG -, O O -who O O -have O O -played O O -18 O O -matches O O -compared O O -to O O -PS B-ORG B-ORG -' O O -19 O O -. O O -Ajax B-ORG B-ORG -face O O -A B-ORG B-ORG -Al I-ORG I-ORG -away O O -on O O -Sunday O O -and O O -Fe B-ORG B-ORG -, O O -eliminated O O -from O O -the O O -UEFA B-MISC B-MISC -Cup I-MISC I-MISC -after O O -losing O O -4 O O -on O O -aggregate O O -to O O -Ten B-ORG B-ORG -on O O -Tuesday O O -, O O -travel O O -to O O -De B-ORG B-ORG -G I-ORG I-ORG -Do I-ORG I-ORG -. O O -The O O -Do B-ORG B-ORG -side O O -, O O -dubbed O O -" O O -The B-ORG B-ORG -Super B-ORG I-ORG -P I-ORG I-ORG -" O O -, O O -are O O -one O O -of O O -the O O -surprise O O -packages O O -of O O -the O O -season O O -. O O -They O O -are O O -fourth O O -in O O -the O O -table O O -. O O -S O O -- O O -SP B-MISC B-MISC -F O O -D O O -R O O -/ O O -ST O O -. O O -MA B-LOC B-LOC -1996 O O -Re O O -of O O -Saturday O O -' O O -only O O -Spanish B-MISC B-MISC -first O O -division O O -match O O -: O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -2 O O -Barcelona B-ORG B-ORG -0 O O -Standing O O -( O O -ta O O -under O O -games O O -played O O -, O O -won O O -, O O -drawn O O -, O O -lost O O -, O O -goals O O -for O O -, O O -against O O -, O O -points O O -) O O -: O O -Real B-ORG B-ORG -Madrid I-ORG I-ORG -16 O O -11 O O -5 O O -0 O O -32 O O -12 O O -38 O O -Barcelona B-ORG B-ORG -16 O O -10 O O -4 O O -2 O O -46 O O -21 O O -34 O O -Deportivo B-ORG B-ORG -Co I-ORG I-ORG -15 O O -9 O O -6 O O -0 O O -23 O O -7 O O -33 O O -Real B-ORG B-ORG -Bet I-ORG I-ORG -15 O O -8 O O -5 O O -2 O O -28 O O -13 O O -29 O O -At B-ORG B-ORG -Madrid I-ORG I-ORG -15 O O -8 O O -3 O O -4 O O -26 O O -17 O O -27 O O -Athletic B-ORG B-ORG -B I-ORG I-ORG -15 O O -7 O O -4 O O -4 O O -28 O O -22 O O -25 O O -Real B-ORG B-ORG -So I-ORG I-ORG -15 O O -7 O O -3 O O -5 O O -20 O O -18 O O -24 O O -Val B-ORG B-ORG -15 O O -7 O O -D B-LOC B-LOC -1996 O O -Jack B-PER B-PER -Charlton I-PER I-PER -' O O -relationship O O -with O O -the O O -people O O -of O O -Ireland B-LOC B-LOC -was O O -cement O O -on O O -Saturday O O -when O O -the O O -English B-MISC B-MISC -was O O -officially O O -declared O O -one O O -of O O -their O O -own O O -. O O -Charlton B-PER B-PER -, O O -61 O O -, O O -and O O -his O O -wife O O -, O O -Peggy B-PER B-PER -, O O -became O O -citizens O O -of O O -Ireland B-LOC B-LOC -when O O -they O O -formally O O -received O O -Irish B-MISC B-MISC -passports O O -from O O -deputy O O -Prime O O -Minister O O -Dick B-PER B-PER -Spring I-PER I-PER -who O O -said O O -the O O -honour O O -had O O -been O O -made O O -in O O -recognition O O -of O O -Charlton B-PER B-PER -' O O -achievements O O -as O O -the O O -national O O -soccer O O -manager O O -. O O -" O O -The O O -years O O -I O O -spent O O -as O O -manager O O -of O O -the O O -Republic B-LOC B-LOC -of I-LOC I-LOC -Ireland I-LOC I-LOC -were O O -the O O -best O O -years O O -of O O -my O O -life O O -. O O -It O O -all O O -culminated O O -in O O -the O O -fact O O -that O O -I O O -now O O -have O O -lots O O -of O O -great O O -, O O -great O O -friends O O -in O O -Ireland B-LOC B-LOC -. O O -That O O -is O O -why O O -this O O -is O O -so O O -emotional O O -a O O -night O O -for O O -me O O -, O O -" O O -Charlton B-PER B-PER -said O O -. O O -" O O -It O O -was O O -the O O -joy O O -that O O -we O O -all O O -had O O -over O O -the O O -period O O -, O O -that O O -I O O -shared O O -with O O -people O O -that O O -I O O -grew O O -to O O -love O O -, O O -that O O -I O O -treasure O O -most O O -, O O -" O O -he O O -added O O -. O O -Charlton B-PER B-PER -managed O O -Ireland B-LOC B-LOC -for O O -93 O O -matches O O -, O O -during O O -which O O -time O O -they O O -lost O O -only O O -17 O O -times O O -in O O -almost O O -10 O O -years O O -until O O -he O O -resigned O O -in O O -December O O -1995 O O -. O O -He O O -guided O O -Ireland B-LOC B-LOC -to O O -two O O -successive O O -World B-MISC B-MISC -Cup I-MISC I-MISC -finals O O -tournaments O O -and O O -to O O -the O O -1988 O O -European B-MISC B-MISC -championship I-MISC O -finals O O -in O O -Germany B-LOC B-LOC -, O O -after O O -the O O -Irish B-MISC B-MISC -beat O O -a O O -well O O -England B-LOC B-LOC -team O O -1 O O -in O O -their O O -group O O -qualifier O O -. O O -The O O -la O O -former O O -Leeds B-ORG B-ORG -United I-ORG I-ORG -defender O O -did O O -not O O -make O O -his O O -England B-LOC B-LOC -debut O O -until O O -the O O -age O O -of O O -30 O O -but O O -eventually O O -won O O -35 O O -caps O O -and O O -was O O -a O O -key O O -member O O -of O O -the O O -1966 B-MISC B-MISC -World B-MISC I-MISC -Cup I-MISC I-MISC -winning O O -team O O -with O O -his O O -younger O O -brother O O -, O O -Bobby B-PER B-PER -. O O diff --git a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/test/train_performance_1p.sh index 7363f037e71ccdb895481ea1991eafe60bf39a28..4628ac50ae9f16e6a6ba569a49b3d9bbe8f1d06a 100644 --- a/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/nlp/Bert-NER_ID0797_for_TensorFlow/test/train_performance_1p.sh @@ -89,6 +89,9 @@ fi #BERT NER性能测试专有,指定trains steps sed -i "s|#num_train_steps = 100|num_train_steps = 25|g" $cur_path/../BERT_NER.py +mkdir -p $cur_path/../output +cp -r $data_path/result_dir $cur_path/../output/ + #训练开始时间,不需要修改 start_time=$(date +%s) diff --git a/TensorFlow/built-in/nlp/Bert-base_ID0060_for_TensorFlow/configs/bert_base_vocab.txt b/TensorFlow/built-in/nlp/Bert-base_ID0060_for_TensorFlow/configs/bert_base_vocab.txt deleted file mode 100644 index ca4f9781030019ab9b253c6dcb8c7878b6dc87a5..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/Bert-base_ID0060_for_TensorFlow/configs/bert_base_vocab.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_1p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_1p.sh index ff70a546a5c55fdee7ed394da559b91cfa09b37a..771488ea15bc6699199b550c8a6f682f8c4fe7c8 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_1p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_1p.sh @@ -105,8 +105,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=32 \ - --num_train_epochs=2 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --learning_rate=3e-5 \ --max_seq_length=384 \ --doc_stride=128 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_8p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_8p.sh index 40a41dc7c2cc1f04971cea46a06458a061c29e34..a12eeef6d5f5d1c2f593e65235c7de3686d35d10 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_full_8p.sh @@ -107,8 +107,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=32 \ - --num_train_epochs=2 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --learning_rate=3e-5 \ --max_seq_length=384 \ --doc_stride=128 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_1p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_1p.sh index b11e96d6a07b00abf03721f947ecd89cd256d4ef..0d67fba344620ab05afedff404e4acbe7f2869e6 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_1p.sh @@ -105,8 +105,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=32 \ - --num_train_epochs=1 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --num_train_steps=1000 \ --learning_rate=3e-5 \ --max_seq_length=384 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_8p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_8p.sh index 55f784e2491f1563e089ad33796b6a2cfc346ebf..c13ecf75593eb3f23e3c3ac5523ba4dca3226426 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID0495_Bert-Squad_performance_8p.sh @@ -107,8 +107,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=32 \ - --num_train_epochs=1 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --num_train_steps=1000 \ --learning_rate=3e-5 \ --max_seq_length=384 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_1p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_1p.sh index a41c04a3c36ab0824140f851d6f931836aaa4f1c..0fc7048a3bacbe715583d1d2c1e6615cd280d035 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_1p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_1p.sh @@ -15,7 +15,7 @@ data_path="" #基础参数 需要模型审视修改 #网络名称,同目录名称 Network="BertLarge-Squad_ID3082_for_TensorFlow" -batch_size=24 +batch_size=32 epoch=2 #维持参数,不需要修改 @@ -104,8 +104,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=24 \ - --num_train_epochs=2 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --learning_rate=3e-5 \ --max_seq_length=384 \ --doc_stride=128 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_8p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_8p.sh index 943c52c52dacbf40f1dcb7d9ae5287514e1097bd..4e53b8c0c86c5c45f30c8bc26965f2c3cd04308e 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_full_8p.sh @@ -16,7 +16,7 @@ data_path="" #基础参数 需要模型审视修改 #网络名称,同目录名称 Network="BertLarge-Squad_ID3082_for_TensorFlow" -batch_size=24 +batch_size=32 epoch=2 #维持参数,不需要修改 @@ -106,8 +106,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=24 \ - --num_train_epochs=2 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --learning_rate=3e-5 \ --max_seq_length=384 \ --doc_stride=128 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_1p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_1p.sh index ef213866b21021eaec6769f97d28e8d9effaecd6..25c14fab37aa242a15ba09fb3878389539eb9ac1 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_1p.sh @@ -15,7 +15,7 @@ data_path="" #基础参数 需要模型审视修改 #网络名称,同目录名称 Network="BertLarge-Squad_ID3082_for_TensorFlow" -batch_size=24 +batch_size=32 epoch=1 @@ -105,8 +105,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=24 \ - --num_train_epochs=1 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --num_train_steps=1000 \ --learning_rate=3e-5 \ --max_seq_length=384 \ diff --git a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_8p.sh b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_8p.sh index 28c3e9bf564244e3c3c682e2de59d579bd269b60..7634bcad70fc146c04c165a8682f8638f3f17f99 100644 --- a/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertGoogle_Series_for_TensorFlow/test/train_ID3082_BertLarge-Squad_performance_8p.sh @@ -16,7 +16,7 @@ data_path="" #基础参数 需要模型审视修改 #网络名称,同目录名称 Network="BertLarge-Squad_ID3082_for_TensorFlow" -batch_size=24 +batch_size=32 epoch=1 #维持参数,不需要修改 @@ -106,8 +106,8 @@ do --do_predict=True \ --do_train=True \ --predict_file=$predict_file \ - --train_batch_size=24 \ - --num_train_epochs=1 \ + --train_batch_size=${batch_size} \ + --num_train_epochs=${epoch} \ --num_train_steps=1000 \ --learning_rate=3e-5 \ --max_seq_length=384 \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/create_pretraining_data.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/create_pretraining_data.py deleted file mode 100644 index ec94c765bde139fe6826f1b37236aa7950c05544..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/create_pretraining_data.py +++ /dev/null @@ -1,457 +0,0 @@ -# coding=utf-8 -# Copyright 2018 The Google AI Language Team Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -============================================================================== -# -# Copyright 2020 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Create masked LM/next sentence masked_lm TF examples for BERT.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import collections -import random -import tokenization -import tensorflow as tf - -flags = tf.flags - -FLAGS = flags.FLAGS - -flags.DEFINE_string("input_file", None, - "Input raw text file (or comma-separated list of files).") - -flags.DEFINE_string( - "output_file", None, - "Output TF example file (or comma-separated list of files).") - -flags.DEFINE_string("vocab_file", None, - "The vocabulary file that the BERT model was trained on.") - -flags.DEFINE_bool( - "do_lower_case", True, - "Whether to lower case the input text. Should be True for uncased " - "models and False for cased models.") - -flags.DEFINE_integer("max_seq_length", 128, "Maximum sequence length.") - -flags.DEFINE_integer("max_predictions_per_seq", 20, - "Maximum number of masked LM predictions per sequence.") - -flags.DEFINE_integer("random_seed", 12345, "Random seed for data generation.") - -flags.DEFINE_integer( - "dupe_factor", 10, - "Number of times to duplicate the input data (with different masks).") - -flags.DEFINE_float("masked_lm_prob", 0.15, "Masked LM probability.") - -flags.DEFINE_float( - "short_seq_prob", 0.1, - "Probability of creating sequences which are shorter than the " - "maximum length.") - - -class TrainingInstance(object): - """A single training instance (sentence pair).""" - - def __init__(self, tokens, segment_ids, masked_lm_positions, masked_lm_labels, - is_random_next): - self.tokens = tokens - self.segment_ids = segment_ids - self.is_random_next = is_random_next - self.masked_lm_positions = masked_lm_positions - self.masked_lm_labels = masked_lm_labels - - def __str__(self): - s = "" - s += "tokens: %s\n" % (" ".join( - [tokenization.printable_text(x) for x in self.tokens])) - s += "segment_ids: %s\n" % (" ".join([str(x) for x in self.segment_ids])) - s += "is_random_next: %s\n" % self.is_random_next - s += "masked_lm_positions: %s\n" % (" ".join( - [str(x) for x in self.masked_lm_positions])) - s += "masked_lm_labels: %s\n" % (" ".join( - [tokenization.printable_text(x) for x in self.masked_lm_labels])) - s += "\n" - return s - - def __repr__(self): - return self.__str__() - - -def write_instance_to_example_files(instances, tokenizer, max_seq_length, - max_predictions_per_seq, output_files): - """Create TF example files from `TrainingInstance`s.""" - writers = [] - for output_file in output_files: - writers.append(tf.python_io.TFRecordWriter(output_file)) - - writer_index = 0 - - total_written = 0 - for (inst_index, instance) in enumerate(instances): - input_ids = tokenizer.convert_tokens_to_ids(instance.tokens) - input_mask = [1] * len(input_ids) - segment_ids = list(instance.segment_ids) - assert len(input_ids) <= max_seq_length - - while len(input_ids) < max_seq_length: - input_ids.append(0) - input_mask.append(0) - segment_ids.append(0) - - assert len(input_ids) == max_seq_length - assert len(input_mask) == max_seq_length - assert len(segment_ids) == max_seq_length - - masked_lm_positions = list(instance.masked_lm_positions) - masked_lm_ids = tokenizer.convert_tokens_to_ids(instance.masked_lm_labels) - masked_lm_weights = [1.0] * len(masked_lm_ids) - - while len(masked_lm_positions) < max_predictions_per_seq: - masked_lm_positions.append(0) - masked_lm_ids.append(0) - masked_lm_weights.append(0.0) - - next_sentence_label = 1 if instance.is_random_next else 0 - - features = collections.OrderedDict() - features["input_ids"] = create_int_feature(input_ids) - features["input_mask"] = create_int_feature(input_mask) - features["segment_ids"] = create_int_feature(segment_ids) - features["masked_lm_positions"] = create_int_feature(masked_lm_positions) - features["masked_lm_ids"] = create_int_feature(masked_lm_ids) - features["masked_lm_weights"] = create_float_feature(masked_lm_weights) - features["next_sentence_labels"] = create_int_feature([next_sentence_label]) - - tf_example = tf.train.Example(features=tf.train.Features(feature=features)) - - writers[writer_index].write(tf_example.SerializeToString()) - writer_index = (writer_index + 1) % len(writers) - - total_written += 1 - - if inst_index < 20: - tf.logging.info("*** Example ***") - tf.logging.info("tokens: %s" % " ".join( - [tokenization.printable_text(x) for x in instance.tokens])) - - for feature_name in features.keys(): - feature = features[feature_name] - values = [] - if feature.int64_list.value: - values = feature.int64_list.value - elif feature.float_list.value: - values = feature.float_list.value - tf.logging.info( - "%s: %s" % (feature_name, " ".join([str(x) for x in values]))) - - for writer in writers: - writer.close() - - tf.logging.info("Wrote %d total instances", total_written) - - -def create_int_feature(values): - feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) - return feature - - -def create_float_feature(values): - feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) - return feature - - -def create_training_instances(input_files, tokenizer, max_seq_length, - dupe_factor, short_seq_prob, masked_lm_prob, - max_predictions_per_seq, rng): - """Create `TrainingInstance`s from raw text.""" - all_documents = [[]] - - # Input file format: - # (1) One sentence per line. These should ideally be actual sentences, not - # entire paragraphs or arbitrary spans of text. (Because we use the - # sentence boundaries for the "next sentence prediction" task). - # (2) Blank lines between documents. Document boundaries are needed so - # that the "next sentence prediction" task doesn't span between documents. - for input_file in input_files: - with tf.gfile.GFile(input_file, "r") as reader: - while True: - line = tokenization.convert_to_unicode(reader.readline()) - if not line: - break - line = line.strip() - - # Empty lines are used as document delimiters - if not line: - all_documents.append([]) - tokens = tokenizer.tokenize(line) - if tokens: - all_documents[-1].append(tokens) - - # Remove empty documents - all_documents = [x for x in all_documents if x] - rng.shuffle(all_documents) - - vocab_words = list(tokenizer.vocab.keys()) - instances = [] - for _ in range(dupe_factor): - for document_index in range(len(all_documents)): - instances.extend( - create_instances_from_document( - all_documents, document_index, max_seq_length, short_seq_prob, - masked_lm_prob, max_predictions_per_seq, vocab_words, rng)) - - rng.shuffle(instances) - return instances - - -def create_instances_from_document( - all_documents, document_index, max_seq_length, short_seq_prob, - masked_lm_prob, max_predictions_per_seq, vocab_words, rng): - """Creates `TrainingInstance`s for a single document.""" - document = all_documents[document_index] - - # Account for [CLS], [SEP], [SEP] - max_num_tokens = max_seq_length - 3 - - # We *usually* want to fill up the entire sequence since we are padding - # to `max_seq_length` anyways, so short sequences are generally wasted - # computation. However, we *sometimes* - # (i.e., short_seq_prob == 0.1 == 10% of the time) want to use shorter - # sequences to minimize the mismatch between pre-training and fine-tuning. - # The `target_seq_length` is just a rough target however, whereas - # `max_seq_length` is a hard limit. - target_seq_length = max_num_tokens - if rng.random() < short_seq_prob: - target_seq_length = rng.randint(2, max_num_tokens) - - # We DON'T just concatenate all of the tokens from a document into a long - # sequence and choose an arbitrary split point because this would make the - # next sentence prediction task too easy. Instead, we split the input into - # segments "A" and "B" based on the actual "sentences" provided by the user - # input. - instances = [] - current_chunk = [] - current_length = 0 - i = 0 - while i < len(document): - segment = document[i] - current_chunk.append(segment) - current_length += len(segment) - if i == len(document) - 1 or current_length >= target_seq_length: - if current_chunk: - # `a_end` is how many segments from `current_chunk` go into the `A` - # (first) sentence. - a_end = 1 - if len(current_chunk) >= 2: - a_end = rng.randint(1, len(current_chunk) - 1) - - tokens_a = [] - for j in range(a_end): - tokens_a.extend(current_chunk[j]) - - tokens_b = [] - # Random next - is_random_next = False - if len(current_chunk) == 1 or rng.random() < 0.5: - is_random_next = True - target_b_length = target_seq_length - len(tokens_a) - - # This should rarely go for more than one iteration for large - # corpora. However, just to be careful, we try to make sure that - # the random document is not the same as the document - # we're processing. - for _ in range(10): - random_document_index = rng.randint(0, len(all_documents) - 1) - if random_document_index != document_index: - break - - random_document = all_documents[random_document_index] - random_start = rng.randint(0, len(random_document) - 1) - for j in range(random_start, len(random_document)): - tokens_b.extend(random_document[j]) - if len(tokens_b) >= target_b_length: - break - # We didn't actually use these segments so we "put them back" so - # they don't go to waste. - num_unused_segments = len(current_chunk) - a_end - i -= num_unused_segments - # Actual next - else: - is_random_next = False - for j in range(a_end, len(current_chunk)): - tokens_b.extend(current_chunk[j]) - truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng) - - assert len(tokens_a) >= 1 - assert len(tokens_b) >= 1 - - tokens = [] - segment_ids = [] - tokens.append("[CLS]") - segment_ids.append(0) - for token in tokens_a: - tokens.append(token) - segment_ids.append(0) - - tokens.append("[SEP]") - segment_ids.append(0) - - for token in tokens_b: - tokens.append(token) - segment_ids.append(1) - tokens.append("[SEP]") - segment_ids.append(1) - - (tokens, masked_lm_positions, - masked_lm_labels) = create_masked_lm_predictions( - tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng) - instance = TrainingInstance( - tokens=tokens, - segment_ids=segment_ids, - is_random_next=is_random_next, - masked_lm_positions=masked_lm_positions, - masked_lm_labels=masked_lm_labels) - instances.append(instance) - current_chunk = [] - current_length = 0 - i += 1 - - return instances - - -MaskedLmInstance = collections.namedtuple("MaskedLmInstance", - ["index", "label"]) - - -def create_masked_lm_predictions(tokens, masked_lm_prob, - max_predictions_per_seq, vocab_words, rng): - """Creates the predictions for the masked LM objective.""" - - cand_indexes = [] - for (i, token) in enumerate(tokens): - if token == "[CLS]" or token == "[SEP]": - continue - cand_indexes.append(i) - - rng.shuffle(cand_indexes) - - output_tokens = list(tokens) - - num_to_predict = min(max_predictions_per_seq, - max(1, int(round(len(tokens) * masked_lm_prob)))) - - masked_lms = [] - covered_indexes = set() - for index in cand_indexes: - if len(masked_lms) >= num_to_predict: - break - if index in covered_indexes: - continue - covered_indexes.add(index) - - masked_token = None - # 80% of the time, replace with [MASK] - if rng.random() < 0.8: - masked_token = "[MASK]" - else: - # 10% of the time, keep original - if rng.random() < 0.5: - masked_token = tokens[index] - # 10% of the time, replace with random word - else: - masked_token = vocab_words[rng.randint(0, len(vocab_words) - 1)] - - output_tokens[index] = masked_token - - masked_lms.append(MaskedLmInstance(index=index, label=tokens[index])) - - masked_lms = sorted(masked_lms, key=lambda x: x.index) - - masked_lm_positions = [] - masked_lm_labels = [] - for p in masked_lms: - masked_lm_positions.append(p.index) - masked_lm_labels.append(p.label) - - return (output_tokens, masked_lm_positions, masked_lm_labels) - - -def truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng): - """Truncates a pair of sequences to a maximum sequence length.""" - while True: - total_length = len(tokens_a) + len(tokens_b) - if total_length <= max_num_tokens: - break - - trunc_tokens = tokens_a if len(tokens_a) > len(tokens_b) else tokens_b - assert len(trunc_tokens) >= 1 - - # We want to sometimes truncate from the front and sometimes from the - # back to add more randomness and avoid biases. - if rng.random() < 0.5: - del trunc_tokens[0] - else: - trunc_tokens.pop() - - -def main(_): - tf.logging.set_verbosity(tf.logging.INFO) - - tokenizer = tokenization.FullTokenizer( - vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) - - input_files = [] - for input_pattern in FLAGS.input_file.split(","): - input_files.extend(tf.gfile.Glob(input_pattern)) - - tf.logging.info("*** Reading from input files ***") - for input_file in input_files: - tf.logging.info(" %s", input_file) - - rng = random.Random(FLAGS.random_seed) - instances = create_training_instances( - input_files, tokenizer, FLAGS.max_seq_length, FLAGS.dupe_factor, - FLAGS.short_seq_prob, FLAGS.masked_lm_prob, FLAGS.max_predictions_per_seq, - rng) - - output_files = FLAGS.output_file.split(",") - tf.logging.info("*** Writing to output files ***") - for output_file in output_files: - tf.logging.info(" %s", output_file) - - write_instance_to_example_files(instances, tokenizer, FLAGS.max_seq_length, - FLAGS.max_predictions_per_seq, output_files) - - -if __name__ == "__main__": - flags.mark_flag_as_required("input_file") - flags.mark_flag_as_required("output_file") - flags.mark_flag_as_required("vocab_file") - tf.app.run() diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/modeling.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/modeling.py index c32e2e67e4d99879ed5ec3c8aa33de04a75beeb2..01a1c8d251a661766a92afbc418c6bc88cbdefe6 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/modeling.py +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/modeling.py @@ -31,7 +31,6 @@ from gpu_environment import get_custom_getter from npu_bridge.estimator.npu_unary_ops import npu_unary_ops from npu_bridge.estimator import npu_ops -from npu_bridge.estimator.npu_aicore_ops import npu_aicore_ops class BertConfig(object): """Configuration for `BertModel`.""" @@ -289,6 +288,7 @@ def gelu(x): if tf.flags.FLAGS.npu_bert_fused_gelu: if tf.flags.FLAGS.use_fast_gelu: + from npu_bridge.estimator.npu_aicore_ops import npu_aicore_ops return npu_aicore_ops.fast_gelu(x) else: return npu_unary_ops.gelu(x) @@ -388,6 +388,7 @@ def dropout(input_tensor, dropout_prob): if tf.flags.FLAGS.npu_bert_npu_dropout: output = npu_ops.dropout(input_tensor, 1.0 - dropout_prob) elif tf.flags.FLAGS.npu_bert_npu_dropout_v3: + from npu_bridge.estimator import npu_aicore_ops output = npu_aicore_ops.dropout_v3(input_tensor, 1.0 - dropout_prob) else: output = tf.nn.dropout(input_tensor, 1.0 - dropout_prob) diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_classifier.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_classifier.py new file mode 100644 index 0000000000000000000000000000000000000000..5ae6f4eedaf571c414ebb1ce716a44c15a3e0bcc --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_classifier.py @@ -0,0 +1,851 @@ +# coding=utf-8 +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""BERT finetuning runner.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from npu_bridge.npu_init import * + +import collections +import csv +import os +import modeling +import optimization +import tokenization +import tensorflow as tf +#import horovod.tensorflow as hvd +import time, sys +from npu_bridge.estimator.npu.npu_estimator import NPUEstimator,NPUEstimatorSpec +from utils.utils import LogEvalRunHook, LogTrainRunHook, setup_xla_flags +#from utils.gpu_affinity import set_affinity +import utils.dllogger_class +from dllogger import Verbosity +from utils.create_glue_data import * +import numpy as np +import tf_metrics + +os.environ['GE_USE_STATIC_MEMORY'] = '1' + +rank_size = int(os.getenv('RANK_SIZE')) +rank_id = int(os.getenv('RANK_ID')) + +flags = tf.flags + +FLAGS = flags.FLAGS + +## Required parameters +flags.DEFINE_string( + "data_dir", None, + "The input data dir. Should contain the .tsv files (or other data files) " + "for the task.") + +flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + +flags.DEFINE_string("task_name", None, "The name of the task to train.") + +flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + +flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + +# npu parameter +flags.DEFINE_bool('npu_bert_debug', False, 'If True, dropout and shuffle is disabled.') +flags.DEFINE_integer('init_loss_scale_value', 2 ** 32, 'Initial loss scale value for loss scale optimizer') +flags.DEFINE_integer("iterations_per_loop", 100, "How many steps to make in each estimator call.") +flags.DEFINE_bool("use_fp16_cls", False, "Whether to use fp16 in cls and pooler.") +flags.DEFINE_bool('npu_bert_fused_gelu', True, 'Whether to use npu defined gelu op') +flags.DEFINE_integer("npu_bert_loss_scale", 0, + "Whether to use loss scale, -1 is disable, 0 is dynamic loss scale, >=1 is static loss scale") +flags.DEFINE_bool("npu_bert_clip_by_global_norm", False, + "Use clip_by_global_norm if True, or use clip_by_norm for each gradient") + +flags.DEFINE_bool('npu_bert_npu_dropout', True, 'Whether to use npu defined dropout op') + +flags.DEFINE_bool('npu_bert_npu_dropout_v3', False, 'Whether to use npu defined dropout_v3 op') + +flags.DEFINE_bool('npu_bert_tail_optimize', False, 'Whether to use npu allreduce tail optimization') + +flags.DEFINE_bool('npu_gather', True, 'Whether to use gather_npu whose backward propagation avoids IndexedSlices') +flags.DEFINE_bool("distributed", False, "Whether to train for multi-npu runs") +flags.DEFINE_bool('hcom_parallel', True, 'Whether to use parallel allreduce') + +flags.DEFINE_bool('use_fast_gelu', True, 'use fast gelu instead gelu') + +flags.DEFINE_bool('npu_bert_use_fused_adam_momentum', False, 'Whether to use fused apply and assign in adam') + +flags.DEFINE_bool('npu_bert_use_fused_lamb_momentum', False, 'Whether to use fused apply and assign in lamb') +flags.DEFINE_bool("enable_exception_dump", False, "Whether to enable excepttion dump.") +flags.DEFINE_bool("data_dump_flag", False, "Whether to dump data.") +flags.DEFINE_string("data_dump_step", "0", "How many steps to dump data.") +flags.DEFINE_string("data_dump_path", "./output/data_dump", "path to dump data.") +flags.DEFINE_bool("over_dump", False, "Whether to over_dump.") +flags.DEFINE_string("over_dump_path", "./output/doverflow_dump", "path to dump overflow data.") + +## Other parameters +flags.DEFINE_string( + "dllog_path", "bert_dllog.json", + "filename where dllogger writes to") + +flags.DEFINE_string( + "optimizer_type", "lamb", + "Optimizer type : adam or lamb") + +flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + +flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + +flags.DEFINE_integer( + "max_seq_length", 128, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + +flags.DEFINE_bool("do_train", False, "Whether to run training.") + +flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.") + +flags.DEFINE_bool( + "do_predict", False, + "Whether to run the model in inference mode on the test set.") + +flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.") + +flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.") + +flags.DEFINE_integer("predict_batch_size", 8, "Total batch size for predict.") + +flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.") + +flags.DEFINE_bool("use_trt", False, "Whether to use TF-TRT") + +flags.DEFINE_float("num_train_epochs", 3.0, + "Total number of training epochs to perform.") + +flags.DEFINE_float( + "warmup_proportion", 0.1, + "Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10% of training.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, + "How often to save the model checkpoint.") +flags.DEFINE_integer("display_loss_steps", 10, + "How often to print loss from estimator") + +flags.DEFINE_integer("num_accumulation_steps", 1, + "Number of accumulation steps before gradient update" + "Global batch size = num_accumulation_steps * train_batch_size") +flags.DEFINE_bool("amp", True, "Whether to enable AMP ops. When false, uses TF32 on A100 and FP32 on V100 GPUS.") +flags.DEFINE_bool("use_xla", True, "Whether to enable XLA JIT compilation.") +flags.DEFINE_bool("horovod", False, "Whether to use Horovod for multi-gpu runs") + +flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + +#npu parameter +flags.DEFINE_string("precision_mode", "allow_mix_precision", "Npu Precision Mode") + +##################NPU_modify start############################# +flags.DEFINE_bool("autotune", False, "Whether autotune") +flags.DEFINE_bool("profiling", False, "Whether profiling") +flags.DEFINE_string("profiling_dump_path", "test/output/profiling_path", "Only used if `profiling` is True") + +def file_based_input_fn_builder(input_file, batch_size, seq_length, is_training, + drop_remainder, hvd=None): + """Creates an `input_fn` closure to be passed to Estimator.""" + + name_to_features = { + "input_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "input_mask": tf.io.FixedLenFeature([seq_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "label_ids": tf.io.FixedLenFeature([], tf.int64), + } + + def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.parse_single_example(record, name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.to_int32(t) + example[name] = t + + return example + + def input_fn(): + """The actual input function.""" + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + d = tf.data.TFRecordDataset(input_file) + if is_training: + if FLAGS.distributed: + d = d.shard(rank_size, rank_id) + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.apply( + tf.contrib.data.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + drop_remainder=True)) + + return d + + return input_fn + +def create_model(bert_config, is_training, input_ids, input_mask, segment_ids, + labels, num_labels, use_one_hot_embeddings): + """Creates a classification model.""" + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings, + compute_type=tf.float16 if FLAGS.precision_mode == "allow_mix_precision" else tf.float32) + + # In the demo, we are doing a simple classification task on the entire + # segment. + # + # If you want to use the token-level output, use model.get_sequence_output() + # instead. + output_layer = model.get_pooled_output() + + hidden_size = output_layer.shape[-1].value + + output_weights = tf.get_variable( + "output_weights", [num_labels, hidden_size], + initializer=tf.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.get_variable( + "output_bias", [num_labels], initializer=tf.zeros_initializer()) + + with tf.variable_scope("loss"): + if is_training: + # I.e., 0.1 dropout + output_layer = npu_ops.dropout(output_layer, keep_prob=0.9) + + logits = tf.matmul(output_layer, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias, name='cls_logits') + probabilities = tf.nn.softmax(logits, axis=-1, name='cls_probabilities') + log_probs = tf.nn.log_softmax(logits, axis=-1) + + one_hot_labels = tf.one_hot(labels, depth=num_labels, dtype=tf.float32) + + per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1, name='cls_per_example_loss') + loss = tf.reduce_mean(per_example_loss, name='cls_loss') + + return (loss, per_example_loss, logits, probabilities) + +def get_frozen_tftrt_model(bert_config, shape, num_labels, use_one_hot_embeddings, init_checkpoint): + tf_config = tf.compat.v1.ConfigProto() + tf_config.gpu_options.allow_growth = True + output_node_names = ['loss/cls_loss', 'loss/cls_per_example_loss', 'loss/cls_logits', 'loss/cls_probabilities'] + + with tf.Session(config=npu_config_proto(config_proto=tf_config)) as tf_sess: + input_ids = tf.placeholder(tf.int32, shape, 'input_ids') + input_mask = tf.placeholder(tf.int32, shape, 'input_mask') + segment_ids = tf.placeholder(tf.int32, shape, 'segment_ids') + label_ids = tf.placeholder(tf.int32, (None), 'label_ids') + + create_model(bert_config, False, input_ids, input_mask, segment_ids, label_ids, + num_labels, use_one_hot_embeddings) + + tvars = tf.trainable_variables() + (assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + tf.train.init_from_checkpoint(init_checkpoint, assignment_map) + tf_sess.run(tf.global_variables_initializer()) + print("LOADED!") + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + else: + init_string = ", *NOTTTTTTTTTTTTTTTTTTTTT" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string) + + frozen_graph = tf.graph_util.convert_variables_to_constants(tf_sess, + tf_sess.graph.as_graph_def(), output_node_names) + + num_nodes = len(frozen_graph.node) + print('Converting graph using TensorFlow-TensorRT...') + from tensorflow.python.compiler.tensorrt import trt_convert as trt + converter = trt.TrtGraphConverter( + input_graph_def=frozen_graph, + nodes_blacklist=output_node_names, + max_workspace_size_bytes=(4096 << 20) - 1000, + precision_mode = "FP16" if FLAGS.amp else "FP32", + minimum_segment_size=4, + is_dynamic_op=True, + maximum_cached_engines=1000 + ) + frozen_graph = converter.convert() + + print('Total node count before and after TF-TRT conversion:', + num_nodes, '->', len(frozen_graph.node)) + print('TRT node count:', + len([1 for n in frozen_graph.node if str(n.op) == 'TRTEngineOp'])) + + with tf.io.gfile.GFile("frozen_modelTRT.pb", "wb") as f: + f.write(frozen_graph.SerializeToString()) + + return frozen_graph + + + +def model_fn_builder(task_name, bert_config, num_labels, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, + use_one_hot_embeddings, distributed, hvd=None): + """Returns `model_fn` closure for Estimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for Estimator.""" + + def metric_fn(per_example_loss, label_ids, logits): + predictions = tf.argmax(logits, axis=-1, output_type=tf.int32) + if task_name == "cola": + FN, FN_op = tf.metrics.false_negatives(labels=label_ids, predictions=predictions) + FP, FP_op = tf.metrics.false_positives(labels=label_ids, predictions=predictions) + TP, TP_op = tf.metrics.true_positives(labels=label_ids, predictions=predictions) + TN, TN_op = tf.metrics.true_negatives(labels=label_ids, predictions=predictions) + + MCC = (TP * TN - FP * FN) / ((TP + FP) * (TP + FN) * (TN + FP) * (TN + FN)) ** 0.5 + MCC_op = tf.group(FN_op, TN_op, TP_op, FP_op, tf.identity(MCC, name="MCC")) + return {"MCC": (MCC, MCC_op)} + elif task_name == "mrpc": + accuracy = tf.metrics.accuracy( + labels=label_ids, predictions=predictions) + loss = tf.metrics.mean(values=per_example_loss) + f1 = tf_metrics.f1(labels=label_ids, predictions=predictions, num_classes=2, pos_indices=[1]) + return { + "eval_accuracy": accuracy, + "eval_f1": f1, + "eval_loss": loss, + } + else: + accuracy = tf.metrics.accuracy( + labels=label_ids, predictions=predictions) + loss = tf.metrics.mean(values=per_example_loss) + return { + "eval_accuracy": accuracy, + "eval_loss": loss, + } + tf.compat.v1.logging.info("*** Features ***") + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + label_ids = features["label_ids"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + if not is_training and FLAGS.use_trt: + trt_graph = get_frozen_tftrt_model(bert_config, input_ids.shape, num_labels, use_one_hot_embeddings, init_checkpoint) + (total_loss, per_example_loss, logits, probabilities) = tf.import_graph_def(trt_graph, + input_map={'input_ids':input_ids, 'input_mask':input_mask, 'segment_ids':segment_ids, 'label_ids':label_ids}, + return_elements=['loss/cls_loss:0', 'loss/cls_per_example_loss:0', 'loss/cls_logits:0', 'loss/cls_probabilities:0'], + name='') + if mode == tf.estimator.ModeKeys.PREDICT: + predictions = {"probabilities": probabilities} + output_spec = tf.estimator.EstimatorSpec( + mode=mode, predictions=predictions) + elif mode == tf.estimator.ModeKeys.EVAL: + eval_metric_ops = metric_fn(per_example_loss, label_ids, logits) + output_spec = tf.estimator.EstimatorSpec( + mode=mode, + loss=total_loss, + eval_metric_ops=eval_metric_ops) + return output_spec + (total_loss, per_example_loss, logits, probabilities) = create_model( + bert_config, is_training, input_ids, input_mask, segment_ids, label_ids, + num_labels, use_one_hot_embeddings) + + tvars = tf.trainable_variables() + initialized_variable_names = {} + if init_checkpoint and (hvd is None or rank_id == 0): + (assignment_map, initialized_variable_names + ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + tf.train.init_from_checkpoint(init_checkpoint, assignment_map) + + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, + init_string) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, + hvd, False, FLAGS.amp, FLAGS.num_accumulation_steps, FLAGS.optimizer_type) + output_spec = tf.estimator.EstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op) + elif mode == tf.estimator.ModeKeys.EVAL: + dummy_op = tf.no_op() + # Need to call mixed precision graph rewrite if fp16 to enable graph rewrite + if FLAGS.amp: + loss_scaler = FixedLossScaleManager(1) + dummy_op = tf.train.experimental.enable_mixed_precision_graph_rewrite( + optimization.LAMBOptimizer(learning_rate=0.0), loss_scaler) + eval_metric_ops = metric_fn(per_example_loss, label_ids, logits) + output_spec = tf.estimator.EstimatorSpec( + mode=mode, + loss=total_loss, + eval_metric_ops=eval_metric_ops) + else: + dummy_op = tf.no_op() + # Need to call mixed precision graph rewrite if fp16 to enable graph rewrite + if FLAGS.amp: + dummy_op = tf.train.experimental.enable_mixed_precision_graph_rewrite( + optimization.LAMBOptimizer(learning_rate=0.0)) + output_spec = tf.estimator.EstimatorSpec( + mode=mode, predictions=probabilities) + return output_spec + + return model_fn + + +# This function is not used by this file but is still used by the Colab and +# people who depend on it. +def input_fn_builder(features, batch_size, seq_length, is_training, drop_remainder, hvd=None): + """Creates an `input_fn` closure to be passed to Estimator.""" + + all_input_ids = [] + all_input_mask = [] + all_segment_ids = [] + all_label_ids = [] + + for feature in features: + all_input_ids.append(feature.input_ids) + all_input_mask.append(feature.input_mask) + all_segment_ids.append(feature.segment_ids) + all_label_ids.append(feature.label_id) + + def input_fn(): + """The actual input function.""" + + num_examples = len(features) + + # This is for demo purposes and does NOT scale to large data sets. We do + # not use Dataset.from_generator() because that uses tf.py_func which is + # not TPU compatible. The right way to load data is with TFRecordReader. + d = tf.data.Dataset.from_tensor_slices({ + "input_ids": + tf.constant( + all_input_ids, shape=[num_examples, seq_length], + dtype=tf.int32), + "input_mask": + tf.constant( + all_input_mask, + shape=[num_examples, seq_length], + dtype=tf.int32), + "segment_ids": + tf.constant( + all_segment_ids, + shape=[num_examples, seq_length], + dtype=tf.int32), + "label_ids": + tf.constant(all_label_ids, shape=[num_examples], dtype=tf.int32), + }) + + if is_training: + if hvd is not None: d = d.shard(get_rank_size(), get_rank_id()) + d = d.repeat() + d = d.shuffle(buffer_size=100) + + d = d.batch(batch_size=batch_size, drop_remainder=True) + return d + + return input_fn + + +def main(_): + + setup_xla_flags() + + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + dllogging = utils.dllogger_class.dllogger_class(FLAGS.dllog_path) + + if FLAGS.horovod: + print() + + processors = { + "cola": ColaProcessor, + "mnli": MnliProcessor, + "mrpc": MrpcProcessor, + "xnli": XnliProcessor, + } + + if not FLAGS.do_train and not FLAGS.do_eval and not FLAGS.do_predict: + raise ValueError( + "At least one of `do_train`, `do_eval` or `do_predict' must be True.") + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + if FLAGS.max_seq_length > bert_config.max_position_embeddings: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" % + (FLAGS.max_seq_length, bert_config.max_position_embeddings)) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + task_name = FLAGS.task_name.lower() + + if task_name not in processors: + raise ValueError("Task not found: %s" % (task_name)) + + processor = processors[task_name]() + + label_list = processor.get_labels() + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + master_process = True + training_hooks = [] + global_batch_size = FLAGS.train_batch_size * FLAGS.num_accumulation_steps + hvd_rank = 0 + + config_proto = tf.compat.v1.ConfigProto() + + if FLAGS.distributed: + tf.compat.v1.logging.info("Multi-NPU training with Npu") + tf.compat.v1.logging.info("rank.size() = %d rank.id() = %d", rank_size, rank_id) + global_batch_size = FLAGS.train_batch_size * FLAGS.num_accumulation_steps * rank_size + master_process = (rank_id == 0) + hvd_rank = rank_id + config_proto.gpu_options.visible_device_list = str(rank_id) + # set_affinity(get_npu_local_rank_id()) + #if get_npu_rank_size() > 1: + # training_hooks.append(NpuEmptyHook()) + + if FLAGS.use_xla: + config_proto.graph_options.optimizer_options.global_jit_level = tf.compat.v1.OptimizerOptions.ON_1 + if FLAGS.amp: + tf.enable_resource_variables() + + auto_tune_mode = None + profiling_config = ProfilingConfig(enable_profiling=False, profiling_options=None) + dump_config = DumpConfig(enable_dump=False, dump_path=None, dump_step=None, dump_mode="output", + enable_dump_debug=False, dump_debug_mode="all") + + if FLAGS.autotune: + auto_tune_mode = "RL,GA" + if FLAGS.profiling: + profiling_config = ProfilingConfig( + enable_profiling = True, + profiling_options = '{"output":"%s",\ + "training_trace":"on",\ + "task_trace":"on",\ + "aicpu":"on",\ + "aic_metrics":"PipeUtilization"}' % FLAGS.profiling_dump_path + ) + + if FLAGS.over_dump: + dump_config = DumpConfig(dump_path=FLAGS.over_dump_path, enable_dump_debug=True, dump_debug_mode="all") + if FLAGS.data_dump_flag: + dump_config = DumpConfig(enable_dump=True, dump_path=FLAGS.data_dump_path, dump_step=FLAGS.data_dump_step, dump_mode="all") + + run_config = NPURunConfig( + model_dir=FLAGS.output_dir, + save_checkpoints_steps=FLAGS.save_checkpoints_steps if master_process else 0, + iterations_per_loop=FLAGS.iterations_per_loop, + session_config=config_proto, + hcom_parallel=FLAGS.hcom_parallel, + is_tailing_optimization=FLAGS.npu_bert_tail_optimize, + precision_mode=FLAGS.precision_mode, + keep_checkpoint_max=5, + log_step_count_steps=10, + auto_tune_mode=auto_tune_mode, + profiling_config=profiling_config, + dump_config=dump_config + ) + + if master_process: + tf.compat.v1.logging.info("***** Configuaration *****") + for key in FLAGS.__flags.keys(): + tf.compat.v1.logging.info(' {}: {}'.format(key, getattr(FLAGS, key))) + tf.compat.v1.logging.info("**************************") + + train_examples = None + num_train_steps = None + num_warmup_steps = None + training_hooks.append(LogTrainRunHook(global_batch_size, hvd_rank, FLAGS.save_checkpoints_steps, num_steps_ignore_xla=25)) + + if FLAGS.do_train: + train_examples = processor.get_train_examples(FLAGS.data_dir) + + num_train_steps = int( + len(train_examples) / global_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + start_index = 0 + end_index = len(train_examples) + tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record")] + + if FLAGS.distributed: + tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record{}".format(i)) for i in range(rank_size)] + num_examples_per_rank = len(train_examples) // rank_size + remainder = len(train_examples) % rank_size + if rank_id < remainder: + start_index = rank_id * (num_examples_per_rank + 1) + end_index = start_index + num_examples_per_rank + 1 + else: + start_index = rank_id * num_examples_per_rank + remainder + end_index = start_index + (num_examples_per_rank) + + model_fn = model_fn_builder( + task_name=task_name, + bert_config=bert_config, + num_labels=len(label_list), + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=FLAGS.learning_rate if not FLAGS.distributed else FLAGS.learning_rate * rank_size, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + use_one_hot_embeddings=False, + distributed=FLAGS.distributed, + hvd=None if not FLAGS.horovod else hvd) + + + estimator = NPUEstimator( + model_fn=model_fn, + config=run_config) + + if FLAGS.do_train: + file_based_convert_examples_to_features( + train_examples[start_index:end_index], label_list, FLAGS.max_seq_length, tokenizer, tmp_filenames[hvd_rank]) + + tf.compat.v1.logging.info("***** Running training *****") + tf.compat.v1.logging.info(" Num examples = %d", len(train_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + train_input_fn = file_based_input_fn_builder( + input_file=tmp_filenames, + batch_size=FLAGS.train_batch_size, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True, + hvd=None if not FLAGS.horovod else hvd) + + train_start_time = time.time() + estimator.train(input_fn=train_input_fn, max_steps=num_train_steps, hooks=npu_hooks_append(hooks_list=training_hooks)) + train_time_elapsed = time.time() - train_start_time + #train_time_wo_overhead = training_hooks[-2].total_time + avg_sentences_per_second = num_train_steps * global_batch_size * 1.0 / train_time_elapsed + #ss_sentences_per_second = (training_hooks[-2].count - training_hooks[-2].skipped) * global_batch_size * 1.0 / train_time_wo_overhead + + if master_process: + tf.compat.v1.logging.info("-----------------------------") + tf.compat.v1.logging.info("Total Training Time = %0.2f for Sentences = %d", train_time_elapsed, + num_train_steps * global_batch_size) + #tf.compat.v1.logging.info("Total Training Time W/O Overhead = %0.2f for Sentences = %d", train_time_wo_overhead, + # (training_hooks[-2].count - training_hooks[-2].skipped) * global_batch_size) + tf.compat.v1.logging.info("Throughput Average (sentences/sec) with overhead = %0.2f", avg_sentences_per_second) + #tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second) + tf.compat.v1.logging.info("-----------------------------") + + if FLAGS.do_eval and master_process: + eval_examples = processor.get_dev_examples(FLAGS.data_dir) + eval_file = os.path.join(FLAGS.output_dir, "eval.tf_record") + file_based_convert_examples_to_features( + eval_examples, label_list, FLAGS.max_seq_length, tokenizer, eval_file) + + tf.compat.v1.logging.info("***** Running evaluation *****") + tf.compat.v1.logging.info(" Num examples = %d", len(eval_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.eval_batch_size) + + eval_drop_remainder = False + eval_input_fn = file_based_input_fn_builder( + input_file=eval_file, + batch_size=FLAGS.eval_batch_size, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=eval_drop_remainder) + + eval_hooks = [LogEvalRunHook(FLAGS.eval_batch_size)] + eval_start_time = time.time() + result = estimator.evaluate(input_fn=eval_input_fn, hooks=eval_hooks) + + eval_time_elapsed = time.time() - eval_start_time + + time_list = eval_hooks[-1].time_list + time_list.sort() + # Removing outliers (init/warmup) in throughput computation. + eval_time_wo_overhead = sum(time_list[:int(len(time_list) * 0.8)]) + num_sentences = (int(len(time_list) * 0.8)) * FLAGS.eval_batch_size + + avg = np.mean(time_list) + cf_50 = max(time_list[:int(len(time_list) * 0.50)]) + cf_90 = max(time_list[:int(len(time_list) * 0.90)]) + cf_95 = max(time_list[:int(len(time_list) * 0.95)]) + cf_99 = max(time_list[:int(len(time_list) * 0.99)]) + cf_100 = max(time_list[:int(len(time_list) * 1)]) + ss_sentences_per_second = num_sentences * 1.0 / eval_time_wo_overhead + + tf.compat.v1.logging.info("-----------------------------") + tf.compat.v1.logging.info("Total Inference Time = %0.2f for Sentences = %d", eval_time_elapsed, + eval_hooks[-1].count * FLAGS.eval_batch_size) + tf.compat.v1.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", eval_time_wo_overhead, + num_sentences) + tf.compat.v1.logging.info("Summary Inference Statistics on EVAL set") + tf.compat.v1.logging.info("Batch size = %d", FLAGS.eval_batch_size) + tf.compat.v1.logging.info("Sequence Length = %d", FLAGS.max_seq_length) + tf.compat.v1.logging.info("Precision = %s", "fp16" if FLAGS.amp else "fp32") + tf.compat.v1.logging.info("Latency Confidence Level 50 (ms) = %0.2f", cf_50 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 90 (ms) = %0.2f", cf_90 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 95 (ms) = %0.2f", cf_95 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 99 (ms) = %0.2f", cf_99 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 100 (ms) = %0.2f", cf_100 * 1000) + tf.compat.v1.logging.info("Latency Average (ms) = %0.2f", avg * 1000) + tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second) + dllogging.logger.log(step=(), data={"throughput_val": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT) + tf.compat.v1.logging.info("-----------------------------") + + + output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt") + with tf.io.gfile.GFile(output_eval_file, "w") as writer: + tf.compat.v1.logging.info("***** Eval results *****") + for key in sorted(result.keys()): + dllogging.logger.log(step=(), data={key: float(result[key])}, verbosity=Verbosity.DEFAULT) + tf.compat.v1.logging.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + if FLAGS.do_predict and master_process: + predict_examples = processor.get_test_examples(FLAGS.data_dir) + predict_file = os.path.join(FLAGS.output_dir, "predict.tf_record") + file_based_convert_examples_to_features(predict_examples, label_list, + FLAGS.max_seq_length, tokenizer, + predict_file) + + tf.compat.v1.logging.info("***** Running prediction*****") + tf.compat.v1.logging.info(" Num examples = %d", len(predict_examples)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + predict_drop_remainder = False + predict_input_fn = file_based_input_fn_builder( + input_file=predict_file, + batch_size=FLAGS.predict_batch_size, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=predict_drop_remainder) + + predict_hooks = [LogEvalRunHook(FLAGS.predict_batch_size)] + predict_start_time = time.time() + + output_predict_file = os.path.join(FLAGS.output_dir, "test_results.tsv") + with tf.io.gfile.GFile(output_predict_file, "w") as writer: + tf.compat.v1.logging.info("***** Predict results *****") + for prediction in estimator.predict(input_fn=predict_input_fn, hooks=predict_hooks, + yield_single_examples=False): + output_line = "\t".join( + str(class_probability) for class_probability in prediction) + "\n" + writer.write(output_line) + + + predict_time_elapsed = time.time() - predict_start_time + + time_list = predict_hooks[-1].time_list + time_list.sort() + # Removing outliers (init/warmup) in throughput computation. + predict_time_wo_overhead = sum(time_list[:int(len(time_list) * 0.8)]) + num_sentences = (int(len(time_list) * 0.8)) * FLAGS.predict_batch_size + + avg = np.mean(time_list) + cf_50 = max(time_list[:int(len(time_list) * 0.50)]) + cf_90 = max(time_list[:int(len(time_list) * 0.90)]) + cf_95 = max(time_list[:int(len(time_list) * 0.95)]) + cf_99 = max(time_list[:int(len(time_list) * 0.99)]) + cf_100 = max(time_list[:int(len(time_list) * 1)]) + ss_sentences_per_second = num_sentences * 1.0 / predict_time_wo_overhead + + tf.compat.v1.logging.info("-----------------------------") + tf.compat.v1.logging.info("Total Inference Time = %0.2f for Sentences = %d", predict_time_elapsed, + predict_hooks[-1].count * FLAGS.predict_batch_size) + tf.compat.v1.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", predict_time_wo_overhead, + num_sentences) + tf.compat.v1.logging.info("Summary Inference Statistics on TEST SET") + tf.compat.v1.logging.info("Batch size = %d", FLAGS.predict_batch_size) + tf.compat.v1.logging.info("Sequence Length = %d", FLAGS.max_seq_length) + tf.compat.v1.logging.info("Precision = %s", "fp16" if FLAGS.amp else "fp32") + tf.compat.v1.logging.info("Latency Confidence Level 50 (ms) = %0.2f", cf_50 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 90 (ms) = %0.2f", cf_90 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 95 (ms) = %0.2f", cf_95 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 99 (ms) = %0.2f", cf_99 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 100 (ms) = %0.2f", cf_100 * 1000) + tf.compat.v1.logging.info("Latency Average (ms) = %0.2f", avg * 1000) + tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second) + dllogging.logger.log(step=(), data={"throughput_val": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT) + tf.compat.v1.logging.info("-----------------------------") + + +if __name__ == "__main__": + if FLAGS.horovod: + (npu_sess, npu_shutdown) = init_resource() + flags.mark_flag_as_required("data_dir") + flags.mark_flag_as_required("task_name") + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + tf.compat.v1.app.run() + if FLAGS.horovod: + shutdown_resource(npu_sess, npu_shutdown) + close_session(npu_sess) + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_pretraining.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_pretraining.py index fdb3eda48c3e6c5c8d8f1153766fdb0ed7d02bd8..0ed1e2525b11a507dc7a3186e0e4803b6caf3f08 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_pretraining.py +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_pretraining.py @@ -26,7 +26,7 @@ import modeling import optimization import tensorflow as tf import glob -from utils import LogEvalRunHook +from utils.utils import LogEvalRunHook from tensorflow.core.protobuf import rewriter_config_pb2 from gpu_environment import get_custom_getter @@ -37,6 +37,9 @@ from npu_bridge.estimator.npu.npu_estimator import NPUEstimator os.environ['GE_USE_STATIC_MEMORY'] = '1' +rank_size = int(os.getenv('RANK_SIZE')) +rank_id = int(os.getenv('RANK_ID')) + flags = tf.flags FLAGS = flags.FLAGS @@ -147,7 +150,7 @@ flags.DEFINE_integer("npu_bert_loss_scale", 0, "Whether to use loss scale, -1 is flags.DEFINE_bool("npu_bert_clip_by_global_norm", False, "Use clip_by_global_norm if True, or use clip_by_norm for each gradient") -flags.DEFINE_bool('npu_bert_npu_dropout', True, 'Whether to use npu defined dropout op') +flags.DEFINE_bool('npu_bert_npu_dropout', False, 'Whether to use npu defined dropout op') flags.DEFINE_bool('npu_bert_npu_dropout_v3', True, 'Whether to use npu defined dropout_v3 op') @@ -163,6 +166,9 @@ flags.DEFINE_bool('npu_bert_use_fused_adam_momentum', True, 'Whether to use fuse flags.DEFINE_bool('npu_bert_use_fused_lamb_momentum', True, 'Whether to use fused apply and assign in lamb') +flags.DEFINE_integer("graph_memory_max_size", 26 * 1024 * 1024 * 1024, "feature map memory max size.") +flags.DEFINE_integer("variable_memory_max_size", 5 * 1024 * 1024 * 1024, "variable memory max size.") + # report samples/sec, total loss and learning rate during training class _LogSessionRunHook(tf.train.SessionRunHook): def __init__(self, global_batch_size, num_accumulation_steps, display_every=10, hvd_rank=-1): @@ -290,7 +296,7 @@ def model_fn_builder(bert_config, init_checkpoint, learning_rate, tvars = tf.trainable_variables() initialized_variable_names = {} - if init_checkpoint and (hvd is None or hvd.rank() == 0): + if init_checkpoint and (rank_id == 0): print("Loading checkpoint", init_checkpoint) (assignment_map, initialized_variable_names ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) @@ -500,9 +506,7 @@ def input_fn_builder(input_files, # For eval, we want no shuffling and parallel reading doesn't matter. if is_training: d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files)) - if FLAGS.distributed: - rank_size = int(os.getenv('RANK_SIZE')) - rank_id = int(os.getenv('RANK_ID')) + if FLAGS.distributed: print('RANK_SIZE=', rank_size, ' RANK_ID=', rank_id) d = d.shard(rank_size, rank_id) d = d.repeat() @@ -633,7 +637,7 @@ def main(_): model_dir=FLAGS.output_dir, save_summary_steps=0, session_config=config, - save_checkpoints_steps=FLAGS.save_checkpoints_steps if not FLAGS.horovod or hvd.rank() == 0 else None, + save_checkpoints_steps=FLAGS.save_checkpoints_steps if rank_id == 0 else 0, # This variable controls how often estimator reports examples/sec. # Default value is every 100 steps. # When --report_loss is True, we set to very large value to prevent @@ -643,14 +647,14 @@ def main(_): enable_data_pre_proc=FLAGS.npu_bert_use_tdt, iterations_per_loop=FLAGS.iterations_per_loop, is_tailing_optimization=FLAGS.npu_bert_tail_optimize, + graph_memory_max_size=FLAGS.graph_memory_max_size, + variable_memory_max_size=FLAGS.variable_memory_max_size, hcom_parallel=FLAGS.hcom_parallel) - if FLAGS.distributed: - rank_size = int(os.getenv('RANK_SIZE')) model_fn = model_fn_builder( bert_config=bert_config, init_checkpoint=FLAGS.init_checkpoint, - learning_rate=FLAGS.learning_rate * rank_size if FLAGS.distributed else FLAGS.learning_rate, + learning_rate=FLAGS.learning_rate, num_train_steps=FLAGS.num_train_steps, num_warmup_steps=FLAGS.num_warmup_steps, use_one_hot_embeddings=False, @@ -688,7 +692,7 @@ def main(_): estimator.train(input_fn=train_input_fn, hooks=training_hooks, max_steps=FLAGS.num_train_steps) - if FLAGS.do_eval and (not FLAGS.horovod or hvd.rank() == 0): + if FLAGS.do_eval and (rank_id == 0): tf.logging.info("***** Running evaluation *****") tf.logging.info(" Batch size = %d", FLAGS.eval_batch_size) @@ -710,9 +714,10 @@ def main(_): input_fn=eval_input_fn, steps=FLAGS.max_eval_steps, hooks=eval_hooks) eval_time_elapsed = time.time() - eval_start_time - eval_time_wo_overhead = eval_hooks[-1].total_time - - num_sentences = (eval_hooks[-1].count - eval_hooks[-1].skipped) * FLAGS.eval_batch_size + time_list = eval_hooks[-1].time_list + time_list.sort() + eval_time_wo_overhead = sum(time_list[:int(len(time_list) * 0.99)]) + num_sentences = (int(len(time_list) * 0.99)) * FLAGS.eval_batch_size ss_sentences_per_second = num_sentences * 1.0 / eval_time_wo_overhead @@ -720,7 +725,7 @@ def main(_): tf.logging.info("Total Inference Time = %0.2f for Sentences = %d", eval_time_elapsed, eval_hooks[-1].count * FLAGS.eval_batch_size) tf.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", eval_time_wo_overhead, - (eval_hooks[-1].count - eval_hooks[-1].skipped) * FLAGS.eval_batch_size) + num_sentences) tf.logging.info("Summary Inference Statistics on EVAL set") tf.logging.info("Batch size = %d", FLAGS.eval_batch_size) tf.logging.info("Sequence Length = %d", FLAGS.max_seq_length) diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_squad.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_squad.py new file mode 100644 index 0000000000000000000000000000000000000000..0d21f03d2242e77081ff04244ff7352504b9ca0e --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/run_squad.py @@ -0,0 +1,1327 @@ +# coding=utf-8 +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Run BERT on SQuAD 1.1 and SQuAD 2.0.""" + +from __future__ import absolute_import, division, print_function +from npu_bridge.npu_init import * + +import collections +import json +import math +import os +import random +import shutil +import time + +#import horovod.tensorflow as hvd +import numpy as np +import six +import tensorflow as tf +from tensorflow.python.client import device_lib + +import modeling +import optimization +import tokenization +from utils.create_squad_data import * +from utils.utils import LogEvalRunHook, LogTrainRunHook, ExamplesPerSecondHook +#from utils.gpu_affinity import set_affinity +# import utils.dllogger_class +# from dllogger import Verbosity + +flags = tf.flags +FLAGS = None + +rank_size = int(os.getenv('RANK_SIZE')) +rank_id = int(os.getenv('RANK_ID')) + +def extract_run_squad_flags(): + + ## Required parameters + flags.DEFINE_string( + "bert_config_file", None, + "The config json file corresponding to the pre-trained BERT model. " + "This specifies the model architecture.") + + flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + + flags.DEFINE_string( + "output_dir", None, + "The output directory where the model checkpoints will be written.") + + ## Other parameters + + flags.DEFINE_string( + "dllog_path", "/results/bert_dllog.json", + "filename where dllogger writes to") + + flags.DEFINE_string("train_file", None, + "SQuAD json for training. E.g., train-v1.1.json") + + flags.DEFINE_string( + "predict_file", None, + "SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json") + flags.DEFINE_string( + "eval_script", None, + "SQuAD evaluate.py file to compute f1 and exact_match E.g., evaluate-v1.1.py") + + flags.DEFINE_string( + "init_checkpoint", None, + "Initial checkpoint (usually from a pre-trained BERT model).") + + flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + + flags.DEFINE_integer( + "max_seq_length", 384, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + + flags.DEFINE_integer( + "doc_stride", 128, + "When splitting up a long document into chunks, how much stride to " + "take between chunks.") + + flags.DEFINE_integer( + "max_query_length", 64, + "The maximum number of tokens for the question. Questions longer than " + "this will be truncated to this length.") + + flags.DEFINE_bool("do_train", False, "Whether to run training.") + + flags.DEFINE_bool("do_predict", False, "Whether to run eval on the dev set.") + + flags.DEFINE_integer("train_batch_size", 8, "Total batch size for training.") + + flags.DEFINE_integer("predict_batch_size", 8, + "Total batch size for predictions.") + + flags.DEFINE_float("learning_rate", 5e-6, "The initial learning rate for Adam.") + + flags.DEFINE_bool("use_trt", False, "Whether to use TF-TRT") + + flags.DEFINE_bool("horovod", False, "Whether to use Horovod for multi-gpu runs") + + flags.DEFINE_float("num_train_epochs", 3.0, + "Total number of training epochs to perform.") + + flags.DEFINE_integer("num_train_steps", 0, + "How many steps to train.") + + flags.DEFINE_float( + "warmup_proportion", 0.1, + "Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10% of training.") + + flags.DEFINE_integer("save_checkpoints_steps", 5000, + "How often to save the model checkpoint.") + flags.DEFINE_integer("display_loss_steps", 100, + "How often to print loss from estimator") + + flags.DEFINE_integer("num_accumulation_steps", 1, + "Number of accumulation steps before gradient update" + "Global batch size = num_accumulation_steps * train_batch_size") + + flags.DEFINE_integer( + "n_best_size", 20, + "The total number of n-best predictions to generate in the " + "nbest_predictions.json output file.") + + flags.DEFINE_integer( + "max_answer_length", 30, + "The maximum length of an answer that can be generated. This is needed " + "because the start and end predictions are not conditioned on one another.") + + + flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + + flags.DEFINE_bool( + "version_2_with_negative", False, + "If true, the SQuAD examples contain some that do not have an answer.") + + flags.DEFINE_float( + "null_score_diff_threshold", 0.0, + "If null_score - best_non_null is greater than the threshold predict null.") + + flags.DEFINE_bool("amp", True, "Whether to enable AMP ops. When false, uses TF32 on A100 and FP32 on V100 GPUS.") + flags.DEFINE_bool("use_xla", False, "Whether to enable XLA JIT compilation.") + flags.DEFINE_integer("num_eval_iterations", None, + "How many eval iterations to run - performs inference on subset") + # npu parameter + flags.DEFINE_bool('npu_bert_debug', False, 'If True, dropout and shuffle is disabled.') + flags.DEFINE_integer('init_loss_scale_value', 2 ** 32, 'Initial loss scale value for loss scale optimizer') + flags.DEFINE_integer("iterations_per_loop", 100, "How many steps to make in each estimator call.") + flags.DEFINE_bool("use_fp16_cls", False, "Whether to use fp16 in cls and pooler.") + flags.DEFINE_bool('npu_bert_fused_gelu', True, 'Whether to use npu defined gelu op') + flags.DEFINE_integer("npu_bert_loss_scale", 0, + "Whether to use loss scale, -1 is disable, 0 is dynamic loss scale, >=1 is static loss scale") + flags.DEFINE_bool("npu_bert_clip_by_global_norm", False, + "Use clip_by_global_norm if True, or use clip_by_norm for each gradient") + + flags.DEFINE_bool('npu_bert_npu_dropout', False, 'Whether to use npu defined dropout op') + + flags.DEFINE_bool('npu_bert_npu_dropout_v3', True, 'Whether to use npu defined dropout_v3 op') + + flags.DEFINE_bool('npu_bert_tail_optimize', False, 'Whether to use npu allreduce tail optimization') + + flags.DEFINE_bool('npu_gather', True, 'Whether to use gather_npu whose backward propagation avoids IndexedSlices') + flags.DEFINE_bool("distributed", False, "Whether to train for multi-npu runs") + flags.DEFINE_bool('hcom_parallel', True, 'Whether to use parallel allreduce') + + flags.DEFINE_bool('use_fast_gelu', True, 'use fast gelu instead gelu') + + flags.DEFINE_bool('npu_bert_use_fused_adam_momentum', False, 'Whether to use fused apply and assign in adam') + + flags.DEFINE_bool('npu_bert_use_fused_lamb_momentum', False, 'Whether to use fused apply and assign in lamb') + flags.DEFINE_string("precision_mode", "allow_mix_precision", "Npu Precision Mode") + flags.DEFINE_bool("enable_exception_dump", False, "Whether to enable excepttion dump.") + flags.DEFINE_bool("data_dump_flag", False, "Whether to dump data.") + flags.DEFINE_string("data_dump_step", "0", "How many steps to dump data.") + flags.DEFINE_string("data_dump_path", "./output/data_dump", "path to dump data.") + flags.DEFINE_bool("over_dump", False, "Whether to over_dump.") + flags.DEFINE_string("over_dump_path", "./output/doverflow_dump", "path to dump overflow data.") + # Triton Specific flags + flags.DEFINE_bool("export_triton", False, "Whether to export saved model or run inference with Triton") + flags.DEFINE_string("triton_model_name", "bert", "exports to appropriate directory for Triton") + flags.DEFINE_integer("triton_model_version", 1, "exports to appropriate directory for Triton") + flags.DEFINE_string("triton_server_url", "localhost:8001", "exports to appropriate directory for Triton") + flags.DEFINE_bool("triton_model_overwrite", False, "If True, will overwrite an existing directory with the specified 'model_name' and 'version_name'") + flags.DEFINE_integer("triton_max_batch_size", 8, "Specifies the 'max_batch_size' in the Triton model config. See the Triton documentation for more info.") + flags.DEFINE_float("triton_dyn_batching_delay", 0, "Determines the dynamic_batching queue delay in milliseconds(ms) for the Triton model config. Use '0' or '-1' to specify static batching. See the Triton documentation for more info.") + flags.DEFINE_integer("triton_engine_count", 1, "Specifies the 'instance_group' count value in the Triton model config. See the Triton documentation for more info.") + flags.mark_flag_as_required("vocab_file") + flags.mark_flag_as_required("bert_config_file") + flags.mark_flag_as_required("output_dir") + + return flags.FLAGS + +def create_model(bert_config, is_training, input_ids, input_mask, segment_ids, + use_one_hot_embeddings): + """Creates a classification model.""" + model = modeling.BertModel( + config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + token_type_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings, + compute_type=tf.float16 if FLAGS.precision_mode == "allow_mix_precision" else tf.float32) + + final_hidden = model.get_sequence_output() + + final_hidden_shape = modeling.get_shape_list(final_hidden, expected_rank=3) + batch_size = final_hidden_shape[0] + seq_length = final_hidden_shape[1] + hidden_size = final_hidden_shape[2] + + output_weights = tf.get_variable( + "cls/squad/output_weights", [2, hidden_size], + initializer=tf.truncated_normal_initializer(stddev=0.02)) + + output_bias = tf.get_variable( + "cls/squad/output_bias", [2], initializer=tf.zeros_initializer()) + + final_hidden_matrix = tf.reshape(final_hidden, + [batch_size * seq_length, hidden_size]) + logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, output_bias) + + logits = tf.reshape(logits, [batch_size, seq_length, 2]) + logits = tf.transpose(logits, [2, 0, 1]) + + unstacked_logits = tf.unstack(logits, axis=0, name='unstack') + + (start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1]) + + return (start_logits, end_logits) + +def get_frozen_tftrt_model(bert_config, shape, use_one_hot_embeddings, init_checkpoint): + tf_config = tf.compat.v1.ConfigProto() + tf_config.gpu_options.allow_growth = True + output_node_names = ['unstack'] + + with tf.Session(config=tf_config) as tf_sess: + input_ids = tf.placeholder(tf.int32, shape, 'input_ids') + input_mask = tf.placeholder(tf.int32, shape, 'input_mask') + segment_ids = tf.placeholder(tf.int32, shape, 'segment_ids') + + (start_logits, end_logits) = create_model(bert_config=bert_config, + is_training=False, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + + tvars = tf.trainable_variables() + (assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + tf.train.init_from_checkpoint(init_checkpoint, assignment_map) + tf_sess.run(tf.global_variables_initializer()) + print("LOADED!") + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + else: + init_string = ", *NOTTTTTTTTTTTTTTTTTTTTT" + tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string) + + frozen_graph = tf.graph_util.convert_variables_to_constants(tf_sess, + tf_sess.graph.as_graph_def(), output_node_names) + + num_nodes = len(frozen_graph.node) + print('Converting graph using TensorFlow-TensorRT...') + from tensorflow.python.compiler.tensorrt import trt_convert as trt + converter = trt.TrtGraphConverter( + input_graph_def=frozen_graph, + nodes_blacklist=output_node_names, + max_workspace_size_bytes=(4096 << 20) - 1000, + precision_mode="FP16" if FLAGS.amp else "FP32", + minimum_segment_size=4, + is_dynamic_op=True, + maximum_cached_engines=1000 + ) + frozen_graph = converter.convert() + + print('Total node count before and after TF-TRT conversion:', + num_nodes, '->', len(frozen_graph.node)) + print('TRT node count:', + len([1 for n in frozen_graph.node if str(n.op) == 'TRTEngineOp'])) + + with tf.io.gfile.GFile("frozen_modelTRT.pb", "wb") as f: + f.write(frozen_graph.SerializeToString()) + + return frozen_graph + + +def model_fn_builder(bert_config, init_checkpoint, learning_rate, + num_train_steps, num_warmup_steps, + hvd=None, amp=False, use_one_hot_embeddings=False): + """Returns `model_fn` closure for Estimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for Estimator.""" + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("*** Features ***") + for name in sorted(features.keys()): + tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) + + unique_ids = features["unique_ids"] + input_ids = features["input_ids"] + input_mask = features["input_mask"] + segment_ids = features["segment_ids"] + + is_training = (mode == tf.estimator.ModeKeys.TRAIN) + + if not is_training and FLAGS.use_trt: + trt_graph = get_frozen_tftrt_model(bert_config, input_ids.shape, use_one_hot_embeddings, init_checkpoint) + (start_logits, end_logits) = tf.import_graph_def(trt_graph, + input_map={'input_ids':input_ids, 'input_mask':input_mask, 'segment_ids':segment_ids}, + return_elements=['unstack:0', 'unstack:1'], + name='') + predictions = { + "unique_ids": unique_ids, + "start_logits": start_logits, + "end_logits": end_logits, + } + output_spec = tf.estimator.EstimatorSpec( + mode=mode, predictions=predictions) + return output_spec + + (start_logits, end_logits) = create_model( + bert_config=bert_config, + is_training=is_training, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + use_one_hot_embeddings=use_one_hot_embeddings) + + tvars = tf.trainable_variables() + + initialized_variable_names = {} + if init_checkpoint and (rank_id == 0): + (assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) + + tf.train.init_from_checkpoint(init_checkpoint, assignment_map) + + if FLAGS.verbose_logging: + tf.compat.v1.logging.info("**** Trainable Variables ****") + for var in tvars: + init_string = "" + if var.name in initialized_variable_names: + init_string = ", *INIT_FROM_CKPT*" + tf.compat.v1.logging.info(" %d name = %s, shape = %s%s", rank_id, var.name, var.shape, + init_string) + + if mode == tf.estimator.ModeKeys.TRAIN: + seq_length = modeling.get_shape_list(input_ids)[1] + + def compute_loss(logits, positions): + one_hot_positions = tf.one_hot( + positions, depth=seq_length, dtype=tf.float32) + log_probs = tf.nn.log_softmax(logits, axis=-1) + loss = -tf.reduce_mean( + tf.reduce_sum(one_hot_positions * log_probs, axis=-1)) + return loss + + start_positions = features["start_positions"] + end_positions = features["end_positions"] + + start_loss = compute_loss(start_logits, start_positions) + end_loss = compute_loss(end_logits, end_positions) + + total_loss = (start_loss + end_loss) / 2.0 + + total_loss = tf.identity(total_loss, name='total_loss') + + train_op = optimization.create_optimizer( + total_loss, learning_rate, num_train_steps, num_warmup_steps, hvd, False, amp, FLAGS.num_accumulation_steps) + + output_spec = NPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op) + # output_spec = tf.estimator.EstimatorSpec( + # mode=mode, + # loss=total_loss, + # train_op=train_op) + elif mode == tf.estimator.ModeKeys.PREDICT: + + # dummy_op = tf.no_op() + # # Need to call mixed precision graph rewrite if fp16 to enable graph rewrite + # if amp: + # loss_scaler = FixedLossScaleManager(1) + # dummy_op = tf.train.experimental.enable_mixed_precision_graph_rewrite( + # optimization.LAMBOptimizer(learning_rate=0.0), loss_scaler) + + predictions = { + "unique_ids": tf.identity(unique_ids), + "start_logits": start_logits, + "end_logits": end_logits, + } + output_spec = NPUEstimatorSpec( + mode=mode, predictions=predictions) + # output_spec = tf.estimator.EstimatorSpec( + # mode=mode, predictions=predictions) + else: + raise ValueError( + "Only TRAIN and PREDICT modes are supported: %s" % (mode)) + + return output_spec + + return model_fn + + +def input_fn_builder(input_file, batch_size, seq_length, is_training, drop_remainder, hvd=None): + """Creates an `input_fn` closure to be passed to Estimator.""" + + name_to_features = { + "unique_ids": tf.io.FixedLenFeature([], tf.int64), + "input_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + "input_mask": tf.io.FixedLenFeature([seq_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64), + } + + if is_training: + name_to_features["start_positions"] = tf.io.FixedLenFeature([], tf.int64) + name_to_features["end_positions"] = tf.io.FixedLenFeature([], tf.int64) + + def _decode_record(record, name_to_features): + """Decodes a record to a TensorFlow example.""" + example = tf.parse_single_example(record, name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.to_int32(t) + example[name] = t + + return example + + def input_fn(): + """The actual input function.""" + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + if is_training: + d = tf.data.TFRecordDataset(input_file, num_parallel_reads=4) + if rank_size > 1: + d = d.shard(rank_size, rank_id) + d = d.apply(tf.data.experimental.ignore_errors()) + if not FLAGS.npu_bert_debug: + d = d.shuffle(buffer_size=100) + d = d.repeat() + else: + d = tf.data.TFRecordDataset(input_file) + + d = d.apply( + tf.contrib.data.map_and_batch( + lambda record: _decode_record(record, name_to_features), + batch_size=batch_size, + drop_remainder=True)) + + return d + + return input_fn + + + +RawResult = collections.namedtuple("RawResult", + ["unique_id", "start_logits", "end_logits"]) + + +def get_predictions(all_examples, all_features, all_results, n_best_size, max_answer_length, + do_lower_case, version_2_with_negative, verbose_logging): + """Get final predictions""" + + example_index_to_features = collections.defaultdict(list) + for feature in all_features: + example_index_to_features[feature.example_index].append(feature) + + unique_id_to_result = {} + for result in all_results: + unique_id_to_result[result.unique_id] = result + + # process unique id issue + max_unique_id = all_results[-1].unique_id + print("max_unique_id=%d" % max_unique_id) + + _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name + "PrelimPrediction", + ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) + + all_predictions = collections.OrderedDict() + all_nbest_json = collections.OrderedDict() + scores_diff_json = collections.OrderedDict() + + for (example_index, example) in enumerate(all_examples): + features = example_index_to_features[example_index] + + prelim_predictions = [] + # keep track of the minimum score of null start+end of position 0 + score_null = 1000000 # large and positive + min_null_feature_index = 0 # the paragraph slice with min mull score + null_start_logit = 0 # the start logit at the slice with min null score + null_end_logit = 0 # the end logit at the slice with min null score + for (feature_index, feature) in enumerate(features): + if feature.unique_id > max_unique_id: + continue + result = unique_id_to_result[feature.unique_id] + start_indexes = _get_best_indexes(result.start_logits, n_best_size) + end_indexes = _get_best_indexes(result.end_logits, n_best_size) + # if we could have irrelevant answers, get the min score of irrelevant + if version_2_with_negative: + feature_null_score = result.start_logits[0] + result.end_logits[0] + if feature_null_score < score_null: + score_null = feature_null_score + min_null_feature_index = feature_index + null_start_logit = result.start_logits[0] + null_end_logit = result.end_logits[0] + for start_index in start_indexes: + for end_index in end_indexes: + # We could hypothetically create invalid predictions, e.g., predict + # that the start of the span is in the question. We throw out all + # invalid predictions. + if start_index >= len(feature.tokens): + continue + if end_index >= len(feature.tokens): + continue + if start_index not in feature.token_to_orig_map: + continue + if end_index not in feature.token_to_orig_map: + continue + if not feature.token_is_max_context.get(start_index, False): + continue + if end_index < start_index: + continue + length = end_index - start_index + 1 + if length > max_answer_length: + continue + prelim_predictions.append( + _PrelimPrediction( + feature_index=feature_index, + start_index=start_index, + end_index=end_index, + start_logit=result.start_logits[start_index], + end_logit=result.end_logits[end_index])) + + if version_2_with_negative: + prelim_predictions.append( + _PrelimPrediction( + feature_index=min_null_feature_index, + start_index=0, + end_index=0, + start_logit=null_start_logit, + end_logit=null_end_logit)) + prelim_predictions = sorted( + prelim_predictions, + key=lambda x: (x.start_logit + x.end_logit), + reverse=True) + + _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name + "NbestPrediction", ["text", "start_logit", "end_logit"]) + + seen_predictions = {} + nbest = [] + for pred in prelim_predictions: + if len(nbest) >= n_best_size: + break + feature = features[pred.feature_index] + if pred.start_index > 0: # this is a non-null prediction + tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] + orig_doc_start = feature.token_to_orig_map[pred.start_index] + orig_doc_end = feature.token_to_orig_map[pred.end_index] + orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] + tok_text = " ".join(tok_tokens) + + # De-tokenize WordPieces that have been split off. + tok_text = tok_text.replace(" ##", "") + tok_text = tok_text.replace("##", "") + + # Clean whitespace + tok_text = tok_text.strip() + tok_text = " ".join(tok_text.split()) + orig_text = " ".join(orig_tokens) + + final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging) + if final_text in seen_predictions: + continue + + seen_predictions[final_text] = True + else: + final_text = "" + seen_predictions[final_text] = True + nbest.append( + _NbestPrediction( + text=final_text, + start_logit=pred.start_logit, + end_logit=pred.end_logit)) + + # if we didn't inlude the empty option in the n-best, inlcude it + if version_2_with_negative: + if "" not in seen_predictions: + nbest.append( + _NbestPrediction( + text="", start_logit=null_start_logit, + end_logit=null_end_logit)) + # In very rare edge cases we could have no valid predictions. So we + # just create a nonce prediction in this case to avoid failure. + if not nbest: + nbest.append( + _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) + + assert len(nbest) >= 1 + + total_scores = [] + best_non_null_entry = None + for entry in nbest: + total_scores.append(entry.start_logit + entry.end_logit) + if not best_non_null_entry: + if entry.text: + best_non_null_entry = entry + + probs = _compute_softmax(total_scores) + + nbest_json = [] + for (i, entry) in enumerate(nbest): + output = collections.OrderedDict() + output["text"] = entry.text + output["probability"] = probs[i] + output["start_logit"] = entry.start_logit + output["end_logit"] = entry.end_logit + nbest_json.append(output) + + assert len(nbest_json) >= 1 + + if not version_2_with_negative: + all_predictions[example.qas_id] = nbest_json[0]["text"] + else: + # predict "" iff the null score - the score of best non-null > threshold + score_diff = score_null - best_non_null_entry.start_logit - ( + best_non_null_entry.end_logit) + scores_diff_json[example.qas_id] = score_diff + + try: + null_score_diff_threshold = FLAGS.null_score_diff_threshold + except: + null_score_diff_threshold = 0.0 + if score_diff > null_score_diff_threshold: + all_predictions[example.qas_id] = "" + else: + all_predictions[example.qas_id] = best_non_null_entry.text + + all_nbest_json[example.qas_id] = nbest_json + return all_predictions, all_nbest_json, scores_diff_json + +def write_predictions(all_examples, all_features, all_results, n_best_size, + max_answer_length, do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file, + version_2_with_negative, verbose_logging): + """Write final predictions to the json file and log-odds of null if needed.""" + + tf.compat.v1.logging.info("Writing predictions to: %s" % (output_prediction_file)) + tf.compat.v1.logging.info("Writing nbest to: %s" % (output_nbest_file)) + + all_predictions, all_nbest_json, scores_diff_json = get_predictions(all_examples, all_features, + all_results, n_best_size, max_answer_length, do_lower_case, version_2_with_negative, verbose_logging) + + with tf.io.gfile.GFile(output_prediction_file, "w") as writer: + writer.write(json.dumps(all_predictions, indent=4) + "\n") + + with tf.io.gfile.GFile(output_nbest_file, "w") as writer: + writer.write(json.dumps(all_nbest_json, indent=4) + "\n") + + if version_2_with_negative: + with tf.io.gfile.GFile(output_null_log_odds_file, "w") as writer: + writer.write(json.dumps(scores_diff_json, indent=4) + "\n") + + +def get_final_text(pred_text, orig_text, do_lower_case, verbose_logging): + """Project the tokenized prediction back to the original text.""" + + # When we created the data, we kept track of the alignment between original + # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So + # now `orig_text` contains the span of our original text corresponding to the + # span that we predicted. + # + # However, `orig_text` may contain extra characters that we don't want in + # our prediction. + # + # For example, let's say: + # pred_text = steve smith + # orig_text = Steve Smith's + # + # We don't want to return `orig_text` because it contains the extra "'s". + # + # We don't want to return `pred_text` because it's already been normalized + # (the SQuAD eval script also does punctuation stripping/lower casing but + # our tokenizer does additional normalization like stripping accent + # characters). + # + # What we really want to return is "Steve Smith". + # + # Therefore, we have to apply a semi-complicated alignment heruistic between + # `pred_text` and `orig_text` to get a character-to-charcter alignment. This + # can fail in certain cases in which case we just return `orig_text`. + + def _strip_spaces(text): + ns_chars = [] + ns_to_s_map = collections.OrderedDict() + for (i, c) in enumerate(text): + if c == " ": + continue + ns_to_s_map[len(ns_chars)] = i + ns_chars.append(c) + ns_text = "".join(ns_chars) + return (ns_text, ns_to_s_map) + + # We first tokenize `orig_text`, strip whitespace from the result + # and `pred_text`, and check if they are the same length. If they are + # NOT the same length, the heuristic has failed. If they are the same + # length, we assume the characters are one-to-one aligned. + tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case) + + tok_text = " ".join(tokenizer.tokenize(orig_text)) + + start_position = tok_text.find(pred_text) + if start_position == -1: + if verbose_logging: + tf.compat.v1.logging.info( + "Unable to find text: '%s' in '%s'" % (pred_text, orig_text)) + return orig_text + end_position = start_position + len(pred_text) - 1 + + (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text) + (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text) + + if len(orig_ns_text) != len(tok_ns_text): + if verbose_logging: + tf.compat.v1.logging.info("Length not equal after stripping spaces: '%s' vs '%s'", + orig_ns_text, tok_ns_text) + return orig_text + + # We then project the characters in `pred_text` back to `orig_text` using + # the character-to-character alignment. + tok_s_to_ns_map = {} + for (i, tok_index) in six.iteritems(tok_ns_to_s_map): + tok_s_to_ns_map[tok_index] = i + + orig_start_position = None + if start_position in tok_s_to_ns_map: + ns_start_position = tok_s_to_ns_map[start_position] + if ns_start_position in orig_ns_to_s_map: + orig_start_position = orig_ns_to_s_map[ns_start_position] + + if orig_start_position is None: + if verbose_logging: + tf.compat.v1.logging.info("Couldn't map start position") + return orig_text + + orig_end_position = None + if end_position in tok_s_to_ns_map: + ns_end_position = tok_s_to_ns_map[end_position] + if ns_end_position in orig_ns_to_s_map: + orig_end_position = orig_ns_to_s_map[ns_end_position] + + if orig_end_position is None: + if verbose_logging: + tf.compat.v1.logging.info("Couldn't map end position") + return orig_text + + output_text = orig_text[orig_start_position:(orig_end_position + 1)] + return output_text + + +def _get_best_indexes(logits, n_best_size): + """Get the n-best logits from a list.""" + index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True) + + best_indexes = [] + for i in range(len(index_and_score)): + if i >= n_best_size: + break + best_indexes.append(index_and_score[i][0]) + return best_indexes + + +def _compute_softmax(scores): + """Compute softmax probability over raw logits.""" + if not scores: + return [] + + max_score = None + for score in scores: + if max_score is None or score > max_score: + max_score = score + + exp_scores = [] + total_sum = 0.0 + for score in scores: + x = math.exp(score - max_score) + exp_scores.append(x) + total_sum += x + + probs = [] + for score in exp_scores: + probs.append(score / total_sum) + return probs + + + +def validate_flags_or_throw(bert_config): + """Validate the input FLAGS or throw an exception.""" + tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case, + FLAGS.init_checkpoint) + + if not FLAGS.do_train and not FLAGS.do_predict and not FLAGS.export_triton: + raise ValueError("At least one of `do_train` or `do_predict` or `export_SavedModel` must be True.") + + if FLAGS.do_train: + if not FLAGS.train_file: + raise ValueError( + "If `do_train` is True, then `train_file` must be specified.") + if FLAGS.do_predict: + if not FLAGS.predict_file: + raise ValueError( + "If `do_predict` is True, then `predict_file` must be specified.") + + if FLAGS.max_seq_length > bert_config.max_position_embeddings: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" % + (FLAGS.max_seq_length, bert_config.max_position_embeddings)) + + if FLAGS.max_seq_length <= FLAGS.max_query_length + 3: + raise ValueError( + "The max_seq_length (%d) must be greater than max_query_length " + "(%d) + 3" % (FLAGS.max_seq_length, FLAGS.max_query_length)) + + +def export_model(estimator, export_dir, init_checkpoint): + """Exports a checkpoint in SavedModel format in a directory structure compatible with Triton.""" + def serving_input_fn(): + label_ids = tf.placeholder(tf.int32, [None,], name='unique_ids') + input_ids = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='input_ids') + input_mask = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='input_mask') + segment_ids = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='segment_ids') + input_fn = tf.estimator.export.build_raw_serving_input_receiver_fn({ + 'unique_ids': label_ids, + 'input_ids': input_ids, + 'input_mask': input_mask, + 'segment_ids': segment_ids, + })() + return input_fn + + saved_dir = estimator.export_savedmodel( + export_dir, + serving_input_fn, + assets_extra=None, + as_text=False, + checkpoint_path=init_checkpoint, + strip_default_attrs=False) + + model_name = FLAGS.triton_model_name + + model_folder = export_dir + "/triton_models/" + model_name + version_folder = model_folder + "/" + str(FLAGS.triton_model_version) + final_model_folder = version_folder + "/model.savedmodel" + + if not os.path.exists(version_folder): + os.makedirs(version_folder) + + if (not os.path.exists(final_model_folder)): + os.rename(saved_dir, final_model_folder) + print("Model saved to dir", final_model_folder) + else: + if (FLAGS.triton_model_overwrite): + shutil.rmtree(final_model_folder) + os.rename(saved_dir, final_model_folder) + print("WARNING: Existing model was overwritten. Model dir: {}".format(final_model_folder)) + else: + print("ERROR: Could not save Triton model. Folder already exists. Use '--triton_model_overwrite=True' if you would like to overwrite an existing model. Model dir: {}".format(final_model_folder)) + return + + # Now build the config for Triton. Check to make sure we can overwrite it, if it exists + config_filename = os.path.join(model_folder, "config.pbtxt") + + optimization_str = "" + if FLAGS.amp: + optimization_str = r""" +optimization { + execution_accelerators + { + gpu_execution_accelerator : + [ { + name : "auto_mixed_precision" + } ] + } +}""" + + if (os.path.exists(config_filename) and not FLAGS.triton_model_overwrite): + print("ERROR: Could not save Triton model config. Config file already exists. Use '--triton_model_overwrite=True' if you would like to overwrite an existing model config. Model config: {}".format(config_filename)) + return + + config_template = r""" +name: "{model_name}" +platform: "tensorflow_savedmodel" +max_batch_size: {max_batch_size} +{optimization_str} +input [ + {{ + name: "unique_ids" + data_type: TYPE_INT32 + dims: [ 1 ] + reshape: {{ shape: [ ] }} + }}, + {{ + name: "segment_ids" + data_type: TYPE_INT32 + dims: {seq_length} + }}, + {{ + name: "input_ids" + data_type: TYPE_INT32 + dims: {seq_length} + }}, + {{ + name: "input_mask" + data_type: TYPE_INT32 + dims: {seq_length} + }} + ] + output [ + {{ + name: "end_logits" + data_type: TYPE_FP32 + dims: {seq_length} + }}, + {{ + name: "start_logits" + data_type: TYPE_FP32 + dims: {seq_length} + }} +] +{dynamic_batching} +instance_group [ + {{ + count: {engine_count} + }} +]""" + + batching_str = "" + max_batch_size = FLAGS.triton_max_batch_size + + if (FLAGS.triton_dyn_batching_delay > 0): + + # Use only full and half full batches + pref_batch_size = [int(max_batch_size / 2.0), max_batch_size] + + batching_str = r""" +dynamic_batching {{ + preferred_batch_size: [{0}] + max_queue_delay_microseconds: {1} +}}""".format(", ".join([str(x) for x in pref_batch_size]), int(FLAGS.triton_dyn_batching_delay * 1000.0)) + + config_values = { + "model_name": model_name, + "max_batch_size": max_batch_size, + "seq_length": FLAGS.max_seq_length, + "dynamic_batching": batching_str, + "engine_count": FLAGS.triton_engine_count, + "optimization_str":optimization_str, + } + + with open(model_folder + "/config.pbtxt", "w") as file: + + final_config_str = config_template.format_map(config_values) + file.write(final_config_str) + +def main(_): + # setup_xla_flags() + + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + # dllogging = utils.dllogger_class.dllogger_class(FLAGS.dllog_path) + + # if FLAGS.horovod: + # hvd.init() + + bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file) + + validate_flags_or_throw(bert_config) + + tf.io.gfile.makedirs(FLAGS.output_dir) + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + + master_process = True + training_hooks = [] + global_batch_size = FLAGS.train_batch_size * FLAGS.num_accumulation_steps + # hvd_rank = 0 + + config = tf.ConfigProto( + inter_op_parallelism_threads=0, + intra_op_parallelism_threads=0, + allow_soft_placement=True) + + learning_rate = FLAGS.learning_rate + # if FLAGS.horovod: + if FLAGS.distributed: + tf.compat.v1.logging.info("Multi-NPU training...") + tf.compat.v1.logging.info("rank_size = %d rank_id = %d", rank_size, rank_id) + global_batch_size = FLAGS.train_batch_size * rank_size * FLAGS.num_accumulation_steps + learning_rate = learning_rate * rank_size + master_process = (rank_id == 0) + # hvd_rank = rank_id + # config.gpu_options.visible_device_list = str(hvd.local_rank()) + #set_affinity(hvd.local_rank()) + # if rank_size > 1: + # training_hooks.append(hvd.BroadcastGlobalVariablesHook(0)) + if FLAGS.use_xla: + config.graph_options.optimizer_options.global_jit_level = tf.compat.v1.OptimizerOptions.ON_1 + if FLAGS.amp: + tf.enable_resource_variables() + # run_config = tf.estimator.RunConfig( + # model_dir=FLAGS.output_dir if master_process else None, + # session_config=config, + # save_checkpoints_steps=FLAGS.save_checkpoints_steps if master_process else 0, + # save_summary_steps=FLAGS.save_checkpoints_steps if master_process else 0, + # log_step_count_steps=FLAGS.display_loss_steps, + # keep_checkpoint_max=1) + + run_config = NPURunConfig( + model_dir=FLAGS.output_dir if master_process else None, + save_checkpoints_steps=FLAGS.save_checkpoints_steps if master_process else 0, + save_summary_steps=0, + iterations_per_loop=FLAGS.iterations_per_loop, + session_config=config, + precision_mode=FLAGS.precision_mode, + log_step_count_steps=None, + is_tailing_optimization=FLAGS.npu_bert_tail_optimize, + hcom_parallel=FLAGS.hcom_parallel) + + if master_process: + tf.compat.v1.logging.info("***** Configuaration *****") + for key in FLAGS.__flags.keys(): + tf.compat.v1.logging.info(' {}: {}'.format(key, getattr(FLAGS, key))) + tf.compat.v1.logging.info("**************************") + + # train_examples = None + num_train_steps = FLAGS.num_train_steps + num_warmup_steps = None + training_hooks.append(ExamplesPerSecondHook(global_batch_size, FLAGS.iterations_per_loop)) + + # Prepare Training Data + if FLAGS.do_train: + # train_examples = read_squad_examples( + # input_file=FLAGS.train_file, is_training=True, + # version_2_with_negative=FLAGS.version_2_with_negative) + # Squad_V1.1 train_examples = 87599 + # Squad_V2.0 train_examples = 130319 + if num_train_steps == 0: + if FLAGS.version_2_with_negative: + num_train_steps = int(130319 / global_batch_size * FLAGS.num_train_epochs) + else: + num_train_steps = int(87599 / global_batch_size * FLAGS.num_train_epochs) + num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion) + + # Pre-shuffle the input to avoid having to make a very large shuffle + # buffer in in the `input_fn`. + # rng = random.Random(12345) + # rng.shuffle(train_examples) + # + # start_index = 0 + # end_index = len(train_examples) + # tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record")] + + # if FLAGS.horovod: + # if FLAGS.distributed: + # tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record{}".format(i)) for i in range(rank_size)] + # num_examples_per_rank = len(train_examples) // rank_size + # remainder = len(train_examples) % rank_size + # if rank_id < remainder: + # start_index = rank_id * (num_examples_per_rank + 1) + # end_index = start_index + num_examples_per_rank + 1 + # else: + # start_index = rank_id * num_examples_per_rank + remainder + # end_index = start_index + num_examples_per_rank + + + model_fn = model_fn_builder( + bert_config=bert_config, + init_checkpoint=FLAGS.init_checkpoint, + learning_rate=learning_rate, + num_train_steps=num_train_steps, + num_warmup_steps=num_warmup_steps, + hvd=None if not FLAGS.horovod else hvd, + amp=FLAGS.amp) + + # estimator = tf.estimator.Estimator( + # model_fn=model_fn, + # config=npu_run_config_init(run_config=run_config)) + + estimator = NPUEstimator( + model_fn=model_fn, + config=run_config, + model_dir=FLAGS.output_dir, + params={"batch_size": FLAGS.train_batch_size, "predict_batch_size": FLAGS.predict_batch_size}) + + if FLAGS.do_train: + + # We write to a temporary file to avoid storing very large constant tensors + # in memory. + # train_writer = FeatureWriter( + # filename=tmp_filenames[hvd_rank], + # is_training=True) + # convert_examples_to_features( + # examples=train_examples[start_index:end_index], + # tokenizer=tokenizer, + # max_seq_length=FLAGS.max_seq_length, + # doc_stride=FLAGS.doc_stride, + # max_query_length=FLAGS.max_query_length, + # is_training=True, + # output_fn=train_writer.process_feature, + # verbose_logging=FLAGS.verbose_logging) + # train_writer.close() + + tf.compat.v1.logging.info("***** Running training *****") + # tf.compat.v1.logging.info(" Num orig examples = %d", end_index - start_index) + # tf.compat.v1.logging.info(" Num split examples = %d", train_writer.num_features) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size) + tf.compat.v1.logging.info(" Num steps = %d", num_train_steps) + tf.compat.v1.logging.info(" LR = %f", learning_rate) + # del train_examples + + train_input_fn = input_fn_builder( + # input_file=tmp_filenames, + input_file=FLAGS.train_file, + batch_size=FLAGS.train_batch_size, + seq_length=FLAGS.max_seq_length, + is_training=True, + drop_remainder=True, + hvd=None if not FLAGS.horovod else hvd) + + # train_start_time = time.time() + estimator.train(input_fn=train_input_fn, hooks=training_hooks, max_steps=num_train_steps) + # train_time_elapsed = time.time() - train_start_time + # train_time_wo_overhead = training_hooks[-1].total_time + # avg_sentences_per_second = num_train_steps * global_batch_size * 1.0 / train_time_elapsed + # ss_sentences_per_second = (num_train_steps - training_hooks[-1].skipped) * global_batch_size * 1.0 / train_time_wo_overhead + + # if master_process: + # tf.compat.v1.logging.info("-----------------------------") + # tf.compat.v1.logging.info("Total Training Time = %0.2f for Sentences = %d", train_time_elapsed, + # num_train_steps * global_batch_size) + # tf.compat.v1.logging.info("Total Training Time W/O Overhead = %0.2f for Sentences = %d", train_time_wo_overhead, + # (num_train_steps - training_hooks[-1].skipped) * global_batch_size) + # tf.compat.v1.logging.info("Throughput Average (sentences/sec) with overhead = %0.2f", avg_sentences_per_second) + # tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second) + # # dllogging.logger.log(step=(), data={"throughput_train": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT) + # tf.compat.v1.logging.info("-----------------------------") + + + if FLAGS.export_triton and master_process: + export_model(estimator, FLAGS.output_dir, FLAGS.init_checkpoint) + + if FLAGS.do_predict and master_process: + eval_examples = read_squad_examples( + input_file=FLAGS.predict_file, is_training=False, + version_2_with_negative=FLAGS.version_2_with_negative) + + # Perform evaluation on subset, useful for profiling + if FLAGS.num_eval_iterations is not None: + eval_examples = eval_examples[:FLAGS.num_eval_iterations*FLAGS.predict_batch_size] + + eval_writer = FeatureWriter( + filename=os.path.join(FLAGS.output_dir, "eval.tf_record"), + is_training=False) + eval_features = [] + + def append_feature(feature): + eval_features.append(feature) + eval_writer.process_feature(feature) + + convert_examples_to_features( + examples=eval_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=False, + output_fn=append_feature, + verbose_logging=FLAGS.verbose_logging) + eval_writer.close() + + tf.compat.v1.logging.info("***** Running predictions *****") + tf.compat.v1.logging.info(" Num orig examples = %d", len(eval_examples)) + tf.compat.v1.logging.info(" Num split examples = %d", len(eval_features)) + tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size) + + predict_input_fn = input_fn_builder( + input_file=eval_writer.filename, + batch_size=FLAGS.predict_batch_size, + seq_length=FLAGS.max_seq_length, + is_training=False, + drop_remainder=False) + + all_results = [] + eval_hooks = [LogEvalRunHook(FLAGS.predict_batch_size)] + eval_start_time = time.time() + for result in estimator.predict( + predict_input_fn, yield_single_examples=True, hooks=eval_hooks): + if len(all_results) % 1000 == 0: + tf.compat.v1.logging.info("Processing example: %d" % (len(all_results))) + unique_id = int(result["unique_ids"]) + start_logits = [float(x) for x in result["start_logits"].flat] + end_logits = [float(x) for x in result["end_logits"].flat] + all_results.append( + RawResult( + unique_id=unique_id, + start_logits=start_logits, + end_logits=end_logits)) + + eval_time_elapsed = time.time() - eval_start_time + + time_list = eval_hooks[-1].time_list + time_list.sort() + # Removing outliers (init/warmup) in throughput computation. + eval_time_wo_overhead = sum(time_list[:int(len(time_list) * 0.99)]) + num_sentences = (int(len(time_list) * 0.99)) * FLAGS.predict_batch_size + + avg = np.mean(time_list) + cf_50 = max(time_list[:int(len(time_list) * 0.50)]) + cf_90 = max(time_list[:int(len(time_list) * 0.90)]) + cf_95 = max(time_list[:int(len(time_list) * 0.95)]) + cf_99 = max(time_list[:int(len(time_list) * 0.99)]) + cf_100 = max(time_list[:int(len(time_list) * 1)]) + ss_sentences_per_second = num_sentences * 1.0 / eval_time_wo_overhead + + tf.compat.v1.logging.info("-----------------------------") + tf.compat.v1.logging.info("Total Inference Time = %0.2f for Sentences = %d", eval_time_elapsed, + eval_hooks[-1].count * FLAGS.predict_batch_size) + tf.compat.v1.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", eval_time_wo_overhead, + num_sentences) + tf.compat.v1.logging.info("Summary Inference Statistics") + tf.compat.v1.logging.info("Batch size = %d", FLAGS.predict_batch_size) + tf.compat.v1.logging.info("Sequence Length = %d", FLAGS.max_seq_length) + tf.compat.v1.logging.info("Precision = %s", "fp16" if FLAGS.amp else "fp32") + tf.compat.v1.logging.info("Latency Confidence Level 50 (ms) = %0.2f", cf_50 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 90 (ms) = %0.2f", cf_90 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 95 (ms) = %0.2f", cf_95 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 99 (ms) = %0.2f", cf_99 * 1000) + tf.compat.v1.logging.info("Latency Confidence Level 100 (ms) = %0.2f", cf_100 * 1000) + tf.compat.v1.logging.info("Latency Average (ms) = %0.2f", avg * 1000) + tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second) + # dllogging.logger.log(step=(), data={"throughput_val": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT) + tf.compat.v1.logging.info("-----------------------------") + + output_prediction_file = os.path.join(FLAGS.output_dir, "predictions.json") + output_nbest_file = os.path.join(FLAGS.output_dir, "nbest_predictions.json") + output_null_log_odds_file = os.path.join(FLAGS.output_dir, "null_odds.json") + + write_predictions(eval_examples, eval_features, all_results, + FLAGS.n_best_size, FLAGS.max_answer_length, + FLAGS.do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file, + FLAGS.version_2_with_negative, FLAGS.verbose_logging) + + if FLAGS.eval_script: + import sys + import subprocess + eval_out = subprocess.check_output([sys.executable, FLAGS.eval_script, + FLAGS.predict_file, output_prediction_file]) + scores = str(eval_out).strip() + print(str(eval_out)) + exact_match = float(scores.split(":")[1].split(",")[0]) + if FLAGS.version_2_with_negative: + f1 = float(scores.split(":")[2].split(",")[0]) + else: + f1 = float(scores.split(":")[2].split("}")[0]) + tf.compat.v1.logging.info("f1 = %2.7f", f1) + tf.compat.v1.logging.info("exact_match = %2.7f", exact_match) + # dllogging.logger.log(step=(), data={"f1": f1}, verbosity=Verbosity.DEFAULT) + # dllogging.logger.log(step=(), data={"exact_match": exact_match}, verbosity=Verbosity.DEFAULT) + + +if __name__ == "__main__": + FLAGS = extract_run_squad_flags() + tf.app.run() diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/__init__.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..effb57b1e893fc03b3782961deb060749083c696 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_glue_data.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_glue_data.py new file mode 100644 index 0000000000000000000000000000000000000000..99b70f2a7d7a0878676dd1e06043693d08f278b2 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_glue_data.py @@ -0,0 +1,531 @@ +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import json +import math +import os +import random +import modeling +import optimization +import tokenization +import six +import tensorflow as tf +#import horovod.tensorflow as hvd +import time +import csv + +flags = tf.flags +FLAGS = None + +def extract_flags(): + + ## Required parameters + flags.DEFINE_string( + "data_dir", None, + "The input data dir. Should contain the .tsv files (or other data files) " + "for the task.") + + flags.DEFINE_string("task_name", None, "The name of the task to train.") + + flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + + flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + + flags.DEFINE_integer( + "max_seq_length", 128, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + + flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + flags.mark_flag_as_required("data_dir") + flags.mark_flag_as_required("task_name") + flags.mark_flag_as_required("vocab_file") + return flags.FLAGS + + +class InputExample(object): + """A single training/test example for simple sequence classification.""" + + def __init__(self, guid, text_a, text_b=None, label=None): + """Constructs a InputExample. + Args: + guid: Unique id for the example. + text_a: string. The untokenized text of the first sequence. For single + sequence tasks, only this sequence must be specified. + text_b: (Optional) string. The untokenized text of the second sequence. + Only must be specified for sequence pair tasks. + label: (Optional) string. The label of the example. This should be + specified for train and dev examples, but not for test examples. + """ + self.guid = guid + self.text_a = text_a + self.text_b = text_b + self.label = label + +class PaddingInputExample(object): + """Fake example so the num input examples is a multiple of the batch size. + + When running eval/predict on the TPU, we need to pad the number of examples + to be a multiple of the batch size, because the TPU requires a fixed batch + size. The alternative is to drop the last batch, which is bad because it means + the entire output data won't be generated. + + We use this class instead of `None` because treating `None` as padding + battches could cause silent errors. + """ + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + input_ids, + input_mask, + segment_ids, + label_id, + is_real_example=True): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.label_id = label_id + self.is_real_example = is_real_example + + +class DataProcessor(object): + """Base class for data converters for sequence classification data sets.""" + + def get_train_examples(self, data_dir): + """Gets a collection of `InputExample`s for the train set.""" + raise NotImplementedError() + + def get_dev_examples(self, data_dir): + """Gets a collection of `InputExample`s for the dev set.""" + raise NotImplementedError() + + def get_test_examples(self, data_dir): + """Gets a collection of `InputExample`s for prediction.""" + raise NotImplementedError() + + def get_labels(self): + """Gets the list of labels for this data set.""" + raise NotImplementedError() + + @classmethod + def _read_tsv(cls, input_file, quotechar=None): + """Reads a tab separated value file.""" + with tf.gfile.Open(input_file, "r") as f: + reader = csv.reader(f, delimiter="\t", quotechar=quotechar) + lines = [] + for line in reader: + lines.append(line) + return lines + + +class XnliProcessor(DataProcessor): + """Processor for the XNLI data set.""" + + def __init__(self): + self.language = "zh" + + def get_train_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv( + os.path.join(data_dir, "multinli", + "multinli.train.%s.tsv" % self.language)) + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "train-%d" % (i) + text_a = tokenization.convert_to_unicode(line[0]) + text_b = tokenization.convert_to_unicode(line[1]) + label = tokenization.convert_to_unicode(line[2]) + if label == tokenization.convert_to_unicode("contradictory"): + label = tokenization.convert_to_unicode("contradiction") + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + lines = self._read_tsv(os.path.join(data_dir, "xnli.dev.tsv")) + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "dev-%d" % (i) + language = tokenization.convert_to_unicode(line[0]) + if language != tokenization.convert_to_unicode(self.language): + continue + text_a = tokenization.convert_to_unicode(line[6]) + text_b = tokenization.convert_to_unicode(line[7]) + label = tokenization.convert_to_unicode(line[1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_labels(self): + """See base class.""" + return ["contradiction", "entailment", "neutral"] + + +class MnliProcessor(DataProcessor): + """Processor for the MultiNLI data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")), + "dev_matched") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["contradiction", "entailment", "neutral"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0])) + text_a = tokenization.convert_to_unicode(line[8]) + text_b = tokenization.convert_to_unicode(line[9]) + if set_type == "test": + label = "contradiction" + else: + label = tokenization.convert_to_unicode(line[-1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + +class MrpcProcessor(DataProcessor): + """Processor for the MRPC data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + if i == 0: + continue + guid = "%s-%s" % (set_type, i) + text_a = tokenization.convert_to_unicode(line[3]) + text_b = tokenization.convert_to_unicode(line[4]) + if set_type == "test": + label = "0" + else: + label = tokenization.convert_to_unicode(line[0]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + +class ColaProcessor(DataProcessor): + """Processor for the CoLA data set (GLUE version).""" + + def get_train_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") + + def get_dev_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") + + def get_test_examples(self, data_dir): + """See base class.""" + return self._create_examples( + self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + # Only the test set has a header + if set_type == "test" and i == 0: + continue + guid = "%s-%s" % (set_type, i) + if set_type == "test": + text_a = tokenization.convert_to_unicode(line[1]) + label = "0" + else: + text_a = tokenization.convert_to_unicode(line[3]) + label = tokenization.convert_to_unicode(line[1]) + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=None, label=label)) + return examples + + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + +def convert_single_example(ex_index, example, label_list, max_seq_length, + tokenizer, verbose_logging=False): + """Converts a single `InputExample` into a single `InputFeatures`.""" + + if isinstance(example, PaddingInputExample): + return InputFeatures( + input_ids=[0] * max_seq_length, + input_mask=[0] * max_seq_length, + segment_ids=[0] * max_seq_length, + label_id=0, + is_real_example=False) + + label_map = {} + for (i, label) in enumerate(label_list): + label_map[label] = i + + tokens_a = tokenizer.tokenize(example.text_a) + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + if tokens_b: + # Modifies `tokens_a` and `tokens_b` in place so that the total + # length is less than the specified length. + # Account for [CLS], [SEP], [SEP] with "- 3" + _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) + else: + # Account for [CLS] and [SEP] with "- 2" + if len(tokens_a) > max_seq_length - 2: + tokens_a = tokens_a[0:(max_seq_length - 2)] + + # The convention in BERT is: + # (a) For sequence pairs: + # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] + # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 + # (b) For single sequences: + # tokens: [CLS] the dog is hairy . [SEP] + # type_ids: 0 0 0 0 0 0 0 + # + # Where "type_ids" are used to indicate whether this is the first + # sequence or the second sequence. The embedding vectors for `type=0` and + # `type=1` were learned during pre-training and are added to the wordpiece + # embedding vector (and position vector). This is not *strictly* necessary + # since the [SEP] token unambiguously separates the sequences, but it makes + # it easier for the model to learn the concept of sequences. + # + # For classification tasks, the first vector (corresponding to [CLS]) is + # used as the "sentence vector". Note that this only makes sense because + # the entire model is fine-tuned. + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + if tokens_b: + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + label_id = label_map[example.label] + if ex_index < 5 and verbose_logging: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("guid: %s" % (example.guid)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + tf.compat.v1.logging.info("label: %s (id = %d)" % (example.label, label_id)) + + feature = InputFeatures( + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + label_id=label_id, + is_real_example=True) + return feature + +# This function is not used by this file but is still used by the Colab and +# people who depend on it. +def convert_examples_to_features(examples, label_list, max_seq_length, + tokenizer): + """Convert a set of `InputExample`s to a list of `InputFeatures`.""" + + features = [] + for (ex_index, example) in enumerate(examples): + if ex_index % 10000 == 0: + tf.compat.v1.logging.info("Writing example %d of %d" % (ex_index, len(examples))) + + feature = convert_single_example(ex_index, example, label_list, + max_seq_length, tokenizer, FLAGS.verbose_logging) + + features.append(feature) + return features + +def file_based_convert_examples_to_features( + examples, label_list, max_seq_length, tokenizer, output_file): + """Convert a set of `InputExample`s to a TFRecord file.""" + + writer = tf.python_io.TFRecordWriter(output_file) + + for (ex_index, example) in enumerate(examples): + if ex_index % 10000 == 0: + tf.compat.v1.logging.info("Writing example %d of %d" % (ex_index, len(examples))) + + feature = convert_single_example(ex_index, example, label_list, + max_seq_length, tokenizer) + + def create_int_feature(values): + f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return f + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + features["label_ids"] = create_int_feature([feature.label_id]) + features["is_real_example"] = create_int_feature( + [int(feature.is_real_example)]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + writer.write(tf_example.SerializeToString()) + writer.close() + +def main(): + processors = { + "cola": ColaProcessor, + "mnli": MnliProcessor, + "mrpc": MrpcProcessor, + "xnli": XnliProcessor, + } + task_name = FLAGS.task_name.lower() + if task_name not in processors: + raise ValueError("Task not found: %s" % (task_name)) + processor = processors[task_name]() + label_list = processor.get_labels() + + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + tf.gfile.MakeDirs(FLAGS.data_dir + "final_tfrecords_sharded") + train_examples = processor.get_train_examples(FLAGS.data_dir) + train_file = os.path.join(FLAGS.data_dir, "final_tfrecords_sharded/" + task_name + "train.tf_record") + file_based_convert_examples_to_features( + train_examples, label_list, FLAGS.max_seq_length, tokenizer, train_file) + + eval_examples = processor.get_dev_examples(FLAGS.data_dir) + eval_file = os.path.join(FLAGS.data_dir, "final_tfrecords_sharded/" + task_name + "eval.tf_record") + file_based_convert_examples_to_features( + eval_examples, label_list, FLAGS.max_seq_length, tokenizer, eval_file) + + predict_examples = processor.get_test_examples(FLAGS.data_dir) + predict_file = os.path.join(FLAGS.data_dir, "final_tfrecords_sharded/" + task_name + "predict.tf_record") + file_based_convert_examples_to_features(predict_examples, label_list, + FLAGS.max_seq_length, tokenizer, + predict_file) + +if __name__ == "__main__": + (npu_sess, npu_shutdown) = init_resource() + main() + shutdown_resource(npu_sess, npu_shutdown) + close_session(npu_sess) diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_pretraining_data.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_pretraining_data.py new file mode 100644 index 0000000000000000000000000000000000000000..79bfc5940e3e253a664472f243ad015a1c6aee38 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_pretraining_data.py @@ -0,0 +1,522 @@ +# coding=utf-8 +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Create masked LM/next sentence masked_lm TF examples for BERT.""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +import argparse +import logging +import os +import random +from io import open +import h5py +import tensorflow as tf +import numpy as np +from tqdm import tqdm, trange + +from tokenization import BertTokenizer +import tokenization as tokenization + +import random +import collections + +class TrainingInstance(object): + """A single training instance (sentence pair).""" + + def __init__(self, tokens, segment_ids, masked_lm_positions, masked_lm_labels, + is_random_next): + self.tokens = tokens + self.segment_ids = segment_ids + self.is_random_next = is_random_next + self.masked_lm_positions = masked_lm_positions + self.masked_lm_labels = masked_lm_labels + + def __str__(self): + s = "" + s += "tokens: %s\n" % (" ".join( + [tokenization.printable_text(x) for x in self.tokens])) + s += "segment_ids: %s\n" % (" ".join([str(x) for x in self.segment_ids])) + s += "is_random_next: %s\n" % self.is_random_next + s += "masked_lm_positions: %s\n" % (" ".join( + [str(x) for x in self.masked_lm_positions])) + s += "masked_lm_labels: %s\n" % (" ".join( + [tokenization.printable_text(x) for x in self.masked_lm_labels])) + s += "\n" + return s + + def __repr__(self): + return self.__str__() + + +def write_instance_to_example_files(instances, tokenizer, max_seq_length, + max_predictions_per_seq, output_files, output_formats="tfrecord"): + """Create TF example files from `TrainingInstance`s.""" + writers = [] + for output_file in output_files: + writers.append(tf.python_io.TFRecordWriter(output_file)) + + writer_index = 0 + + total_written = 0 + if 'hdf5' in output_formats: + features_hdf5 = collections.OrderedDict() + num_instances = len(instances) + features_hdf5["input_ids"] = np.zeros([num_instances, max_seq_length], dtype="int32") + features_hdf5["input_mask"] = np.zeros([num_instances, max_seq_length], dtype="int32") + features_hdf5["segment_ids"] = np.zeros([num_instances, max_seq_length], dtype="int32") + features_hdf5["masked_lm_positions"] = np.zeros([num_instances, max_predictions_per_seq], dtype="int32") + features_hdf5["masked_lm_ids"] = np.zeros([num_instances, max_predictions_per_seq], dtype="int32") + features_hdf5["next_sentence_labels"] = np.zeros(num_instances, dtype="int32") + + for (inst_index, instance) in enumerate(instances): + input_ids = tokenizer.convert_tokens_to_ids(instance.tokens) + input_mask = [1] * len(input_ids) + segment_ids = list(instance.segment_ids) + assert len(input_ids) <= max_seq_length + + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + masked_lm_positions = list(instance.masked_lm_positions) + masked_lm_ids = tokenizer.convert_tokens_to_ids(instance.masked_lm_labels) + masked_lm_weights = [1.0] * len(masked_lm_ids) + + while len(masked_lm_positions) < max_predictions_per_seq: + masked_lm_positions.append(0) + masked_lm_ids.append(0) + masked_lm_weights.append(0.0) + + next_sentence_label = 1 if instance.is_random_next else 0 + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(input_ids) + features["input_mask"] = create_int_feature(input_mask) + features["segment_ids"] = create_int_feature(segment_ids) + features["masked_lm_positions"] = create_int_feature(masked_lm_positions) + features["masked_lm_ids"] = create_int_feature(masked_lm_ids) + features["masked_lm_weights"] = create_float_feature(masked_lm_weights) + features["next_sentence_labels"] = create_int_feature([next_sentence_label]) + + if 'tfrecord' in output_formats: + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + writers[writer_index].write(tf_example.SerializeToString()) + if 'hdf5' in output_formats: + features_hdf5["input_ids"][inst_index] = input_ids + features_hdf5["input_mask"][inst_index] = input_mask + features_hdf5["segment_ids"][inst_index] = segment_ids + features_hdf5["masked_lm_positions"][inst_index] = masked_lm_positions + features_hdf5["masked_lm_ids"][inst_index] = masked_lm_ids + features_hdf5["next_sentence_labels"][inst_index] = next_sentence_label + if 'tfrecord' not in output_formats and 'hdf5' not in output_formats: + assert False, 'Either empty output_formats list or unsupported type specified. Try: tfrecord or hdf5' + + writer_index = (writer_index + 1) % len(writers) + + total_written += 1 + + if inst_index < 20: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in instance.tokens])) + + for feature_name in features.keys(): + feature = features[feature_name] + values = [] + if feature.int64_list.value: + values = feature.int64_list.value + elif feature.float_list.value: + values = feature.float_list.value + tf.compat.v1.logging.info( + "%s: %s" % (feature_name, " ".join([str(x) for x in values]))) + + for writer in writers: + writer.close() + + if 'hdf5' in output_formats: + f = h5py.File(output_file, 'w') + f.create_dataset("input_ids", data=features_hdf5["input_ids"], dtype='i4', compression='gzip') + f.create_dataset("input_mask", data=features_hdf5["input_mask"], dtype='i1', compression='gzip') + f.create_dataset("segment_ids", data=features_hdf5["segment_ids"], dtype='i1', compression='gzip') + f.create_dataset("masked_lm_positions", data=features_hdf5["masked_lm_positions"], dtype='i4', compression='gzip') + f.create_dataset("masked_lm_ids", data=features_hdf5["masked_lm_ids"], dtype='i4', compression='gzip') + f.create_dataset("next_sentence_labels", data=features_hdf5["next_sentence_labels"], dtype='i1', compression='gzip') + f.flush() + f.close() + + tf.compat.v1.logging.info("Wrote %d total instances", total_written) + + +def create_int_feature(values): + feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return feature + + +def create_float_feature(values): + feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) + return feature + + +def create_training_instances(input_files, tokenizer, max_seq_length, + dupe_factor, short_seq_prob, masked_lm_prob, + max_predictions_per_seq, rng): + """Create `TrainingInstance`s from raw text.""" + all_documents = [[]] + + # Input file format: + # (1) One sentence per line. These should ideally be actual sentences, not + # entire paragraphs or arbitrary spans of text. (Because we use the + # sentence boundaries for the "next sentence prediction" task). + # (2) Blank lines between documents. Document boundaries are needed so + # that the "next sentence prediction" task doesn't span between documents. + for input_file in input_files: + print("creating instance from {}".format(input_file)) + with open(input_file, "r") as reader: + while True: + line = tokenization.convert_to_unicode(reader.readline()) + if not line: + break + line = line.strip() + + # Empty lines are used as document delimiters + if not line: + all_documents.append([]) + tokens = tokenizer.tokenize(line) + if tokens: + all_documents[-1].append(tokens) + + # Remove empty documents + all_documents = [x for x in all_documents if x] + rng.shuffle(all_documents) + + vocab_words = list(tokenizer.vocab.keys()) + instances = [] + for _ in range(dupe_factor): + for document_index in range(len(all_documents)): + instances.extend( + create_instances_from_document( + all_documents, document_index, max_seq_length, short_seq_prob, + masked_lm_prob, max_predictions_per_seq, vocab_words, rng)) + + rng.shuffle(instances) + return instances + + +def create_instances_from_document( + all_documents, document_index, max_seq_length, short_seq_prob, + masked_lm_prob, max_predictions_per_seq, vocab_words, rng): + """Creates `TrainingInstance`s for a single document.""" + document = all_documents[document_index] + + # Account for [CLS], [SEP], [SEP] + max_num_tokens = max_seq_length - 3 + + # We *usually* want to fill up the entire sequence since we are padding + # to `max_seq_length` anyways, so short sequences are generally wasted + # computation. However, we *sometimes* + # (i.e., short_seq_prob == 0.1 == 10% of the time) want to use shorter + # sequences to minimize the mismatch between pre-training and fine-tuning. + # The `target_seq_length` is just a rough target however, whereas + # `max_seq_length` is a hard limit. + target_seq_length = max_num_tokens + if rng.random() < short_seq_prob: + target_seq_length = rng.randint(2, max_num_tokens) + + # We DON'T just concatenate all of the tokens from a document into a long + # sequence and choose an arbitrary split point because this would make the + # next sentence prediction task too easy. Instead, we split the input into + # segments "A" and "B" based on the actual "sentences" provided by the user + # input. + instances = [] + current_chunk = [] + current_length = 0 + i = 0 + while i < len(document): + segment = document[i] + current_chunk.append(segment) + current_length += len(segment) + if i == len(document) - 1 or current_length >= target_seq_length: + if current_chunk: + # `a_end` is how many segments from `current_chunk` go into the `A` + # (first) sentence. + a_end = 1 + if len(current_chunk) >= 2: + a_end = rng.randint(1, len(current_chunk) - 1) + + tokens_a = [] + for j in range(a_end): + tokens_a.extend(current_chunk[j]) + + tokens_b = [] + # Random next + is_random_next = False + if len(current_chunk) == 1 or rng.random() < 0.5: + is_random_next = True + target_b_length = target_seq_length - len(tokens_a) + + # This should rarely go for more than one iteration for large + # corpora. However, just to be careful, we try to make sure that + # the random document is not the same as the document + # we're processing. + for _ in range(10): + random_document_index = rng.randint(0, len(all_documents) - 1) + if random_document_index != document_index: + break + + #If picked random document is the same as the current document + if random_document_index == document_index: + is_random_next = False + + random_document = all_documents[random_document_index] + random_start = rng.randint(0, len(random_document) - 1) + for j in range(random_start, len(random_document)): + tokens_b.extend(random_document[j]) + if len(tokens_b) >= target_b_length: + break + # We didn't actually use these segments so we "put them back" so + # they don't go to waste. + num_unused_segments = len(current_chunk) - a_end + i -= num_unused_segments + # Actual next + else: + is_random_next = False + for j in range(a_end, len(current_chunk)): + tokens_b.extend(current_chunk[j]) + truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng) + + assert len(tokens_a) >= 1 + assert len(tokens_b) >= 1 + + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + + tokens.append("[SEP]") + segment_ids.append(0) + + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + (tokens, masked_lm_positions, + masked_lm_labels) = create_masked_lm_predictions( + tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng) + instance = TrainingInstance( + tokens=tokens, + segment_ids=segment_ids, + is_random_next=is_random_next, + masked_lm_positions=masked_lm_positions, + masked_lm_labels=masked_lm_labels) + instances.append(instance) + current_chunk = [] + current_length = 0 + i += 1 + + return instances + + +MaskedLmInstance = collections.namedtuple("MaskedLmInstance", + ["index", "label"]) + + +def create_masked_lm_predictions(tokens, masked_lm_prob, + max_predictions_per_seq, vocab_words, rng): + """Creates the predictions for the masked LM objective.""" + + cand_indexes = [] + for (i, token) in enumerate(tokens): + if token == "[CLS]" or token == "[SEP]": + continue + cand_indexes.append(i) + + rng.shuffle(cand_indexes) + + output_tokens = list(tokens) + + num_to_predict = min(max_predictions_per_seq, + max(1, int(round(len(tokens) * masked_lm_prob)))) + + masked_lms = [] + covered_indexes = set() + for index in cand_indexes: + if len(masked_lms) >= num_to_predict: + break + if index in covered_indexes: + continue + covered_indexes.add(index) + + masked_token = None + # 80% of the time, replace with [MASK] + if rng.random() < 0.8: + masked_token = "[MASK]" + else: + # 10% of the time, keep original + if rng.random() < 0.5: + masked_token = tokens[index] + # 10% of the time, replace with random word + else: + masked_token = vocab_words[rng.randint(0, len(vocab_words) - 1)] + + output_tokens[index] = masked_token + + masked_lms.append(MaskedLmInstance(index=index, label=tokens[index])) + + masked_lms = sorted(masked_lms, key=lambda x: x.index) + + masked_lm_positions = [] + masked_lm_labels = [] + for p in masked_lms: + masked_lm_positions.append(p.index) + masked_lm_labels.append(p.label) + + return (output_tokens, masked_lm_positions, masked_lm_labels) + + +def truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng): + """Truncates a pair of sequences to a maximum sequence length.""" + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_num_tokens: + break + + trunc_tokens = tokens_a if len(tokens_a) > len(tokens_b) else tokens_b + assert len(trunc_tokens) >= 1 + + # We want to sometimes truncate from the front and sometimes from the + # back to add more randomness and avoid biases. + if rng.random() < 0.5: + del trunc_tokens[0] + else: + trunc_tokens.pop() + + +def main(): + parser = argparse.ArgumentParser() + ## Required parameters + parser.add_argument("--vocab_file", + default=None, + type=str, + required=True, + help="The vocabulary the BERT model will train on.") + parser.add_argument("--input_file", + default=None, + type=str, + required=True, + help="The input train corpus. can be directory with .txt files or a path to a single file") + parser.add_argument("--output_file", + default=None, + type=str, + required=True, + help="The output file where the model checkpoints will be written.") + + ## Other parameters + # int + parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") + parser.add_argument("--dupe_factor", + default=10, + type=int, + help="Number of times to duplicate the input data (with different masks).") + parser.add_argument("--max_predictions_per_seq", + default=20, + type=int, + help="Maximum sequence length.") + + # floats + + parser.add_argument("--masked_lm_prob", + default=0.15, + type=float, + help="Masked LM probability.") + + parser.add_argument("--short_seq_prob", + default=0.1, + type=float, + help="Probability to create a sequence shorter than maximum sequence length") + + parser.add_argument("--do_lower_case", + action='store_true', + default=True, + help="Whether to lower case the input text. True for uncased models, False for cased models.") + parser.add_argument('--random_seed', + type=int, + default=12345, + help="random seed for initialization") + + args = parser.parse_args() + + tokenizer = BertTokenizer(args.vocab_file, do_lower_case=args.do_lower_case) + + input_files = [] + if os.path.isfile(args.input_file): + input_files.append(args.input_file) + elif os.path.isdir(args.input_file): + input_files = [os.path.join(args.input_file, f) for f in os.listdir(args.input_file) if + (os.path.isfile(os.path.join(args.input_file, f)) and f.endswith('.txt'))] + else: + raise ValueError("{} is not a valid path".format(args.input_file)) + + rng = random.Random(args.random_seed) + instances = create_training_instances( + input_files, tokenizer, args.max_seq_length, args.dupe_factor, + args.short_seq_prob, args.masked_lm_prob, args.max_predictions_per_seq, + rng) + + output_files = args.output_file.split(",") + print("*** Writing to output files ***") + for output_file in output_files: + print(output_file) + + + write_instance_to_example_files(instances, tokenizer, args.max_seq_length, + args.max_predictions_per_seq, output_files) + + +if __name__ == "__main__": + main() + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_squad_data.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_squad_data.py new file mode 100644 index 0000000000000000000000000000000000000000..a65748c0ebaca5dea6ca79acb038dc70288d37ab --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/create_squad_data.py @@ -0,0 +1,581 @@ +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import json +import math +import os +import random +import modeling +import optimization +import tokenization +import six +import tensorflow as tf +#import horovod.tensorflow as hvd +import time + +flags = tf.flags +FLAGS = None + +def extract_flags(): + flags.DEFINE_integer( + "max_seq_length", 384, + "The maximum total input sequence length after WordPiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.") + + flags.DEFINE_integer( + "doc_stride", 128, + "When splitting up a long document into chunks, how much stride to " + "take between chunks.") + + flags.DEFINE_integer( + "max_query_length", 64, + "The maximum number of tokens for the question. Questions longer than " + "this will be truncated to this length.") + + flags.DEFINE_bool( + "version_2_with_negative", False, + "If true, the SQuAD examples contain some that do not have an answer.") + + flags.DEFINE_string("train_file", None, + "SQuAD json for training. E.g., train-v1.1.json") + + flags.DEFINE_string( + "predict_file", None, + "SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json") + + flags.DEFINE_string( + "squad_dir", None, + "The output directory where the model checkpoints will be written.") + + flags.DEFINE_string("vocab_file", None, + "The vocabulary file that the BERT model was trained on.") + + flags.DEFINE_bool( + "do_lower_case", True, + "Whether to lower case the input text. Should be True for uncased " + "models and False for cased models.") + + flags.DEFINE_bool( + "verbose_logging", False, + "If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + flags.mark_flag_as_required("train_file") + flags.mark_flag_as_required("predict_file") + flags.mark_flag_as_required("squad_dir") + flags.mark_flag_as_required("vocab_file") + return flags.FLAGS + +class SquadExample(object): + """A single training/test example for simple sequence classification. + + For examples without an answer, the start and end position are -1. + """ + + def __init__(self, + qas_id, + question_text, + doc_tokens, + orig_answer_text=None, + start_position=None, + end_position=None, + is_impossible=False): + self.qas_id = qas_id + self.question_text = question_text + self.doc_tokens = doc_tokens + self.orig_answer_text = orig_answer_text + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + def __str__(self): + return self.__repr__() + + def __repr__(self): + s = "" + s += "qas_id: %s" % (tokenization.printable_text(self.qas_id)) + s += ", question_text: %s" % ( + tokenization.printable_text(self.question_text)) + s += ", doc_tokens: [%s]" % (" ".join(self.doc_tokens)) + if self.start_position: + s += ", start_position: %d" % (self.start_position) + if self.start_position: + s += ", end_position: %d" % (self.end_position) + if self.start_position: + s += ", is_impossible: %r" % (self.is_impossible) + return s + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + unique_id, + example_index, + doc_span_index, + tokens, + token_to_orig_map, + token_is_max_context, + input_ids, + input_mask, + segment_ids, + start_position=None, + end_position=None, + is_impossible=None): + self.unique_id = unique_id + self.example_index = example_index + self.doc_span_index = doc_span_index + self.tokens = tokens + self.token_to_orig_map = token_to_orig_map + self.token_is_max_context = token_is_max_context + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + +def read_squad_examples(input_file, is_training, version_2_with_negative=False, input_data=None): + """Return list of SquadExample from input_data or input_file (SQuAD json file)""" + if input_data is None: + with tf.gfile.Open(input_file, "r") as reader: + input_data = json.load(reader)["data"] + + def is_whitespace(c): + if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: + return True + return False + + examples = [] + for entry in input_data: + for paragraph in entry["paragraphs"]: + paragraph_text = paragraph["context"] + doc_tokens = [] + char_to_word_offset = [] + prev_is_whitespace = True + for c in paragraph_text: + if is_whitespace(c): + prev_is_whitespace = True + else: + if prev_is_whitespace: + doc_tokens.append(c) + else: + doc_tokens[-1] += c + prev_is_whitespace = False + char_to_word_offset.append(len(doc_tokens) - 1) + + for qa in paragraph["qas"]: + qas_id = qa["id"] + question_text = qa["question"] + start_position = None + end_position = None + orig_answer_text = None + is_impossible = False + if is_training: + + if version_2_with_negative: + is_impossible = qa["is_impossible"] + if (len(qa["answers"]) != 1) and (not is_impossible): + raise ValueError( + "For training, each question should have exactly 1 answer.") + if not is_impossible: + answer = qa["answers"][0] + orig_answer_text = answer["text"] + answer_offset = answer["answer_start"] + answer_length = len(orig_answer_text) + start_position = char_to_word_offset[answer_offset] + end_position = char_to_word_offset[answer_offset + answer_length - + 1] + # Only add answers where the text can be exactly recovered from the + # document. If this CAN'T happen it's likely due to weird Unicode + # stuff so we will just skip the example. + # + # Note that this means for training mode, every example is NOT + # guaranteed to be preserved. + actual_text = " ".join( + doc_tokens[start_position:(end_position + 1)]) + cleaned_answer_text = " ".join( + tokenization.whitespace_tokenize(orig_answer_text)) + if actual_text.find(cleaned_answer_text) == -1: + tf.logging.warning("Could not find answer: '%s' vs. '%s'", + actual_text, cleaned_answer_text) + continue + else: + start_position = -1 + end_position = -1 + orig_answer_text = "" + + example = SquadExample( + qas_id=qas_id, + question_text=question_text, + doc_tokens=doc_tokens, + orig_answer_text=orig_answer_text, + start_position=start_position, + end_position=end_position, + is_impossible=is_impossible) + examples.append(example) + + return examples + +def _check_is_max_context(doc_spans, cur_span_index, position): + """Check if this is the 'max context' doc span for the token.""" + + # Because of the sliding window approach taken to scoring documents, a single + # token can appear in multiple documents. E.g. + # Doc: the man went to the store and bought a gallon of milk + # Span A: the man went to the + # Span B: to the store and bought + # Span C: and bought a gallon of + # ... + # + # Now the word 'bought' will have two scores from spans B and C. We only + # want to consider the score with "maximum context", which we define as + # the *minimum* of its left and right context (the *sum* of left and + # right context will always be the same, of course). + # + # In the example the maximum context for 'bought' would be span C since + # it has 1 left context and 3 right context, while span B has 4 left context + # and 0 right context. + best_score = None + best_span_index = None + for (span_index, doc_span) in enumerate(doc_spans): + end = doc_span.start + doc_span.length - 1 + if position < doc_span.start: + continue + if position > end: + continue + num_left_context = position - doc_span.start + num_right_context = end - position + score = min(num_left_context, num_right_context) + 0.01 * doc_span.length + if best_score is None or score > best_score: + best_score = score + best_span_index = span_index + + return cur_span_index == best_span_index + +def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer, + orig_answer_text): + """Returns tokenized answer spans that better match the annotated answer.""" + + # The SQuAD annotations are character based. We first project them to + # whitespace-tokenized words. But then after WordPiece tokenization, we can + # often find a "better match". For example: + # + # Question: What year was John Smith born? + # Context: The leader was John Smith (1895-1943). + # Answer: 1895 + # + # The original whitespace-tokenized answer will be "(1895-1943).". However + # after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match + # the exact answer, 1895. + # + # However, this is not always possible. Consider the following: + # + # Question: What country is the top exporter of electornics? + # Context: The Japanese electronics industry is the lagest in the world. + # Answer: Japan + # + # In this case, the annotator chose "Japan" as a character sub-span of + # the word "Japanese". Since our WordPiece tokenizer does not split + # "Japanese", we just use "Japanese" as the annotation. This is fairly rare + # in SQuAD, but does happen. + tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) + + for new_start in range(input_start, input_end + 1): + for new_end in range(input_end, new_start - 1, -1): + text_span = " ".join(doc_tokens[new_start:(new_end + 1)]) + if text_span == tok_answer_text: + return (new_start, new_end) + + return (input_start, input_end) + + +def convert_examples_to_features(examples, tokenizer, max_seq_length, + doc_stride, max_query_length, is_training, + output_fn, verbose_logging=False): + """Loads a data file into a list of `InputBatch`s.""" + + unique_id = 1000000000 + + for (example_index, example) in enumerate(examples): + query_tokens = tokenizer.tokenize(example.question_text) + + if len(query_tokens) > max_query_length: + query_tokens = query_tokens[0:max_query_length] + + tok_to_orig_index = [] + orig_to_tok_index = [] + all_doc_tokens = [] + for (i, token) in enumerate(example.doc_tokens): + orig_to_tok_index.append(len(all_doc_tokens)) + sub_tokens = tokenizer.tokenize(token) + for sub_token in sub_tokens: + tok_to_orig_index.append(i) + all_doc_tokens.append(sub_token) + + tok_start_position = None + tok_end_position = None + if is_training and example.is_impossible: + tok_start_position = -1 + tok_end_position = -1 + if is_training and not example.is_impossible: + tok_start_position = orig_to_tok_index[example.start_position] + if example.end_position < len(example.doc_tokens) - 1: + tok_end_position = orig_to_tok_index[example.end_position + 1] - 1 + else: + tok_end_position = len(all_doc_tokens) - 1 + (tok_start_position, tok_end_position) = _improve_answer_span( + all_doc_tokens, tok_start_position, tok_end_position, tokenizer, + example.orig_answer_text) + + # The -3 accounts for [CLS], [SEP] and [SEP] + max_tokens_for_doc = max_seq_length - len(query_tokens) - 3 + + # We can have documents that are longer than the maximum sequence length. + # To deal with this we do a sliding window approach, where we take chunks + # of the up to our max length with a stride of `doc_stride`. + _DocSpan = collections.namedtuple( # pylint: disable=invalid-name + "DocSpan", ["start", "length"]) + doc_spans = [] + start_offset = 0 + while start_offset < len(all_doc_tokens): + length = len(all_doc_tokens) - start_offset + if length > max_tokens_for_doc: + length = max_tokens_for_doc + doc_spans.append(_DocSpan(start=start_offset, length=length)) + if start_offset + length == len(all_doc_tokens): + break + start_offset += min(length, doc_stride) + + for (doc_span_index, doc_span) in enumerate(doc_spans): + tokens = [] + token_to_orig_map = {} + token_is_max_context = {} + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in query_tokens: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + for i in range(doc_span.length): + split_token_index = doc_span.start + i + token_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index] + + is_max_context = _check_is_max_context(doc_spans, doc_span_index, + split_token_index) + token_is_max_context[len(tokens)] = is_max_context + tokens.append(all_doc_tokens[split_token_index]) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + start_position = None + end_position = None + if is_training and not example.is_impossible: + # For training, if our document chunk does not contain an annotation + # we throw it out, since there is nothing to predict. + doc_start = doc_span.start + doc_end = doc_span.start + doc_span.length - 1 + out_of_span = False + if not (tok_start_position >= doc_start and + tok_end_position <= doc_end): + out_of_span = True + if out_of_span: + start_position = 0 + end_position = 0 + else: + doc_offset = len(query_tokens) + 2 + start_position = tok_start_position - doc_start + doc_offset + end_position = tok_end_position - doc_start + doc_offset + + if is_training and example.is_impossible: + start_position = 0 + end_position = 0 + + if verbose_logging and example_index < 20: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("unique_id: %s" % (unique_id)) + tf.compat.v1.logging.info("example_index: %s" % (example_index)) + tf.compat.v1.logging.info("doc_span_index: %s" % (doc_span_index)) + tf.compat.v1.logging.info("tokens: %s" % " ".join( + [tokenization.printable_text(x) for x in tokens])) + tf.compat.v1.logging.info("token_to_orig_map: %s" % " ".join( + ["%d:%d" % (x, y) for (x, y) in six.iteritems(token_to_orig_map)])) + tf.compat.v1.logging.info("token_is_max_context: %s" % " ".join([ + "%d:%s" % (x, y) for (x, y) in six.iteritems(token_is_max_context) + ])) + tf.compat.v1.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + tf.compat.v1.logging.info( + "input_mask: %s" % " ".join([str(x) for x in input_mask])) + tf.compat.v1.logging.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + if is_training and example.is_impossible: + tf.compat.v1.logging.info("impossible example") + if is_training and not example.is_impossible: + answer_text = " ".join(tokens[start_position:(end_position + 1)]) + tf.compat.v1.logging.info("start_position: %d" % (start_position)) + tf.compat.v1.logging.info("end_position: %d" % (end_position)) + tf.compat.v1.logging.info( + "answer: %s" % (tokenization.printable_text(answer_text))) + + feature = InputFeatures( + unique_id=unique_id, + example_index=example_index, + doc_span_index=doc_span_index, + tokens=tokens, + token_to_orig_map=token_to_orig_map, + token_is_max_context=token_is_max_context, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + start_position=start_position, + end_position=end_position, + is_impossible=example.is_impossible) + + # Run callback + output_fn(feature) + + unique_id += 1 + +class FeatureWriter(object): + """Writes InputFeature to TF example file.""" + + def __init__(self, filename, is_training): + self.filename = filename + self.is_training = is_training + self.num_features = 0 + self._writer = tf.python_io.TFRecordWriter(filename) + + def process_feature(self, feature): + """Write a InputFeature to the TFRecordWriter as a tf.train.Example.""" + self.num_features += 1 + + def create_int_feature(values): + feature = tf.train.Feature( + int64_list=tf.train.Int64List(value=list(values))) + return feature + + features = collections.OrderedDict() + features["unique_ids"] = create_int_feature([feature.unique_id]) + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + + if self.is_training: + features["start_positions"] = create_int_feature([feature.start_position]) + features["end_positions"] = create_int_feature([feature.end_position]) + impossible = 0 + if feature.is_impossible: + impossible = 1 + features["is_impossible"] = create_int_feature([impossible]) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + self._writer.write(tf_example.SerializeToString()) + + def close(self): + self._writer.close() + +def main(): + + FLAGS = extract_flags() + tokenizer = tokenization.FullTokenizer( + vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) + tf.gfile.MakeDirs(FLAGS.squad_dir + "/final_tfrecords_sharded") + # We write to a temporary file to avoid storing very large constant tensors + # in memory. + train_examples = read_squad_examples( + input_file=FLAGS.train_file, is_training=True, + version_2_with_negative=FLAGS.version_2_with_negative) + train_writer = FeatureWriter( + filename=os.path.join(FLAGS.squad_dir, "final_tfrecords_sharded/train.tf_record"), + is_training=True) + convert_examples_to_features( + examples=train_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=True, + output_fn=train_writer.process_feature, + verbose_logging=FLAGS.verbose_logging) + train_writer.close() + + + eval_examples = read_squad_examples( + input_file=FLAGS.predict_file, is_training=False, + version_2_with_negative=FLAGS.version_2_with_negative) + + eval_writer = FeatureWriter( + filename=os.path.join(FLAGS.squad_dir, "final_tfrecords_sharded/eval.tf_record"), + is_training=False) + eval_features = [] + + def append_feature(feature): + eval_features.append(feature) + eval_writer.process_feature(feature) + + convert_examples_to_features( + examples=eval_examples, + tokenizer=tokenizer, + max_seq_length=FLAGS.max_seq_length, + doc_stride=FLAGS.doc_stride, + max_query_length=FLAGS.max_query_length, + is_training=False, + output_fn=append_feature, + verbose_logging=FLAGS.verbose_logging) + eval_writer.close() + +if __name__ == "__main__": + (npu_sess, npu_shutdown) = init_resource() + main() + shutdown_resource(npu_sess, npu_shutdown) + close_session(npu_sess) diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/dllogger_class.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/dllogger_class.py new file mode 100644 index 0000000000000000000000000000000000000000..57b693bbe68b9f3f2e50b5a81f52a617fb498c88 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/dllogger_class.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from dllogger import Logger, StdOutBackend, JSONStreamBackend, Verbosity +import numpy + +class dllogger_class(): + + def format_step(self, step): + if isinstance(step, str): + return step + elif isinstance(step, int): + return "Iteration: {} ".format(step) + elif len(step) > 0: + return "Iteration: {} ".format(step[0]) + else: + return "" + + def __init__(self, log_path="bert_dllog.json"): + self.logger = Logger([ + StdOutBackend(Verbosity.DEFAULT, step_format=self.format_step), + JSONStreamBackend(Verbosity.VERBOSE, log_path), + ]) + self.logger.metadata("mlm_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"}) + self.logger.metadata("nsp_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"}) + self.logger.metadata("avg_loss_step", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"}) + self.logger.metadata("total_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"}) + self.logger.metadata("loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"}) + self.logger.metadata("f1", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"}) + self.logger.metadata("precision", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"}) + self.logger.metadata("recall", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"}) + self.logger.metadata("mcc", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"}) + self.logger.metadata("exact_match", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"}) + self.logger.metadata( + "throughput_train", + {"unit": "seq/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "TRAIN"}, + ) + self.logger.metadata( + "throughput_inf", + {"unit": "seq/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "VAL"}, + ) + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/gpu_affinity.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/gpu_affinity.py new file mode 100644 index 0000000000000000000000000000000000000000..01bd3475b80dab20587048116af0287fbd532a90 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/gpu_affinity.py @@ -0,0 +1,80 @@ +# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import math +import os + +import pynvml + +pynvml.nvmlInit() + + +def systemGetDriverVersion(): + return pynvml.nvmlSystemGetDriverVersion() + + +def deviceGetCount(): + return pynvml.nvmlDeviceGetCount() + + +class device: + # assume nvml returns list of 64 bit ints + _nvml_affinity_elements = math.ceil(os.cpu_count() / 64) + + def __init__(self, device_idx): + super().__init__() + self.handle = pynvml.nvmlDeviceGetHandleByIndex(device_idx) + + def getName(self): + return pynvml.nvmlDeviceGetName(self.handle) + + def getCpuAffinity(self): + affinity_string = '' + for j in pynvml.nvmlDeviceGetCpuAffinity( + self.handle, device._nvml_affinity_elements + ): + # assume nvml returns list of 64 bit ints + affinity_string = '{:064b}'.format(j) + affinity_string + affinity_list = [int(x) for x in affinity_string] + affinity_list.reverse() # so core 0 is in 0th element of list + + return [i for i, e in enumerate(affinity_list) if e != 0] + + +def set_affinity(gpu_id=None): + if gpu_id is None: + gpu_id = int(os.getenv('LOCAL_RANK', 0)) + + dev = device(gpu_id) + os.sched_setaffinity(0, dev.getCpuAffinity()) + + # list of ints representing the logical cores this process is now affinitied with + return os.sched_getaffinity(0) + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils.py b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/utils.py similarity index 39% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils.py rename to TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/utils.py index 56aa173a4054706da7e7064fe7f08336ba5892d7..898b6706958781aecdd532f461925d03128ec82c 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils.py +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/src/utils/utils.py @@ -1,5 +1,18 @@ -# coding=utf-8 -# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,17 +25,31 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# + import tensorflow as tf import time +import os + +def setup_xla_flags(): + # causes memory fragmentation for bert leading to OOM + if os.environ.get("TF_XLA_FLAGS", None) is not None: + try: + os.environ["TF_XLA_FLAGS"] += " --tf_xla_enable_lazy_compilation=false" + except: #mpi 4.0.2 causes syntax error for = + os.environ["TF_XLA_FLAGS"] += " --tf_xla_enable_lazy_compilation false" + else: + try: + os.environ["TF_XLA_FLAGS"] = " --tf_xla_enable_lazy_compilation=false" + except: + os.environ["TF_XLA_FLAGS"] = " --tf_xla_enable_lazy_compilation false" # report latency and throughput during eval -class LogEvalRunHook(tf.train.SessionRunHook): +class LogEvalRunHook(tf.estimator.SessionRunHook): def __init__(self, global_batch_size, hvd_rank=-1): self.global_batch_size = global_batch_size self.hvd_rank = hvd_rank - self.total_time = 0.0 self.count = 0 - self.skipped = 0 self.time_list = [] def before_run(self, run_context): @@ -31,46 +58,59 @@ class LogEvalRunHook(tf.train.SessionRunHook): def after_run(self, run_context, run_values): elapsed_secs = time.time() - self.t0 self.count += 1 - - # Removing first 2 (arbitrary) number of startup iterations from perf evaluations - if self.count <= 2: - print("Skipping time record for ", self.count, " due to overhead") - self.skipped += 1 - else: - self.time_list.append(elapsed_secs) - self.total_time += elapsed_secs + self.time_list.append(elapsed_secs) # report throughput during training -class LogTrainRunHook(tf.train.SessionRunHook): - def __init__(self, global_batch_size, hvd_rank=-1, save_checkpoints_steps=1000): +class LogTrainRunHook(tf.estimator.SessionRunHook): + def __init__(self, global_batch_size, hvd_rank=-1, save_checkpoints_steps=1000, num_steps_ignore_xla=100): self.global_batch_size = global_batch_size self.hvd_rank = hvd_rank self.save_checkpoints_steps = save_checkpoints_steps self.total_time = 0.0 self.count = 0 # Holds number of iterations, including skipped iterations for fp16 loss scaling + self.skipped = 0 + self.num_steps_ignore_xla = num_steps_ignore_xla + #initial steps while xla is still compilingneed to be ignored from throughput computation def after_create_session(self, session, coord): self.init_global_step = session.run(tf.train.get_global_step()) def before_run(self, run_context): self.t0 = time.time() - return tf.train.SessionRunArgs( - fetches=['step_update:0']) + return tf.estimator.SessionRunArgs( + fetches=['global_step:0']) def after_run(self, run_context, run_values): elapsed_secs = time.time() - self.t0 self.global_step = run_values.results[0] self.count += 1 - # Removing first step + first two steps after every checkpoint save - if (self.global_step - self.init_global_step) % self.save_checkpoints_steps <= 1: + # Removing first 100 step + first five steps after every checkpoint save + if (self.global_step - self.init_global_step) <= self.num_steps_ignore_xla or (self.global_step - self.init_global_step) % self.save_checkpoints_steps < 5: print("Skipping time record for ", self.global_step, " due to checkpoint-saving/warmup overhead") + self.skipped += 1 else: self.total_time += elapsed_secs - def end(self, session): - num_global_steps = self.global_step - self.init_global_step +class ExamplesPerSecondHook(tf.estimator.SessionRunHook): + def __init__(self, batch_size, iterations_per_loop=1): + self._batch_size = batch_size + self._iter_per_loop = iterations_per_loop + self.start_time = 0 + self.end_time = 0 + + def before_run(self, run_context): # pylint: disable=unused-argument + self.start_time = time.time() + return tf.estimator.SessionRunArgs(fetches=[tf.compat.v1.train.get_global_step(), 'total_loss:0']) + + def after_run(self, run_context, run_values): + self.end_time = time.time() + elapsed_time = self.end_time - self.start_time + global_step, total_loss = run_values.results + global_step_per_sec = self._iter_per_loop / elapsed_time + examples_per_sec = self._batch_size * global_step_per_sec + tf.compat.v1.logging.info('loss = %.7f', total_loss) + tf.compat.v1.logging.info('global_step/sec: %g', global_step_per_sec) + tf.compat.v1.logging.info('examples/sec: %g', examples_per_sec) - self.skipped = (num_global_steps // self.save_checkpoints_steps) * 2 + \ - min(2, num_global_steps % self.save_checkpoints_steps) - 1 \ No newline at end of file diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_8p.sh index 22c6d2c0c807eabf73361a5da501a1faad2ae38f..969311af6d4e42b7683517785d9a6278d8a00926 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_8p.sh @@ -19,8 +19,10 @@ Network="Bert-base_ID0060_for_TensorFlow" train_epochs= #训练batch_size batch_size=128 -#训练step -train_steps=500000 +#训练step 1140000 / (128*8/256) +# warmup step 10000 / (128*8/256) +# lr = 1e-4 * (128*8/256) +train_steps=286000 #学习率 learning_rate= @@ -115,20 +117,21 @@ do --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --num_warmup_steps=2500 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=100 \ - --save_checkpoints_steps=1000 \ + --iterations_per_loop=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ @@ -150,7 +153,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +train_accuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_1p.sh index 186d04e022b2c0caecae8c3c47a0966191fcf4b6..7aad180ab0d2b3b4d3f6f62bbf87e9739a6c2766 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_1p.sh @@ -103,13 +103,13 @@ do --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_8p.sh index bbf3fa24c21ce1358d45f27bc9275efee869645d..fe7f8a287cdfff7ce257f1ba569e64fca4533eca 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_performance_8p.sh @@ -115,13 +115,13 @@ do --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ @@ -163,7 +163,7 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "] loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..9699386e56699c4365a092fc105754927d47c0c1 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1641_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..47dcf8501abcb0981054d84a201e156844e4f6c6 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_full_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1641_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..ca6c54223e9a75c2bcb0a4e38e40b78417dcdad2 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1641_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..4fd17e0612d81ea354a0ca315488a5404dd7f52e --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1641_BertLarge-128_performance_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1641_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..1c37a56df55fe3b684e00bf5632c3e56d2dfe016 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1642_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..ae2de14f1a7c5a2dcb02f5c14219825bde4cb4e5 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_full_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1642_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..69cf072cf7e89b19a2216521572dd85167cb979c --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1642_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..3643786b982413af500a6299600d215c2c342ed8 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1642_BertLarge-128_performance_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1642_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..7ae0e26c1e3077a7f1596d36a4259e1b3dcfd02e --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1643_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..6db26b34a398b43fbdfd6e99c4d069ea1d471d34 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_full_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1643_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..272bcefe06057cadd3c6934c1566509fcb07d77e --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1643_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..800f730cf15b7acd50ff77a0946ed2b230c4d5bf --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID1643_BertLarge-128_performance_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-128_ID1643_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-24_H-1024_A-16 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_8p.sh index 6bd45a8f17431dc892bfb8a26dee2dd5497e23ef..a1fc1f5d2c503ca96bd5de14848cd4e42445df0e 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_8p.sh @@ -18,9 +18,10 @@ Network="BertLarge-128_ID3067_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 -#训练step -train_steps=32000 +batch_size=128 +#训练step 1140000 / (128*8/256) +# warmup step 10000 / (128*8/256) +train_steps=286000 #学习率 learning_rate= @@ -113,26 +114,26 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=1000 \ + --learning_rate=5e-5 \ + --num_warmup_steps=2500 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & @@ -152,7 +153,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_1p.sh index 94281301911bd64cb7f906bc27567a437663f8d7..422f2a8abd7af4bd77de6533971e1ee9821779ea 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_1p.sh @@ -17,9 +17,9 @@ Network="BertLarge-128_ID3067_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 +batch_size=128 #训练step -train_steps=100 +train_steps=1000 #学习率 learning_rate= @@ -102,25 +102,24 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ + --learning_rate=5e-5 \ --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=1000 \ --npu_bert_clip_by_global_norm=False \ --distributed=False \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_8p.sh index eaf3efa0848298b46b2e75f77632e802e47f8a56..e0113d2fc722d29cab9daf9322183d80edb6a06a 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_performance_8p.sh @@ -18,7 +18,7 @@ Network="BertLarge-128_ID3067_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 +batch_size=128 #训练step train_steps=1000 #学习率 @@ -113,14 +113,14 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --learning_rate=5e-5 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ @@ -132,7 +132,6 @@ do --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_8p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_8p_lamb_phase2.sh index bbd7ab172323decd0044080e173448a5e8efa658..498bdac94311b19233a44b6a7f3d539df89bdeda 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_8p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_8p_lamb_phase2.sh @@ -20,7 +20,7 @@ train_epochs=1 #训练batch_size batch_size=24 #训练step -train_steps=32000 +train_steps=50000 #学习率 learning_rate= @@ -71,6 +71,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -111,28 +113,29 @@ do fi nohup python3.7 ${cur_path}/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ + --learning_rate=1e-4 \ + --init_checkpoint=${ckpt_path}/mlpref_ckpt/bs64k_32k_ckpt_model.ckpt-28252 \ --num_warmup_steps=1000 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & @@ -152,7 +155,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_1p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_1p_lamb_phase2.sh index b443d8f9e1d3ad02f5b33bc1b499ae56937e35a7..0b4b153fdf2437db87debcf9b7f4b50d76df5b6f 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_1p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_1p_lamb_phase2.sh @@ -19,7 +19,7 @@ train_epochs=1 #训练batch_size batch_size=24 #训练step -train_steps=100 +train_steps=200 #学习率 learning_rate= @@ -70,6 +70,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -100,23 +102,24 @@ do #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ + --init_checkpoint=${ckpt_path}/mlpref_ckpt/bs64k_32k_ckpt_model.ckpt-28252 \ --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=200 \ --npu_bert_clip_by_global_norm=False \ --distributed=False \ --npu_bert_loss_scale=0 \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_8p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_8p_lamb_phase2.sh index 052606c33900ae44598ac9f1342a12bfd6622b24..cd9c8e7ef1a0e772f50b30fd2a331a21467d79b3 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_8p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_performance_8p_lamb_phase2.sh @@ -20,7 +20,7 @@ train_epochs=1 #训练batch_size batch_size=24 #训练step -train_steps=100 +train_steps=200 #学习率 learning_rate= @@ -71,6 +71,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -111,23 +113,24 @@ do fi nohup ${bind_core} python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ + --learning_rate=1e-4 \ + --init_checkpoint=${ckpt_path}/mlpref_ckpt/bs64k_32k_ckpt_model.ckpt-28252 \ --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=200 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_8p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_8p_lamb_phase2.sh index 71b9cc9758999bacaab1c57de8f9398730e49625..a96fcd5b8629bb23166c55da1e815eb76d2417ce 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_8p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_8p_lamb_phase2.sh @@ -20,7 +20,7 @@ train_epochs= #训练batch_size batch_size=64 #训练step -train_steps=32000 +train_steps=100000 #学习率 learning_rate= @@ -71,6 +71,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -111,23 +113,25 @@ do fi nohup python3.7 ${cur_path}/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ + --learning_rate=1e-4 \ + --init_checkpoint=${ckpt_path}/bertbase_phase1_npu_ckpt/model.ckpt-660497 \ --num_warmup_steps=1000 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ @@ -151,7 +155,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_1p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_1p_lamb_phase2.sh index c4211aa680962e4a6397800fdc8947eeb950daca..8f58f91459a73a8bd985edc86f836e5fb61ba697 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_1p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_1p_lamb_phase2.sh @@ -70,6 +70,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -100,16 +102,17 @@ do #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ - --num_warmup_steps=100 \ + --init_checkpoint=${ckpt_path}/bertbase_phase1_npu_ckpt/model.ckpt-660497 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_8p_lamb_phase2.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_8p_lamb_phase2.sh index c46b250a81e04c8721eec06edd11a27e0a1238b1..b9177621709dee0c532bc357298cb74fcef1b042 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_8p_lamb_phase2.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_performance_8p_lamb_phase2.sh @@ -71,6 +71,8 @@ do mkdir -p ${profiling_dump_path} elif [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` fi done @@ -111,16 +113,17 @@ do fi nohup ${bind_core} python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ - --num_warmup_steps=100 \ + --learning_rate=1e-4 \ + --init_checkpoint=${ckpt_path}/bertbase_phase1_npu_ckpt/model.ckpt-660497 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_8p.sh index 2da89be7d8602e38a7c3a1e70d3fa50ceda5a4d3..7edb01b264f542152b84c3118fed6e6d4c1b6a47 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_8p.sh @@ -19,8 +19,9 @@ Network="BertBase-512_ID3206_for_TensorFlow" train_epochs= #训练batch_size batch_size=64 -#训练step -train_steps=32000 +#训练step 1144000 / (64*8/256) +# warmup step 10000 / (64*8/256) +train_steps=572000 #学习率 learning_rate= @@ -111,23 +112,26 @@ do fi nohup python3.7 ${cur_path}/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ + --num_warmup_steps=5000 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --graph_memory_max_size=28991029248 \ + --variable_memory_max_size=4294967296 \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ @@ -151,7 +155,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_1p.sh index 5174684914cc707ec2f2550f4800b6ee19339f4d..dbf695e1e2eb915e0b15416bea04a4ccc94808a8 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_1p.sh @@ -100,18 +100,20 @@ do #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ - --num_warmup_steps=100 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --graph_memory_max_size=28991029248 \ + --variable_memory_max_size=4294967296 \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_8p.sh index ae232a8bb20a6c4d0ea0981bf68a05cc3d01fc35..50e2299eaf2775c6a72c8e8e28a28b7812bad360 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_performance_8p.sh @@ -111,18 +111,20 @@ do fi nohup ${bind_core} python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ - --num_warmup_steps=100 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --graph_memory_max_size=28991029248 \ + --variable_memory_max_size=4294967296 \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_8p.sh index 34219b6ed76322d1158c08d180afb6f21ff67faf..3163167dd8b2775db0c2f07e128962b64796f771 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_8p.sh @@ -19,8 +19,9 @@ Network="BertLarge-512_ID3207_for_TensorFlow" train_epochs=1 #训练batch_size batch_size=24 -#训练step -train_steps=32000 +#训练step 1144000 / (24*8/256) +# warmup step 10000 / (24*8/256) +train_steps=1144000 #学习率 learning_rate= @@ -111,28 +112,30 @@ do fi nohup python3.7 ${cur_path}/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ + --num_warmup_steps=10000 \ --num_train_steps=${train_steps} \ --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --npu_bert_npu_dropout=True \ + --npu_bert_npu_dropout_v3=False \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & @@ -152,7 +155,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_1p.sh index c6d99dbfc186c7368f55d9921b774afb9cac7150..29fba085d9de7128f8b2fa6be7fc55e14ec822ce 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_1p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_1p.sh @@ -19,7 +19,7 @@ train_epochs=1 #训练batch_size batch_size=24 #训练step -train_steps=100 +train_steps=200 #学习率 learning_rate= @@ -100,7 +100,7 @@ do #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ --num_warmup_steps=0 \ @@ -108,19 +108,20 @@ do --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --npu_bert_npu_dropout=True \ + --npu_bert_npu_dropout_v3=False \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=200 \ --npu_bert_clip_by_global_norm=False \ --distributed=False \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_8p.sh index 413222136ed0b8bd761eabd3c31c6fbb612d9184..760f7ed4b9210978cec6a35db53f9deacae484a8 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_8p.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_performance_8p.sh @@ -20,7 +20,7 @@ train_epochs=1 #训练batch_size batch_size=24 #训练step -train_steps=100 +train_steps=200 #学习率 learning_rate= @@ -111,7 +111,7 @@ do fi nohup ${bind_core} python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ --max_seq_length=512 \ - --max_predictions_per_seq=76 \ + --max_predictions_per_seq=80 \ --train_batch_size=${batch_size} \ --learning_rate=5e-5 \ --num_warmup_steps=0 \ @@ -119,20 +119,21 @@ do --optimizer_type=adam \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ + --input_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_512_max_pred_80/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ + --npu_bert_npu_dropout=True \ + --npu_bert_npu_dropout_v3=False \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=200 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_8p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_8p_lamb_phase1.sh index 140a44570e3ac90a1338d7f656319818aec9b3ed..669bab2a1cb1a17901680505e5ecd0f4eaa1dc11 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_8p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_8p_lamb_phase1.sh @@ -19,8 +19,10 @@ Network="BertBase-128_ID3208_for_TensorFlow" train_epochs= #训练batch_size batch_size=128 -#训练step -train_steps=500000 +#训练step 1140000 / (128*8/256) +# warmup step 10000 / (128*8/256) +# lr = 1e-4 * (128*8/256) +train_steps=286000 #学习率 learning_rate= @@ -114,21 +116,22 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --learning_rate=5e-4 \ + --num_warmup_steps=2500 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=100 \ - --save_checkpoints_steps=1000 \ + --iterations_per_loop=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ @@ -150,7 +153,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +train_accuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_1p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_1p_lamb_phase1.sh index 852c9164a61fb63c4fcafe3a2bf64db31832bc4d..1d2bad58ea09856c3f4f93be4f53847ec2f5b42d 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_1p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_1p_lamb_phase1.sh @@ -102,14 +102,14 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --learning_rate=5e-4 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_8p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_8p_lamb_phase1.sh index a7270327ad511ea6ec94e9f1ca81bf88ba53e1ed..0dab73c7c101eb1517dc6752bf3d70c5d3497557 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_8p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_performance_8p_lamb_phase1.sh @@ -114,14 +114,14 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --learning_rate=5e-4 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ @@ -163,7 +163,7 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "] loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_1p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_1p_lamb_phase1.sh deleted file mode 100644 index 6e6b4f2b5067773a2c24d3447ff6f2ec49aeea35..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_1p_lamb_phase1.sh +++ /dev/null @@ -1,171 +0,0 @@ -#!/bin/bash - -#当前路径,不需要修改 -cur_path=`pwd` - -#集合通信参数,不需要修改 -export RANK_SIZE=1 -export JOB_ID=99990001 -RANK_ID_START=0 - -# 数据集路径,保持为空,不需要修改 -data_path="" - -#基础参数,需要模型审视修改 -#网络名称,同目录名称 -Network="BertLarge-128_ID3209_for_TensorFlow" -#训练epoch -train_epochs=1 -#训练batch_size -batch_size=24 -#训练step -train_steps=100000 -#学习率 -learning_rate= - -#维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" -#维持参数,以下不需要修改 -over_dump=False -data_dump_flag=False -data_dump_step="10" -profiling=False -autotune=False - -# 帮助信息,不需要修改 -if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " - echo " " - echo "parameter explain: - --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message - " - exit 1 -fi - -#参数校验,不需要修改 -for para in $* -do - if [[ $para == --precision_mode* ]];then - precision_mode=`echo ${para#*=}` - elif [[ $para == --over_dump* ]];then - over_dump=`echo ${para#*=}` - over_dump_path=${cur_path}/output/overflow_dump - mkdir -p ${over_dump_path} - elif [[ $para == --data_dump_flag* ]];then - data_dump_flag=`echo ${para#*=}` - data_dump_path=${cur_path}/output/data_dump - mkdir -p ${data_dump_path} - elif [[ $para == --data_dump_step* ]];then - data_dump_step=`echo ${para#*=}` - elif [[ $para == --profiling* ]];then - profiling=`echo ${para#*=}` - profiling_dump_path=${cur_path}/output/profiling - mkdir -p ${profiling_dump_path} - elif [[ $para == --data_path* ]];then - data_path=`echo ${para#*=}` - fi -done - -#校验是否传入data_path,不需要修改 -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path\" must be confing" - exit 1 -fi - -#训练开始时间,不需要修改 -start_time=$(date +%s) -#进入训练脚本目录,需要模型审视修改 -for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); -do - #设置环境变量,不需要修改 - echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$RANK_ID - - #创建DeviceID输出目录,不需要修改 - if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then - rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - fi - - #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ - --max_seq_length=128 \ - --max_predictions_per_seq=20 \ - --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=10000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=lamb \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & -done -wait - -#训练结束时间,不需要修改 -end_time=$(date +%s) -e2e_time=$(( $end_time - $start_time )) - -#结果打印,不需要修改 -echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" - -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` -#打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" -echo "E2E Training Duration sec : $e2e_time" - -#稳定性精度看护结果汇总 -#训练用例信息,不需要修改 -BatchSize=${batch_size} -DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' - - -#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt - -#最后一个迭代loss值,不需要修改 -ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` - -#关键信息打印到${CaseName}.log中,不需要修改 -echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_8p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_8p_lamb_phase1.sh index e3a80ebb0f2207a3d1a3855af29d4b9d6407b1a1..06442a11f4536e46f56d3bfb85b1033ab1a42614 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_8p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_full_8p_lamb_phase1.sh @@ -18,9 +18,11 @@ Network="BertLarge-128_ID3209_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 -#训练step -train_steps=32000 +batch_size=128 +#训练step 1140000 / (128*8/256) +# warmup step 10000 / (128*8/256) +# lr = 1e-4 * (128*8/256) +train_steps=286000 #学习率 learning_rate= @@ -113,26 +115,26 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=1000 \ + --learning_rate=4e-4 \ + --num_warmup_steps=2500 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ + --do_eval=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ + --save_checkpoints_steps=10000 \ --npu_bert_clip_by_global_norm=False \ --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & @@ -152,7 +154,7 @@ TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ echo "Final Performance images/sec : $ActualFPS" #输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +TrainAccuracy=`grep "tensorflow: masked_lm_accuracy" $cur_path/output/0/train_0.log|awk 'END {print $4}'` #打印,不需要修改 echo "Final Train Accuracy : ${TrainAccuracy}" echo "E2E Training Duration sec : $e2e_time" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_1p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_1p_lamb_phase1.sh index 5127e43ddcc0b12879d8baf6ea7c7e1efc4a698c..7efe7affe36ca6967bc82a65c1982583904f66a2 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_1p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_1p_lamb_phase1.sh @@ -17,9 +17,9 @@ Network="BertLarge-128_ID3209_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 +batch_size=128 #训练step -train_steps=100 +train_steps=1000 #学习率 learning_rate= @@ -102,25 +102,24 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ + --learning_rate=5e-5 \ --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ --num_accumulation_steps=1 \ --npu_bert_job_start_file= \ - --iterations_per_loop=10 \ - --save_checkpoints_steps=100 \ + --iterations_per_loop=100 \ + --save_checkpoints_steps=1000 \ --npu_bert_clip_by_global_norm=False \ --distributed=False \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_8p_lamb_phase1.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_8p_lamb_phase1.sh index c0bfb6b7b04686e98cb18c3706406550d79726ee..8b6c7930f03310a420863a3f9307c7b5ab1d0d04 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_8p_lamb_phase1.sh +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3209_BertLarge-128_performance_8p_lamb_phase1.sh @@ -14,11 +14,11 @@ data_path="" #基础参数,需要模型审视修改 #网络名称,同目录名称 -Network="BertLarge-128_ID3067_for_TensorFlow" +Network="BertLarge-128_ID3209_for_TensorFlow" #训练epoch train_epochs=1 #训练batch_size -batch_size=24 +batch_size=128 #训练step train_steps=1000 #学习率 @@ -113,14 +113,14 @@ do --max_seq_length=128 \ --max_predictions_per_seq=20 \ --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ + --learning_rate=5e-5 \ + --num_warmup_steps=0 \ --num_train_steps=${train_steps} \ --optimizer_type=lamb \ --manual_fp16=True \ --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ + --input_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/training \ + --eval_files_dir=${data_path}/tfrecord/seq_len_128_max_pred_20/wikicorpus_en/test \ --npu_bert_debug=False \ --npu_bert_use_tdt=True \ --do_train=True \ @@ -132,7 +132,6 @@ do --distributed=True \ --npu_bert_tail_optimize=True \ --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ --over_dump=${over_dump} \ --over_dump_path=${over_dump_path} \ --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..98922c52a5f6173a4763268a47581b268ba4be2d --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_full_8p.sh @@ -0,0 +1,204 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad1.1_ID3217_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=2.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/model + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=True \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "tensorflow:f1 =" $cur_path/output/0/train_0.log|awk 'END {print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..98f551f0177d02dcd470d3fc1773ea9abae9b64e --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_1p.sh @@ -0,0 +1,200 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad1.1_ID3217_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/model + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --npu_bert_loss_scale=0 \ + --num_train_steps=1000 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..4501084fc3cd0f2b93aaa5d2d52ccb2865562914 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3217_BertBase-Squad1.1_performance_8p.sh @@ -0,0 +1,204 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad1.1_ID3217_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/model + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=${train_batch_size} \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..42d0cc1724ad00cf29859b4388425a016eb8a3f4 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_full_8p.sh @@ -0,0 +1,205 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad1.1_ID3218_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=2.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=True \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "tensorflow:f1 =" $cur_path/output/0/train_0.log|awk 'END {print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..35827f4cfd4daeb6d0b3c01c297e7abb140ee0e3 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_1p.sh @@ -0,0 +1,201 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad1.1_ID3218_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --npu_bert_loss_scale=0 \ + --num_train_steps=1000 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..853d6af544efada28636bad191429f7e56b3f1d7 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3218_BertLarge-Squad1.1_performance_8p.sh @@ -0,0 +1,205 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad1.1_ID3218_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v1.1_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v1.1.json \ + --eval_script=${data_path}/dataset/evaluate-v1.1.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..1a0ad760ad3bb0b246b8d68420297def625bf41c --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_full_8p.sh @@ -0,0 +1,206 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad2.0_ID3219_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=2.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/uncased_L-12_H-768_A-12 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=True \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "tensorflow:f1 =" $cur_path/output/0/train_0.log|awk 'END {print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..0af37e1b1d0cfaec2aaafa066493ab21ec8649fa --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_1p.sh @@ -0,0 +1,203 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad2.0_ID3219_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/uncased_L-12_H-768_A-12 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --npu_bert_loss_scale=0 \ + --num_train_steps=1000 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..4c3f6bd28281dfeb42f76c4bf0b66e0c23674288 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3219_BertBase-Squad2.0_performance_8p.sh @@ -0,0 +1,206 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-Squad2.0_ID3219_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +model_path=${data_path}/uncased_L-12_H-768_A-12 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/bert_model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..d179d620316be3902847e4f8b4ead3795e7c54c9 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_full_8p.sh @@ -0,0 +1,207 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad2.0_ID3220_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=2.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=True \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "tensorflow:f1 =" $cur_path/output/0/train_0.log|awk 'END {print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..5a5a0d949a346bef275426d697027ca70de471e8 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_1p.sh @@ -0,0 +1,204 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad2.0_ID3220_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --npu_bert_loss_scale=0 \ + --num_train_steps=1000 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..5f4d3604f069f4f0e0c3d307cd9aa019b613553b --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3220_BertLarge-Squad2.0_performance_8p.sh @@ -0,0 +1,207 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export JOB_ID=99990001 +export RANK_TABLE_FILE=${cur_path}/../configs/8p.json +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge-Squad2.0_ID3220_for_TensorFlow" +#训练batch_size +train_batch_size=32 + +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=5e-6 +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +if [[ $1 == --help || $1 == -h ]];then + echo "usage: ./train_full_1p.sh " + + echo "" + echo "parameter explain: + --task_name finetune dataset + --data_path source data of training + --train_batch_size training batch + --learning_rate learning_rate + --enable_exception_dump enable_exception_dump + --num_train_epochs epochs + --output_dir output dir + -h/--help Show help message + " + exit 1 +fi + +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --squad_version* ]];then + squad_version=`echo ${para#*=}` + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + fi +done + +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +#model_path=${data_path}/uncased_L-24_H-1024_A-16 +model_path=${data_path}/bert_tf_ckpt_large_pretraining_amp_lamb_19.03.1 + +#训练开始时间,不需要修改 +start_time=$(date +%s) +#进入训练脚本目录,需要模型审视修改 +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + fi + + nohup python3.7 ${cur_path}/../src/run_squad.py \ + --precision_mode=$precision_mode \ + --vocab_file=${model_path}/vocab.txt \ + --bert_config_file=${model_path}/bert_config.json \ + --init_checkpoint=${model_path}/model.ckpt \ + --do_train=True \ + --train_file=${data_path}/dataset/squad_v2.0_train.tf_record \ + --do_predict=False \ + --predict_file=${data_path}/dataset/dev-v2.0.json \ + --eval_script=${data_path}/dataset/evaluate-v2.0.py \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --save_checkpoints_steps=1000 \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --output_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} \ + --version_2_with_negative=True \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +#输出性能FPS,需要模型审视修改 +FPS=`grep "tensorflow:examples/sec" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $2}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +#train_accuracy=`grep "f1 =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $3}'` +#打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" + +##获取性能数据 +#吞吐量 +ActualFPS=$FPS +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${train_batch_size}'*1000/'${FPS}'}'` + +##冒烟看护字段 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F " " '{print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改' +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt`) + +#关键性息打印到CaseName.log中 +echo "Network = ${Network}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${Accuracy}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}">>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..18e3310f8f2d1d6f89edb1156fbbe7c6ca44cc20 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3232_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..ef99a125fe71f4d58cebc0b2c89337cde4c10d1f --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_full_8p.sh @@ -0,0 +1,251 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3232_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..e31cf52966684bc148c506d0a1f940e7ddb13e80 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3232_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..b0a02b62290916d7d6995c085e443d867c8b7fb0 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3232_BertBase-128_performance_8p.sh @@ -0,0 +1,251 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3232_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MRPC +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..9edfaaa82244864e3c1fcd05b405037e8dfeec5a --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3233_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..20acc6f76ec50325fc34d7b587b18b4adc81386d --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_full_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3233_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..057096ef566bbd6451bcfd923d3bce2b20ded824 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3233_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..5c8121484497726b78930aa5677c137cf13bbb55 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3233_BertBase-128_performance_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3233_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=1e-6 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=MNLI +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'eval_accuracy' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..4e625a6545358b8c9e43299f0052b5399243d135 --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_1p.sh @@ -0,0 +1,240 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3234_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=2e-5 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..1761a0794f3ef2e566ab6f984f28c1cc08941d7f --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_full_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3234_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=3.0 +#学习率 +learning_rate=2e-5 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_1p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..957bd4cafe409be2bd28049edf1a2be60277feda --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_1p.sh @@ -0,0 +1,239 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=1 +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3234_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=2e-5 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=False \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_8p.sh b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_8p.sh new file mode 100644 index 0000000000000000000000000000000000000000..288cb7ac60ac108c8f5a38a90e3c0c571c8ca8df --- /dev/null +++ b/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3234_BertBase-128_performance_8p.sh @@ -0,0 +1,250 @@ +#!/bin/bash +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +export RANK_SIZE=8 +export RANK_TABLE_FILE=$cur_path/../configs/${RANK_SIZE}p.json +export JOB_ID=10087 +export GE_USE_STATIC_MEMORY=1 +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertBase-128_ID3234_for_TensorFlow" +#训练batch_size +train_batch_size=32 +#训练ephch +num_train_epochs=1.0 +#学习率 +learning_rate=2e-5 +warmup_proportion=0.1 +precision="fp32" +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +optimizer_type="adam" +#维持参数,以下不需要修改 +over_dump=False +over_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/overflow_dump +data_dump_flag=False +data_dump_path=${cur_path}/output/${ASCEND_DEVICE_ID}/data_dump +enable_exception_dump=False +data_dump_step="0" +profiling=False +autotune=False + +#其他参数 +task_name=CoLA +output_dir=ckpt +type=official +use_xla=false +use_fp16="" +if [ "$precision" = "fp16" ] ; then + echo "fp16 activated!" + use_fp16="--amp" +else + echo "fp32/tf32 activated!" + use_fp16="--noamp" +fi + + +if [ "$use_xla" = "true" ] ; then + use_xla_tag="--use_xla" + echo "XLA activated" +else + use_xla_tag="--nouse_xla" +fi + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --task_name* ]];then + task_name=`echo ${para#*=}` + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]];then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --ckpt_path* ]];then + ckpt_path=`echo ${para#*=}` + elif [[ $para == --train_batch_size* ]];then + train_batch_size=`echo ${para#*=}` + elif [[ $para == --learning_rate* ]];then + learning_rate=`echo ${para#*=}` + elif [[ $para == --num_train_epochs* ]];then + num_train_epochs=`echo ${para#*=}` + elif [[ $para == --output_dir* ]];then + output_dir=`echo ${para#*=}` + elif [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --optimizer_type* ]];then + optimizer_type=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + elif [[ $para == --data_dump_path* ]];then + data_dump_path=`echo ${para#*=}` + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --enable_exception_dump* ]];then + enable_exception_dump=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --autotune* ]];then + autotune=`echo ${para#*=}` + mv $install_path/fwkacllib/data/rl/Ascend910/custom $install_path/fwkacllib/data/rl/Ascend910/custom_bak + mv $install_path/fwkacllib/data/tiling/Ascend910/custom $install_path/fwkacllib/data/tiling/Ascend910/custom_bak + autotune_dump_path=${cur_path}/output/autotune_dump + mkdir -p ${autotune_dump_path}/GA + mkdir -p ${autotune_dump_path}/rl + cp -rf $install_path/fwkacllib/data/tiling/Ascend910/custom ${autotune_dump_path}/GA/ + cp -rf $install_path/fwkacllib/data/rl/Ascend910/custom ${autotune_dump_path}/RL/ + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi +bertmodelpath=$ckpt_path/uncased_L-12_H-768_A-12 +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../ +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=${ASCEND_DEVICE_ID} + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + mkdir -p ${data_dump_path} + mkdir -p ${over_dump_path} + fi + + # 绑核,不需要的绑核的模型删除,需要的模型审视修改 + let a=RANK_ID*12 + let b=RANK_ID+1 + let c=b*12-1 + + corenum=`cat /proc/cpuinfo |grep 'processor' | wc -l` + let a=RANK_ID*${corenum}/8 + let b=RANK_ID+1 + let c=b*${corenum}/8-1 + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + + nohup ${bind_core} python3 ./src/run_classifier.py \ + --task_name=$task_name \ + --do_train=true \ + --do_eval=true \ + --enable_exception_dump=$enable_exception_dump\ + --data_dump_flag=$data_dump_flag \ + --data_dump_step=$data_dump_step\ + --data_dump_path=$data_dump_path\ + --over_dump=$over_dump \ + --over_dump_path=$over_dump_path \ + --precision_mode=$precision_mode \ + --data_dir=${data_path}/Glue/${task_name} \ + --vocab_file=$bertmodelpath/vocab.txt \ + --bert_config_file=$bertmodelpath/bert_config.json \ + --init_checkpoint=$bertmodelpath/bert_model.ckpt \ + --max_seq_length=128 \ + --train_batch_size=$train_batch_size \ + --learning_rate=$learning_rate \ + --num_train_epochs=$num_train_epochs \ + --output_dir=${cur_path}/${output_dir} \ + --horovod=false "$use_fp16" \ + --distributed=True \ + --npu_bert_tail_optimize=True \ + --npu_bert_loss_scale=0 \ + --optimizer_type= $optimizer_type \ + $use_xla_tag --warmup_proportion=$warmup_proportion > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +step_sec=`grep -a 'INFO:tensorflow:global_step/sec: ' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk 'END {print $2}'` +FPS=`awk 'BEGIN{printf "%d\n",'$step_sec' * '$train_batch_size' * '$RANK_SIZE'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" +#输出训练精度,需要模型审视修改 +train_accuracy=`grep -a 'MCC' ${cur_path}/${output_dir}/eval_results.txt|awk '{print $3}'` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${train_batch_size} +DeviceType=`uname -m` +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*1000/'${FPS}'}'` + +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 +grep 'tensorflow:loss =' $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk -F ' ' '{print $3}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + + + + + diff --git a/TensorFlow/built-in/nlp/GRU4Rec_ID0128_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/nlp/GRU4Rec_ID0128_for_TensorFlow/test/train_full_1p.sh index c6b78abc7224cdf86e594512cfe915b9937a21bb..1d9183f5326395d4301ff2a8a165b4c47529692c 100644 --- a/TensorFlow/built-in/nlp/GRU4Rec_ID0128_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/nlp/GRU4Rec_ID0128_for_TensorFlow/test/train_full_1p.sh @@ -7,13 +7,14 @@ export JOB_ID=10087 RANK_ID_START=0 # 数据集路径,保持为空,不需要修改 data_path="" +data_file="/rsc15_train_full.txt" #设置默认日志级别,不需要修改 -export ASCEND_GLOBAL_LOG_LEVEL=3 +export ASCEND_GLOBAL_LOG_LEVEL_ETP=3 #基础参数,需要模型审视修改 #网络名称,同目录名称 Network="GRU4Rec_for_TensorFlow" #训练epoch -train_epochs=1 +train_epochs=10 #TF2.X独有,不需要修改 #export NPU_LOOP_SIZE=${train_steps} #维测参数,precision_mode需要模型审视修改 @@ -30,18 +31,18 @@ if [[ $1 == --help || $1 == -h ]];then echo " " echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message + --data_path source data of training + -h/--help show help message " exit 1 fi #参数校验,不需要修改 -for para in $* +for para in $* do if [[ $para == --precision_mode* ]];then precision_mode=`echo ${para#*=}` @@ -78,6 +79,7 @@ if [[ $data_path == "" ]];then exit 1 fi BatchSize=50 +`sed -i 's/batch_size = 4096/batch_size = 50/g' ${cur_path}/../gru4rec_BP/main.py` CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' #训练开始时间,不需要修改 start_time=$(date +%s) @@ -87,8 +89,8 @@ for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$RANK_ID - + export RANK_ID=$RANK_ID + #创建DeviceID输出目录,不需要修改 if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} @@ -101,7 +103,7 @@ do # let a=RANK_ID*12 # let b=RANK_ID+1 # let c=b*12-1 - + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune nohup python3 main.py \ @@ -109,13 +111,13 @@ do --path_to_test=${data_path} \ --train=1 \ --epoch=${train_epochs} \ - > ${cur_path}/output/${ASCEND_DEVICE_ID}/train.log \ - 2>&1 & + --train_dataset_file=${data_file} \ + > ${cur_path}/output/${ASCEND_DEVICE_ID}/train.log 2>&1 & #python3 main.py --train=1 --epoch=${train_epochs} \ # --over_dump=${over_dump} \ - # --over_dump_path=${over_dump_path} -done + # --over_dump_path=${over_dump_path} +done wait #训练结束时间,不需要修改 @@ -149,7 +151,7 @@ ActualFPS=${FPS} grep Each $cur_path/output/${ASCEND_DEVICE_ID}/train.log|awk '{print $6}'>>$cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_time.txt TrainingTime=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_time.txt` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep Epoch $cur_path/output/$ASCEND_DEVICE_ID/train.log|awk '{print $8}'>> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep Epoch $cur_path/output/$ASCEND_DEVICE_ID/train.log|awk '{print $8}'> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` @@ -166,4 +168,4 @@ echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/albert_config/vocab.txt b/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/albert_config/vocab.txt deleted file mode 100644 index ca4f9781030019ab9b253c6dcb8c7878b6dc87a5..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/albert_config/vocab.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/test/train_performance_1p_finetune.sh b/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/test/train_performance_1p_finetune.sh index 319d60c9f6c2a12c56891cd89045a9a2d45a3d9d..5552d4802689ea63e0ca9ee0bd02429be75597a0 100644 --- a/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/test/train_performance_1p_finetune.sh +++ b/TensorFlow/built-in/nlp/albert_xlarge_zh_ID2348_for_TensorFlow/test/train_performance_1p_finetune.sh @@ -80,6 +80,7 @@ else mkdir -p $cur_path/test/output/$ASCEND_DEVICE_ID fi +cp -r $data_path/albert_config $cur_path/ #执行训练 cd $cur_path diff --git a/TensorFlow/built-in/recommendation/DCN_ID1986_for_TensorFlow/readme.md b/TensorFlow/built-in/recommendation/DCN_ID1986_for_TensorFlow/readme.md index 624b047b9a91024e833860e6afa0077eef68dfca..dfefc40dedd91414f5577b0662352be1fb83e91e 100644 --- a/TensorFlow/built-in/recommendation/DCN_ID1986_for_TensorFlow/readme.md +++ b/TensorFlow/built-in/recommendation/DCN_ID1986_for_TensorFlow/readme.md @@ -139,8 +139,11 @@ custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(FLAGS.precision

快速上手

## 数据集准备 -1、下载数据集:deepfm - +1、下载数据集criteo,转换为tfrecord格式,数据集下载及格式处理参考 + + ``` + https://github.com/mindspore-ai/models/tree/master/official/recommend/wide_and_deep + ``` ## 模型训练 - 单击“立即下载”,并选择合适的下载方式下载源码包 - 开始训练。 diff --git a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/README.md b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/README.md index 19106617a7959682666d3ed86c70646032c51f19..e110bd50ad5034b97c20cc6fdc3faf9cbca30005 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/README.md +++ b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/README.md @@ -29,6 +29,7 @@ **描述(Description):基于TensorFlow框架的DIEN网络训练代码** +

概述

- 参考论文: https://arxiv.org/abs/1809.03672 @@ -82,7 +83,7 @@ 相关代码示例。 ``` -config_proto = tf.ConfigProto(allow_soft_placement=True) + config_proto = tf.ConfigProto(allow_soft_placement=True) custom_op = config_proto.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = 'NpuOptimizer' custom_op.parameter_map["use_off_line"].b = True diff --git a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/model.py b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/model.py index f1ce1624351d27b8268344d25e812f9be01700f3..6a1d9fea62436f6707c2f2a5343b8d5054fc5d74 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/model.py +++ b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/model.py @@ -111,7 +111,10 @@ class Model(object): if self.use_negsampling: self.loss += self.aux_loss tf.summary.scalar('loss', self.loss) - self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr).minimize(self.loss) + self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr) + loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, decr_every_n_nan_or_inf=2, decr_ratio=0.5) + self.optimizer = NPULossScaleOptimizer(self.optimizer, loss_scale_manager) + self.optimizer = self.optimizer.minimize(self.loss) # Accuracy metric self.accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.round(self.y_hat), self.target_ph), tf.float32)) diff --git a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/train.py b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/train.py index aebeda31562dfdb8892a97b7f65c36b29caef79e..1ebdf7b3ee1a87cc3e508ac083cb2e1a75d5f804 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/train.py +++ b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/script/train.py @@ -37,9 +37,9 @@ import random import sys from utils import * -EMBEDDING_DIM = 64 -HIDDEN_SIZE = 16 * 2 -ATTENTION_SIZE = 16 * 2 +EMBEDDING_DIM = 18 +HIDDEN_SIZE = 18 * 2 +ATTENTION_SIZE = 18 * 2 best_auc = 0.0 def prepare_data(input, target, maxlen = None, return_neg = False): @@ -158,6 +158,8 @@ def train( custom_op.name = 'NpuOptimizer' custom_op.parameter_map["enable_data_pre_proc"].b = True custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + ### 配置mm16进32出 + custom_op.parameter_map["customize_dtypes"].s = tf.compat.as_bytes("switch_config.txt") sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF diff --git a/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/switch_config.txt b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/switch_config.txt new file mode 100644 index 0000000000000000000000000000000000000000..995f4cea2905f7a19343d8bf259579c3d4b4bca5 --- /dev/null +++ b/TensorFlow/built-in/recommendation/DIEN_ID0109_for_TensorFlow/switch_config.txt @@ -0,0 +1,3 @@ +OpType::MatMulV2:InputDtype:float16,float16,float32,OutputDtype:float32 +OpType::BatchMatMul:InputDtype:float16,float16,OutputDtype:float32 +OpType::BatchMatMulV2:InputDtype:float16,float16,OutputDtype:float32 \ No newline at end of file diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/README.md b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/README.md index b3709a2640001988d15af6bd05997116b4cbfb8d..455a59ca1ae7f32af8dcec191bef1af83138c33a 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/README.md +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/README.md @@ -1,36 +1,175 @@ -Deep Interest Evolution Network for Click-Through Rate Prediction -https://arxiv.org/abs/1809.03672 +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [迁移学习指导](#迁移学习指导.md) +- [高级参考](#高级参考.md) -prepare data -method 1 -You can get the data from amazon website and process it using the script +

基本信息

-sh prepare_data.sh -method 2 (recommended) -Because getting and processing the data is time consuming,so we had processed it and upload it for you. You can unzip it to use directly. +**发布者(Publisher):Huawei** +**应用领域(Application Domain):Recommendation** + +**版本(Version):1.1** + +**修改时间(Modified) :2022.04.08** + +**大小(Size)**_**:220Kb** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt** + +**精度(Precision):Mixed** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架的DIEN网络训练代码** + +

概述

+ +- 参考论文: + + https://arxiv.org/abs/1809.03672 + +- 参考实现: + + https://github.com/mouna99/dien + +- 适配昇腾 AI 处理器的实现: + + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +## 默认配置 +- 网络结构 + + - 学习率为0.001 + - 优化器:adam + - 单卡batchsize:128 + - 总step数为25400 + +- 训练超参(单卡): + - Batch size: 128 + - Learning rate\(LR\): 0.0001 + - Train epochs:1 + - Trainsteps:25400 + +## 支持特性 + +| 特性列表 | 是否支持 | +| ---------- | -------- | +| 分布式训练 | 否 | +| 混合精度 | 是 | +| 数据并行 | 否 | + + +## 混合精度训练 + +昇腾910 AI处理器提供自动混合精度功能,可以针对全网中float32数据类型的算子,按照内置的优化策略,自动将部分float32的算子降低精度到float16,从而在精度损失很小的情况下提升系统性能并减少内存使用。 + +## 开启混合精度 +相关代码示例。 + +``` + config_proto = tf.ConfigProto(allow_soft_placement=True) + custom_op = config_proto.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config_proto.graph_options.rewrite_options.remapping = RewriterConfig.OFF + session_config = npu_config_proto(config_proto=config_proto) +``` +

训练环境准备

+ +- 硬件环境和运行环境准备请参见《[CANN软件安装指南](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373?category=installation-update)》 +- 运行以下命令安装依赖。 +``` +pip3 install requirements.txt +``` +说明:依赖配置文件requirements.txt文件位于模型的根目录 + + +

快速上手

+ +## 数据集准备 + +1. 可以从"参考实现"中的源码链接里下载数据集 +2. 下载好数据集后可以解压到本地目录下 tar -jxvf data.tar.gz mv data/* . tar -jxvf data1.tar.gz mv data1/* . tar -jxvf data2.tar.gz mv data2/* . -When you see the files below, you can do the next work. - -cat_voc.pkl -mid_voc.pkl -uid_voc.pkl -local_train_splitByUser -local_test_splitByUser -reviews-info -item-info -train model -python train.py train [model name] -The model blelow had been supported: - -DNN -PNN -Wide (Wide&Deep NN) -DIN (https://arxiv.org/abs/1706.06978) -DIEN (https://arxiv.org/pdf/1809.03672.pdf) -Note: we use tensorflow 1.4. \ No newline at end of file +``` +## 模型训练 +- 单击“立即下载”,并选择合适的下载方式下载源码包。 +- 开始训练。 + + 1. 启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + + +- 单卡训练 + 以数据集放在/data为例 + ``` + cd test + bash train_performance_1p.sh --data_path=/data (功能和性能) + bash train_full_1p.sh --data_path=/data (全量) ## 当前full脚本尚未调通 + ``` + +

迁移学习指导

+ +- 数据集准备。 + + 1. 获取数据。 + 请参见“快速上手”中的数据集准备 + +- 模型训练 + + 请参考“快速上手”章节 + +

高级参考

+ +## 脚本和示例代码 + + ├── README.md //说明文档 + ├── requirements.txt //依赖 + ├── script //模型结构 + │ ├──data_iterator.py + │ ├──Dice.py + │ ├──train.py + │ ├──model.py + ├── test + | |—— train_full_1p.sh //单卡训练脚本 + | |—— train_performance_1p.sh //单卡训练脚本 + +## 脚本参数 + +``` +batch_size 训练batch_size +learning_rate 初始学习率 +train_epochs 总训练epoch数 + +``` + +## 训练过程 + +通过“模型训练”中的训练指令启动单卡训练。 +将训练脚本(train_full_1p.sh)中的data_path设置为训练数据集的路径。具体的流程参见“模型训练”的示例。 \ No newline at end of file diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/model.py b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/model.py index 5e02e2e3e8081a612702f6492d1dffef9a7b177e..4137fc337540c186475d362963f99a2e296ed201 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/model.py +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/model.py @@ -1,13 +1,40 @@ -from npu_bridge.estimator.npu.npu_dynamic_rnn import DynamicGRUV2 -from npu_bridge.estimator.npu.npu_dynamic_rnn import DynamicAUGRU +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from npu_bridge.npu_init import * import tensorflow as tf - -# from tensorflow.python.ops.rnn_cell import LSTMCell -# from tensorflow.python.ops.rnn import bidirectional_dynamic_rnn as bi_rnn +from tensorflow.python.ops.rnn_cell import GRUCell +from tensorflow.python.ops.rnn_cell import LSTMCell +#from tensorflow.python.ops.rnn import bidirectional_dynamic_rnn as bi_rnn #from tensorflow.python.ops.rnn import dynamic_rnn -from my_rnn import dynamic_rnn -# from tensorflow.contrib import rnn -from tensorflow.contrib.rnn import GRUCell +from rnn import dynamic_rnn +from tensorflow.contrib import rnn from utils import * from Dice import dice @@ -27,26 +54,26 @@ class Model(object): if use_negsampling: self.noclk_mid_batch_ph = tf.placeholder(tf.int32, [batch_size, maxlen, 5], name='noclk_mid_batch_ph') #generate 3 item IDs from negative sampling. self.noclk_cat_batch_ph = tf.placeholder(tf.int32, [batch_size, maxlen, 5], name='noclk_cat_batch_ph') - + # Embedding layer with tf.name_scope('Embedding_layer'): self.uid_embeddings_var = tf.get_variable("uid_embedding_var", [n_uid, EMBEDDING_DIM]) tf.summary.histogram('uid_embeddings_var', self.uid_embeddings_var) - self.uid_batch_embedded = embedding_lookup_npu(self.uid_embeddings_var, self.uid_batch_ph) + self.uid_batch_embedded = tf.nn.embedding_lookup(self.uid_embeddings_var, self.uid_batch_ph) self.mid_embeddings_var = tf.get_variable("mid_embedding_var", [n_mid, EMBEDDING_DIM]) tf.summary.histogram('mid_embeddings_var', self.mid_embeddings_var) - self.mid_batch_embedded = embedding_lookup_npu(self.mid_embeddings_var, self.mid_batch_ph) - self.mid_his_batch_embedded = embedding_lookup_npu(self.mid_embeddings_var, self.mid_his_batch_ph) + self.mid_batch_embedded = tf.nn.embedding_lookup(self.mid_embeddings_var, self.mid_batch_ph) + self.mid_his_batch_embedded = tf.nn.embedding_lookup(self.mid_embeddings_var, self.mid_his_batch_ph) if self.use_negsampling: - self.noclk_mid_his_batch_embedded = embedding_lookup_npu(self.mid_embeddings_var, self.noclk_mid_batch_ph) + self.noclk_mid_his_batch_embedded = tf.nn.embedding_lookup(self.mid_embeddings_var, self.noclk_mid_batch_ph) self.cat_embeddings_var = tf.get_variable("cat_embedding_var", [n_cat, EMBEDDING_DIM]) tf.summary.histogram('cat_embeddings_var', self.cat_embeddings_var) - self.cat_batch_embedded = embedding_lookup_npu(self.cat_embeddings_var, self.cat_batch_ph) - self.cat_his_batch_embedded = embedding_lookup_npu(self.cat_embeddings_var, self.cat_his_batch_ph) + self.cat_batch_embedded = tf.nn.embedding_lookup(self.cat_embeddings_var, self.cat_batch_ph) + self.cat_his_batch_embedded = tf.nn.embedding_lookup(self.cat_embeddings_var, self.cat_his_batch_ph) if self.use_negsampling: - self.noclk_cat_his_batch_embedded = embedding_lookup_npu(self.cat_embeddings_var, self.noclk_cat_batch_ph) + self.noclk_cat_his_batch_embedded = tf.nn.embedding_lookup(self.cat_embeddings_var, self.noclk_cat_batch_ph) self.item_eb = tf.concat([self.mid_batch_embedded, self.cat_batch_embedded], 1) self.item_his_eb = tf.concat([self.mid_his_batch_embedded, self.cat_his_batch_embedded], 2) @@ -55,7 +82,7 @@ class Model(object): self.noclk_item_his_eb = tf.concat( [self.noclk_mid_his_batch_embedded[:, :, 0, :], self.noclk_cat_his_batch_embedded[:, :, 0, :]], -1)# 0 means only using the first negative item ID. 3 item IDs are inputed in the line 24. self.noclk_item_his_eb = tf.reshape(self.noclk_item_his_eb, - [-1, tf.shape(self.noclk_mid_his_batch_embedded)[1], EMBEDDING_DIM * 2])# cat embedding 18 concate item embedding 18. + [-1, tf.shape(self.noclk_mid_his_batch_embedded)[1], EMBEDDING_DIM*2])# cat embedding 18 concate item embedding 18. self.noclk_his_eb = tf.concat([self.noclk_mid_his_batch_embedded, self.noclk_cat_his_batch_embedded], -1) self.noclk_his_eb_sum_1 = tf.reduce_sum(self.noclk_his_eb, 2) @@ -63,7 +90,6 @@ class Model(object): def build_fcn_net(self, inp, use_dice = False): bn1 = tf.layers.batch_normalization(inputs=inp, name='bn1') - print(">>>>>>>>>>>>>>>>>>>>bn1", bn1.get_shape().as_list()) dnn1 = tf.layers.dense(bn1, 200, activation=None, name='f1') if use_dice: dnn1 = dice(dnn1, name='dice_1') @@ -85,15 +111,11 @@ class Model(object): if self.use_negsampling: self.loss += self.aux_loss tf.summary.scalar('loss', self.loss) - self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr, epsilon=1e-04).minimize(self.loss) - # self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr).minimize(self.loss) + self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr) + # loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, decr_every_n_nan_or_inf=2, decr_ratio=0.5) + # self.optimizer = NPULossScaleOptimizer(self.optimizer, loss_scale_manager) + self.optimizer = self.optimizer.minimize(self.loss) - - # self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr) - # grads = self.optimizer.compute_gradients(self.loss * (2**12)) - # grads = [(grad / (2**12), var) for grad, var in grads] - # grads = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in grads] - # self.optimizer = self.optimizer.apply_gradients(grads) # Accuracy metric self.accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.round(self.y_hat), self.target_ph), tf.float32)) tf.summary.scalar('accuracy', self.accuracy) @@ -112,7 +134,6 @@ class Model(object): return loss_ def auxiliary_net(self, in_, stag='auxiliary_net'): - print(">>>>>>>>>>>>>>>>>>>>>in_", in_.get_shape().as_list()) bn1 = tf.layers.batch_normalization(inputs=in_, name='bn1' + stag, reuse=tf.AUTO_REUSE) dnn1 = tf.layers.dense(bn1, 100, activation=None, name='f1' + stag, reuse=tf.AUTO_REUSE) dnn1 = tf.nn.sigmoid(dnn1) @@ -122,29 +143,9 @@ class Model(object): y_hat = tf.nn.softmax(dnn3) + 0.00000001 return y_hat + def train(self, sess, inps): if self.use_negsampling: - # outputs = sess.run(self.rnn_outputs, feed_dict={ - # self.uid_batch_ph: inps[0], - # self.mid_batch_ph: inps[1], - # self.cat_batch_ph: inps[2], - # self.mid_his_batch_ph: inps[3], - # self.cat_his_batch_ph: inps[4], - # self.mask: inps[5], - # self.target_ph: inps[6], - # self.seq_len_ph: inps[7], - # self.lr: inps[8], - # self.noclk_mid_batch_ph: inps[9], - # self.noclk_cat_batch_ph: inps[10], - # }) - # print(outputs) - # return - - # op = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) - # for key in op: - # if "dynamic_gru_v2" in key.name: - # print(key.name, sess.run(key)) - loss, accuracy, aux_loss, _ = sess.run([self.loss, self.accuracy, self.aux_loss, self.optimizer], feed_dict={ self.uid_batch_ph: inps[0], self.mid_batch_ph: inps[1], @@ -158,15 +159,9 @@ class Model(object): self.noclk_mid_batch_ph: inps[9], self.noclk_cat_batch_ph: inps[10], }) - # print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>mid_his_batch_ph") - # print(inps[3]) - # print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>embedding") - # print(item_his_eb_time_major) - # print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>output") - # print(outputs) return loss, accuracy, aux_loss else: - loss, accuracy, _, summary_str = sess.run([self.loss, self.accuracy, self.optimizer, self.merged], feed_dict={ + loss, accuracy, _ = sess.run([self.loss, self.accuracy, self.optimizer], feed_dict={ self.uid_batch_ph: inps[0], self.mid_batch_ph: inps[1], self.cat_batch_ph: inps[2], @@ -177,7 +172,7 @@ class Model(object): self.seq_len_ph: inps[7], self.lr: inps[8], }) - return loss, accuracy, 0, summary_str + return loss, accuracy, 0 def calculate(self, sess, inps): if self.use_negsampling: @@ -215,11 +210,163 @@ class Model(object): saver = tf.train.Saver() saver.restore(sess, save_path=path) print('model restored from %s' % path) - - def summary_op(self, summary_writer, summary_str, step): - summary_writer.add_summary(summary_str, global_step=step) -# DIEN +class Model_DIN_V2_Gru_att_Gru(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_DIN_V2_Gru_att_Gru, self).__init__(n_uid, n_mid, n_cat, + EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, + use_negsampling) + + # RNN layer(-s) + with tf.name_scope('rnn_1'): + rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=self.item_his_eb, + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru1") + tf.summary.histogram('GRU_outputs', rnn_outputs) + + # Attention layer + with tf.name_scope('Attention_layer_1'): + att_outputs, alphas = din_fcn_attention(self.item_eb, rnn_outputs, ATTENTION_SIZE, self.mask, + softmax_stag=1, stag='1_1', mode='LIST', return_alphas=True) + tf.summary.histogram('alpha_outputs', alphas) + + with tf.name_scope('rnn_2'): + rnn_outputs2, final_state2 = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=att_outputs, + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru2") + tf.summary.histogram('GRU2_Final_State', final_state2) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, final_state2], 1) + # Fully connected layer + self.build_fcn_net(inp, use_dice=True) + +class Model_DIN_V2_Gru_Gru_att(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_DIN_V2_Gru_Gru_att, self).__init__(n_uid, n_mid, n_cat, + EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, + use_negsampling) + + # RNN layer(-s) + with tf.name_scope('rnn_1'): + rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=self.item_his_eb, + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru1") + tf.summary.histogram('GRU_outputs', rnn_outputs) + + with tf.name_scope('rnn_2'): + rnn_outputs2, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=rnn_outputs, + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru2") + tf.summary.histogram('GRU2_outputs', rnn_outputs2) + + # Attention layer + with tf.name_scope('Attention_layer_1'): + att_outputs, alphas = din_fcn_attention(self.item_eb, rnn_outputs2, ATTENTION_SIZE, self.mask, + softmax_stag=1, stag='1_1', mode='LIST', return_alphas=True) + att_fea = tf.reduce_sum(att_outputs, 1) + tf.summary.histogram('att_fea', att_fea) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, att_fea], 1) + self.build_fcn_net(inp, use_dice=True) + +class Model_WideDeep(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_WideDeep, self).__init__(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, + ATTENTION_SIZE, + use_negsampling) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum], 1) + # Fully connected layer + bn1 = tf.layers.batch_normalization(inputs=inp, name='bn1') + dnn1 = tf.layers.dense(bn1, 200, activation=None, name='f1') + dnn1 = prelu(dnn1, 'p1') + dnn2 = tf.layers.dense(dnn1, 80, activation=None, name='f2') + dnn2 = prelu(dnn2, 'p2') + dnn3 = tf.layers.dense(dnn2, 2, activation=None, name='f3') + d_layer_wide = tf.concat([tf.concat([self.item_eb,self.item_his_eb_sum], axis=-1), + self.item_eb * self.item_his_eb_sum], axis=-1) + d_layer_wide = tf.layers.dense(d_layer_wide, 2, activation=None, name='f_fm') + self.y_hat = tf.nn.softmax(dnn3 + d_layer_wide) + + with tf.name_scope('Metrics'): + # Cross-entropy loss and optimizer initialization + self.loss = - tf.reduce_mean(tf.log(self.y_hat) * self.target_ph) + tf.summary.scalar('loss', self.loss) + self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr).minimize(self.loss) + + # Accuracy metric + self.accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.round(self.y_hat), self.target_ph), tf.float32)) + tf.summary.scalar('accuracy', self.accuracy) + self.merged = tf.summary.merge_all() + + +class Model_DIN_V2_Gru_QA_attGru(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_DIN_V2_Gru_QA_attGru, self).__init__(n_uid, n_mid, n_cat, + EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, + use_negsampling) + + # RNN layer(-s) + with tf.name_scope('rnn_1'): + rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=self.item_his_eb, + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru1") + tf.summary.histogram('GRU_outputs', rnn_outputs) + + # Attention layer + with tf.name_scope('Attention_layer_1'): + att_outputs, alphas = din_fcn_attention(self.item_eb, rnn_outputs, ATTENTION_SIZE, self.mask, + softmax_stag=1, stag='1_1', mode='LIST', return_alphas=True) + tf.summary.histogram('alpha_outputs', alphas) + + with tf.name_scope('rnn_2'): + rnn_outputs2, final_state2 = dynamic_rnn(QAAttGRUCell(HIDDEN_SIZE), inputs=rnn_outputs, + att_scores = tf.expand_dims(alphas, -1), + sequence_length=self.seq_len_ph, dtype=tf.float32, + scope="gru2") + tf.summary.histogram('GRU2_Final_State', final_state2) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, final_state2], 1) + self.build_fcn_net(inp, use_dice=True) + +class Model_DNN(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_DNN, self).__init__(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, + ATTENTION_SIZE, + use_negsampling) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum], 1) + self.build_fcn_net(inp, use_dice=False) + +class Model_PNN(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_PNN, self).__init__(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, + ATTENTION_SIZE, + use_negsampling) + + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, + self.item_eb * self.item_his_eb_sum], 1) + + # Fully connected layer + self.build_fcn_net(inp, use_dice=False) + + +class Model_DIN(Model): + def __init__(self, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=False): + super(Model_DIN, self).__init__(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, + ATTENTION_SIZE, + use_negsampling) + + # Attention layer + with tf.name_scope('Attention_layer'): + attention_output = din_attention(self.item_eb, self.item_his_eb, ATTENTION_SIZE, self.mask) + att_fea = tf.reduce_sum(attention_output, 1) + tf.summary.histogram('att_fea', att_fea) + inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, att_fea], -1) + # Fully connected layer + self.build_fcn_net(inp, use_dice=True) + + class Model_DIN_V2_Gru_Vec_attGru_Neg(Model): def __init__(self, batch_size, maxlen, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE, use_negsampling=True): super(Model_DIN_V2_Gru_Vec_attGru_Neg, self).__init__(batch_size, maxlen, n_uid, n_mid, n_cat, @@ -228,49 +375,16 @@ class Model_DIN_V2_Gru_Vec_attGru_Neg(Model): # RNN layer(-s) with tf.name_scope('rnn_1'): - # rnn_outputs, _ = tf.nn.dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=self.item_his_eb, - # sequence_length=self.seq_len_ph, dtype=tf.float32, - # scope="gru1") - + from npu_bridge.estimator.npu.npu_dynamic_rnn import DynamicGRUV2 + from npu_bridge.estimator.npu.npu_dynamic_rnn import DynamicAUGRU item_his_eb_fp16 = tf.cast(self.item_his_eb, tf.float16, name="cast_fp16") item_his_eb_time_major = tf.transpose(item_his_eb_fp16, [1, 0, 2], name="transpose_time_major") gruv2 = DynamicGRUV2(HIDDEN_SIZE, dtype=tf.float16) rnn_outputs, _, _, _, _, _ = gruv2(item_his_eb_time_major) + # rnn_outputs, _, _, _, _, _ = gruv2(item_his_eb_time_major, seq_length=self.seq_len_ph) rnn_outputs_time_major = tf.transpose(rnn_outputs, [1, 0, 2], name="rnn_outputs_transpose_time_major") rnn_outputs = tf.cast(rnn_outputs_time_major, tf.float32) - # mask_dim = tf.expand_dims(self.mask, -1) - # rnn_outputs = rnn_outputs * mask_dim - # print("?????????????????rnn_outputs", rnn_outputs) - - # item_his_eb_fp16 = tf.cast(self.item_his_eb, tf.float16, name="cast_fp16") - # gru = tf.keras.layers.CuDNNGRU(HIDDEN_SIZE, return_sequences=True) - # rnn_outputs = gru(item_his_eb_fp16) - # # print(rnn_outputs) - # self.rnn_outputs = rnn_outputs - # rnn_outputs = tf.cast(rnn_outputs, tf.float32) - - # from tensorflow.contrib.cudnn_rnn.python.layers import CudnnGRU - # from tensorflow.python.framework import dtypes - - # item_his_eb_fp16 = tf.cast(self.item_his_eb, tf.float16, name="cast_fp16") - # gru = CudnnGRU(num_layers=1, num_units=HIDDEN_SIZE, dtype=dtypes.float16) - # rnn_outputs, _ = gru(inputs=item_his_eb_fp16, sequence_lengths=self.seq_len_ph) - # # print(rnn_outputs) - # self.rnn_outputs = rnn_outputs - # rnn_outputs = tf.cast(rnn_outputs, tf.float32) - - # rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=self.item_his_eb, - # sequence_length=self.seq_len_ph, dtype=tf.float32, - # scope="gru1") - # self.rnn_outputs = rnn_outputs - - # item_his_eb_fp16 = tf.cast(self.item_his_eb, tf.float16, name="cast_fp16") - # rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=item_his_eb_fp16, - # sequence_length=self.seq_len_ph, dtype=tf.float16, - # scope="gru1") - # rnn_outputs = tf.cast(rnn_outputs, tf.float32) - # self.rnn_outputs = rnn_outputs - + tf.summary.histogram('GRU_outputs', rnn_outputs) aux_loss_1 = self.auxiliary_loss(rnn_outputs[:, :-1, :], self.item_his_eb[:, 1:, :], @@ -291,29 +405,12 @@ class Model_DIN_V2_Gru_Vec_attGru_Neg(Model): alphas_fp16_time_major = tf.transpose(alphas_fp16, [1, 0], name="att_transpose_time_major") augru = DynamicAUGRU(HIDDEN_SIZE, dtype=tf.float16) - rnn_outputs2, _, _, _, _, _, _ = augru(rnn_outputs_time_major, alphas_fp16_time_major) - - rnn_outputs2_time_major = tf.transpose(rnn_outputs2, [1, 0, 2], name="gru2_rnn_outputs_transpose_time_major") - rnn_outputs2 = tf.cast(rnn_outputs2_time_major, tf.float32) - final_state2 = tf.batch_gather(rnn_outputs2, self.seq_len_ph[:, None] - 1) - final_state2 = tf.squeeze(final_state2, 1) - - # rnn_outputs2, final_state2 = dynamic_rnn(VecAttGRUCell(HIDDEN_SIZE), inputs=rnn_outputs, - # att_scores = tf.expand_dims(alphas, -1), - # sequence_length=self.seq_len_ph, dtype=tf.float32, - # scope="gru2") - # rnn_outputs2, final_state2 = dynamic_rnn(VecAttGRUCell(HIDDEN_SIZE), inputs=rnn_outputs, - # att_scores = tf.expand_dims(alphas, -1), - # dtype=tf.float32, - # scope="gru2") - # final_state2 = tf.batch_gather(rnn_outputs2, self.seq_len_ph[:, None]-1) - # final_state2 = tf.squeeze(final_state2, 1) - - - # rnn_outputs2, final_state2 = tf.nn.dynamic_rnn(GRUCell(HIDDEN_SIZE), inputs=rnn_outputs, - # sequence_length=self.seq_len_ph, dtype=tf.float32, - # scope="gru2") - # print("@@@@@@@@@@@@@@@@@@@", rnn_outputs2.get_shape().as_list()) + rnn_outputs2, _, _, _, _, _, _ = augru(rnn_outputs_time_major, alphas_fp16_time_major, + seq_length=self.seq_len_ph) + rnn_outputs2 = rnn_outputs2[-1] + print('wxz wxz rnn output2 .shape is {}'.format(rnn_outputs2.shape)) + final_state2 = tf.cast(rnn_outputs2, tf.float32) + print('wxz wxz fina shape is {}'.format(final_state2.shape), flush=True) tf.summary.histogram('GRU2_Final_State', final_state2) inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, final_state2], 1) @@ -350,21 +447,11 @@ class Model_DIN_V2_Gru_Vec_attGru(Model): inp = tf.concat([self.uid_batch_embedded, self.item_eb, self.item_his_eb_sum, self.item_eb * self.item_his_eb_sum, final_state2], 1) self.build_fcn_net(inp, use_dice=True) - @tf.custom_gradient def gather_npu(params, indices): - def grad(dy): - params_shape = tf.shape(params, out_type=tf.int64) - params_shape = tf.cast(params_shape, tf.int32) - grad_gather = tf.unsorted_segment_sum(dy, indices, params_shape[0]) - return grad_gather, None - return tf.gather(params, indices), grad - -@tf.custom_gradient -def embedding_lookup_npu(params, indices): - def grad(dy): - params_shape = tf.shape(params, out_type=tf.int64) - params_shape = tf.cast(params_shape, tf.int32) - grad_embedding_lookup = tf.unsorted_segment_sum(dy, indices, params_shape[0]) - return grad_embedding_lookup, None - return tf.nn.embedding_lookup(params, indices), grad \ No newline at end of file + def grad(dy): + params_shape = tf.shape(params, out_type=tf.int64) + params_shape = tf.cast(params_shape, tf.int32) + grad_gather = tf.unsorted_segment_sum(dy, indices, params_shape[0]) + return grad_gather, None + return tf.gather(params, indices), grad \ No newline at end of file diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/rnn.py b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/rnn.py new file mode 100644 index 0000000000000000000000000000000000000000..959bcec40858605c4d8762099e91eebe78a74af1 --- /dev/null +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/rnn.py @@ -0,0 +1,1476 @@ +# Copyright 2015 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================== + +"""RNN helpers for TensorFlow models. + + +@@bidirectional_dynamic_rnn +@@dynamic_rnn +@@raw_rnn +@@static_rnn +@@static_state_saving_rnn +@@static_bidirectional_rnn +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from npu_bridge.npu_init import * + +from tensorflow.python.framework import constant_op +from tensorflow.python.framework import dtypes +from tensorflow.python.framework import ops +from tensorflow.python.framework import tensor_shape +from tensorflow.python.ops import array_ops +from tensorflow.python.ops import control_flow_ops +from tensorflow.python.ops import math_ops +from tensorflow.python.ops import rnn_cell_impl +from tensorflow.python.ops import tensor_array_ops +from tensorflow.python.ops import variable_scope as vs +from tensorflow.python.util import nest + + +# pylint: disable=protected-access +_concat = rnn_cell_impl._concat +assert_like_rnncell = rnn_cell_impl.assert_like_rnncell +# pylint: enable=protected-access + + +def _transpose_batch_time(x): + """Transpose the batch and time dimensions of a Tensor. + + Retains as much of the static shape information as possible. + + Args: + x: A tensor of rank 2 or higher. + + Returns: + x transposed along the first two dimensions. + + Raises: + ValueError: if `x` is rank 1 or lower. + """ + x_static_shape = x.get_shape() + if x_static_shape.ndims is not None and x_static_shape.ndims < 2: + raise ValueError( + "Expected input tensor %s to have rank at least 2, but saw shape: %s" % + (x, x_static_shape)) + x_rank = array_ops.rank(x) + x_t = array_ops.transpose( + x, array_ops.concat( + ([1, 0], math_ops.range(2, x_rank)), axis=0)) + x_t.set_shape( + tensor_shape.TensorShape([ + x_static_shape[1].value, x_static_shape[0].value + ]).concatenate(x_static_shape[2:])) + return x_t + + +def _best_effort_input_batch_size(flat_input): + """Get static input batch size if available, with fallback to the dynamic one. + + Args: + flat_input: An iterable of time major input Tensors of shape [max_time, + batch_size, ...]. All inputs should have compatible batch sizes. + + Returns: + The batch size in Python integer if available, or a scalar Tensor otherwise. + + Raises: + ValueError: if there is any input with an invalid shape. + """ + for input_ in flat_input: + shape = input_.shape + if shape.ndims is None: + continue + if shape.ndims < 2: + raise ValueError( + "Expected input tensor %s to have rank at least 2" % input_) + batch_size = shape[1].value + if batch_size is not None: + return batch_size + # Fallback to the dynamic batch size of the first input. + return array_ops.shape(flat_input[0])[1] + + +def _infer_state_dtype(explicit_dtype, state): + """Infer the dtype of an RNN state. + + Args: + explicit_dtype: explicitly declared dtype or None. + state: RNN's hidden state. Must be a Tensor or a nested iterable containing + Tensors. + + Returns: + dtype: inferred dtype of hidden state. + + Raises: + ValueError: if `state` has heterogeneous dtypes or is empty. + """ + if explicit_dtype is not None: + return explicit_dtype + elif nest.is_sequence(state): + inferred_dtypes = [element.dtype for element in nest.flatten(state)] + if not inferred_dtypes: + raise ValueError("Unable to infer dtype from empty state.") + all_same = all([x == inferred_dtypes[0] for x in inferred_dtypes]) + if not all_same: + raise ValueError( + "State has tensors of different inferred_dtypes. Unable to infer a " + "single representative dtype.") + return inferred_dtypes[0] + else: + return state.dtype + + +# pylint: disable=unused-argument +def _rnn_step( + time, sequence_length, min_sequence_length, max_sequence_length, + zero_output, state, call_cell, state_size, skip_conditionals=False): + """Calculate one step of a dynamic RNN minibatch. + + Returns an (output, state) pair conditioned on the sequence_lengths. + When skip_conditionals=False, the pseudocode is something like: + + if t >= max_sequence_length: + return (zero_output, state) + if t < min_sequence_length: + return call_cell() + + # Selectively output zeros or output, old state or new state depending + # on if we've finished calculating each row. + new_output, new_state = call_cell() + final_output = np.vstack([ + zero_output if time >= sequence_lengths[r] else new_output_r + for r, new_output_r in enumerate(new_output) + ]) + final_state = np.vstack([ + state[r] if time >= sequence_lengths[r] else new_state_r + for r, new_state_r in enumerate(new_state) + ]) + return (final_output, final_state) + + Args: + time: Python int, the current time step + sequence_length: int32 `Tensor` vector of size [batch_size] + min_sequence_length: int32 `Tensor` scalar, min of sequence_length + max_sequence_length: int32 `Tensor` scalar, max of sequence_length + zero_output: `Tensor` vector of shape [output_size] + state: Either a single `Tensor` matrix of shape `[batch_size, state_size]`, + or a list/tuple of such tensors. + call_cell: lambda returning tuple of (new_output, new_state) where + new_output is a `Tensor` matrix of shape `[batch_size, output_size]`. + new_state is a `Tensor` matrix of shape `[batch_size, state_size]`. + state_size: The `cell.state_size` associated with the state. + skip_conditionals: Python bool, whether to skip using the conditional + calculations. This is useful for `dynamic_rnn`, where the input tensor + matches `max_sequence_length`, and using conditionals just slows + everything down. + + Returns: + A tuple of (`final_output`, `final_state`) as given by the pseudocode above: + final_output is a `Tensor` matrix of shape [batch_size, output_size] + final_state is either a single `Tensor` matrix, or a tuple of such + matrices (matching length and shapes of input `state`). + + Raises: + ValueError: If the cell returns a state tuple whose length does not match + that returned by `state_size`. + """ + + # Convert state to a list for ease of use + flat_state = nest.flatten(state) + flat_zero_output = nest.flatten(zero_output) + + def _copy_one_through(output, new_output): + # If the state contains a scalar value we simply pass it through. + if output.shape.ndims == 0: + return new_output + copy_cond = (time >= sequence_length) + with ops.colocate_with(new_output): + return array_ops.where(copy_cond, output, new_output) + + def _copy_some_through(flat_new_output, flat_new_state): + # Use broadcasting select to determine which values should get + # the previous state & zero output, and which values should get + # a calculated state & output. + flat_new_output = [ + _copy_one_through(zero_output, new_output) + for zero_output, new_output in zip(flat_zero_output, flat_new_output)] + flat_new_state = [ + _copy_one_through(state, new_state) + for state, new_state in zip(flat_state, flat_new_state)] + return flat_new_output + flat_new_state + + def _maybe_copy_some_through(): + """Run RNN step. Pass through either no or some past state.""" + new_output, new_state = call_cell() + + nest.assert_same_structure(state, new_state) + + flat_new_state = nest.flatten(new_state) + flat_new_output = nest.flatten(new_output) + return control_flow_ops.cond( + # if t < min_seq_len: calculate and return everything + time < min_sequence_length, lambda: flat_new_output + flat_new_state, + # else copy some of it through + lambda: _copy_some_through(flat_new_output, flat_new_state)) + + # TODO(ebrevdo): skipping these conditionals may cause a slowdown, + # but benefits from removing cond() and its gradient. We should + # profile with and without this switch here. + if skip_conditionals: + # Instead of using conditionals, perform the selective copy at all time + # steps. This is faster when max_seq_len is equal to the number of unrolls + # (which is typical for dynamic_rnn). + new_output, new_state = call_cell() + nest.assert_same_structure(state, new_state) + new_state = nest.flatten(new_state) + new_output = nest.flatten(new_output) + final_output_and_state = _copy_some_through(new_output, new_state) + else: + empty_update = lambda: flat_zero_output + flat_state + final_output_and_state = control_flow_ops.cond( + # if t >= max_seq_len: copy all state through, output zeros + time >= max_sequence_length, empty_update, + # otherwise calculation is required: copy some or all of it through + _maybe_copy_some_through) + + if len(final_output_and_state) != len(flat_zero_output) + len(flat_state): + raise ValueError("Internal error: state and output were not concatenated " + "correctly.") + final_output = final_output_and_state[:len(flat_zero_output)] + final_state = final_output_and_state[len(flat_zero_output):] + + for output, flat_output in zip(final_output, flat_zero_output): + output.set_shape(flat_output.get_shape()) + for substate, flat_substate in zip(final_state, flat_state): + substate.set_shape(flat_substate.get_shape()) + + final_output = nest.pack_sequence_as( + structure=zero_output, flat_sequence=final_output) + final_state = nest.pack_sequence_as( + structure=state, flat_sequence=final_state) + + return final_output, final_state + + +def _reverse_seq(input_seq, lengths): + """Reverse a list of Tensors up to specified lengths. + + Args: + input_seq: Sequence of seq_len tensors of dimension (batch_size, n_features) + or nested tuples of tensors. + lengths: A `Tensor` of dimension batch_size, containing lengths for each + sequence in the batch. If "None" is specified, simply reverses + the list. + + Returns: + time-reversed sequence + """ + if lengths is None: + return list(reversed(input_seq)) + + flat_input_seq = tuple(nest.flatten(input_) for input_ in input_seq) + + flat_results = [[] for _ in range(len(input_seq))] + for sequence in zip(*flat_input_seq): + input_shape = tensor_shape.unknown_shape( + ndims=sequence[0].get_shape().ndims) + for input_ in sequence: + input_shape.merge_with(input_.get_shape()) + input_.set_shape(input_shape) + + # Join into (time, batch_size, depth) + s_joined = array_ops.stack(sequence) + + # Reverse along dimension 0 + s_reversed = array_ops.reverse_sequence(s_joined, lengths, 0, 1) + # Split again into list + result = array_ops.unstack(s_reversed) + for r, flat_result in zip(result, flat_results): + r.set_shape(input_shape) + flat_result.append(r) + + results = [nest.pack_sequence_as(structure=input_, flat_sequence=flat_result) + for input_, flat_result in zip(input_seq, flat_results)] + return results + + +def bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, sequence_length=None, + initial_state_fw=None, initial_state_bw=None, + dtype=None, parallel_iterations=None, + swap_memory=False, time_major=False, scope=None): + """Creates a dynamic version of bidirectional recurrent neural network. + + Takes input and builds independent forward and backward RNNs. The input_size + of forward and backward cell must match. The initial state for both directions + is zero by default (but can be set optionally) and no intermediate states are + ever returned -- the network is fully unrolled for the given (passed in) + length(s) of the sequence(s) or completely unrolled if length(s) is not + given. + + Args: + cell_fw: An instance of RNNCell, to be used for forward direction. + cell_bw: An instance of RNNCell, to be used for backward direction. + inputs: The RNN inputs. + If time_major == False (default), this must be a tensor of shape: + `[batch_size, max_time, ...]`, or a nested tuple of such elements. + If time_major == True, this must be a tensor of shape: + `[max_time, batch_size, ...]`, or a nested tuple of such elements. + sequence_length: (optional) An int32/int64 vector, size `[batch_size]`, + containing the actual lengths for each of the sequences in the batch. + If not provided, all batch entries are assumed to be full sequences; and + time reversal is applied from time `0` to `max_time` for each sequence. + initial_state_fw: (optional) An initial state for the forward RNN. + This must be a tensor of appropriate type and shape + `[batch_size, cell_fw.state_size]`. + If `cell_fw.state_size` is a tuple, this should be a tuple of + tensors having shapes `[batch_size, s] for s in cell_fw.state_size`. + initial_state_bw: (optional) Same as for `initial_state_fw`, but using + the corresponding properties of `cell_bw`. + dtype: (optional) The data type for the initial states and expected output. + Required if initial_states are not provided or RNN states have a + heterogeneous dtype. + parallel_iterations: (Default: 32). The number of iterations to run in + parallel. Those operations which do not have any temporal dependency + and can be run in parallel, will be. This parameter trades off + time for space. Values >> 1 use more memory but take less time, + while smaller values use less memory but computations take longer. + swap_memory: Transparently swap the tensors produced in forward inference + but needed for back prop from GPU to CPU. This allows training RNNs + which would typically not fit on a single GPU, with very minimal (or no) + performance penalty. + time_major: The shape format of the `inputs` and `outputs` Tensors. + If true, these `Tensors` must be shaped `[max_time, batch_size, depth]`. + If false, these `Tensors` must be shaped `[batch_size, max_time, depth]`. + Using `time_major = True` is a bit more efficient because it avoids + transposes at the beginning and end of the RNN calculation. However, + most TensorFlow data is batch-major, so by default this function + accepts input and emits output in batch-major form. + scope: VariableScope for the created subgraph; defaults to + "bidirectional_rnn" + + Returns: + A tuple (outputs, output_states) where: + outputs: A tuple (output_fw, output_bw) containing the forward and + the backward rnn output `Tensor`. + If time_major == False (default), + output_fw will be a `Tensor` shaped: + `[batch_size, max_time, cell_fw.output_size]` + and output_bw will be a `Tensor` shaped: + `[batch_size, max_time, cell_bw.output_size]`. + If time_major == True, + output_fw will be a `Tensor` shaped: + `[max_time, batch_size, cell_fw.output_size]` + and output_bw will be a `Tensor` shaped: + `[max_time, batch_size, cell_bw.output_size]`. + It returns a tuple instead of a single concatenated `Tensor`, unlike + in the `bidirectional_rnn`. If the concatenated one is preferred, + the forward and backward outputs can be concatenated as + `tf.concat(outputs, 2)`. + output_states: A tuple (output_state_fw, output_state_bw) containing + the forward and the backward final states of bidirectional rnn. + + Raises: + TypeError: If `cell_fw` or `cell_bw` is not an instance of `RNNCell`. + """ + + assert_like_rnncell("cell_fw must be an instance of RNNCell", cell_fw) + assert_like_rnncell("cell_bw must be an instance of RNNCell", cell_bw) + #if not _like_rnncell(cell_fw): + # raise TypeError("cell_fw must be an instance of RNNCell") + #if not _like_rnncell(cell_bw): + # raise TypeError("cell_bw must be an instance of RNNCell") + + with vs.variable_scope(scope or "bidirectional_rnn"): + # Forward direction + with vs.variable_scope("fw") as fw_scope: + output_fw, output_state_fw = dynamic_rnn( + cell=cell_fw, inputs=inputs, sequence_length=sequence_length, + initial_state=initial_state_fw, dtype=dtype, + parallel_iterations=parallel_iterations, swap_memory=swap_memory, + time_major=time_major, scope=fw_scope) + + # Backward direction + if not time_major: + time_dim = 1 + batch_dim = 0 + else: + time_dim = 0 + batch_dim = 1 + + def _reverse(input_, seq_lengths, seq_dim, batch_dim): + if seq_lengths is not None: + return array_ops.reverse_sequence( + input=input_, seq_lengths=seq_lengths, + seq_dim=seq_dim, batch_dim=batch_dim) + else: + return array_ops.reverse(input_, axis=[seq_dim]) + + with vs.variable_scope("bw") as bw_scope: + inputs_reverse = _reverse( + inputs, seq_lengths=sequence_length, + seq_dim=time_dim, batch_dim=batch_dim) + tmp, output_state_bw = dynamic_rnn( + cell=cell_bw, inputs=inputs_reverse, sequence_length=sequence_length, + initial_state=initial_state_bw, dtype=dtype, + parallel_iterations=parallel_iterations, swap_memory=swap_memory, + time_major=time_major, scope=bw_scope) + + output_bw = _reverse( + tmp, seq_lengths=sequence_length, + seq_dim=time_dim, batch_dim=batch_dim) + + outputs = (output_fw, output_bw) + output_states = (output_state_fw, output_state_bw) + + return (outputs, output_states) + + +def dynamic_rnn(cell, inputs, att_scores=None, sequence_length=None, initial_state=None, + dtype=None, parallel_iterations=None, swap_memory=False, + time_major=False, scope=None): + """Creates a recurrent neural network specified by RNNCell `cell`. + + Performs fully dynamic unrolling of `inputs`. + + Example: + + ```python + # create a BasicRNNCell + rnn_cell = tf.nn.rnn_cell.BasicRNNCell(hidden_size) + + # 'outputs' is a tensor of shape [batch_size, max_time, cell_state_size] + + # defining initial state + initial_state = rnn_cell.zero_state(batch_size, dtype=tf.float32) + + # 'state' is a tensor of shape [batch_size, cell_state_size] + outputs, state = tf.nn.dynamic_rnn(rnn_cell, input_data, + initial_state=initial_state, + dtype=tf.float32) + ``` + + ```python + # create 2 LSTMCells + rnn_layers = [tf.nn.rnn_cell.LSTMCell(size) for size in [128, 256]] + + # create a RNN cell composed sequentially of a number of RNNCells + multi_rnn_cell = tf.nn.rnn_cell.MultiRNNCell(rnn_layers) + + # 'outputs' is a tensor of shape [batch_size, max_time, 256] + # 'state' is a N-tuple where N is the number of LSTMCells containing a + # tf.contrib.rnn.LSTMStateTuple for each cell + outputs, state = tf.nn.dynamic_rnn(cell=multi_rnn_cell, + inputs=data, + dtype=tf.float32) + ``` + + + Args: + cell: An instance of RNNCell. + inputs: The RNN inputs. + If `time_major == False` (default), this must be a `Tensor` of shape: + `[batch_size, max_time, ...]`, or a nested tuple of such + elements. + If `time_major == True`, this must be a `Tensor` of shape: + `[max_time, batch_size, ...]`, or a nested tuple of such + elements. + This may also be a (possibly nested) tuple of Tensors satisfying + this property. The first two dimensions must match across all the inputs, + but otherwise the ranks and other shape components may differ. + In this case, input to `cell` at each time-step will replicate the + structure of these tuples, except for the time dimension (from which the + time is taken). + The input to `cell` at each time step will be a `Tensor` or (possibly + nested) tuple of Tensors each with dimensions `[batch_size, ...]`. + sequence_length: (optional) An int32/int64 vector sized `[batch_size]`. + Used to copy-through state and zero-out outputs when past a batch + element's sequence length. So it's more for correctness than performance. + initial_state: (optional) An initial state for the RNN. + If `cell.state_size` is an integer, this must be + a `Tensor` of appropriate type and shape `[batch_size, cell.state_size]`. + If `cell.state_size` is a tuple, this should be a tuple of + tensors having shapes `[batch_size, s] for s in cell.state_size`. + dtype: (optional) The data type for the initial state and expected output. + Required if initial_state is not provided or RNN state has a heterogeneous + dtype. + parallel_iterations: (Default: 32). The number of iterations to run in + parallel. Those operations which do not have any temporal dependency + and can be run in parallel, will be. This parameter trades off + time for space. Values >> 1 use more memory but take less time, + while smaller values use less memory but computations take longer. + swap_memory: Transparently swap the tensors produced in forward inference + but needed for back prop from GPU to CPU. This allows training RNNs + which would typically not fit on a single GPU, with very minimal (or no) + performance penalty. + time_major: The shape format of the `inputs` and `outputs` Tensors. + If true, these `Tensors` must be shaped `[max_time, batch_size, depth]`. + If false, these `Tensors` must be shaped `[batch_size, max_time, depth]`. + Using `time_major = True` is a bit more efficient because it avoids + transposes at the beginning and end of the RNN calculation. However, + most TensorFlow data is batch-major, so by default this function + accepts input and emits output in batch-major form. + scope: VariableScope for the created subgraph; defaults to "rnn". + + Returns: + A pair (outputs, state) where: + + outputs: The RNN output `Tensor`. + + If time_major == False (default), this will be a `Tensor` shaped: + `[batch_size, max_time, cell.output_size]`. + + If time_major == True, this will be a `Tensor` shaped: + `[max_time, batch_size, cell.output_size]`. + + Note, if `cell.output_size` is a (possibly nested) tuple of integers + or `TensorShape` objects, then `outputs` will be a tuple having the + same structure as `cell.output_size`, containing Tensors having shapes + corresponding to the shape data in `cell.output_size`. + + state: The final state. If `cell.state_size` is an int, this + will be shaped `[batch_size, cell.state_size]`. If it is a + `TensorShape`, this will be shaped `[batch_size] + cell.state_size`. + If it is a (possibly nested) tuple of ints or `TensorShape`, this will + be a tuple having the corresponding shapes. If cells are `LSTMCells` + `state` will be a tuple containing a `LSTMStateTuple` for each cell. + + Raises: + TypeError: If `cell` is not an instance of RNNCell. + ValueError: If inputs is None or an empty list. + """ + assert_like_rnncell("cell must be an instance of RNNCell", cell) + #if not _like_rnncell(cell): + # raise TypeError("cell must be an instance of RNNCell") + + # By default, time_major==False and inputs are batch-major: shaped + # [batch, time, depth] + # For internal calculations, we transpose to [time, batch, depth] + flat_input = nest.flatten(inputs) + + if not time_major: + # (B,T,D) => (T,B,D) + flat_input = [ops.convert_to_tensor(input_) for input_ in flat_input] + flat_input = tuple(_transpose_batch_time(input_) for input_ in flat_input) + + parallel_iterations = parallel_iterations or 32 + if sequence_length is not None: + sequence_length = math_ops.to_int32(sequence_length) + if sequence_length.get_shape().ndims not in (None, 1): + raise ValueError( + "sequence_length must be a vector of length batch_size, " + "but saw shape: %s" % sequence_length.get_shape()) + sequence_length = array_ops.identity( # Just to find it in the graph. + sequence_length, name="sequence_length") + + # Create a new scope in which the caching device is either + # determined by the parent scope, or is set to place the cached + # Variable using the same placement as for the rest of the RNN. + with vs.variable_scope(scope or "rnn") as varscope: + if varscope.caching_device is None: + varscope.set_caching_device(lambda op: op.device) + batch_size = _best_effort_input_batch_size(flat_input) + + if initial_state is not None: + state = initial_state + else: + if not dtype: + raise ValueError("If there is no initial_state, you must give a dtype.") + state = cell.zero_state(batch_size, dtype) + + def _assert_has_shape(x, shape): + x_shape = array_ops.shape(x) + packed_shape = array_ops.stack(shape) + return control_flow_ops.Assert( + math_ops.reduce_all(math_ops.equal(x_shape, packed_shape)), + ["Expected shape for Tensor %s is " % x.name, + packed_shape, " but saw shape: ", x_shape]) + + if sequence_length is not None: + # Perform some shape validation + with ops.control_dependencies( + [_assert_has_shape(sequence_length, [batch_size])]): + sequence_length = array_ops.identity( + sequence_length, name="CheckSeqLen") + + inputs = nest.pack_sequence_as(structure=inputs, flat_sequence=flat_input) + + (outputs, final_state) = _dynamic_rnn_loop( + cell, + inputs, + state, + parallel_iterations=parallel_iterations, + swap_memory=swap_memory, + att_scores = att_scores, + sequence_length=sequence_length, + dtype=dtype) + + # Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth]. + # If we are performing batch-major calculations, transpose output back + # to shape [batch, time, depth] + if not time_major: + # (T,B,D) => (B,T,D) + outputs = nest.map_structure(_transpose_batch_time, outputs) + + return (outputs, final_state) + + +def _dynamic_rnn_loop(cell, + inputs, + initial_state, + parallel_iterations, + swap_memory, + att_scores = None, + sequence_length=None, + dtype=None): + """Internal implementation of Dynamic RNN. + + Args: + cell: An instance of RNNCell. + inputs: A `Tensor` of shape [time, batch_size, input_size], or a nested + tuple of such elements. + initial_state: A `Tensor` of shape `[batch_size, state_size]`, or if + `cell.state_size` is a tuple, then this should be a tuple of + tensors having shapes `[batch_size, s] for s in cell.state_size`. + parallel_iterations: Positive Python int. + swap_memory: A Python boolean + sequence_length: (optional) An `int32` `Tensor` of shape [batch_size]. + dtype: (optional) Expected dtype of output. If not specified, inferred from + initial_state. + + Returns: + Tuple `(final_outputs, final_state)`. + final_outputs: + A `Tensor` of shape `[time, batch_size, cell.output_size]`. If + `cell.output_size` is a (possibly nested) tuple of ints or `TensorShape` + objects, then this returns a (possibly nsted) tuple of Tensors matching + the corresponding shapes. + final_state: + A `Tensor`, or possibly nested tuple of Tensors, matching in length + and shapes to `initial_state`. + + Raises: + ValueError: If the input depth cannot be inferred via shape inference + from the inputs. + """ + state = initial_state + assert isinstance(parallel_iterations, int), "parallel_iterations must be int" + + state_size = cell.state_size + + flat_input = nest.flatten(inputs) + flat_output_size = nest.flatten(cell.output_size) + + # Construct an initial output + input_shape = array_ops.shape(flat_input[0]) + time_steps = input_shape[0] + batch_size = _best_effort_input_batch_size(flat_input) + + inputs_got_shape = tuple(input_.get_shape().with_rank_at_least(3) + for input_ in flat_input) + + const_time_steps, const_batch_size = inputs_got_shape[0].as_list()[:2] + + for shape in inputs_got_shape: + if not shape[2:].is_fully_defined(): + raise ValueError( + "Input size (depth of inputs) must be accessible via shape inference," + " but saw value None.") + got_time_steps = shape[0].value + got_batch_size = shape[1].value + if const_time_steps != got_time_steps: + raise ValueError( + "Time steps is not the same for all the elements in the input in a " + "batch.") + if const_batch_size != got_batch_size: + raise ValueError( + "Batch_size is not the same for all the elements in the input.") + + # Prepare dynamic conditional copying of state & output + def _create_zero_arrays(size): + size = _concat(batch_size, size) + return array_ops.zeros( + array_ops.stack(size), _infer_state_dtype(dtype, state)) + + flat_zero_output = tuple(_create_zero_arrays(output) + for output in flat_output_size) + zero_output = nest.pack_sequence_as(structure=cell.output_size, + flat_sequence=flat_zero_output) + + if sequence_length is not None: + min_sequence_length = math_ops.reduce_min(sequence_length) + max_sequence_length = math_ops.reduce_max(sequence_length) + + time = array_ops.constant(0, dtype=dtypes.int32, name="time") + + with ops.name_scope("dynamic_rnn") as scope: + base_name = scope + + def _create_ta(name, dtype): + return tensor_array_ops.TensorArray(dtype=dtype, + size=time_steps, + tensor_array_name=base_name + name) + + output_ta = tuple(_create_ta("output_%d" % i, + _infer_state_dtype(dtype, state)) + for i in range(len(flat_output_size))) + input_ta = tuple(_create_ta("input_%d" % i, flat_input[i].dtype) + for i in range(len(flat_input))) + + input_ta = tuple(ta.unstack(input_) + for ta, input_ in zip(input_ta, flat_input)) + + def _time_step(time, output_ta_t, state, att_scores=None): + """Take a time step of the dynamic RNN. + + Args: + time: int32 scalar Tensor. + output_ta_t: List of `TensorArray`s that represent the output. + state: nested tuple of vector tensors that represent the state. + + Returns: + The tuple (time + 1, output_ta_t with updated flow, new_state). + """ + + input_t = tuple(ta.read(time) for ta in input_ta) + # Restore some shape information + for input_, shape in zip(input_t, inputs_got_shape): + input_.set_shape(shape[1:]) + + input_t = nest.pack_sequence_as(structure=inputs, flat_sequence=input_t) + if att_scores is not None: + att_score = att_scores[:, time, :] + call_cell = lambda: cell(input_t, state, att_score) + else: + call_cell = lambda: cell(input_t, state) + + if sequence_length is not None: + (output, new_state) = _rnn_step( + time=time, + sequence_length=sequence_length, + min_sequence_length=min_sequence_length, + max_sequence_length=max_sequence_length, + zero_output=zero_output, + state=state, + call_cell=call_cell, + state_size=state_size, + skip_conditionals=True) + else: + (output, new_state) = call_cell() + + # Pack state if using state tuples + output = nest.flatten(output) + + output_ta_t = tuple( + ta.write(time, out) for ta, out in zip(output_ta_t, output)) + if att_scores is not None: + return (time + 1, output_ta_t, new_state, att_scores) + else: + return (time + 1, output_ta_t, new_state) + + if att_scores is not None: + _, output_final_ta, final_state, _ = control_flow_ops.while_loop( + cond=lambda time, *_: time < time_steps, + body=_time_step, + loop_vars=(time, output_ta, state, att_scores), + parallel_iterations=parallel_iterations, + swap_memory=swap_memory) + else: + _, output_final_ta, final_state = control_flow_ops.while_loop( + cond=lambda time, *_: time < time_steps, + body=_time_step, + loop_vars=(time, output_ta, state), + parallel_iterations=parallel_iterations, + swap_memory=swap_memory) + + # Unpack final output if not using output tuples. + final_outputs = tuple(ta.stack() for ta in output_final_ta) + + # Restore some shape information + for output, output_size in zip(final_outputs, flat_output_size): + shape = _concat( + [const_time_steps, const_batch_size], output_size, static=True) + output.set_shape(shape) + + final_outputs = nest.pack_sequence_as( + structure=cell.output_size, flat_sequence=final_outputs) + + return (final_outputs, final_state) + + +def raw_rnn(cell, loop_fn, + parallel_iterations=None, swap_memory=False, scope=None): + """Creates an `RNN` specified by RNNCell `cell` and loop function `loop_fn`. + + **NOTE: This method is still in testing, and the API may change.** + + This function is a more primitive version of `dynamic_rnn` that provides + more direct access to the inputs each iteration. It also provides more + control over when to start and finish reading the sequence, and + what to emit for the output. + + For example, it can be used to implement the dynamic decoder of a seq2seq + model. + + Instead of working with `Tensor` objects, most operations work with + `TensorArray` objects directly. + + The operation of `raw_rnn`, in pseudo-code, is basically the following: + + ```python + time = tf.constant(0, dtype=tf.int32) + (finished, next_input, initial_state, _, loop_state) = loop_fn( + time=time, cell_output=None, cell_state=None, loop_state=None) + emit_ta = TensorArray(dynamic_size=True, dtype=initial_state.dtype) + state = initial_state + while not all(finished): + (output, cell_state) = cell(next_input, state) + (next_finished, next_input, next_state, emit, loop_state) = loop_fn( + time=time + 1, cell_output=output, cell_state=cell_state, + loop_state=loop_state) + # Emit zeros and copy forward state for minibatch entries that are finished. + state = tf.where(finished, state, next_state) + emit = tf.where(finished, tf.zeros_like(emit), emit) + emit_ta = emit_ta.write(time, emit) + # If any new minibatch entries are marked as finished, mark these. + finished = tf.logical_or(finished, next_finished) + time += 1 + return (emit_ta, state, loop_state) + ``` + + with the additional properties that output and state may be (possibly nested) + tuples, as determined by `cell.output_size` and `cell.state_size`, and + as a result the final `state` and `emit_ta` may themselves be tuples. + + A simple implementation of `dynamic_rnn` via `raw_rnn` looks like this: + + ```python + inputs = tf.placeholder(shape=(max_time, batch_size, input_depth), + dtype=tf.float32) + sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32) + inputs_ta = tf.TensorArray(dtype=tf.float32, size=max_time) + inputs_ta = inputs_ta.unstack(inputs) + + cell = tf.contrib.rnn.LSTMCell(num_units) + + def loop_fn(time, cell_output, cell_state, loop_state): + emit_output = cell_output # == None for time == 0 + if cell_output is None: # time == 0 + next_cell_state = cell.zero_state(batch_size, tf.float32) + else: + next_cell_state = cell_state + elements_finished = (time >= sequence_length) + finished = tf.reduce_all(elements_finished) + next_input = tf.cond( + finished, + lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32), + lambda: inputs_ta.read(time)) + next_loop_state = None + return (elements_finished, next_input, next_cell_state, + emit_output, next_loop_state) + + outputs_ta, final_state, _ = raw_rnn(cell, loop_fn) + outputs = outputs_ta.stack() + ``` + + Args: + cell: An instance of RNNCell. + loop_fn: A callable that takes inputs + `(time, cell_output, cell_state, loop_state)` + and returns the tuple + `(finished, next_input, next_cell_state, emit_output, next_loop_state)`. + Here `time` is an int32 scalar `Tensor`, `cell_output` is a + `Tensor` or (possibly nested) tuple of tensors as determined by + `cell.output_size`, and `cell_state` is a `Tensor` + or (possibly nested) tuple of tensors, as determined by the `loop_fn` + on its first call (and should match `cell.state_size`). + The outputs are: `finished`, a boolean `Tensor` of + shape `[batch_size]`, `next_input`: the next input to feed to `cell`, + `next_cell_state`: the next state to feed to `cell`, + and `emit_output`: the output to store for this iteration. + + Note that `emit_output` should be a `Tensor` or (possibly nested) + tuple of tensors with shapes and structure matching `cell.output_size` + and `cell_output` above. The parameter `cell_state` and output + `next_cell_state` may be either a single or (possibly nested) tuple + of tensors. The parameter `loop_state` and + output `next_loop_state` may be either a single or (possibly nested) tuple + of `Tensor` and `TensorArray` objects. This last parameter + may be ignored by `loop_fn` and the return value may be `None`. If it + is not `None`, then the `loop_state` will be propagated through the RNN + loop, for use purely by `loop_fn` to keep track of its own state. + The `next_loop_state` parameter returned may be `None`. + + The first call to `loop_fn` will be `time = 0`, `cell_output = None`, + `cell_state = None`, and `loop_state = None`. For this call: + The `next_cell_state` value should be the value with which to initialize + the cell's state. It may be a final state from a previous RNN or it + may be the output of `cell.zero_state()`. It should be a + (possibly nested) tuple structure of tensors. + If `cell.state_size` is an integer, this must be + a `Tensor` of appropriate type and shape `[batch_size, cell.state_size]`. + If `cell.state_size` is a `TensorShape`, this must be a `Tensor` of + appropriate type and shape `[batch_size] + cell.state_size`. + If `cell.state_size` is a (possibly nested) tuple of ints or + `TensorShape`, this will be a tuple having the corresponding shapes. + The `emit_output` value may be either `None` or a (possibly nested) + tuple structure of tensors, e.g., + `(tf.zeros(shape_0, dtype=dtype_0), tf.zeros(shape_1, dtype=dtype_1))`. + If this first `emit_output` return value is `None`, + then the `emit_ta` result of `raw_rnn` will have the same structure and + dtypes as `cell.output_size`. Otherwise `emit_ta` will have the same + structure, shapes (prepended with a `batch_size` dimension), and dtypes + as `emit_output`. The actual values returned for `emit_output` at this + initializing call are ignored. Note, this emit structure must be + consistent across all time steps. + + parallel_iterations: (Default: 32). The number of iterations to run in + parallel. Those operations which do not have any temporal dependency + and can be run in parallel, will be. This parameter trades off + time for space. Values >> 1 use more memory but take less time, + while smaller values use less memory but computations take longer. + swap_memory: Transparently swap the tensors produced in forward inference + but needed for back prop from GPU to CPU. This allows training RNNs + which would typically not fit on a single GPU, with very minimal (or no) + performance penalty. + scope: VariableScope for the created subgraph; defaults to "rnn". + + Returns: + A tuple `(emit_ta, final_state, final_loop_state)` where: + + `emit_ta`: The RNN output `TensorArray`. + If `loop_fn` returns a (possibly nested) set of Tensors for + `emit_output` during initialization, (inputs `time = 0`, + `cell_output = None`, and `loop_state = None`), then `emit_ta` will + have the same structure, dtypes, and shapes as `emit_output` instead. + If `loop_fn` returns `emit_output = None` during this call, + the structure of `cell.output_size` is used: + If `cell.output_size` is a (possibly nested) tuple of integers + or `TensorShape` objects, then `emit_ta` will be a tuple having the + same structure as `cell.output_size`, containing TensorArrays whose + elements' shapes correspond to the shape data in `cell.output_size`. + + `final_state`: The final cell state. If `cell.state_size` is an int, this + will be shaped `[batch_size, cell.state_size]`. If it is a + `TensorShape`, this will be shaped `[batch_size] + cell.state_size`. + If it is a (possibly nested) tuple of ints or `TensorShape`, this will + be a tuple having the corresponding shapes. + + `final_loop_state`: The final loop state as returned by `loop_fn`. + + Raises: + TypeError: If `cell` is not an instance of RNNCell, or `loop_fn` is not + a `callable`. + """ + + assert_like_rnncell("cell must be an instance of RNNCell", cell) + #if not _like_rnncell(cell): + # raise TypeError("cell must be an instance of RNNCell") + if not callable(loop_fn): + raise TypeError("loop_fn must be a callable") + + parallel_iterations = parallel_iterations or 32 + + # Create a new scope in which the caching device is either + # determined by the parent scope, or is set to place the cached + # Variable using the same placement as for the rest of the RNN. + with vs.variable_scope(scope or "rnn") as varscope: + if varscope.caching_device is None: + varscope.set_caching_device(lambda op: op.device) + + time = constant_op.constant(0, dtype=dtypes.int32) + (elements_finished, next_input, initial_state, emit_structure, + init_loop_state) = loop_fn( + time, None, None, None) # time, cell_output, cell_state, loop_state + flat_input = nest.flatten(next_input) + + # Need a surrogate loop state for the while_loop if none is available. + loop_state = (init_loop_state if init_loop_state is not None + else constant_op.constant(0, dtype=dtypes.int32)) + + input_shape = [input_.get_shape() for input_ in flat_input] + static_batch_size = input_shape[0][0] + + for input_shape_i in input_shape: + # Static verification that batch sizes all match + static_batch_size.merge_with(input_shape_i[0]) + + batch_size = static_batch_size.value + if batch_size is None: + batch_size = array_ops.shape(flat_input[0])[0] + + nest.assert_same_structure(initial_state, cell.state_size) + state = initial_state + flat_state = nest.flatten(state) + flat_state = [ops.convert_to_tensor(s) for s in flat_state] + state = nest.pack_sequence_as(structure=state, + flat_sequence=flat_state) + + if emit_structure is not None: + flat_emit_structure = nest.flatten(emit_structure) + flat_emit_size = [emit.shape if emit.shape.is_fully_defined() else + array_ops.shape(emit) for emit in flat_emit_structure] + flat_emit_dtypes = [emit.dtype for emit in flat_emit_structure] + else: + emit_structure = cell.output_size + flat_emit_size = nest.flatten(emit_structure) + flat_emit_dtypes = [flat_state[0].dtype] * len(flat_emit_size) + + flat_emit_ta = [ + tensor_array_ops.TensorArray( + dtype=dtype_i, dynamic_size=True, size=0, name="rnn_output_%d" % i) + for i, dtype_i in enumerate(flat_emit_dtypes)] + emit_ta = nest.pack_sequence_as(structure=emit_structure, + flat_sequence=flat_emit_ta) + flat_zero_emit = [ + array_ops.zeros(_concat(batch_size, size_i), dtype_i) + for size_i, dtype_i in zip(flat_emit_size, flat_emit_dtypes)] + zero_emit = nest.pack_sequence_as(structure=emit_structure, + flat_sequence=flat_zero_emit) + + def condition(unused_time, elements_finished, *_): + return math_ops.logical_not(math_ops.reduce_all(elements_finished)) + + def body(time, elements_finished, current_input, + emit_ta, state, loop_state): + """Internal while loop body for raw_rnn. + + Args: + time: time scalar. + elements_finished: batch-size vector. + current_input: possibly nested tuple of input tensors. + emit_ta: possibly nested tuple of output TensorArrays. + state: possibly nested tuple of state tensors. + loop_state: possibly nested tuple of loop state tensors. + + Returns: + Tuple having the same size as Args but with updated values. + """ + (next_output, cell_state) = cell(current_input, state) + + nest.assert_same_structure(state, cell_state) + nest.assert_same_structure(cell.output_size, next_output) + + next_time = time + 1 + (next_finished, next_input, next_state, emit_output, + next_loop_state) = loop_fn( + next_time, next_output, cell_state, loop_state) + + nest.assert_same_structure(state, next_state) + nest.assert_same_structure(current_input, next_input) + nest.assert_same_structure(emit_ta, emit_output) + + # If loop_fn returns None for next_loop_state, just reuse the + # previous one. + loop_state = loop_state if next_loop_state is None else next_loop_state + + def _copy_some_through(current, candidate): + """Copy some tensors through via array_ops.where.""" + def copy_fn(cur_i, cand_i): + with ops.colocate_with(cand_i): + return array_ops.where(elements_finished, cur_i, cand_i) + return nest.map_structure(copy_fn, current, candidate) + + emit_output = _copy_some_through(zero_emit, emit_output) + next_state = _copy_some_through(state, next_state) + + emit_ta = nest.map_structure( + lambda ta, emit: ta.write(time, emit), emit_ta, emit_output) + + elements_finished = math_ops.logical_or(elements_finished, next_finished) + + return (next_time, elements_finished, next_input, + emit_ta, next_state, loop_state) + + returned = control_flow_ops.while_loop( + condition, body, loop_vars=[ + time, elements_finished, next_input, + emit_ta, state, loop_state], + parallel_iterations=parallel_iterations, + swap_memory=swap_memory) + + (emit_ta, final_state, final_loop_state) = returned[-3:] + + if init_loop_state is None: + final_loop_state = None + + return (emit_ta, final_state, final_loop_state) + + +def static_rnn(cell, + inputs, + initial_state=None, + dtype=None, + sequence_length=None, + scope=None): + """Creates a recurrent neural network specified by RNNCell `cell`. + + The simplest form of RNN network generated is: + + ```python + state = cell.zero_state(...) + outputs = [] + for input_ in inputs: + output, state = cell(input_, state) + outputs.append(output) + return (outputs, state) + ``` + However, a few other options are available: + + An initial state can be provided. + If the sequence_length vector is provided, dynamic calculation is performed. + This method of calculation does not compute the RNN steps past the maximum + sequence length of the minibatch (thus saving computational time), + and properly propagates the state at an example's sequence length + to the final state output. + + The dynamic calculation performed is, at time `t` for batch row `b`, + + ```python + (output, state)(b, t) = + (t >= sequence_length(b)) + ? (zeros(cell.output_size), states(b, sequence_length(b) - 1)) + : cell(input(b, t), state(b, t - 1)) + ``` + + Args: + cell: An instance of RNNCell. + inputs: A length T list of inputs, each a `Tensor` of shape + `[batch_size, input_size]`, or a nested tuple of such elements. + initial_state: (optional) An initial state for the RNN. + If `cell.state_size` is an integer, this must be + a `Tensor` of appropriate type and shape `[batch_size, cell.state_size]`. + If `cell.state_size` is a tuple, this should be a tuple of + tensors having shapes `[batch_size, s] for s in cell.state_size`. + dtype: (optional) The data type for the initial state and expected output. + Required if initial_state is not provided or RNN state has a heterogeneous + dtype. + sequence_length: Specifies the length of each sequence in inputs. + An int32 or int64 vector (tensor) size `[batch_size]`, values in `[0, T)`. + scope: VariableScope for the created subgraph; defaults to "rnn". + + Returns: + A pair (outputs, state) where: + + - outputs is a length T list of outputs (one for each input), or a nested + tuple of such elements. + - state is the final state + + Raises: + TypeError: If `cell` is not an instance of RNNCell. + ValueError: If `inputs` is `None` or an empty list, or if the input depth + (column size) cannot be inferred from inputs via shape inference. + """ + + assert_like_rnncell("cell must be an instance of RNNCell", cell) + #if not _like_rnncell(cell): + # raise TypeError("cell must be an instance of RNNCell") + if not nest.is_sequence(inputs): + raise TypeError("inputs must be a sequence") + if not inputs: + raise ValueError("inputs must not be empty") + + outputs = [] + # Create a new scope in which the caching device is either + # determined by the parent scope, or is set to place the cached + # Variable using the same placement as for the rest of the RNN. + with vs.variable_scope(scope or "rnn") as varscope: + if varscope.caching_device is None: + varscope.set_caching_device(lambda op: op.device) + + # Obtain the first sequence of the input + first_input = inputs + while nest.is_sequence(first_input): + first_input = first_input[0] + + # Temporarily avoid EmbeddingWrapper and seq2seq badness + # TODO(lukaszkaiser): remove EmbeddingWrapper + if first_input.get_shape().ndims != 1: + + input_shape = first_input.get_shape().with_rank_at_least(2) + fixed_batch_size = input_shape[0] + + flat_inputs = nest.flatten(inputs) + for flat_input in flat_inputs: + input_shape = flat_input.get_shape().with_rank_at_least(2) + batch_size, input_size = input_shape[0], input_shape[1:] + fixed_batch_size.merge_with(batch_size) + for i, size in enumerate(input_size): + if size.value is None: + raise ValueError( + "Input size (dimension %d of inputs) must be accessible via " + "shape inference, but saw value None." % i) + else: + fixed_batch_size = first_input.get_shape().with_rank_at_least(1)[0] + + if fixed_batch_size.value: + batch_size = fixed_batch_size.value + else: + batch_size = array_ops.shape(first_input)[0] + if initial_state is not None: + state = initial_state + else: + if not dtype: + raise ValueError("If no initial_state is provided, " + "dtype must be specified") + state = cell.zero_state(batch_size, dtype) + + if sequence_length is not None: # Prepare variables + sequence_length = ops.convert_to_tensor( + sequence_length, name="sequence_length") + if sequence_length.get_shape().ndims not in (None, 1): + raise ValueError( + "sequence_length must be a vector of length batch_size") + + def _create_zero_output(output_size): + # convert int to TensorShape if necessary + size = _concat(batch_size, output_size) + output = array_ops.zeros( + array_ops.stack(size), _infer_state_dtype(dtype, state)) + shape = _concat(fixed_batch_size.value, output_size, static=True) + output.set_shape(tensor_shape.TensorShape(shape)) + return output + + output_size = cell.output_size + flat_output_size = nest.flatten(output_size) + flat_zero_output = tuple( + _create_zero_output(size) for size in flat_output_size) + zero_output = nest.pack_sequence_as( + structure=output_size, flat_sequence=flat_zero_output) + + sequence_length = math_ops.to_int32(sequence_length) + min_sequence_length = math_ops.reduce_min(sequence_length) + max_sequence_length = math_ops.reduce_max(sequence_length) + + for time, input_ in enumerate(inputs): + if time > 0: + varscope.reuse_variables() + # pylint: disable=cell-var-from-loop + call_cell = lambda: cell(input_, state) + # pylint: enable=cell-var-from-loop + if sequence_length is not None: + (output, state) = _rnn_step( + time=time, + sequence_length=sequence_length, + min_sequence_length=min_sequence_length, + max_sequence_length=max_sequence_length, + zero_output=zero_output, + state=state, + call_cell=call_cell, + state_size=cell.state_size) + else: + (output, state) = call_cell() + + outputs.append(output) + + return (outputs, state) + + +def static_state_saving_rnn(cell, + inputs, + state_saver, + state_name, + sequence_length=None, + scope=None): + """RNN that accepts a state saver for time-truncated RNN calculation. + + Args: + cell: An instance of `RNNCell`. + inputs: A length T list of inputs, each a `Tensor` of shape + `[batch_size, input_size]`. + state_saver: A state saver object with methods `state` and `save_state`. + state_name: Python string or tuple of strings. The name to use with the + state_saver. If the cell returns tuples of states (i.e., + `cell.state_size` is a tuple) then `state_name` should be a tuple of + strings having the same length as `cell.state_size`. Otherwise it should + be a single string. + sequence_length: (optional) An int32/int64 vector size [batch_size]. + See the documentation for rnn() for more details about sequence_length. + scope: VariableScope for the created subgraph; defaults to "rnn". + + Returns: + A pair (outputs, state) where: + outputs is a length T list of outputs (one for each input) + states is the final state + + Raises: + TypeError: If `cell` is not an instance of RNNCell. + ValueError: If `inputs` is `None` or an empty list, or if the arity and + type of `state_name` does not match that of `cell.state_size`. + """ + state_size = cell.state_size + state_is_tuple = nest.is_sequence(state_size) + state_name_tuple = nest.is_sequence(state_name) + + if state_is_tuple != state_name_tuple: + raise ValueError("state_name should be the same type as cell.state_size. " + "state_name: %s, cell.state_size: %s" % (str(state_name), + str(state_size))) + + if state_is_tuple: + state_name_flat = nest.flatten(state_name) + state_size_flat = nest.flatten(state_size) + + if len(state_name_flat) != len(state_size_flat): + raise ValueError("#elems(state_name) != #elems(state_size): %d vs. %d" % + (len(state_name_flat), len(state_size_flat))) + + initial_state = nest.pack_sequence_as( + structure=state_size, + flat_sequence=[state_saver.state(s) for s in state_name_flat]) + else: + initial_state = state_saver.state(state_name) + + (outputs, state) = static_rnn( + cell, + inputs, + initial_state=initial_state, + sequence_length=sequence_length, + scope=scope) + + if state_is_tuple: + flat_state = nest.flatten(state) + state_name = nest.flatten(state_name) + save_state = [ + state_saver.save_state(name, substate) + for name, substate in zip(state_name, flat_state) + ] + else: + save_state = [state_saver.save_state(state_name, state)] + + with ops.control_dependencies(save_state): + last_output = outputs[-1] + flat_last_output = nest.flatten(last_output) + flat_last_output = [ + array_ops.identity(output) for output in flat_last_output + ] + outputs[-1] = nest.pack_sequence_as( + structure=last_output, flat_sequence=flat_last_output) + + return (outputs, state) + + +def static_bidirectional_rnn(cell_fw, + cell_bw, + inputs, + initial_state_fw=None, + initial_state_bw=None, + dtype=None, + sequence_length=None, + scope=None): + """Creates a bidirectional recurrent neural network. + + Similar to the unidirectional case above (rnn) but takes input and builds + independent forward and backward RNNs with the final forward and backward + outputs depth-concatenated, such that the output will have the format + [time][batch][cell_fw.output_size + cell_bw.output_size]. The input_size of + forward and backward cell must match. The initial state for both directions + is zero by default (but can be set optionally) and no intermediate states are + ever returned -- the network is fully unrolled for the given (passed in) + length(s) of the sequence(s) or completely unrolled if length(s) is not given. + + Args: + cell_fw: An instance of RNNCell, to be used for forward direction. + cell_bw: An instance of RNNCell, to be used for backward direction. + inputs: A length T list of inputs, each a tensor of shape + [batch_size, input_size], or a nested tuple of such elements. + initial_state_fw: (optional) An initial state for the forward RNN. + This must be a tensor of appropriate type and shape + `[batch_size, cell_fw.state_size]`. + If `cell_fw.state_size` is a tuple, this should be a tuple of + tensors having shapes `[batch_size, s] for s in cell_fw.state_size`. + initial_state_bw: (optional) Same as for `initial_state_fw`, but using + the corresponding properties of `cell_bw`. + dtype: (optional) The data type for the initial state. Required if + either of the initial states are not provided. + sequence_length: (optional) An int32/int64 vector, size `[batch_size]`, + containing the actual lengths for each of the sequences. + scope: VariableScope for the created subgraph; defaults to + "bidirectional_rnn" + + Returns: + A tuple (outputs, output_state_fw, output_state_bw) where: + outputs is a length `T` list of outputs (one for each input), which + are depth-concatenated forward and backward outputs. + output_state_fw is the final state of the forward rnn. + output_state_bw is the final state of the backward rnn. + + Raises: + TypeError: If `cell_fw` or `cell_bw` is not an instance of `RNNCell`. + ValueError: If inputs is None or an empty list. + """ + + if not _like_rnncell(cell_fw): + raise TypeError("cell_fw must be an instance of RNNCell") + if not _like_rnncell(cell_bw): + raise TypeError("cell_bw must be an instance of RNNCell") + if not nest.is_sequence(inputs): + raise TypeError("inputs must be a sequence") + if not inputs: + raise ValueError("inputs must not be empty") + + with vs.variable_scope(scope or "bidirectional_rnn"): + # Forward direction + with vs.variable_scope("fw") as fw_scope: + output_fw, output_state_fw = static_rnn( + cell_fw, + inputs, + initial_state_fw, + dtype, + sequence_length, + scope=fw_scope) + + # Backward direction + with vs.variable_scope("bw") as bw_scope: + reversed_inputs = _reverse_seq(inputs, sequence_length) + tmp, output_state_bw = static_rnn( + cell_bw, + reversed_inputs, + initial_state_bw, + dtype, + sequence_length, + scope=bw_scope) + + output_bw = _reverse_seq(tmp, sequence_length) + # Concat each of the forward/backward outputs + flat_output_fw = nest.flatten(output_fw) + flat_output_bw = nest.flatten(output_bw) + + flat_outputs = tuple( + array_ops.concat([fw, bw], 1) + for fw, bw in zip(flat_output_fw, flat_output_bw)) + + outputs = nest.pack_sequence_as( + structure=output_fw, flat_sequence=flat_outputs) + + return (outputs, output_state_fw, output_state_bw) + diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/train.py b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/train.py index b2cc74f593dc4b65adc90774015b560aea69b212..eef575ad88a07180b178207497a18a768f51dda9 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/train.py +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/train.py @@ -1,21 +1,45 @@ -from npu_bridge.estimator import npu_ops -from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig - +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from npu_bridge.npu_init import * import numpy from data_iterator import DataIterator import tensorflow as tf -# tf.enable_eager_execution() - -# from model import * -from model_general import * +from model import * import time import random import sys from utils import * -EMBEDDING_DIM = 64 -HIDDEN_SIZE = 16 * 2 -ATTENTION_SIZE = 16 * 2 +EMBEDDING_DIM = 18 +HIDDEN_SIZE = 18 * 2 +ATTENTION_SIZE = 18 * 2 best_auc = 0.0 def prepare_data(input, target, maxlen = None, return_neg = False): @@ -55,7 +79,7 @@ def prepare_data(input, target, maxlen = None, return_neg = False): return None, None, None, None n_samples = len(seqs_mid) - # maxlen_x = numpy.max(lengths_x) + #maxlen_x = numpy.max(lengths_x) if maxlen is not None: maxlen_x = maxlen else: @@ -73,7 +97,6 @@ def prepare_data(input, target, maxlen = None, return_neg = False): cat_his[idx, :lengths_x[idx]] = s_y noclk_mid_his[idx, :lengths_x[idx], :] = no_sx noclk_cat_his[idx, :lengths_x[idx], :] = no_sy - # lengths_x[idx] = maxlen uids = numpy.array([inp[0] for inp in input]) mids = numpy.array([inp[1] for inp in input]) @@ -86,8 +109,6 @@ def prepare_data(input, target, maxlen = None, return_neg = False): return uids, mids, cats, mid_his, cat_his, mid_mask, numpy.array(target), numpy.array(lengths_x) def eval(sess, test_data, model, model_path): - # import pdb - # pdb.set_trace() loss_sum = 0. accuracy_sum = 0. @@ -95,9 +116,7 @@ def eval(sess, test_data, model, model_path): nums = 0 stored_arr = [] for src, tgt in test_data: - # print("!!!!!!", num) nums += 1 - # uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats = prepare_data(src, tgt, return_neg=True) uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats = prepare_data(src, tgt, maxlen=100, return_neg=True) prob, loss, acc, aux_loss = model.calculate(sess, [uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats]) loss_sum += loss @@ -126,28 +145,43 @@ def train( batch_size = 128, maxlen = 100, test_iter = 100, - save_iter = 100, + save_iter = 5000, model_type = 'DNN', - seed = 2, + seed = 2, ): model_path = "dnn_save_path/ckpt_noshuff" + model_type + str(seed) best_model_path = "dnn_best_model/ckpt_noshuff" + model_type + str(seed) - tensorboard_path = "tensorboard_log" gpu_options = tf.GPUOptions(allow_growth=True) + ################### + sess_config = tf.ConfigProto(gpu_options=gpu_options) + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + ### 配置mm16进32出 + custom_op.parameter_map["customize_dtypes"].s = tf.compat.as_bytes("switch_config.txt") + + if False: + custom_op.parameter_map["enable_dump"].b = True + custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes("/autotest/mwx927052/DIEN_ID0109_for_TensorFlow_ori/test/overflow") + custom_op.parameter_map["dump_step"].s = tf.compat.as_bytes("0") + custom_op.parameter_map["dump_mode"].s = tf.compat.as_bytes("all") + if False: + print('i am check overflow') + custom_op.parameter_map["enable_dump_debug"].b = True + custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes("/autotest/mwx927052/DIEN_ID0109_for_TensorFlow_ori/test/overflow") + custom_op.parameter_map["dump_debug_mode"].s = tf.compat.as_bytes("all") + + + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + - config = tf.ConfigProto() - custom_op = config.graph_options.rewrite_options.custom_optimizers.add() - custom_op.name = "NpuOptimizer" - custom_op.parameter_map["use_off_line"].b = True #在昇腾AI处理器执行训练 - custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes('allow_mix_precision') - config.graph_options.rewrite_options.remapping = RewriterConfig.OFF #关闭remap开关 - # custom_op.parameter_map["enable_dump"].b = True - # custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes("/data1/d00564369/dien/dump_mix") - # custom_op.parameter_map["dump_step"].s = tf.compat.as_bytes("0-5") - # custom_op.parameter_map["dump_mode"].s = tf.compat.as_bytes("all") + with tf.Session(config=npu_config_proto(config_proto=sess_config)) as sess: + #################### - with tf.Session(config=config) as sess: + #with tf.Session(config=npu_config_proto(config_proto=tf.ConfigProto(gpu_options=gpu_options))) as sess: train_data = DataIterator(train_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen, shuffle_each_epoch=False) test_data = DataIterator(test_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen) n_uid, n_mid, n_cat = train_data.get_n() @@ -176,60 +210,40 @@ def train( sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) sys.stdout.flush() - print('test_auc: %.4f ---- test_loss: %.4f ---- test_accuracy: %.4f ---- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path)) + #print(' test_auc: %.4f ---- test_loss: %.4f ---- test_accuracy: %.4f ---- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path)) sys.stdout.flush() - # summary_writer = tf.summary.FileWriter(tensorboard_path, tf.get_default_graph()) - - start = time.time() + #start_time = time.time() iter = 0 - start_epoch = 0 - start_iter = 0 - a = 0 - lr = 0.001 * 0.5**(start_epoch) - - for itr in range(start_epoch, 3): + lr = 0.001 + for itr in range(3): loss_sum = 0.0 accuracy_sum = 0. aux_loss_sum = 0. - # for src, tgt in train_data: - for iter, (src, tgt) in enumerate(train_data, start=start_iter): - if a > 500: + for src, tgt in train_data: + if iter > 500: pass start_time = time.time() uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats = prepare_data(src, tgt, maxlen, return_neg=True) loss, acc, aux_loss = model.train(sess, [uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, lr, noclk_mids, noclk_cats]) end_time = time.time() - print("step_time:", end_time - start_time) - # tf.io.write_graph(sess.graph_def, '/data1/d00564369/dien-npu', 'train_graph.pbtxt') loss_sum += loss accuracy_sum += acc aux_loss_sum += aux_loss - a += 1 + iter += 1 sys.stdout.flush() if (iter % test_iter) == 0: - avg_time_per_step = (time.time() - start) / test_iter - #avg_examples_per_second = (test_iter * batch_size)/(time.time() - start) avg_examples_per_second = batch_size/(end_time - start_time) - print("avg_time_per_step: ", avg_time_per_step) print("avg_examples_per_second: ", avg_examples_per_second) - print("step_time:", end_time - start_time) - - print('[epoch: %d, iter: %d] ----> train_loss: %.4f ---- train_accuracy: %.4f ---- train_aux_loss: %.4f' % \ - (itr, iter, loss_sum / test_iter, accuracy_sum / test_iter, aux_loss_sum / test_iter)) - print('test_auc: %.4f ----test_loss: %.4f ---- test_accuracy: %.4f ---- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path)) - # model.summary_op(summary_writer, summary_str, iter) - start = time.time() + print('iter: %d ----> train_loss: %.4f ---- train_accuracy: %.4f ---- train_aux_loss: %.4f ---- perf: %.4f' % \ + (iter, loss_sum / test_iter, accuracy_sum / test_iter, aux_loss_sum / test_iter, end_time-start_time)) + #print(' test_auc: %.4f ----test_loss: %.4f ---- test_accuracy: %.4f ---- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path)) loss_sum = 0.0 accuracy_sum = 0.0 aux_loss_sum = 0.0 - #if (iter % save_iter) == 0: - if (iter % 10000) == 0: - print('save model epoch {}, iter: {}'.format(itr, iter)) - model.save(sess, model_path + "--" + str(itr)+"--"+str(iter)) - - start_iter = 0 - + if (iter % save_iter) == 0: + print('save model iter: %d' %(iter)) + model.save(sess, model_path+"--"+str(iter)) lr *= 0.5 def test( @@ -241,14 +255,13 @@ def test( batch_size = 128, maxlen = 100, model_type = 'DNN', - seed = 2 + seed = 2 ): model_path = "dnn_best_model/ckpt_noshuff" + model_type + str(seed) gpu_options = tf.GPUOptions(allow_growth=True) - with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess: + with tf.Session(config=npu_config_proto(config_proto=tf.ConfigProto(gpu_options=gpu_options))) as sess: train_data = DataIterator(train_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen) - # tofix, in test and eval stage, last batch cannot be discarded test_data = DataIterator(test_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen) n_uid, n_mid, n_cat = train_data.get_n() if model_type == 'DNN': @@ -256,7 +269,7 @@ def test( elif model_type == 'PNN': model = Model_PNN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) elif model_type == 'Wide': - model = Model_WideDeep(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) + model = Model_WideDeep(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) elif model_type == 'DIN': model = Model_DIN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) elif model_type == 'DIN-V2-gru-att-gru': @@ -268,7 +281,7 @@ def test( elif model_type == 'DIN-V2-gru-vec-attGru': model = Model_DIN_V2_Gru_Vec_attGru(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) elif model_type == 'DIEN': - model = Model_DIN_V2_Gru_Vec_attGru_Neg(batch_size, maxlen, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) + model = Model_DIN_V2_Gru_Vec_attGru_Neg(None, maxlen, n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE) else: print ("Invalid model_type : %s", model_type) return @@ -283,7 +296,6 @@ if __name__ == '__main__': tf.set_random_seed(SEED) numpy.random.seed(SEED) random.seed(SEED) - if sys.argv[1] == 'train': train(model_type=sys.argv[2], seed=SEED) elif sys.argv[1] == 'test': diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/utils.py b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/utils.py index 6c5d5a372e3165f89bc66f58873174dbabaf6571..8f5df0af14c0572bcb7f35d56df09c21bd3cfeef 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/utils.py +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/script/utils.py @@ -1,13 +1,42 @@ +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from npu_bridge.npu_init import * import tensorflow as tf -# from tensorflow.python.ops.rnn_cell import * +#from tensorflow.python.ops.rnn_cell import * from tensorflow.contrib.rnn import * -from tensorflow.contrib.rnn.python.ops.rnn_cell import _Linear -# from tensorflow.python.ops import math_ops -# from tensorflow.python.ops import init_ops -# from tensorflow.python.ops import array_ops -# from tensorflow.python.ops import variable_scope as vs -import math - +from tensorflow.contrib.rnn.python.ops.rnn_cell import _Linear +from tensorflow import keras +from tensorflow.python.ops import math_ops +from tensorflow.python.ops import init_ops +from tensorflow.python.ops import array_ops +from tensorflow.python.ops import variable_scope as vs class QAAttGRUCell(RNNCell): """Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078). @@ -30,8 +59,7 @@ class QAAttGRUCell(RNNCell): bias_initializer=None): super(QAAttGRUCell, self).__init__(_reuse=reuse) self._num_units = num_units - # self._activation = activation or math_ops.tanh - self._activation = activation or tf.tanh + self._activation = activation or math_ops.tanh self._kernel_initializer = kernel_initializer self._bias_initializer = bias_initializer self._gate_linear = None @@ -53,9 +81,8 @@ class QAAttGRUCell(RNNCell): if self._gate_linear is None: bias_ones = self._bias_initializer if self._bias_initializer is None: - # bias_ones = init_ops.constant_initializer(1.0, dtype=inputs.dtype) - bias_ones = tf.constant_initializer(1.0) - with tf.variable_scope("gates"): # Reset gate and update gate. + bias_ones = init_ops.constant_initializer(1.0, dtype=inputs.dtype) + with vs.variable_scope("gates"): # Reset gate and update gate. self._gate_linear = _Linear( [inputs, state], 2 * self._num_units, @@ -63,13 +90,12 @@ class QAAttGRUCell(RNNCell): bias_initializer=bias_ones, kernel_initializer=self._kernel_initializer) - # value = math_ops.sigmoid(self._gate_linear([inputs, state])) - value = tf.sigmoid(self._gate_linear([inputs, state])) - r, u = tf.split(value=value, num_or_size_splits=2, axis=1) + value = math_ops.sigmoid(self._gate_linear([inputs, state])) + r, u = array_ops.split(value=value, num_or_size_splits=2, axis=1) r_state = r * state if self._candidate_linear is None: - with tf.variable_scope("candidate"): + with vs.variable_scope("candidate"): self._candidate_linear = _Linear( [inputs, r_state], self._num_units, @@ -101,8 +127,7 @@ class VecAttGRUCell(RNNCell): bias_initializer=None): super(VecAttGRUCell, self).__init__(_reuse=reuse) self._num_units = num_units - # self._activation = activation or math_ops.tanh - self._activation = activation or tf.tanh + self._activation = activation or math_ops.tanh self._kernel_initializer = kernel_initializer self._bias_initializer = bias_initializer self._gate_linear = None @@ -122,9 +147,8 @@ class VecAttGRUCell(RNNCell): if self._gate_linear is None: bias_ones = self._bias_initializer if self._bias_initializer is None: - bias_ones = tf.constant_initializer(1.0) - # bias_ones = init_ops.constant_initializer(1.0, dtype=inputs.dtype) - with tf.variable_scope("gates"): # Reset gate and update gate. + bias_ones = init_ops.constant_initializer(1.0, dtype=inputs.dtype) + with vs.variable_scope("gates"): # Reset gate and update gate. self._gate_linear = _Linear( [inputs, state], 2 * self._num_units, @@ -132,13 +156,12 @@ class VecAttGRUCell(RNNCell): bias_initializer=bias_ones, kernel_initializer=self._kernel_initializer) - # value = math_ops.sigmoid(self._gate_linear([inputs, state])) - value = tf.sigmoid(self._gate_linear([inputs, state])) - r, u = tf.split(value=value, num_or_size_splits=2, axis=1) + value = math_ops.sigmoid(self._gate_linear([inputs, state])) + r, u = array_ops.split(value=value, num_or_size_splits=2, axis=1) r_state = r * state if self._candidate_linear is None: - with tf.variable_scope("candidate"): + with vs.variable_scope("candidate"): self._candidate_linear = _Linear( [inputs, r_state], self._num_units, @@ -146,82 +169,10 @@ class VecAttGRUCell(RNNCell): bias_initializer=self._bias_initializer, kernel_initializer=self._kernel_initializer) c = self._activation(self._candidate_linear([inputs, r_state])) - print("????????????????? att", att_score.get_shape().as_list()) - print("????????????????? u", u.get_shape().as_list()) u = (1.0 - att_score) * u new_h = u * state + (1 - u) * c return new_h, new_h -class VecAttGRUCellV2(RNNCell): - """Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078). - Args: - num_units: int, The number of units in the GRU cell. - activation: Nonlinearity to use. Default: `tanh`. - reuse: (optional) Python boolean describing whether to reuse variables - in an existing scope. If not `True`, and the existing scope already has - the given variables, an error is raised. - kernel_initializer: (optional) The initializer to use for the weight and - projection matrices. - bias_initializer: (optional) The initializer to use for the bias. - """ - - def __init__(self, - num_units, - activation=None, - reuse=None, - kernel_initializer=None, - bias_initializer=None): - super(VecAttGRUCell, self).__init__(_reuse=reuse) - self._num_units = num_units - # self._activation = activation or math_ops.tanh - self._activation = activation or tf.tanh - self._kernel_initializer = kernel_initializer - self._bias_initializer = bias_initializer - self._gate_linear = None - self._candidate_linear = None - - @property - def state_size(self): - return self._num_units - - @property - def output_size(self): - return self._num_units - def __call__(self, inputs, state, att_score): - return self.call(inputs, state, att_score) - def call(self, inputs, state, att_score=None): - """Gated recurrent unit (GRU) with nunits cells.""" - stdv = 1.0 / math.sqrt(self._num_units) - with tf.variable_scope("input_gates"): # Reset gate and update gate. - self._input_gate_linear = _Linear( - inputs, - 3 * self._num_units, - True, - bias_initializer=init_ops.random_uniform_initializer(-stdv, stdv), - kernel_initializer=init_ops.random_uniform_initializer(-stdv, stdv)) - with tf.variable_scope("hidden_gates"): # Reset gate and update gate. - self._hidden_gate_linear = _Linear( - state, - 3 * self._num_units, - True, - bias_initializer=init_ops.random_uniform_initializer(-stdv, stdv), - kernel_initializer=init_ops.random_uniform_initializer(-stdv, stdv)) - - # value = math_ops.sigmoid(self._gate_linear([inputs, state])) - input_value = self._input_gate_linear(inputs) - hidden_value = self._hidden_gate_linear(state) - i_i, i_r, i_n = tf.split(value=input_value, num_or_size_splits=3, axis=1) - h_i, h_r, h_n = tf.split(value=hidden_value, num_or_size_splits=3, axis=1) - - i_t = tf.sigmoid(i_i + h_i) - u_t = (1.0 - att_score) * i_t - r_t = tf.sigmoid(i_r + h_r) - n_t = self._activation(i_n + r_t * h_n) - new_h = u_t * state + (1 - u_t) * n_t - - return new_h, new_h - - def prelu(_x, scope=''): """parametric ReLU activation""" with tf.variable_scope(name_or_scope=scope, default_name="prelu"): @@ -274,7 +225,7 @@ def attention(query, facts, attention_size, mask, stag='null', mode='LIST', soft if time_major: # (T,B,D) => (B,T,D) - facts = tf.transpose(facts, [1, 0, 2]) + facts = tf.array_ops.transpose(facts, [1, 0, 2]) mask = tf.equal(mask, tf.ones_like(mask)) hidden_size = facts.get_shape().as_list()[-1] # D value - hidden size of the RNN layer @@ -324,7 +275,7 @@ def din_attention(query, facts, attention_size, mask, stag='null', mode='SUM', s if time_major: # (T,B,D) => (B,T,D) - facts = tf.transpose(facts, [1, 0, 2]) + facts = tf.array_ops.transpose(facts, [1, 0, 2]) mask = tf.equal(mask, tf.ones_like(mask)) facts_size = facts.get_shape().as_list()[-1] # D value - hidden size of the RNN layer querry_size = query.get_shape().as_list()[-1] @@ -368,7 +319,7 @@ def din_fcn_attention(query, facts, attention_size, mask, stag='null', mode='SUM if time_major: # (T,B,D) => (B,T,D) - facts = tf.transpose(facts, [1, 0, 2]) + facts = tf.array_ops.transpose(facts, [1, 0, 2]) # Trainable parameters mask = tf.equal(mask, tf.ones_like(mask)) facts_size = facts.get_shape().as_list()[-1] # D value - hidden size of the RNN layer @@ -464,7 +415,7 @@ def din_fcn_shine(query, facts, attention_size, mask, stag='null', mode='SUM', s if time_major: # (T,B,D) => (B,T,D) - facts = tf.transpose(facts, [1, 0, 2]) + facts = tf.array_ops.transpose(facts, [1, 0, 2]) # Trainable parameters mask = tf.equal(mask, tf.ones_like(mask)) facts_size = facts.get_shape().as_list()[-1] # D value - hidden size of the RNN layer @@ -480,3 +431,4 @@ def din_fcn_shine(query, facts, attention_size, mask, stag='null', mode='SUM', s output = d_layer_2_all return output + diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/switch_config.txt b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/switch_config.txt new file mode 100644 index 0000000000000000000000000000000000000000..995f4cea2905f7a19343d8bf259579c3d4b4bca5 --- /dev/null +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/switch_config.txt @@ -0,0 +1,3 @@ +OpType::MatMulV2:InputDtype:float16,float16,float32,OutputDtype:float32 +OpType::BatchMatMul:InputDtype:float16,float16,OutputDtype:float32 +OpType::BatchMatMulV2:InputDtype:float16,float16,OutputDtype:float32 \ No newline at end of file diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_full_1p.sh index ecbd254578c47a73f2f127a173e509ffad884af4..81b63f390314053587c00bb31270fd67647b31b6 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_full_1p.sh @@ -125,13 +125,12 @@ e2e_time=$(( $end_time - $start_time )) #结果打印,不需要修改 echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 -Time=`grep perf $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $14}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'}'` +FPS=`grep avg_examples_per_second $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F ":" 'END{print $2}'|sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Performance item/sec : $FPS" #输出训练精度,需要模型审视修改 -train_accuracy=`grep "train_accuracy" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +train_accuracy=`grep "train_accuracy" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "train_accuracy:" 'END{print $2}' | awk -F ' ' '{print $1}' |sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" @@ -149,7 +148,7 @@ ActualFPS=${FPS} TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep train_loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $5}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep "train_loss" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "train_loss:" '{print $2}' | awk -F ' ' '{print $1}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` diff --git a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_performance_1p.sh index c9355df632285401a50ef3dcf58b08874b6f678f..bfc9a75e275ec7849269e3a67f98497511b6b9e4 100644 --- a/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/recommendation/DIEN_ID3065_for_TensorFlow/test/train_performance_1p.sh @@ -128,13 +128,12 @@ sed -i "s|break|pass|g" train.py #结果打印,不需要修改 echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 -Time=`grep perf $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $14}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'}'` +FPS=`grep avg_examples_per_second $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F ":" 'END{print $2}' |sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Performance item/sec : $FPS" #输出训练精度,需要模型审视修改 -train_accuracy=`grep "train_accuracy" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +train_accuracy=`grep "train_accuracy" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "train_accuracy:" 'END{print $2}' | awk -F ' ' '{print $1}'|sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" @@ -152,46 +151,20 @@ ActualFPS=${FPS} TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep train_loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $5}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep "train_loss" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "train_loss:" '{print $2}' | awk -F ' ' '{print $1}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` #关键信息打印到${CaseName}.log中,不需要修改 -#echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log - -##获取错误信息 -#系统错误信息 -error_msg="cannot import name 'DynamicAUGRU' from 'npu_bridge.estimator.npu.npu_dynamic_rnn'" -#判断错误信息是否和历史状态一致,此处无需修改 -Status=`grep "${error_msg}" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | wc -l` -#失败阶段,枚举值图准备FAIL/图拆分FAIL/图优化FAIL/图编译FAIL/图执行FAIL/流程OK -ModelStatus="图执行FAIL" -#DTS单号或者issue链接 -DTS_Number="AR0001TRFQ Florence V100R001C25_SF_02_020_SR_0800_AR_0001 -AR0001TRFR Florence V100R001C25_SF_02_020_SR_0800_AR_0002 -AR0001TRFS Florence V100R001C25_SF_02_020_SR_0800_AR_0003 -AR0001TRFT Florence V100R001C25_SF_02_020_SR_0801_AR_0001 -AR0001TRFU Florence V100R001C25_SF_02_020_SR_0801_AR_0002" - -#关键信息打印到CaseName.log中,此处无需修改 echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RankSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ModelStatus = ${ModelStatus}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DTS_Number = ${DTS_Number}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "Status = ${Status}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "error_msg = ${error_msg}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file + diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/rnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/rnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/rnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/rnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/rnn_v2.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/rnn_v2.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/rnn_v2.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/rnn_v2.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/utils.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/utils.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/contrib/utils.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/contrib/utils.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/feature_column.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/feature_column.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/feature_column.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/feature_column.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/inputs.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/inputs.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/inputs.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/inputs.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/afm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/afm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/afm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/afm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/autoint.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/autoint.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/autoint.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/autoint.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/ccpm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/ccpm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/ccpm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/ccpm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/dcn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/dcn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/dcn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/dcn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/deepfefm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/deepfefm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/deepfefm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/deepfefm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/deepfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/deepfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/deepfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/deepfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fibinet.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fibinet.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fibinet.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fibinet.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fwfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fwfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/fwfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/fwfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/nfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/nfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/nfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/nfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/pnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/pnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/pnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/pnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/wdl.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/wdl.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/wdl.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/wdl.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/xdeepfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/xdeepfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/models/xdeepfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/models/xdeepfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/utils.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/utils.py similarity index 98% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/utils.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/utils.py index 187354dd58ea20dc10819c825b047677dd920aab..639622f731b9f80c3b9e6922ae1e0c75bcab62a7 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/estimator/utils.py +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/estimator/utils.py @@ -138,12 +138,13 @@ class Head(_Head): training_loss = loss + reg_loss eval_metric_ops = self._eval_metric_ops(labels, logits, pred, unweighted_loss) - + training_Hook=tf.train.LoggingTensorHook({"loss":training_loss}, every_n_iter=1) return tf.estimator.EstimatorSpec( mode=mode, predictions=predictions, loss=training_loss, train_op=train_op_fn(training_loss), + training_hooks=[training_Hook], eval_metric_ops=eval_metric_ops, training_chief_hooks=training_chief_hooks) diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/feature_column.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/feature_column.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/feature_column.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/feature_column.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/inputs.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/inputs.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/inputs.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/inputs.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/activation.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/activation.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/activation.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/activation.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/core.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/core.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/core.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/core.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/interaction.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/interaction.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/interaction.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/interaction.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/normalization.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/normalization.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/normalization.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/normalization.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/sequence.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/sequence.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/sequence.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/sequence.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/utils.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/utils.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/layers/utils.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/layers/utils.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/afm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/afm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/afm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/afm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/autoint.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/autoint.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/autoint.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/autoint.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/ccpm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/ccpm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/ccpm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/ccpm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/dcn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/dcn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/dcn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/dcn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/dcnmix.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/dcnmix.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/dcnmix.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/dcnmix.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/deepfefm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/deepfefm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/deepfefm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/deepfefm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/deepfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/deepfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/deepfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/deepfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/difm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/difm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/difm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/difm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fgcnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fgcnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fgcnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fgcnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fibinet.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fibinet.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fibinet.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fibinet.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/flen.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/flen.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/flen.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/flen.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fwfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fwfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/fwfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/fwfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/ifm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/ifm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/ifm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/ifm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/mlr.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/mlr.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/mlr.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/mlr.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/esmm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/esmm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/esmm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/esmm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/mmoe.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/mmoe.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/mmoe.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/mmoe.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/ple.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/ple.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/ple.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/ple.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/sharedbottom.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/sharedbottom.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/multitask/sharedbottom.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/multitask/sharedbottom.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/nfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/nfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/nfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/nfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/onn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/onn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/onn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/onn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/pnn.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/pnn.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/pnn.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/pnn.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/__init__.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/__init__.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/__init__.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/__init__.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/bst.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/bst.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/bst.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/bst.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/dien.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/dien.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/dien.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/dien.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/din.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/din.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/din.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/din.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/dsin.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/dsin.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/sequence/dsin.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/sequence/dsin.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/wdl.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/wdl.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/wdl.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/wdl.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/xdeepfm.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/xdeepfm.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/models/xdeepfm.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/models/xdeepfm.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/utils.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/utils.py similarity index 100% rename from TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/deepctr/utils.py rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/deepctr/utils.py diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/movielens_sample.txt b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/movielens_sample.txt deleted file mode 100644 index 9ffa14824b834e57a9aed7e616a2a3fc8785c734..0000000000000000000000000000000000000000 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/movielens_sample.txt +++ /dev/null @@ -1,201 +0,0 @@ -user_id,movie_id,rating,timestamp,title,genres,gender,age,occupation,zip -3299,235,4,968035345,Ed Wood (1994),Comedy|Drama,F,25,4,19119 -3630,3256,3,966536874,Patriot Games (1992),Action|Thriller,M,18,4,77005 -517,105,4,976203603,"Bridges of Madison County, The (1995)",Drama|Romance,F,25,14,55408 -785,2115,3,975430389,Indiana Jones and the Temple of Doom (1984),Action|Adventure,M,18,19,29307 -5848,909,5,957782527,"Apartment, The (1960)",Comedy|Drama,M,50,20,20009 -2996,2799,1,972769867,Problem Child 2 (1991),Comedy,M,18,0,63011 -3087,837,5,969738869,Matilda (1996),Children's|Comedy,F,1,1,90802 -872,3092,5,975273310,Chushingura (1962),Drama,M,50,1,20815 -4094,529,5,966223349,Searching for Bobby Fischer (1993),Drama,M,25,17,49017 -1868,3508,3,974694703,"Outlaw Josey Wales, The (1976)",Western,M,50,11,92346 -2913,1387,5,971769808,Jaws (1975),Action|Horror,F,35,20,98119 -380,3481,5,976316283,High Fidelity (2000),Comedy,M,25,2,92024 -2073,1784,5,974759084,As Good As It Gets (1997),Comedy|Drama,F,18,4,13148 -80,2059,3,977788576,"Parent Trap, The (1998)",Children's|Drama,M,56,1,49327 -3679,2557,1,976298130,I Stand Alone (Seul contre tous) (1998),Drama,M,25,4,68108 -2077,788,3,980013556,"Nutty Professor, The (1996)",Comedy|Fantasy|Romance|Sci-Fi,M,18,0,55112 -6036,2085,4,956716684,101 Dalmatians (1961),Animation|Children's,F,25,15,32603 -3675,532,3,966363610,Serial Mom (1994),Comedy|Crime|Horror,M,35,7,06680 -4566,3683,4,964489599,Blood Simple (1984),Drama|Film-Noir,M,35,17,19473 -2996,3763,3,972413564,F/X (1986),Action|Crime|Thriller,M,18,0,63011 -5831,2458,1,957898337,Armed and Dangerous (1986),Comedy|Crime,M,25,1,92120 -1869,1244,2,974695654,Manhattan (1979),Comedy|Drama|Romance,M,45,14,95148 -5389,2657,3,960328279,"Rocky Horror Picture Show, The (1975)",Comedy|Horror|Musical|Sci-Fi,M,45,7,01905 -1391,1535,3,974851275,Love! Valour! Compassion! (1997),Drama|Romance,M,35,15,20723 -3123,2407,3,969324381,Cocoon (1985),Comedy|Sci-Fi,M,25,2,90401 -4694,159,3,963602574,Clockers (1995),Drama,M,56,7,40505 -1680,1988,3,974709821,Hello Mary Lou: Prom Night II (1987),Horror,M,25,20,95380 -2002,1945,4,974677761,On the Waterfront (1954),Crime|Drama,F,56,13,02136-1522 -3430,2690,4,979949863,"Ideal Husband, An (1999)",Comedy,F,45,1,15208 -425,471,4,976284972,"Hudsucker Proxy, The (1994)",Comedy|Romance,M,25,12,55303 -1841,2289,2,974699637,"Player, The (1992)",Comedy|Drama,M,18,0,95037 -4964,2348,4,962619587,Sid and Nancy (1986),Drama,M,35,0,94110 -4520,2160,4,964883648,Rosemary's Baby (1968),Horror|Thriller,M,25,4,45810 -1265,2396,4,1011716691,Shakespeare in Love (1998),Comedy|Romance,F,18,20,49321 -2496,1278,5,974435324,Young Frankenstein (1974),Comedy|Horror,M,50,1,37932 -5511,2174,4,959787754,Beetlejuice (1988),Comedy|Fantasy,M,45,1,92407 -621,833,1,975799925,High School High (1996),Comedy,M,18,4,93560 -3045,2762,5,970189524,"Sixth Sense, The (1999)",Thriller,M,45,1,90631 -2050,2546,4,975522689,"Deep End of the Ocean, The (1999)",Drama,F,35,3,99504 -613,32,4,975812238,Twelve Monkeys (1995),Drama|Sci-Fi,M,35,20,10562 -366,1077,5,978471241,Sleeper (1973),Comedy|Sci-Fi,M,50,15,55126 -5108,367,4,962338215,"Mask, The (1994)",Comedy|Crime|Fantasy,F,25,9,93940 -4502,1960,4,965094644,"Last Emperor, The (1987)",Drama|War,M,50,0,01379 -5512,1801,5,959713840,"Man in the Iron Mask, The (1998)",Action|Drama|Romance,F,25,17,01701 -1861,2642,2,974699627,Superman III (1983),Action|Adventure|Sci-Fi,M,50,16,92129 -1667,1240,4,975016698,"Terminator, The (1984)",Action|Sci-Fi|Thriller,M,50,16,98516 -753,434,3,975460449,Cliffhanger (1993),Action|Adventure|Crime,M,1,10,42754 -1836,2736,5,974826228,Brighton Beach Memoirs (1986),Comedy,M,25,0,10016 -5626,474,5,959052158,In the Line of Fire (1993),Action|Thriller,M,56,16,32043 -1601,1396,4,978576948,Sneakers (1992),Crime|Drama|Sci-Fi,M,25,12,83001 -4725,1100,4,963369546,Days of Thunder (1990),Action|Romance,M,35,5,96707-1321 -2837,2396,5,972571456,Shakespeare in Love (1998),Comedy|Romance,M,18,0,49506 -1776,3882,4,1001558470,Bring It On (2000),Comedy,M,25,0,45801 -2820,457,2,972662398,"Fugitive, The (1993)",Action|Thriller,F,35,0,02138 -1834,2288,3,1038179198,"Thing, The (1982)",Action|Horror|Sci-Fi|Thriller,M,35,5,10990 -284,2716,4,976570902,Ghostbusters (1984),Comedy|Horror,M,25,12,91910 -2744,588,1,973215985,Aladdin (1992),Animation|Children's|Comedy|Musical,M,18,17,53818 -881,4,2,975264028,Waiting to Exhale (1995),Comedy|Drama,M,18,14,76401 -2211,916,3,974607067,Roman Holiday (1953),Comedy|Romance,M,45,6,01950 -2271,2671,4,1007158806,Notting Hill (1999),Comedy|Romance,M,50,14,13210 -1010,2953,1,975222613,Home Alone 2: Lost in New York (1992),Children's|Comedy,M,25,0,10310 -1589,2594,4,974735454,Open Your Eyes (Abre los ojos) (1997),Drama|Romance|Sci-Fi,M,25,0,95136 -1724,597,5,976441106,Pretty Woman (1990),Comedy|Romance,M,18,4,00961 -2590,2097,3,973840056,Something Wicked This Way Comes (1983),Children's|Horror,M,18,4,94044 -1717,1352,3,1009256707,Albino Alligator (1996),Crime|Thriller,F,50,6,30307 -1391,3160,2,974850796,Magnolia (1999),Drama,M,35,15,20723 -1941,1263,3,974954220,"Deer Hunter, The (1978)",Drama|War,M,35,17,94550 -3526,2867,4,966906064,Fright Night (1985),Comedy|Horror,M,35,2,62263-3004 -5767,198,3,958192148,Strange Days (1995),Action|Crime|Sci-Fi,M,25,2,75287 -5355,590,4,960596927,Dances with Wolves (1990),Adventure|Drama|Western,M,56,0,78232 -5788,156,4,958108785,Blue in the Face (1995),Comedy,M,25,0,92646 -1078,1307,4,974938851,When Harry Met Sally... (1989),Comedy|Romance,F,45,9,95661 -3808,61,2,965973222,Eye for an Eye (1996),Drama|Thriller,M,25,7,60010 -974,3897,4,975106398,Almost Famous (2000),Comedy|Drama,M,35,19,94930 -5153,1290,4,961972292,Some Kind of Wonderful (1987),Drama|Romance,M,25,7,60046 -5732,2115,3,958434069,Indiana Jones and the Temple of Doom (1984),Action|Adventure,F,25,11,02111 -4627,2478,3,964110136,Three Amigos! (1986),Comedy|Western,M,56,1,45224 -1884,1831,2,975648062,Lost in Space (1998),Action|Sci-Fi|Thriller,M,45,20,93108 -4284,517,4,965277546,Rising Sun (1993),Action|Drama|Mystery,M,50,7,40601 -1383,468,2,975979732,"Englishman Who Went Up a Hill, But Came Down a Mountain, The (1995)",Comedy|Romance,F,25,7,19806 -2230,2873,3,974599097,Lulu on the Bridge (1998),Drama|Mystery|Romance,F,45,1,60302 -2533,2266,4,974055724,"Butcher's Wife, The (1991)",Comedy|Romance,F,25,3,49423 -6040,3224,5,956716750,Woman in the Dunes (Suna no onna) (1964),Drama,M,25,6,11106 -4384,2918,5,965171739,Ferris Bueller's Day Off (1986),Comedy,M,25,0,43623 -5156,3688,3,961946487,Porky's (1981),Comedy,M,18,14,10024 -615,296,3,975805801,Pulp Fiction (1994),Crime|Drama,M,50,17,32951 -2753,3045,3,973198964,Peter's Friends (1992),Comedy|Drama,F,50,20,27516 -2438,1125,5,974259943,"Return of the Pink Panther, The (1974)",Comedy,M,35,1,22903 -5746,1242,4,958354460,Glory (1989),Action|Drama|War,M,18,15,94061 -5157,3462,5,961944604,Modern Times (1936),Comedy,M,35,1,74012 -3402,1252,5,967433929,Chinatown (1974),Film-Noir|Mystery|Thriller,M,35,20,30306 -76,593,5,977847255,"Silence of the Lambs, The (1991)",Drama|Thriller,M,35,7,55413 -2067,1019,3,974658834,"20,000 Leagues Under the Sea (1954)",Adventure|Children's|Fantasy|Sci-Fi,M,50,16,06430 -2181,2020,3,979353437,Dangerous Liaisons (1988),Drama|Romance,M,25,0,45245 -3947,593,5,965691680,"Silence of the Lambs, The (1991)",Drama|Thriller,M,25,0,90019 -546,218,4,976069421,Boys on the Side (1995),Comedy|Drama,F,25,0,37211 -1246,3030,5,1032056405,Yojimbo (1961),Comedy|Drama|Western,M,18,4,98225 -4214,3186,5,965319143,"Girl, Interrupted (1999)",Drama,F,25,0,20121 -2841,680,3,982805796,Alphaville (1965),Sci-Fi,M,50,12,98056 -4205,3175,4,965321085,Galaxy Quest (1999),Adventure|Comedy|Sci-Fi,F,25,15,87801 -1120,1097,4,974911354,E.T. the Extra-Terrestrial (1982),Children's|Drama|Fantasy|Sci-Fi,M,18,4,95616 -5371,3194,3,960481000,"Way We Were, The (1973)",Drama,M,25,11,55408 -2695,1278,5,973310827,Young Frankenstein (1974),Comedy|Horror,M,35,11,46033 -3312,520,2,976673070,Robin Hood: Men in Tights (1993),Comedy,F,18,4,90039 -5039,1792,1,962513044,U.S. Marshalls (1998),Action|Thriller,F,35,4,97068 -4655,2146,3,963903103,St. Elmo's Fire (1985),Drama|Romance,F,25,1,92037 -3558,1580,5,966802528,Men in Black (1997),Action|Adventure|Comedy|Sci-Fi,M,18,17,66044 -506,3354,1,976208080,Mission to Mars (2000),Sci-Fi,M,25,16,55103-1006 -3568,1230,3,966745594,Annie Hall (1977),Comedy|Romance,M,25,0,98503 -2943,1197,5,971319983,"Princess Bride, The (1987)",Action|Adventure|Comedy|Romance,M,35,12,95864 -716,737,3,982881364,Barb Wire (1996),Action|Sci-Fi,M,18,4,98188 -5964,454,3,956999469,"Firm, The (1993)",Drama|Thriller,M,18,5,97202 -4802,1208,4,996034747,Apocalypse Now (1979),Drama|War,M,56,1,40601 -1106,3624,4,974920622,Shanghai Noon (2000),Action,M,18,4,90241 -3410,2565,3,967419652,"King and I, The (1956)",Musical,M,35,1,20653 -1273,3095,5,974814536,"Grapes of Wrath, The (1940)",Drama,M,35,2,19123 -1706,1916,4,974709448,Buffalo 66 (1998),Action|Comedy|Drama,M,25,20,19134 -4889,590,5,962909224,Dances with Wolves (1990),Adventure|Drama|Western,M,18,4,63108 -4966,2100,3,962609782,Splash (1984),Comedy|Fantasy|Romance,M,50,14,55407 -4238,1884,4,965343416,Fear and Loathing in Las Vegas (1998),Comedy|Drama,M,35,16,44691 -5365,1042,3,960502974,That Thing You Do! (1996),Comedy,M,18,12,90250 -415,1302,3,977501743,Field of Dreams (1989),Drama,F,35,0,55406 -4658,1009,5,963966553,Escape to Witch Mountain (1975),Adventure|Children's|Fantasy,M,25,4,99163 -854,345,3,975357801,"Adventures of Priscilla, Queen of the Desert, The (1994)",Comedy|Drama,F,25,16,44092 -2857,436,4,972509362,Color of Night (1994),Drama|Thriller,M,25,0,10469 -1835,1330,4,974878241,April Fool's Day (1986),Comedy|Horror,M,25,19,11501 -1321,2240,3,974778494,My Bodyguard (1980),Drama,F,25,14,34639 -3274,3698,2,979767184,"Running Man, The (1987)",Action|Adventure|Sci-Fi,M,25,20,02062 -5893,2144,3,957470619,Sixteen Candles (1984),Comedy,M,25,7,02139 -3436,2724,3,967328026,Runaway Bride (1999),Comedy|Romance,M,35,0,98503 -3315,2918,5,967942960,Ferris Bueller's Day Off (1986),Comedy,M,25,12,78731 -5056,2700,5,962488280,"South Park: Bigger, Longer and Uncut (1999)",Animation|Comedy,M,45,1,16673 -5256,208,2,961271616,Waterworld (1995),Action|Adventure,M,25,16,30269 -4290,1193,4,965274348,One Flew Over the Cuckoo's Nest (1975),Drama,M,25,17,98661 -1010,1379,2,975220259,Young Guns II (1990),Action|Comedy|Western,M,25,0,10310 -829,904,4,975368038,Rear Window (1954),Mystery|Thriller,M,1,19,53711 -5953,480,4,957143581,Jurassic Park (1993),Action|Adventure|Sci-Fi,M,1,10,21030 -4732,3016,4,963332896,Creepshow (1982),Horror,M,25,14,24450 -4815,3181,5,972240802,Titus (1999),Drama,F,50,18,04849 -1164,1894,2,1004486985,Six Days Seven Nights (1998),Adventure|Comedy|Romance,F,25,19,90020 -4373,3167,5,965180829,Carnal Knowledge (1971),Drama,M,50,12,32920 -5293,1374,4,961055887,Star Trek: The Wrath of Khan (1982),Action|Adventure|Sci-Fi,M,25,12,95030 -1579,3101,4,981272057,Fatal Attraction (1987),Thriller,M,25,0,60201 -2600,3147,5,973804787,"Green Mile, The (1999)",Drama|Thriller,M,25,14,19312 -1283,480,4,974793389,Jurassic Park (1993),Action|Adventure|Sci-Fi,F,18,1,94607 -3242,3062,5,968341175,"Longest Day, The (1962)",Action|Drama|War,M,50,13,94089 -3618,3374,3,967116272,Daughters of the Dust (1992),Drama,M,56,17,22657 -3762,1337,4,966434517,"Body Snatcher, The (1945)",Horror,M,50,6,11746 -1015,1184,3,975018699,Mediterraneo (1991),Comedy|War,M,35,3,11220 -4645,2344,5,963976808,Runaway Train (1985),Action|Adventure|Drama|Thriller,F,50,6,48094 -3184,1397,4,968709039,Bastard Out of Carolina (1996),Drama,F,25,18,21214 -1285,1794,4,974833328,Love and Death on Long Island (1997),Comedy|Drama,M,35,4,98125 -5521,3354,2,959833154,Mission to Mars (2000),Sci-Fi,F,25,6,02118 -1472,2278,3,974767792,Ronin (1998),Action|Crime|Thriller,M,25,7,90248 -5630,21,4,980085414,Get Shorty (1995),Action|Comedy|Drama,M,35,17,06854 -3710,3033,5,966272980,Spaceballs (1987),Comedy|Sci-Fi,M,1,10,02818 -192,761,1,977028390,"Phantom, The (1996)",Adventure,M,18,1,10977 -1285,1198,5,974880310,Raiders of the Lost Ark (1981),Action|Adventure,M,35,4,98125 -2174,1046,4,974613044,Beautiful Thing (1996),Drama|Romance,M,50,12,87505 -635,1270,4,975768106,Back to the Future (1985),Comedy|Sci-Fi,M,56,17,33785 -910,412,5,975207742,"Age of Innocence, The (1993)",Drama,F,50,0,98226 -1752,2021,4,975729332,Dune (1984),Fantasy|Sci-Fi,M,25,3,96813 -1408,198,4,974762924,Strange Days (1995),Action|Crime|Sci-Fi,M,25,0,90046 -4738,1242,4,963279051,Glory (1989),Action|Drama|War,M,56,1,23608 -1503,1971,2,974748897,"Nightmare on Elm Street 4: The Dream Master, A (1988)",Horror,M,25,12,92688 -3053,1296,3,970601837,"Room with a View, A (1986)",Drama|Romance,F,25,3,55102 -3471,3614,2,973297828,Honeymoon in Vegas (1992),Comedy|Romance,M,18,4,80302 -678,1972,3,988638700,"Nightmare on Elm Street 5: The Dream Child, A (1989)",Horror,M,25,0,34952 -3483,2561,3,986327282,True Crime (1999),Crime|Thriller,F,45,7,30260 -3910,3108,5,965756244,"Fisher King, The (1991)",Comedy|Drama|Romance,M,25,20,91505 -182,1089,1,977085647,Reservoir Dogs (1992),Crime|Thriller,M,18,4,03052 -1755,1653,3,1036917836,Gattaca (1997),Drama|Sci-Fi|Thriller,F,18,4,77005 -3589,70,2,966658567,From Dusk Till Dawn (1996),Action|Comedy|Crime|Horror|Thriller,F,45,0,80010 -471,3481,4,976222483,High Fidelity (2000),Comedy,M,35,7,08904 -1141,813,2,974878678,Larger Than Life (1996),Comedy,F,25,3,84770 -5227,1196,2,961476022,Star Wars: Episode V - The Empire Strikes Back (1980),Action|Adventure|Drama|Sci-Fi|War,M,18,10,64050 -1303,2344,2,974837844,Runaway Train (1985),Action|Adventure|Drama|Thriller,M,25,19,94111 -5080,3102,5,962412804,Jagged Edge (1985),Thriller,F,50,12,95472 -2023,1012,4,1006290836,Old Yeller (1957),Children's|Drama,M,18,4,56001 -3759,2151,5,966094413,"Gods Must Be Crazy II, The (1989)",Comedy,M,35,6,54751 -1685,2664,2,974709721,Invasion of the Body Snatchers (1956),Horror|Sci-Fi,M,35,12,95833 -4715,1221,4,963508830,"Godfather: Part II, The (1974)",Action|Crime|Drama,M,25,2,97205 -1591,350,5,974742941,"Client, The (1994)",Drama|Mystery|Thriller,M,50,7,26501 -4227,3635,3,965411938,"Spy Who Loved Me, The (1977)",Action,M,25,19,11414-2520 -1908,36,5,974697744,Dead Man Walking (1995),Drama,M,56,13,95129 -5365,1892,4,960503255,"Perfect Murder, A (1998)",Mystery|Thriller,M,18,12,90250 -1579,2420,4,981272235,"Karate Kid, The (1984)",Drama,M,25,0,60201 -1866,3948,5,974753321,Meet the Parents (2000),Comedy,M,25,7,94043 -4238,3543,4,965415533,Diner (1982),Comedy|Drama,M,35,16,44691 -3590,2000,5,966657892,Lethal Weapon (1987),Action|Comedy|Crime|Drama,F,18,15,02115 -3401,3256,5,980115327,Patriot Games (1992),Action|Thriller,M,35,7,76109 -3705,540,2,966287116,Sliver (1993),Thriller,M,45,7,30076 -4973,1246,3,962607149,Dead Poets Society (1989),Drama,F,56,2,949702 -4947,380,4,962651180,True Lies (1994),Action|Adventure|Comedy|Romance,M,35,17,90035 -2346,1416,4,974413811,Evita (1996),Drama|Musical,F,1,10,48105 -1427,3596,3,974840560,Screwed (2000),Comedy,M,25,12,21401 -3868,1626,3,965855033,Fire Down Below (1997),Action|Drama|Thriller,M,18,12,73112 -249,2369,3,976730191,Desperately Seeking Susan (1985),Comedy|Romance,F,18,14,48126 -5720,349,4,958503395,Clear and Present Danger (1994),Action|Adventure|Thriller,M,25,0,60610 -877,1485,3,975270899,Liar Liar (1997),Comedy,M,25,0,90631 diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_classification_criteo.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_classification_criteo.py index 6462f295838e969ecd7a3f015298a68d1a50d9f0..877c0a1397535bf9d18c3f09497ec40a7ba81928 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_classification_criteo.py +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_classification_criteo.py @@ -36,10 +36,39 @@ from sklearn.preprocessing import LabelEncoder, MinMaxScaler from deepctr.models import DeepFM from deepctr.feature_column import SparseFeat, DenseFeat, get_feature_names +import argparse if __name__ == "__main__": - npu_keras_sess = set_keras_session_npu_config() - data = pd.read_csv('./criteo_sample.txt') + + parser = argparse.ArgumentParser() + parser.add_argument('--data_dir', default="./", + help='data path for train') + parser.add_argument('--precision_mode', default='allow_fp32_to_fp16', + help='allow_fp32_to_fp16/force_fp16/ ' + 'must_keep_origin_dtype/allow_mix_precision.') + parser.add_argument('--data_dump_flag', action="store_true", + help='whether to enable dump data') + parser.add_argument('--data_dump_path', default="/home/data", + help='the path to save dump data') + parser.add_argument('--data_dump_step', default="0", + help='the step to dump') + args = parser.parse_args() + + sess_config = tf.ConfigProto() + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(args.precision_mode) + if args.data_dump_flag: + print("start to config data dump...{}", args.data_dump_flag) + custom_op.parameter_map["enable_dump"].b = True + custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes(args.data_dump_path) + custom_op.parameter_map["dump_step"].s = tf.compat.as_bytes(args.data_dump_step) + custom_op.parameter_map["dump_mode"].s = tf.compat.as_bytes("all") + + npu_keras_sess = set_keras_session_npu_config(config=sess_config) + data = pd.read_csv(os.path.join(args.data_dir, 'criteo_sample.txt')) sparse_features = ['C' + str(i) for i in range(1, 27)] dense_features = ['I' + str(i) for i in range(1, 14)] diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_estimator_tfrecord_classification.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_estimator_tfrecord_classification.py index 1ce5fac75631b9fdae6a883575115e20825ca08c..9ead9c2cde12c4f0cf2a046eea3a02192a9c1c47 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_estimator_tfrecord_classification.py +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_estimator_tfrecord_classification.py @@ -29,13 +29,45 @@ # from npu_bridge.npu_init import * import tensorflow as tf - +from tensorflow import keras from tensorflow.python.ops.parsing_ops import FixedLenFeature from deepctr.estimator import DeepFMEstimator from deepctr.estimator.inputs import input_fn_tfrecord +import argparse +import os +def main(): -if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--data_dir', default="./", + help='data path for train') + parser.add_argument('--precision_mode', default='allow_fp32_to_fp16', + help='allow_fp32_to_fp16/force_fp16/ ' + 'must_keep_origin_dtype/allow_mix_precision.') + parser.add_argument('--profiling', default=False, + help='if or not profiling for performance debug, default is False') + parser.add_argument('--profiling_dump_path', default="/home/data", + help='the path to save profiling data') + args = parser.parse_args() + + sess_config = tf.ConfigProto() + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(args.precision_mode) + + if args.profiling: + custom_op.parameter_map["profiling_mode"].b = True + custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes( + '{"output":"' + args.profiling_dump_path + '", \ + "training_trace":"on", \ + "task_trace":"on", \ + "aicpu":"on", \ + "aic_metrics":"PipeUtilization",\ + "fp_point":"concatenate_1/concat", \ + "bp_point":"training/Adam/gradients/gradients/AddN_38"}') + npu_keras_sess = set_keras_session_npu_config(config=sess_config) # 1.generate feature_column for linear part and dnn part sparse_features = ['C' + str(i) for i in range(1, 27)] @@ -59,17 +91,22 @@ if __name__ == "__main__": {k: FixedLenFeature(dtype=tf.float32, shape=1) for k in dense_features}) feature_description['label'] = FixedLenFeature(dtype=tf.float32, shape=1) - train_model_input = input_fn_tfrecord('./criteo_sample.tr.tfrecords', feature_description, 'label', batch_size=256, - num_epochs=1, shuffle_factor=10) + train_model_input = input_fn_tfrecord('./criteo_sample.tr.tfrecords', feature_description, 'label', batch_size=162, + num_epochs=5, shuffle_factor=10) test_model_input = input_fn_tfrecord('./criteo_sample.te.tfrecords', feature_description, 'label', batch_size=2 ** 14, num_epochs=1, shuffle_factor=0) # 3.Define Model,train,predict and evaluate model = DeepFMEstimator(linear_feature_columns, dnn_feature_columns, task='binary', config=tf.estimator.RunConfig(tf_random_seed=2021, save_summary_steps=0)) - + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) model.train(train_model_input) eval_result = model.evaluate(test_model_input) print(eval_result) + close_session(npu_keras_sess) + +if __name__ == "__main__": + main() + diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_flen.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_flen.py index e083ffaf75cdc07cbaef58f34973ee9eb1b039f9..6373b34841a92f88aec2761cfe9a7c38890d528f 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_flen.py +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_flen.py @@ -28,6 +28,7 @@ # limitations under the License. # from npu_bridge.npu_init import * +from tensorflow import keras import pandas as pd from sklearn.metrics import log_loss, roc_auc_score from sklearn.model_selection import train_test_split @@ -35,9 +36,42 @@ from sklearn.preprocessing import LabelEncoder from deepctr.feature_column import SparseFeat,get_feature_names from deepctr.models import FLEN +import argparse +import os -if __name__ == "__main__": - data = pd.read_csv('./avazu_sample.txt') +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--data_dir', default="./", + help='data path for train') + parser.add_argument('--precision_mode', default='allow_fp32_to_fp16', + help='allow_fp32_to_fp16/force_fp16/ ' + 'must_keep_origin_dtype/allow_mix_precision.') + parser.add_argument('--profiling', default=False, + help='if or not profiling for performance debug, default is False') + parser.add_argument('--profiling_dump_path', default="/home/data", + help='the path to save profiling data') + args = parser.parse_args() + + sess_config = tf.ConfigProto() + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(args.precision_mode) + + if args.profiling: + custom_op.parameter_map["profiling_mode"].b = True + custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes( + '{"output":"' + args.profiling_dump_path + '", \ + "training_trace":"on", \ + "task_trace":"on", \ + "aicpu":"on", \ + "aic_metrics":"PipeUtilization",\ + "fp_point":"concatenate_1/concat", \ + "bp_point":"training/Adam/gradients/gradients/AddN_38"}') + + npu_keras_sess = set_keras_session_npu_config(config=sess_config) + data = pd.read_csv(os.path.join(args.data_dir,'./avazu_sample.txt')) data['day'] = data['hour'].apply(lambda x: str(x)[4:6]) data['hour'] = data['hour'].apply(lambda x: str(x)[6:]) @@ -88,8 +122,11 @@ if __name__ == "__main__": metrics=['binary_crossentropy'], ) history = model.fit(train_model_input, train[target].values, - batch_size=256, epochs=10, verbose=2, validation_split=0.2, ) - pred_ans = model.predict(test_model_input, batch_size=256) + batch_size=64, epochs=10, verbose=1, validation_split=0.2, ) + pred_ans = model.predict(test_model_input, batch_size=4) print("test LogLoss", round(log_loss(test[target].values, pred_ans), 4)) print("test AUC", round(roc_auc_score(test[target].values, pred_ans), 4)) +if __name__ == "__main__": + main() + diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_multivalue_movielens.py b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_multivalue_movielens.py index a54fd6c851cd994d270d27961bf385b4d527e79c..9e61c348a04fd2d226455806a45ce231ff652cd8 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_multivalue_movielens.py +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/examples/run_multivalue_movielens.py @@ -28,6 +28,7 @@ # limitations under the License. # from npu_bridge.npu_init import * +from tensorflow import keras import numpy as np import pandas as pd from sklearn.preprocessing import LabelEncoder @@ -35,6 +36,8 @@ from tensorflow.python.keras.preprocessing.sequence import pad_sequences from deepctr.feature_column import SparseFeat, VarLenSparseFeat,get_feature_names from deepctr.models import DeepFM +import argparse +import os def split(x): @@ -45,10 +48,39 @@ def split(x): key2index[key] = len(key2index) + 1 return list(map(lambda x: key2index[x], key_ans)) - if __name__ == "__main__": - npu_keras_sess = set_keras_session_npu_config() - data = pd.read_csv("./movielens_sample.txt") + parser = argparse.ArgumentParser() + parser.add_argument('--data_dir', default="./", + help='data path for train') + parser.add_argument('--precision_mode', default='allow_fp32_to_fp16', + help='allow_fp32_to_fp16/force_fp16/ ' + 'must_keep_origin_dtype/allow_mix_precision.') + parser.add_argument('--profiling', default=False, + help='if or not profiling for performance debug, default is False') + parser.add_argument('--profiling_dump_path', default="/home/data", + help='the path to save profiling data') + args = parser.parse_args() + + sess_config = tf.ConfigProto() + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(args.precision_mode) + + if args.profiling: + custom_op.parameter_map["profiling_mode"].b = True + custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes( + '{"output":"' + args.profiling_dump_path + '", \ + "training_trace":"on", \ + "task_trace":"on", \ + "aicpu":"on", \ + "aic_metrics":"PipeUtilization",\ + "fp_point":"concatenate_1/concat", \ + "bp_point":"training/Adam/gradients/gradients/AddN_38"}') + + npu_keras_sess = set_keras_session_npu_config(config=sess_config) + data = pd.read_csv(os.path.join(args.data_dir,"./movielens_sample.txt")) sparse_features = ["movie_id", "user_id", "gender", "age", "occupation", "zip", ] target = ['rating'] @@ -96,6 +128,6 @@ if __name__ == "__main__": model.compile("adam", "mse", metrics=['mse'], ) history = model.fit(model_input, data[target].values, - batch_size=256, epochs=10, verbose=2, validation_split=0.2, ) + batch_size=160, epochs=10, verbose=1, validation_split=0.2, ) close_session(npu_keras_sess) diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_full_1p.sh index 133ee0476685c328f1ae95a480d3064bc2138723..a6c8057d2bc0881b306c953a3361df3baba690d5 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_full_1p.sh @@ -120,7 +120,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_performance_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_performance_1p.sh index 198e11fdf71d6d01f0e9a8ee9343b155fe8546bc..f192262101cc2881d480bfb3f5ac3a28ff13a438 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_performance_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3057_FwFM_performance_1p.sh @@ -122,7 +122,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_full_1p.sh index f99ec75a9fe14f4df80a8c29056ca5aabcd9560f..57eb2a9c552ab9a9aa81d34d0137c52a05a34b57 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_full_1p.sh @@ -120,7 +120,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" '{print $1}'|awk '{sum+=$1} END {print"",sum/NR}'|awk '{print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_performance_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_performance_1p.sh index a32f77fe128c32dde7c384cda8b2cd9cc921627d..77cce685ce5e0535792db212b895eaae27c87df5 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_performance_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3058_MMoE_performance_1p.sh @@ -118,7 +118,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" '{print $1}'|awk '{sum+=$1} END {print"",sum/NR}'|awk '{print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_full_1p.sh index 8fb69eea2430c7aa0076fc011035289eef58cbe8..3340bf02547b0826fed5b695432b1448a95a9353 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_full_1p.sh @@ -29,8 +29,8 @@ learning_rate= precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False -data_dump_flag=False -data_dump_step="10" +data_dump_flag="" +data_dump_step="1" profiling=False # 帮助信息,不需要修改 @@ -40,8 +40,8 @@ if [[ $1 == --help || $1 == -h ]];then echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 + --data_dump_flag data dump flag, default is "" + --data_dump_step data dump step, default is 1 --profiling if or not profiling for performance debug, default is False --data_path source data of training -h/--help show help message @@ -101,7 +101,11 @@ do #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path - nohup python3 run_classification_criteo.py > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + nohup python3 run_classification_criteo.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + ${data_dump_flag} --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -114,7 +118,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END{print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_performance_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_performance_1p.sh index 5af0754d360311aedd96924580ae477cc3450fd3..53fdf2d0970dd0e4d3362181cda056b79aca5c44 100644 --- a/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_performance_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3062_DeepFM_performance_1p.sh @@ -29,8 +29,8 @@ learning_rate= precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False -data_dump_flag=False -data_dump_step="10" +data_dump_flag="" +data_dump_step="1" profiling=False # 帮助信息,不需要修改 @@ -40,8 +40,8 @@ if [[ $1 == --help || $1 == -h ]];then echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 + --data_dump_flag data dump flag, default is "" + --data_dump_step data dump step, default is 1 --profiling if or not profiling for performance debug, default is False --data_path source data of training -h/--help show help message @@ -59,7 +59,8 @@ do over_dump_path=${cur_path}/output/overflow_dump mkdir -p ${over_dump_path} elif [[ $para == --data_dump_flag* ]];then - data_dump_flag=`echo ${para#*=}` + # data_dump_flag=`echo ${para#*=}` + data_dump_flag="--data_dump_flag" data_dump_path=${cur_path}/output/data_dump mkdir -p ${data_dump_path} elif [[ $para == --data_dump_step* ]];then @@ -101,7 +102,11 @@ do #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path - nohup python3 run_classification_criteo.py > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + nohup python3 run_classification_criteo.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + ${data_dump_flag} --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait sed -i "s|epochs=5|epochs=10|g" run_classification_criteo.py @@ -114,7 +119,7 @@ echo "------------------ Final result ------------------" # #输出性能FPS,需要模型审视修改 Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END{print $1}'` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'*1000000}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 echo "Final Performance item/sec : $FPS" diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_1p_lamb_phase2.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_full_1p.sh similarity index 55% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_1p_lamb_phase2.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_full_1p.sh index 33aded8415373db9243b5768552f29ef05c7093f..dd228ad5143e65cda8db2de44feabc28f64242d5 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3068_BertLarge-512_full_1p_lamb_phase2.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_full_1p.sh @@ -1,171 +1,164 @@ -#!/bin/bash - -#当前路径,不需要修改 -cur_path=`pwd` - -#集合通信参数,不需要修改 -export RANK_SIZE=1 -export JOB_ID=99990001 -RANK_ID_START=0 - -# 数据集路径,保持为空,不需要修改 -data_path="" - -#基础参数,需要模型审视修改 -#网络名称,同目录名称 -Network="BertLarge-512_ID3068_for_TensorFlow" -#训练epoch -train_epochs=1 -#训练batch_size -batch_size=24 -#训练step -train_steps=100000 -#学习率 -learning_rate= - -#维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" -#维持参数,以下不需要修改 -over_dump=False -data_dump_flag=False -data_dump_step="10" -profiling=False -autotune=False - -# 帮助信息,不需要修改 -if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " - echo " " - echo "parameter explain: - --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message - " - exit 1 -fi - -#参数校验,不需要修改 -for para in $* -do - if [[ $para == --precision_mode* ]];then - precision_mode=`echo ${para#*=}` - elif [[ $para == --over_dump* ]];then - over_dump=`echo ${para#*=}` - over_dump_path=${cur_path}/output/overflow_dump - mkdir -p ${over_dump_path} - elif [[ $para == --data_dump_flag* ]];then - data_dump_flag=`echo ${para#*=}` - data_dump_path=${cur_path}/output/data_dump - mkdir -p ${data_dump_path} - elif [[ $para == --data_dump_step* ]];then - data_dump_step=`echo ${para#*=}` - elif [[ $para == --profiling* ]];then - profiling=`echo ${para#*=}` - profiling_dump_path=${cur_path}/output/profiling - mkdir -p ${profiling_dump_path} - elif [[ $para == --data_path* ]];then - data_path=`echo ${para#*=}` - fi -done - -#校验是否传入data_path,不需要修改 -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path\" must be confing" - exit 1 -fi - -#训练开始时间,不需要修改 -start_time=$(date +%s) -#进入训练脚本目录,需要模型审视修改 -for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); -do - #设置环境变量,不需要修改 - echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$RANK_ID - - #创建DeviceID输出目录,不需要修改 - if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then - rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - fi - - #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ - --max_seq_length=512 \ - --max_predictions_per_seq=76 \ - --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=lamb \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & -done -wait - -#训练结束时间,不需要修改 -end_time=$(date +%s) -e2e_time=$(( $end_time - $start_time )) - -#结果打印,不需要修改 -echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" - -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` -#打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" -echo "E2E Training Duration sec : $e2e_time" - -#稳定性精度看护结果汇总 -#训练用例信息,不需要修改 -BatchSize=${batch_size} -DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' - - -#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt - -#最后一个迭代loss值,不需要修改 -ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` - -#关键信息打印到${CaseName}.log中,不需要修改 -echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 +RankSize=1 +# 数据集路径,保持为空,不需要修改 +data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="DeepFM_ID3202_for_TensorFlow" +#训练epoch +train_epochs=5 +#训练batch_size +batch_size=162 +#训练step +train_steps= +#学习率 +learning_rate= + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + +sed -i "s|epochs=10|epochs=5|g" run_multivalue_movielens.py + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_estimator_tfrecord_classification.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +# #输出性能FPS,需要模型审视修改 +Time=`grep ":loss =" $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk '{ print $4}'| awk -F "(" '{print $2}'|tail -n 2|awk '{sum+=$1} END {print"",sum/NR}'|awk '{print $1}' +` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'}'` +# #打印,不需要修改 +echo "Final Performance item/sec : $FPS" + +# #输出训练精度,需要模型审视修改 +train_accuracy=`grep "AUC = " $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk '{print $9}'|awk -F "," '{print $1}'` + + + +# #输出训练精度,需要模型审视修改 + +# #打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'accu' + +##获取性能数据,不需要修改 +#吞吐量 +TrainingTime=`awk 'BEGIN{printf "%.6f\n",'${BatchSize}'/'${FPS}'}'` + +ActualFPS=${FPS} +grep ":loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log| awk '{ print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_performance_1p.sh similarity index 55% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_1p.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_performance_1p.sh index 13bcf4fcd178e644c01f00d86c76080cd8b50258..4fc7f9bb29fc5a75df32334133d684646da3cc78 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3207_BertLarge-512_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3202_DeepFM_performance_1p.sh @@ -1,171 +1,164 @@ -#!/bin/bash - -#当前路径,不需要修改 -cur_path=`pwd` - -#集合通信参数,不需要修改 -export RANK_SIZE=1 -export JOB_ID=99990001 -RANK_ID_START=0 - -# 数据集路径,保持为空,不需要修改 -data_path="" - -#基础参数,需要模型审视修改 -#网络名称,同目录名称 -Network="BertLarge-512_ID3207_for_TensorFlow" -#训练epoch -train_epochs=1 -#训练batch_size -batch_size=24 -#训练step -train_steps=100000 -#学习率 -learning_rate= - -#维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" -#维持参数,以下不需要修改 -over_dump=False -data_dump_flag=False -data_dump_step="10" -profiling=False -autotune=False - -# 帮助信息,不需要修改 -if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " - echo " " - echo "parameter explain: - --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message - " - exit 1 -fi - -#参数校验,不需要修改 -for para in $* -do - if [[ $para == --precision_mode* ]];then - precision_mode=`echo ${para#*=}` - elif [[ $para == --over_dump* ]];then - over_dump=`echo ${para#*=}` - over_dump_path=${cur_path}/output/overflow_dump - mkdir -p ${over_dump_path} - elif [[ $para == --data_dump_flag* ]];then - data_dump_flag=`echo ${para#*=}` - data_dump_path=${cur_path}/output/data_dump - mkdir -p ${data_dump_path} - elif [[ $para == --data_dump_step* ]];then - data_dump_step=`echo ${para#*=}` - elif [[ $para == --profiling* ]];then - profiling=`echo ${para#*=}` - profiling_dump_path=${cur_path}/output/profiling - mkdir -p ${profiling_dump_path} - elif [[ $para == --data_path* ]];then - data_path=`echo ${para#*=}` - fi -done - -#校验是否传入data_path,不需要修改 -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path\" must be confing" - exit 1 -fi - -#训练开始时间,不需要修改 -start_time=$(date +%s) -#进入训练脚本目录,需要模型审视修改 -for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); -do - #设置环境变量,不需要修改 - echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$RANK_ID - - #创建DeviceID输出目录,不需要修改 - if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then - rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - fi - - #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ - --max_seq_length=512 \ - --max_predictions_per_seq=76 \ - --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=adam \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & -done -wait - -#训练结束时间,不需要修改 -end_time=$(date +%s) -e2e_time=$(( $end_time - $start_time )) - -#结果打印,不需要修改 -echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" - -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` -#打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" -echo "E2E Training Duration sec : $e2e_time" - -#稳定性精度看护结果汇总 -#训练用例信息,不需要修改 -BatchSize=${batch_size} -DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' - - -#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt - -#最后一个迭代loss值,不需要修改 -ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` - -#关键信息打印到${CaseName}.log中,不需要修改 -echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 +RankSize=1 +# 数据集路径,保持为空,不需要修改 +data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="DeepFM_ID3202_for_TensorFlow" +#训练epoch +train_epochs=5 +#训练batch_size +batch_size=162 +#训练step +train_steps= +#学习率 +learning_rate= + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + +sed -i "s|epochs=10|epochs=5|g" run_multivalue_movielens.py + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt + fi + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_estimator_tfrecord_classification.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +# #输出性能FPS,需要模型审视修改 +Time=`grep ":loss =" $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk '{ print $4}'| awk -F "(" '{print $2}'|tail -n 2|awk '{sum+=$1} END {print"",sum/NR}'|awk '{print $1}' +` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${Time}'}'` +# #打印,不需要修改 +echo "Final Performance item/sec : $FPS" + +# #输出训练精度,需要模型审视修改 +train_accuracy=`grep "AUC = " $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk '{print $9}'|awk -F "," '{print $1}'` + + + +# #输出训练精度,需要模型审视修改 + +# #打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#性能看护结果汇总 +#训练用例信息,不需要修改 +BatchSize=${batch_size} +DeviceType=`uname -m` +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +TrainingTime=`awk 'BEGIN{printf "%.6f\n",'${BatchSize}'/'${FPS}'}'` + +ActualFPS=${FPS} +grep ":loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log| awk '{ print $3}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +#最后一个迭代loss值,不需要修改 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_1p_lamb_phase2.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_full_1p.sh similarity index 58% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_1p_lamb_phase2.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_full_1p.sh index 29a38da1f88ccf21ec184b03366decf0b26bd7e6..c180165beb55d300753d44d50a5aefe1278da1df 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3069_BertBase-512_full_1p_lamb_phase2.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_full_1p.sh @@ -4,47 +4,47 @@ cur_path=`pwd` #集合通信参数,不需要修改 + export RANK_SIZE=1 -export JOB_ID=99990001 +export JOB_ID=10087 RANK_ID_START=0 - +RankSize=1 # 数据集路径,保持为空,不需要修改 data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 #基础参数,需要模型审视修改 #网络名称,同目录名称 -Network="BertBase-512_ID3069_for_TensorFlow" +Network="DeepFM_ID3203_for_TensorFlow" #训练epoch -train_epochs=1 +train_epochs=10 #训练batch_size -batch_size=64 +batch_size=160 #训练step -train_steps=100000 +train_steps= #学习率 learning_rate= #维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" +precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False data_dump_flag=False data_dump_step="10" profiling=False -autotune=False # 帮助信息,不需要修改 if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " + echo"usage:./train_full_1P.sh " echo " " echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message " exit 1 fi @@ -81,48 +81,31 @@ fi #训练开始时间,不需要修改 start_time=$(date +%s) + #进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" export RANK_ID=$RANK_ID - + #创建DeviceID输出目录,不需要修改 if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt fi - + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ - --max_seq_length=512 \ - --max_predictions_per_seq=76 \ - --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=lamb \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_multivalue_movielens.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -132,27 +115,35 @@ e2e_time=$(( $end_time - $start_time )) #结果打印,不需要修改 echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" +# #输出性能FPS,需要模型审视修改 -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" +echo "Final Performance item/sec : $FPS" + + +# #输出训练精度,需要模型审视修改 +#train_accuracy=`grep "test AUC" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` +# #打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" -#稳定性精度看护结果汇总 +#性能看护结果汇总 #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' +##获取性能数据,不需要修改 +#吞吐量 + +ActualFPS=${FPS} +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +cat $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tr -d '\b\r'|grep -Eo " loss: [0-9]*\.[0-9]*"|awk -F " " '{print $2}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` @@ -164,7 +155,8 @@ echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = None" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_performance_1p.sh similarity index 57% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_1p.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_performance_1p.sh index 3238c377e44b8feffea4a142fb8dbeb3dbca4a05..f432f21ac4a6396ad97637534e4c6688c3f6ec69 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3206_BertBase-512_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3203_DeepFM_performance_1p.sh @@ -4,47 +4,47 @@ cur_path=`pwd` #集合通信参数,不需要修改 + export RANK_SIZE=1 -export JOB_ID=99990001 +export JOB_ID=10087 RANK_ID_START=0 - +RankSize=1 # 数据集路径,保持为空,不需要修改 data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 #基础参数,需要模型审视修改 #网络名称,同目录名称 -Network="BertBase-512_ID3206_for_TensorFlow" +Network="DeepFM_ID3203_for_TensorFlow" #训练epoch -train_epochs=1 +train_epochs=5 #训练batch_size -batch_size=64 +batch_size=160 #训练step -train_steps=100000 +train_steps= #学习率 learning_rate= #维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" +precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False data_dump_flag=False data_dump_step="10" profiling=False -autotune=False # 帮助信息,不需要修改 if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " + echo"usage:./train_performance_1P.sh " echo " " echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message " exit 1 fi @@ -81,48 +81,33 @@ fi #训练开始时间,不需要修改 start_time=$(date +%s) + #进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + +sed -i "s|epochs=10|epochs=5|g" run_multivalue_movielens.py + for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" export RANK_ID=$RANK_ID - + #创建DeviceID输出目录,不需要修改 if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt fi - + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ - --max_seq_length=512 \ - --max_predictions_per_seq=76 \ - --train_batch_size=${batch_size} \ - --learning_rate=5e-5 \ - --num_warmup_steps=1000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=adam \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train \ - --eval_files_dir=${data_path}/eval \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_multivalue_movielens.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -130,29 +115,39 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) +sed -i "s|epochs=5|epochs=10|g" run_multivalue_movielens.py + #结果打印,不需要修改 echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" +# #输出性能FPS,需要模型审视修改 -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" +echo "Final Performance item/sec : $FPS" + + +# #输出训练精度,需要模型审视修改 +#train_accuracy=`grep "test AUC" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` +# #打印,不需要修改 +#echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" -#稳定性精度看护结果汇总 +#性能看护结果汇总 #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +cat $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tr -d '\b\r'|grep -Eo " loss: [0-9]*\.[0-9]*"|awk -F " " '{print $2}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` @@ -164,7 +159,8 @@ echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = None" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_1p.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_full_1p.sh similarity index 58% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_1p.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_full_1p.sh index 4e44bd480cfcef3b21fadb801c104eada561ca22..54d6021648f9e103a566525bf567678976bea124 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID0060_BertBase_full_1p.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_full_1p.sh @@ -4,47 +4,47 @@ cur_path=`pwd` #集合通信参数,不需要修改 + export RANK_SIZE=1 -export JOB_ID=99990001 +export JOB_ID=10087 RANK_ID_START=0 - +RankSize=1 # 数据集路径,保持为空,不需要修改 data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 #基础参数,需要模型审视修改 #网络名称,同目录名称 -Network="Bert-base_ID0060_for_TensorFlow" +Network="FLEN_ID3204_for_TensorFlow" #训练epoch -train_epochs=1 +train_epochs=10 #训练batch_size -batch_size=128 +batch_size=64 #训练step -train_steps=1000 +train_steps= #学习率 learning_rate= #维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" +precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False data_dump_flag=False data_dump_step="10" profiling=False -autotune=False # 帮助信息,不需要修改 if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " + echo"usage:./train_full_1P.sh " echo " " echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message " exit 1 fi @@ -81,48 +81,32 @@ fi #训练开始时间,不需要修改 start_time=$(date +%s) + #进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + + for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" export RANK_ID=$RANK_ID - + #创建DeviceID输出目录,不需要修改 if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt fi - + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ - --max_seq_length=128 \ - --max_predictions_per_seq=20 \ - --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ - --num_train_steps=${train_steps} \ - --optimizer_type=adam \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=100 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_flen.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -130,33 +114,42 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) + #结果打印,不需要修改 echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" +# #输出性能FPS,需要模型审视修改 -#输出训练精度,需要模型审视修改 -train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 +echo "Final Performance item/sec : $FPS" + + +# #输出训练精度,需要模型审视修改 +train_accuracy=`grep "test AUC" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` +# #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" -#稳定性精度看护结果汇总 +#性能看护结果汇总 #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' +##获取性能数据,不需要修改 +#吞吐量 + +ActualFPS=${FPS} +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +cat $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tr -d '\b\r'|grep -Eo " loss: [0-9]*\.[0-9]*"|awk -F " " '{print $2}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` -TrainAccuracy=${train_accuracy} + #关键信息打印到${CaseName}.log中,不需要修改 echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log @@ -164,7 +157,8 @@ echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_1p_lamb_phase1.sh b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_performance_1p.sh similarity index 57% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_1p_lamb_phase1.sh rename to TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_performance_1p.sh index f79248aa2dedc2ec5a6cec48ba428d0d0996b45d..a90a4f4399f9b000ea8d495517696c7a07e9f606 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3208_BertBase-128_full_1p_lamb_phase1.sh +++ b/TensorFlow/built-in/recommendation/DeepCTR_Series_for_TensorFlow/test/train_ID3204_FLEN_performance_1p.sh @@ -4,47 +4,47 @@ cur_path=`pwd` #集合通信参数,不需要修改 + export RANK_SIZE=1 -export JOB_ID=99990001 +export JOB_ID=10087 RANK_ID_START=0 - +RankSize=1 # 数据集路径,保持为空,不需要修改 data_path="" +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 #基础参数,需要模型审视修改 #网络名称,同目录名称 -Network="BertBase-128_ID3208_for_TensorFlow" +Network="FLEN_ID3204_for_TensorFlow" #训练epoch -train_epochs=1 +train_epochs=5 #训练batch_size -batch_size=128 +batch_size=64 #训练step -train_steps=1000 +train_steps= #学习率 learning_rate= #维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" +precision_mode="allow_fp32_to_fp16" #维持参数,以下不需要修改 over_dump=False data_dump_flag=False data_dump_step="10" profiling=False -autotune=False # 帮助信息,不需要修改 if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " + echo"usage:./train_performance_1P.sh " echo " " echo "parameter explain: --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message " exit 1 fi @@ -81,48 +81,33 @@ fi #训练开始时间,不需要修改 start_time=$(date +%s) + #进入训练脚本目录,需要模型审视修改 +cd $cur_path/../examples + +sed -i "s|epochs=10|epochs=5|g" run_flen.py + for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" export RANK_ID=$RANK_ID - + #创建DeviceID输出目录,不需要修改 if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt fi - + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_base_config.json \ - --max_seq_length=128 \ - --max_predictions_per_seq=20 \ - --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=100 \ - --num_train_steps=${train_steps} \ - --optimizer_type=lamb \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=100 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path + nohup python3 run_flen.py \ + --data_dir=${data_path} \ + --precision_mode=${precision_mode} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & done wait @@ -130,33 +115,43 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) +sed -i "s|epochs=5|epochs=10|g" run_flen.py + #结果打印,不需要修改 echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" +# #输出性能FPS,需要模型审视修改 -#输出训练精度,需要模型审视修改 -train_accuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` +Time=`cat $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|tr -d '\b\r'|grep -Eo "[0-9]*us/sample"|awk -F "us/sample" 'END {print $1}'` +FPS=`awk 'BEGIN{printf "%.2f\n", 1 /'${Time}'*1000000}'` #打印,不需要修改 +echo "Final Performance item/sec : $FPS" + + +# #输出训练精度,需要模型审视修改 +train_accuracy=`grep "test AUC" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` +# #打印,不需要修改 echo "Final Train Accuracy : ${train_accuracy}" echo "E2E Training Duration sec : $e2e_time" -#稳定性精度看护结果汇总 +#性能看护结果汇总 #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=${FPS} +#单迭代训练时长 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +cat $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tr -d '\b\r'|grep -Eo " loss: [0-9]*\.[0-9]*"|awk -F " " '{print $2}' > $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` -TrainAccuracy=${train_accuracy} + #关键信息打印到${CaseName}.log中,不需要修改 echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log @@ -164,7 +159,8 @@ echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/configs/config.py b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/configs/config.py index d2cd1a0f13cb7d3c8d2b4d03fdfb4bf93da3c920..aceafd8d3ecabd7f96ce940ac936349cae79302a 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/configs/config.py +++ b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/configs/config.py @@ -40,9 +40,9 @@ multi_hot_flags = [False] multi_hot_len = 1 ### #n_epoches =50 -#iterations_per_loop = 10 +iterations_per_loop = 10 n_epoches = 1 -iterations_per_loop = 1 +# iterations_per_loop = 1 #one_step = 50/iterations_per_loop # for one step debug one_step = 0 line_per_sample = 1000 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/test/train_full_8p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/test/train_full_8p.sh index d83ef0dd5a3ce601505e87ebd03fe12c487e1045..81ff7c389e571bd87b0930259ec132d57af31165 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/test/train_full_8p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/test/train_full_8p.sh @@ -14,7 +14,7 @@ RANK_ID_START=0 data_path="" sed -i "s/n_epoches = 1/n_epoches = 50/g" `grep -rl "n_epoches = 1" ${cur_path}/../configs/config.py` -sed -i "s/iterations_per_loop = 1/iterations_per_loop = 10/g" `grep -rl "iterations_per_loop = 1" ${cur_path}/../configs/config.py` +#sed -i "s/iterations_per_loop = 1/iterations_per_loop = 10/g" `grep -rl "iterations_per_loop = 1" ${cur_path}/../configs/config.py` #基础参数 需要模型审视修改 #网络名称,同目录名称 Network="WideDeep_ID0028_for_TensorFlow" diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/train.py b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/train.py index 6852b54388f51c8d6e4b3347ebd381fdc23ee41e..c7c74ab324475a23d804c64111402acda4c281ce 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/train.py +++ b/TensorFlow/built-in/recommendation/WideDeep_ID0028_for_TensorFlow/train.py @@ -91,7 +91,7 @@ train_para = { 'test_per_epoch': config.test_size, 'batch_size': data_para['batch_size'], 'early_stop_epochs': 50, - # 'iterations_per_loop': config.iterations_per_loop + 'iterations_per_loop': config.iterations_per_loop } # set PIN model param @@ -383,6 +383,7 @@ if __name__ == '__main__': custom_op.parameter_map["min_group_size"].b = 1 custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") custom_op.parameter_map["hcom_parallel"].b = True + custom_op.parameter_map["iterations_per_loop"].i = config.iterations_per_loop if args.over_dump is True: print("NPU overflow dump is enabled") diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/README.md b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/README.md index 584719121a63dcf0b7d38fbb7f9e5b632a4e31f9..323f46a9cafb983a34b7220f4871d97d761d3639 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/README.md +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/README.md @@ -29,6 +29,7 @@ **描述(Description):Wide&Deep是一个同时具有Memorization和Generalization功能的CTR预估模型,该模型主要由广义线性模型(Wide网络)和深度神经网络(Deep网络)组成,对于推荐系统来说,Wide线性模型可以通过交叉特征转换来记忆稀疏特征之间的交互,Deep神经网络可以通过低维嵌入来泛化未出现的特征交互。与单一的线性模型(Wide-only)和深度模型(Deep-only)相比,Wide&Deep可以显著提高CTR预估的效果,从而提高APP的下载量。 +

概述

- 参考论文: https://arxiv.org/abs/1606.07792 @@ -37,7 +38,7 @@ - 适配昇腾 AI 处理器的实现: - https://gitee.com/chen-yucheng113/research_TF/tree/master/built-in/TensorFlow/Research/debugging_model/WideDeep_ID2712_for_TensorFlow + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow - 通过Git获取对应commit\_id的代码方法如下: diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_1p.sh index afa8a4be3f556ab1f0f31296ce18808dfe49f54a..b5db5a9dcf41778aa7acf22c0f48b8e7dad69905 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_1p.sh @@ -16,7 +16,7 @@ RANK_ID_START=0 #Batch Size batch_size=131072 #网络名称,同目录名称 -Network="WideDeep_TF_ID2712_for_TensorFlow" +Network="WideDeep_ID2712_for_TensorFlow" #Device数量,单卡默认为1 RankSize=1 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_4p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_4p.sh index 5b740ba40a1ad42b66cbbeac00cce9bbf50ab0f4..4e899e3266772f1485aee0e3ed831f4e981f1b42 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_4p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_4p.sh @@ -6,7 +6,7 @@ cur_path=`pwd` #集合通信参数,不需要修改 -export HCCL_CONNECT_TIMEOUT=300 +export HCCL_CONNECT_TIMEOUT=1200 #集合通信参数,不需要修改 export RANK_SIZE=4 @@ -20,7 +20,7 @@ ASCEND_DEVICE_ID_START=0 #Batch Size batch_size=131072 #网络名称,同目录名称 -Network="WideDeep_TF_ID2712_for_TensorFlow" +Network="WideDeep_ID2712_for_TensorFlow" #Device数量,单卡默认为1 RankSize=1 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_8p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_8p.sh index 953c6a895c34a662e9bd5145413209f1c228195b..6beb473a5f93d9041575d4f3df8f05fb91c1f01f 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_8p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_full_8p.sh @@ -6,7 +6,7 @@ cur_path=`pwd` #集合通信参数,不需要修改 -export HCCL_CONNECT_TIMEOUT=300 +export HCCL_CONNECT_TIMEOUT=1200 #集合通信参数,不需要修改 export RANK_SIZE=8 @@ -20,7 +20,7 @@ ASCEND_DEVICE_ID_START=0 #Batch Size batch_size=131072 #网络名称,同目录名称 -Network="WideDeep_TF_ID2712_for_TensorFlow" +Network="WideDeep_ID2712_for_TensorFlow" #Device数量,单卡默认为1 RankSize=1 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_4p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_4p.sh index be35617020cbb16f85e21961533089b5dd0d6c51..8296478223a22d687cde6e67747bab12d72d0946 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_4p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_4p.sh @@ -5,7 +5,7 @@ cur_path=`pwd` #export ASCEND_SLOG_PRINT_TO_STDOUT=1 #export GE_USE_STATIC_MEMORY=1 -export HCCL_CONNECT_TIMEOUT=300 +export HCCL_CONNECT_TIMEOUT=1200 #集合通信参数,不需要修改 export RANK_SIZE=4 @@ -24,8 +24,8 @@ RankSize=1 #参数配置 data_path="/npu/traindata/ID2940_CarPeting_TF_WideDeep_TF" -train_size=13107200 -display_step=1 +train_size=52428800 +display_step=10 n_epoches=4 #维持参数,以下不需要修改 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_8p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_8p.sh index 55f195dd29fb41a89944b3bd9a63fc3663d1c2c5..bb0e0a8447cf22315bfaecd5e471ff866e14643e 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_8p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2712_for_TensorFlow/test/train_performance_8p.sh @@ -5,7 +5,7 @@ cur_path=`pwd` #export ASCEND_SLOG_PRINT_TO_STDOUT=1 #export GE_USE_STATIC_MEMORY=1 -export HCCL_CONNECT_TIMEOUT=300 +export HCCL_CONNECT_TIMEOUT=1200 #集合通信参数,不需要修改 export RANK_SIZE=8 @@ -24,8 +24,8 @@ RankSize=1 #参数配置 data_path="/npu/traindata/ID2940_CarPeting_TF_WideDeep_TF" -train_size=13107200 -display_step=1 +train_size=52428800 +display_step=10 n_epoches=4 #维持参数,以下不需要修改 diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/README.md b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/README.md index 034ac59e535556fed064af4577d3e6a0693f97a1..2cd80db12a83b631bd5c158e1f5e0fffbd22088b 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/README.md +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/README.md @@ -1,492 +1,155 @@ -# Wide & Deep Recommender Model Training For TensorFlow - -This repository provides a script and recipe to train the Wide and Deep Recommender model to achieve state-of-the-art accuracy and is tested and maintained by NVIDIA. - -## Table Of Contents - -- [Model overview](#model-overview) - * [Model architecture](#model-architecture) - * [Applications and dataset](#applications-and-dataset) - * [Default configuration](#default-configuration) - * [Feature support matrix](#feature-support-matrix) - * [Features](#features) - * [Mixed precision](#mixed-precision) - * [Enabling mixed precision](#enabling-mixed-precision) - * [Impact of mixed precision on training accuracy](#impact-of-mixed-precision-on-training-accuracy) - * [Impact of mixed precision on inference accuracy](#impact-of-mixed-precision-on-inference-accuracy) - * [Enabling TF32](#enabling-tf32) - * [Glossary](#glossary) -- [Setup](#setup) - * [Requirements](#requirements) -- [Quick Start Guide](#quick-start-guide) -- [Advanced](#advanced) - * [Scripts and sample code](#scripts-and-sample-code) - * [Parameters](#parameters) - * [Command-line options](#command-line-options) - * [Getting the data](#getting-the-data) - * [Dataset guidelines](#dataset-guidelines) - * [Spark preprocessing](#spark-preprocessing) - * [Training process](#training-process) -- [Performance](#performance) - * [Benchmarking](#benchmarking) - * [Training performance benchmark](#training-performance-benchmark) - * [Results](#results) - * [Training accuracy results](#training-accuracy-results) - * [Training accuracy: NVIDIA DGX A100 (8x A100 40GB)](#training-accuracy-nvidia-dgx-a100-8x-a100-40gb) - * [Training accuracy: NVIDIA DGX-1 (8x V100 16GB)](#training-accuracy-nvidia-dgx-1-8x-v100-16gb) - * [Training accuracy plots](#training-accuracy-plots) - * [Training stability test](#training-stability-test) - * [Training performance results](#training-performance-results) - * [Training performance: NVIDIA DGX A100 (8x A100 40GB)](#training-performance-nvidia-dgx-a100-8x-a100-40gb) - * [Training performance: NVIDIA DGX-1 (8x V100 16GB)](#training-performance-nvidia-dgx-1-8x-v100-16gb) -- [Release notes](#release-notes) - * [Changelog](#changelog) - * [Known issues](#known-issues) +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [迁移学习指导](#迁移学习指导.md) +- [高级参考](#高级参考.md) +

基本信息

-## Model overview +**发布者(Publisher):Huawei** -Recommendation systems drive engagement on many of the most popular online platforms. As the volume of data available to power these systems grows exponentially, data scientists are increasingly turning from more traditional machine learning methods to highly expressive deep learning models to improve the quality of their recommendations. Google's [Wide & Deep Learning for Recommender Systems](arxiv.org/abs/1606.07792) has emerged as a popular model for these problems both for its robustness to signal sparsity as well as its user-friendly implementation in [TensorFlow](www.tensorflow.org/api_docs/python/tf/estimator/DNNLinearCombinedClassifier). +**应用领域(Application Domain):Object Detection -The differences between this Wide & Deep Recommender Model and the model from the paper is the size of the Deep part of the model. Originally, in Google's paper, the fully connected part was three layers of 1024, 512, and 256 neurons. Our model consists of 5 layers each of 1024 neurons. +**版本(Version):1.1 -The model enables you to train a recommender model that combines the memorization of the Wide part and generalization of the Deep part of the network. +**修改时间(Modified) :2022.04.08 -This model is trained with mixed precision using Tensor Cores on NVIDIA Volta, Turing and the NVIDIA Ampere GPU architectures. Therefore, researchers can get results 1.49 times faster than training without Tensor Cores, while experiencing the benefits of mixed precision training. This model is tested against each NGC monthly container release to ensure consistent accuracy and performance over time. +**大小(Size):140KB -### Model architecture +**框架(Framework):TensorFlow 1.15.0 -Wide & Deep refers to a class of networks that use the output of two parts working in parallel - wide model and deep model - to make predictions of recommenders. The wide model is a generalized linear model of features together with their transforms. The deep model is a series of 5 hidden MLP layers of 1024 neurons each beginning with a dense embedding of features. The architecture is presented in Figure 1. +**模型格式(Model Format):ckpt -

- -
-Figure 1. The architecture of the Wide & Deep model. -

+**精度(Precision):Mixed -### Applications and dataset +**处理器(Processor):昇腾910 -As a reference dataset, we used a subset of [the features engineered](/gabrielspmoreira/kaggle_outbrain_click_prediction_google_cloud_ml_engine) by the 19th place finisher in the [Kaggle Outbrain Click Prediction Challenge](www.kaggle.com/c/outbrain-click-prediction/). This competition challenged competitors to predict the likelihood with which a particular ad on a website's display would be clicked on. Competitors were given information about the user, display, document, and ad in order to train their models. More information can be found [here](www.kaggle.com/c/outbrain-click-prediction/data). +**应用级别(Categories):Official +**描述(Description):Wide&Deep是一个同时具有Memorization和Generalization功能的CTR预估模型,该模型主要由广义线性模型(Wide网络)和深度神经网络(Deep网络)组成,对于推荐系统来说,Wide线性模型可以通过交叉特征转换来记忆稀疏特征之间的交互,Deep神经网络可以通过低维嵌入来泛化未出现的特征交互。与单一的线性模型(Wide-only)和深度模型(Deep-only)相比,Wide&Deep可以显著提高CTR预估的效果,从而提高APP的下载量。 -### Default configuration +

概述

+- 参考论文: + https://arxiv.org/abs/1606.07792 -For reference, and to give context to the acceleration numbers described below, some important properties of our features and model are as follows: +- 参考实现: + https://github.com/NVIDIA/DeepLearningExamples/tree/master/TensorFlow/Recommendation/WideAndDeep -- Features - - Request Level - - 16 scalar numeric features `(shape=(1,)`) - - 12 one-hot categorical features (all `int` dtype) - - 5 indicator embeddings with sizes 2, 2, 3, 3, 6 - - 7 trainable embeddings - - all except two have an embedding size of 64 (remaining two have 128), though it's important to note for *all* categorical features that we *do not* leverage that information to short-circuit the lookups by treating them as a single multi-hot lookup. Our API is fully general to any combination of embedding sizes. - - all use hash bucketing with `num_buckets=` 300k, 100k, 4k, 2.5k, 2k, 1k, and 300 respectively - - 3 multi-hot categorical features (all `int` dtype) - - all trainable embeddings - - all with embedding size 64 - - all use hash bucketing with `num_buckets=` 10k, 350, and 100 respectively - - Item Level - - 16 scalar numeric features - - 4 one hot categorical features (all `int` dtype) - - embedding sizes of 128, 64, 64, 64 respectively - - hash bucketing with `num_buckets=` 250k, 4k, 2.5k, and 1k respectively - - 3 multi-hot categorical features (all `int` dtype) - - all with embedding size 64 - - hash bucketing with `num_buckets=` 10k, 350, and 100 respectively - - All features are used in both wide *and* deep branches of the network +- 适配昇腾 AI 处理器的实现: -- Model - - Total embedding dimension is 1328 - - 5 hidden layers each with size 1024 - - Output dimension is 1 (probability of click) + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow -### Feature support matrix - -The following features are supported by this model: - -| Feature | Wide & Deep -|-----------------------|-------------------------- -|Horovod Multi-GPU | Yes -|Automatic mixed precision (AMP) | Yes - -#### Features - -Horovod - -Horovod is a distributed training framework for TensorFlow, Keras, PyTorch and MXNet. The goal of Horovod is to make distributed deep learning fast and easy to use. For more information about how to get started with Horovod, see the [Horovod: Official repository](/horovod/horovod). - -Multi-GPU training with Horovod - -Our model uses Horovod to implement efficient multi-GPU training with NCCL. For details, see example sources in this repository or see the [TensorFlow tutorial](/horovod/horovod/#usage). - - -### Mixed precision - -Mixed precision is the combined use of different numerical precisions in a computational method. [Mixed precision](arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [Tensor Cores](developer./tensor-cores) in the Volta and Turing architecture, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using mixed precision training requires two steps: -1. Porting the model to use the FP16 data type where appropriate. -2. Adding loss scaling to preserve small gradient values. - -The ability to train deep learning networks with lower precision was introduced in the Pascal architecture and first supported in [CUDA 8](devblogs./parallelforall/tag/fp16/) in the NVIDIA Deep Learning SDK. - -For information about: -- How to train using mixed precision, see the [Mixed Precision Training](arxiv.org/abs/1710.03740) paper and [Training With Mixed Precision](docs./deeplearning/sdk/mixed-precision-training/index.html) documentation. -- Techniques used for mixed precision training, see the [Mixed-Precision Training of Deep Neural Networks](devblogs./mixed-precision-training-deep-neural-networks/) blog. -- How to access and enable AMP for TensorFlow, see [Using TF-AMP](docs./deeplearning/dgx/tensorflow-user-guide/index.html#tfamp) from the TensorFlow User Guide. - -#### Enabling mixed precision - -To enable Wide & Deep training to use mixed precision you don't need to perform input quantization, only an additional flag `--amp` to the training script is needed (see [Quick Start Guide](#quick-start-guide)). - -##### Impact of mixed precision on training accuracy -The accuracy of training, measured with MAP@12 metric was not impacted by enabling mixed precision. The obtained results were statistically similar (i.e. similar run-to-run variance was observed, with standard deviation of the level of `0.002`). - -##### Impact of mixed precision on inference accuracy -For our reference model, the average absolute error on the probability of interaction induced by reduced precision inference is `0.0002`, producing a near-perfect fit between predictions produced by full and mixed precision models. Moreover, this error is uncorrelated with the magnitude of the predicted value, which means for most predictions of interest (i.e. greater than `0.01` or `0.1` likelihood of interaction), the relative magnitude of the error is approaching the noise floor of the problem. - -#### Enabling TF32 - -TensorFloat-32 (TF32) is the new math mode in [NVIDIA A100](www./en-us/data-center/a100/) GPUs for handling the matrix math also called tensor operations. TF32 running on Tensor Cores in A100 GPUs can provide up to 10x speedups compared to single-precision floating-point math (FP32) on Volta GPUs. - -TF32 Tensor Cores can speed up networks using FP32, typically with no loss of accuracy. It is more robust than FP16 for models which require high dynamic range for weights or activations. - -For more information, refer to the [TensorFloat-32 in the A100 GPU Accelerates AI Training, HPC up to 20x](blogs./blog/2020/05/14/tensorfloat-32-precision-format/) blog post. - -TF32 is supported in the NVIDIA Ampere GPU architecture and is enabled by default. - - -### Glossary - -Request level features: Features that describe the person or object _to which_ we wish to make recommendations. - -Item level features: Features that describe those objects which we are considering recommending. - -## Setup - -The following section lists the requirements that you need to meet in order to start training the Wide & Deep model. - -### Requirements - -This repository contains Dockerfile which extends the TensorFlow NGC container and encapsulates some dependencies. Aside from these dependencies, ensure you have the following components: -- [NVIDIA Docker](/NVIDIA/nvidia-docker) -- [20.10-tf1-py3](ngc./catalog/containers/nvidia:tensorflow) NGC container -- Supported GPUs: - - [NVIDIA Volta architecture](www./en-us/data-center/volta-gpu-architecture/) - - [NVIDIA Turing architecture](www./en-us/geforce/turing/) - - [NVIDIA Ampere architecture](www./en-us/data-center/nvidia-ampere-gpu-architecture/) - -For more information about how to get started with NGC containers, see the following sections from the NVIDIA GPU Cloud Documentation and the Deep Learning Documentation: -- [Getting Started Using NVIDIA GPU Cloud](docs./ngc/ngc-getting-started-guide/index.html) -- [Accessing And Pulling From The NGC Container Registry](docs./deeplearning/frameworks/user-guide/index.html#accessing_registry) -- [Running TensorFlow](docs./deeplearning/frameworks/tensorflow-release-notes/running.html#running) - -For those unable to use the TensorFlow NGC container, to set up the required environment or create your own container, see the versioned [NVIDIA Container Support Matrix](docs./deeplearning/frameworks/support-matrix/index.html). - -## Quick Start Guide - -To train your model using mixed or TF32 precision with Tensor Cores or using FP32, perform the following steps using the default parameters of the Wide & Deep model on the Outbrain dataset. For the specifics concerning training and inference, see the [Advanced](#advanced) section. - -1. Clone the repository. - -``` -git clone /NVIDIA/DeepLearningExamples -cd DeepLearningExamples/TensorFlow/Recommendation/WideAndDeep -``` - -2. Download the Outbrain dataset. +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` +### 默认配置 -The Outbrain dataset can be downloaded from [Kaggle](www.kaggle.com/c/outbrain-click-prediction/data) (requires Kaggle account). -Unzip the downloaded archive e.g. to `/raid/outbrain/orig` and set the `HOST_OUTBRAIN_PATH` variable to the parent directory: +1、 训练超参(单卡) -```bash -HOST_OUTBRAIN_PATH=/raid/outbrain -``` + batch_size:131072 + pos_weight: 1.0 + train_per_epoch: 59761827 + test_per_epoch: 1048576 -3. Build the Wide & Deep Tensorflow NGC container. -```bash -docker build . -t wide_deep -``` +### 支持特性 -4. Start an interactive session in the NGC container to run preprocessing/training/inference. +| 特性列表 | 是否支持 | +|-------|------| +| 分布式训练 | 是 | +| 混合精度 | 是 | -```bash -docker run --runtime=nvidia --privileged --rm -ti -v ${HOST_OUTBRAIN_PATH}:/outbrain wide_deep /bin/bash -``` -5. Start preprocessing. +## 混合精度训练 -```bash -bash scripts/preproc.sh 4096 -``` -The result of preprocessing scripts are prebatched TFRecords. The argument to the script is the prebatch -size (4096 is the default). +昇腾910 AI处理器提供自动混合精度功能,可以针对全网中float32数据类型的算子,按照内置的优化策略,自动将部分float32的算子降低精度到float16,从而在精度损失很小的情况下提升系统性能并减少内存使用。 -6. Start training. +## 开启混合精度 +相关代码示例。 -Single GPU: -```bash -python -m trainer.task --gpu ``` -8 GPU: -```bash -mpiexec --allow-run-as-root --bind-to socket -np 8 python -m trainer.task --gpu --hvd + config_proto = tf.ConfigProto(allow_soft_placement=True) + custom_op = config_proto.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config_proto.graph_options.rewrite_options.remapping = RewriterConfig.OFF + session_config = npu_config_proto(config_proto=config_proto) ``` -If you want to run validation or inference, you can either use the checkpoint obtained from the training -commands above, or download the pretrained checkpoint from NGC. - -In order to download the checkpoint from NGC, visit [ngc.](ngc.) website and -browse the available models. -Download the checkpoint files and unzip them to some path, e.g. to `/raid/outbrain/checkpoints/` -(which is the default path for storing the checkpoints during training). - +

训练环境准备

-7. Start validation/evaluation. - -In order to validate the checkpoint on the evaluation set, run the `task.py` script with `--evaluate` flag: - -```bash -python -m trainer.task --gpu --evaluate --model_dir /outbrain/checkpoints +- 硬件环境和运行环境准备请参见《[CANN软件安装指南](https://support.huawei.com/enterprise/zh/ascend-computing/cann-pid-251168373?category=installation-update)》 +- 运行以下命令安装依赖。 ``` - -8. Start inference/predictions. - -In order to run inference and predict the results, run the `task.py` -script with `--predict` flag: - -```bash -python -m trainer.task --gpu --predict --model_dir /outbrain/checkpoints +pip3 install requirements.txt ``` +说明:依赖配置文件requirements.txt文件位于模型的根目录 +

快速上手

-## Advanced - -The following sections provide greater details of the dataset, running training, and the training results. - -### Scripts and sample code - -These are the important scripts in this repository: -* `trainer/task.py` - Python script for training the Wide & Deep recommender model -* `trainer/features.py` - Python file describing the request and item level features - -### Parameters +### 数据集准备 -These are the important parameters in the `trainer/task.py` script: +- 请用户参考"参考实现"从源码里下载outbrain数据集 ``` ---model_dir: Path to model checkpoint directory ---deep_hidden_units: [DEEP_LAYER1 DEEP_LAYER2 ...] hidden units per layer, separated by spaces ---prebatch_size: Number of samples in each pre-batch in tfrecords ---global_batch_size: Training batch size (per all GPUs, must be a multiplicity of prebatch_size) ---eval_batch_size: Evaluation batch size (must be a multiplicity of prebatch_size) ---num_epochs: Number of epochs to train ---linear_learning_rate: Learning rate for the wide part of the model ---linear_l1_regularization: L1 regularization for the wide part of the model ---linear_l2_regularization: L2 regularization for the wide part of the model ---deep_learning_rate: Learning rate for the deep part of the model ---deep_dropout: Dropout probability for deep model ---deep_warmup_epochs: Number of epochs with linear learning rate warmup ---predict: Perform only the prediction on the validation set, do not train ---evaluate: Perform only the evaluation on the validation set, do not train ---gpu: Run computations on GPU ---amp: Enable Automatic Mixed Precision ---xla: Enable XLA ---hvd: Use Horovod for multi-GPU training ---eval_epoch_interval: Perform evaluation every this many epochs -``` - -### Command-line options - -To see the full list of available options and their descriptions, use the `-h` or `--help` command-line option: -```bash -python -m trainer.task --help -``` - - -### Getting the data - -The Outbrain dataset can be downloaded from [Kaggle](www.kaggle.com/c/outbrain-click-prediction/data) (requires Kaggle account). - - -#### Dataset guidelines - -The dataset contains a sample of users’ page views and clicks, as observed on multiple publisher sites. Viewed pages and clicked recommendations have additional semantic attributes of the documents. -The dataset contains sets of content recommendations served to a specific user in a specific context. Each context (i.e. a set of recommended ads) is given a `display_id`. In each such recommendation set, the user has clicked on exactly one of the ads. - -The original data is stored in several separate files: -- `page_views.csv` - log of users visiting documents (2B rows, ~100GB uncompressed) -- `clicks_train.csv` - data showing which ad was clicked in each recommendation set (87M rows) -- `clicks_test.csv` - used only for the submission in the original Kaggle contest -- `events.csv` - metadata about the context of each recommendation set (23M rows) -- `promoted_content.csv` - metadata about the ads -- `document_meta.csv`, `document_topics.csv`, `document_entities.csv`, `document_categories.csv` - metadata about the documents - -During the preprocessing stage the data is transformed into 59M rows tabular data of 54 features and eventually saved in pre-batched TFRecord format. - - -#### Spark preprocessing - -The original dataset is preprocessed using Spark scripts from the `preproc` directory. The preprocessing consists of the following operations: -- separating out the validation set for cross-validation -- filling missing data with the most frequent value -- generating the user profiles from the page views data -- joining the tables for the ad clicks data -- computing click-through rates (CTR) for ads grouped by different contexts -- computing cosine similarity between the features of the clicked ads and the viewed ads -- math transformations of the numeric features (taking logarithm, scaling, binning) -- storing the resulting set of features in TFRecord format - -The `preproc1-4.py` preprocessing scripts use PySpark. -In the Docker image, we have installed Spark 2.3.1 as a standalone cluster of Spark. -The `preproc1.py` script splits the data into a training set and a validation set. -The `preproc2.py` script generates the user profiles from the page views data. -The `preproc3.py` computes the click-through rates (CTR) and cosine similarities between the features. -The `preproc4.py` script performs the math transformations and generates the final TFRecord files. -The data in the output files is pre-batched (with the default batch size of 4096) to avoid the overhead -of the TFRecord format, which otherwise is not suitable for the tabular data - -it stores a separate dictionary with each feature name in plain text for every data entry. - -The preprocessing includes some very resource-exhausting operations, like joining 2B+ rows tables. -Such operations may not fit into the RAM memory, therefore we decided to use Spark which is a suitable tool -for handling tabular operations on large data. -Note that the Spark job requires about 1 TB disk space and 500 GB RAM to perform the preprocessing. -For more information about Spark, please refer to the -[Spark documentation](spark.apache.org/docs/2.3.1/). - - -### Training process - -The training can be started by running the `trainer/task.py` script. By default the script is in train mode. Other training related -configs are also present in the `trainer/task.py` and can be seen using the command `python -m trainer.task --help`. Training happens for `--num_epochs` epochs with a DNNLinearCombinedClassifier estimator for the model. The model has a wide linear part and a deep feed forward network, and the networks are built according to the default configuration. - -Two separate optimizers are used to optimize the wide and the deep part of the network: - -- FTLR (Follow the Regularized Leader) optimizer is used to optimize the wide part of the network. -- Adagrad optimizer is used to optimize the deep part of the network. - -The training log will contain information about: - -- Loss value after every 100 steps. -- Training throughput if `--benchmark` option is selected. -- Evaluation metrics after every `--eval_epoch_interval` epochs. - -Checkpoints are stored with every evaluation at the `--model_dir` location. - -## Performance - -The performance measurements in this document were conducted at the time of publication and may not reflect the performance achieved from NVIDIA’s latest software release. For the most up-to-date performance measurements, go to [NVIDIA Data Center Deep Learning Product Performance](developer./deep-learning-performance-training-inference). - -### Benchmarking - -The following section shows how to run benchmarks measuring the model performance in training mode. - -#### Training performance benchmark - -We provide 8 scripts to benchmark the performance of training: -```bash -bash scripts/DGXA100_benchmark_training_tf32_1gpu.sh -bash scripts/DGXA100_benchmark_training_amp_1gpu.sh -bash scripts/DGXA100_benchmark_training_tf32_8gpu.sh -bash scripts/DGXA100_benchmark_training_amp_8gpu.sh -bash scripts/DGX1_benchmark_training_fp32_1gpu.sh -bash scripts/DGX1_benchmark_training_amp_1gpu.sh -bash scripts/DGX1_benchmark_training_fp32_8gpu.sh -bash scripts/DGX1_benchmark_training_amp_8gpu.sh -``` - -### Results - -The following sections provide details on how we achieved our performance and -accuracy in training. - -#### Training accuracy results - -##### Training accuracy: NVIDIA DGX A100 (8x A100 40GB) - -Our results were obtained by running the `trainer/task.py` training script in the TensorFlow NGC container on NVIDIA DGX A100 with (8x A100 40GB) GPUs. - -|**GPUs**|**Batch size / GPU**|**Accuracy - TF32 (MAP@12)**|**Accuracy - mixed precision (MAP@12)**|**Time to train - TF32 (minutes)**|**Time to train - mixed precision (minutes)**|**Time to train speedup (TF32 to mixed precision)**| -|-------:|-------------------:|----------------------------:|---------------------------------------:|-----------------------------------------------:|----------------------:|---------------------------------:| -| 1 | 131,072 | 0.67683 | 0.67632 | 341 | 359 | [-](#known-issues) | -| 8 | 16,384 | 0.67709 | 0.67721 | 93 | 107 | [-](#known-issues) | - -To achieve the same results, follow the steps in the [Quick Start Guide](#quick-start-guide). - -##### Training accuracy: NVIDIA DGX-1 (8x V100 16GB) - -Our results were obtained by running the `trainer/task.py` training script in the TensorFlow NGC container on NVIDIA DGX-1 with (8x V100 16GB) GPUs. - -|**GPUs**|**Batch size / GPU**|**Accuracy - FP32 (MAP@12)**|**Accuracy - mixed precision (MAP@12)**|**Time to train - FP32 (minutes)**|**Time to train - mixed precision (minutes)**|**Time to train speedup (FP32 to mixed precision)**| -|-------:|-------------------:|----------------------------:|---------------------------------------:|-----------------------------------------------:|----------------------:|---------------------------------:| -| 1 | 131,072 | 0.67648 | 0.67744 | 654 | 440 | 1.49 | -| 8 | 16,384 | 0.67692 | 0.67725 | 190 | 185 | 1.03 | - -To achieve the same results, follow the steps in the [Quick Start Guide](#quick-start-guide). - -##### Training accuracy plots - -Models trained with FP32, TF32 and Automatic Mixed Precision (AMP) achieve similar precision. - -![MAP12](img/lc20.06.png) - -##### Training stability test - -The Wide and Deep model was trained for 54,713 training steps, starting -from 6 different initial random seeds for each setup. The training was performed in the 20.10-tf1-py3 NGC container on -NVIDIA DGX A100 40GB and DGX-1 16GB machines with and without mixed precision enabled. -After training, the models were evaluated on the validation set. The following -table summarizes the final MAP@12 score on the validation set. - -||**Average MAP@12**|**Standard deviation**|**Minimum**|**Maximum**| -|:-------|-----------------:|---------------------:|----------:|----------:| -| DGX A100 TF32 | 0.67709 | 0.00094 | 0.67463 | 0.67813 | -| DGX A100 mixed precision | 0.67721 | 0.00048 | 0.67643 | 0.67783 | -| DGX-1 FP32 | 0.67692 | 0.00060 | 0.67587 | 0.67791 | -| DGX-1 mixed precision | 0.67725 | 0.00064 | 0.67561 | 0.67803 | - - -#### Training performance results - - -##### Training performance: NVIDIA DGX A100 (8x A100 40GB) +### 模型训练 -Our results were obtained by running the benchmark scripts from the `scripts` directory in the TensorFlow NGC container on NVIDIA DGX A100 with (8x A100 40GB) GPUs. Improving model scaling for multi-GPU is [under development](#known-issues). +- 单击“立即下载”,并选择合适的下载方式下载源码包。 +- 开始训练 + + 1. 启动训练之前,首先要配置程序运行相关环境变量。 -|**GPUs**|**Batch size / GPU**|**Throughput - TF32 (samples/s)**|**Throughput - mixed precision (samples/s)**|**Strong scaling - TF32**|**Strong scaling - mixed precision**| -|-------:|-------------------:|----------------------------:|---------------------------------------:|----------------------:|---------------------------------:| -| 1 | 131,072 | 349,879 | 332,529 | 1.00 | 1.00 | -| 8 | 16,384 | 1,283,457 | 1,111,976 | 3.67 | 3.34 | + 环境变量配置信息参见: -##### Training performance: NVIDIA DGX-1 (8x V100 16GB) + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) -Our results were obtained by running the benchmark scripts from the `scripts` directory in the TensorFlow NGC container on NVIDIA DGX-1 with (8x V100 16GB) GPUs. Improving model scaling for multi-GPU is [under development](#known-issues). +- 单卡训练 + 以数据集放在/data为例 + ``` + cd test + bash train_performance_1p.sh --data_path=/data (功能和性能) + bash train_full_1p.sh --data_path=/data (全量) + ``` +

高级参考

-|**GPUs**|**Batch size / GPU**|**Throughput - FP32 (samples/s)**|**Throughput - mixed precision (samples/s)**|**Throughput speedup (FP32 to mixed precision)**|**Strong scaling - FP32**|**Strong scaling - mixed precision**| -|-------:|-------------------:|----------------------------:|---------------------------------------:|-----------------------------------------------:|----------------------:|---------------------------------:| -| 1 | 131,072 | 182,510 | 271,366 | 1.49 | 1.00 | 1.00 | -| 8 | 16,384 | 626,301 | 643,334 | 1.03 | 3.43 | 2.37 | +## 脚本和示例代码 -## Release notes +├── README.md //代码说明文档 +├── requirements.txt //安装依赖 +├── make_docker.sh +├── configs +│ ├──config.py //参数配置 +├── widedeep //模型结构 +│ ├──WideDeep_fp16_huifeng.py +│ ├──data_utils.py +│ ├──features.py +│ ├──tf_util.py +├── test +│ ├──train_full_1p.sh //单卡运行启动脚本 +│ ├──train_full_8p.sh //8卡执行脚本 +│ ├──train_performance_1p.sh //单卡性能运行启动脚本 +│ ├──train_performance_8p.sh //8卡性能执行脚本 +│ ├──8p.json //8卡IP的json配置文件 -### Changelog +### 脚本参数 -November 2020 -- Updated performance tables to include numbers from 20.10-tf1-py3 NGC container +在`configs/config.py`中进行设置。 -June 2020 -- Updated performance tables to include A100 results +--record_path train data dir, default : path/to/data +--num_inputs number of features of dataset. default : 39 +--batch_size mini-batch size ,default: 128 +--n_epoches initial learning rate,default: 0.06 -April 2020 -- Improved Spark preprocessing scripts performance +## 训练过程 -March 2020 -- Initial release +通过“模型训练”中的训练指令启动单卡训练。 +将训练脚本(train_full_1p.sh)中的data_path设置为训练数据集的路径。具体的流程参见“模型训练”的示例。 -### Known issues -- Limited tf.feature_column support -- Limited scaling for multi-GPU because of inefficient handling of embedding operations (multiple memory transfers between CPU and GPU), work in progress to cover all the operations on GPU. -- In this model the TF32 precision can in some cases be as fast as the FP16 precision on Ampere GPUs. -This is because TF32 also uses Tensor Cores and doesn't need any additional logic -such as maintaining FP32 master weights and casts. -However, please note that W&D is, by modern recommender standards, a very small model. -Larger models should still see significant benefits of using FP16 math. \ No newline at end of file diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p.json index 96078ec8d6a851ca15a96cf8b68938913cf9c798..28426dea5096e8246f782ac2e253794aadbb79a6 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "0", - "device_ip": "192.1.2.8", - "rank_id": "0", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"0","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_0.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_0.json index 96078ec8d6a851ca15a96cf8b68938913cf9c798..28426dea5096e8246f782ac2e253794aadbb79a6 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_0.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_0.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "0", - "device_ip": "192.1.2.8", - "rank_id": "0", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"0","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_1.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_1.json index 8270cbe0e347f7e644c199804764a52690d8a456..d3e1c570bc4f79bcecbbfde405107a021be18098 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_1.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_1.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "1", - "device_ip": "192.1.2.8", - "rank_id": "1", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"1","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_2.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_2.json index 0438819ddf1e1c1425f5f3a706a5e5f7e9da0746..93c4a960c311ffba53d679224927aa01043e2328 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_2.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_2.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "2", - "device_ip": "192.1.2.8", - "rank_id": "2", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"2","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_3.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_3.json index ca9a5ff4ecdf11d5bed59358ccfeea9850d56697..4ed1fcf81f66bb111c3970bd91ee6358806eebe3 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_3.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_3.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "3", - "device_ip": "192.1.2.8", - "rank_id": "3", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"3","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_4.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_4.json index 90872c5e1a567ef0ba4145e9644005f87d4c1174..cdde74396d0e2ebb40ab88e835e0b60b4a713434 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_4.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_4.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "4", - "device_ip": "192.4.2.9", - "rank_id": "4", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"4","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_5.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_5.json index 1b1322f115c2a3dae81f85e46565068e87c8e50f..c0a7890298636b6b73b3f065dafff4c8602b5719 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_5.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_5.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "5", - "device_ip": "192.4.2.9", - "rank_id": "5", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"5","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_6.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_6.json index ea26227f6d4e233f61c3471a6b773f4ac432f4af..2c4b3211286d0efa409e74db5b2447ac23a79e36 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_6.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_6.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "6", - "device_ip": "192.4.2.9", - "rank_id": "6", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"6","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_7.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_7.json index 1ccc40abf4265a459877b44705d097bc9ab3db21..01399b72c57294348981db482d9e621f68669d77 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_7.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/1p_7.json @@ -1,18 +1,8 @@ { - "group_count": "1", - "group_list": - [ - { - "devices": [ - { - "device_id": "7", - "device_ip": "192.4.2.9", - "rank_id": "7", - } - ], - "server_id": "10.155.111.118" - } - ], - "status": "completed", - "version":"1.0" -} \ No newline at end of file +"server_count":"1", +"server_list":[{ + "device":[{"device_id":"7","device_ip":"192.168.1.195","rank_id":"0"}], + "server_id":"127.0.0.1"}], +"status":"completed", +"version":"1.0" +} diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/8p.json b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/8p.json index d13441e11288704f3e3ad5087dadd019d5481a15..3c329456ba1da36824150806dfbaae1b7beffa20 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/8p.json +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/config/8p.json @@ -11,10 +11,10 @@ {"device_id":"4","device_ip":"192.4.2.9","rank_id":"4"}, {"device_id":"5","device_ip":"192.4.2.9","rank_id":"5"}, {"device_id":"6","device_ip":"192.4.2.9","rank_id":"6"}, - {"device_id":"7","device_ip":"192.4.2.9","rank_id":"7"}, + {"device_id":"7","device_ip":"192.4.2.9","rank_id":"7"} ], - "server_id":"10.155.111.118" + "server_id":"127.0.0.1" } ], "status":"completed", diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/test/train_performance_1p.sh index 64ab2617e0318fcc0494239c6617026099635949..e5db039a1b812fe9099f2c8ccf020cbe280b16ea 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/test/train_performance_1p.sh @@ -27,6 +27,9 @@ RankSize=1 #训练epoch,可选 train_epochs=1 +#迭代下沉循环次数 +iteration_per_loop=0 + #参数配置 data_path="" @@ -93,36 +96,40 @@ else mkdir -p $cur_path/output/$ASCEND_DEVICE_ID/ckpt fi -if [ -d $cur_path/../config/1p_$ASCEND_DEVICE.json ];then - export RANK_TABLE_FILE=$cur_path/../config/1p_$ASCEND_DEVICE.json - export RANK_ID=$ASCEND_DEVICE_ID +if [ -f $cur_path/../config/1p_$ASCEND_DEVICE_ID.json ];then + export RANK_TABLE_FILE=$cur_path/../config/1p_$ASCEND_DEVICE_ID.json + export RANK_ID=0 else export RANK_TABLE_FILE=$cur_path/../config/1p_0.json export RANK_ID=0 fi + wait cd $cur_path/../ start=$(date +%s) -python3 -m trainer.task --gpu \ +python3 -m trainer.task \ --Adam \ + --iteration_per_loop=$iteration_per_loop \ --train_data_pattern=$data_path/outbrain/tfrecords/train/part* \ --eval_data_pattern=$data_path/outbrain/tfrecords/eval/part* \ --model_dir=$cur_path/output/$ASCEND_DEVICE_ID/ckpt \ --transformed_metadata_path=$data_path/outbrain/tfrecords \ - --num_epochs=$train_epochs > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & + --num_epochs=$train_epochs \ + --benchmark \ + --global_batch_size=$batch_size > $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log 2>&1 & wait end=$(date +%s) -e2etime=$(( $end - $start )) +e2e_time=$(( $end - $start )) #结果打印,不需要修改 echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 -Time=`grep "INFO:tensorflow:global_step/sec: " $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log| awk -F' ' '{print $2}' | tail -n 2 | head -n +1` -FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'*'${Time}'}'` - +#Time=`grep "INFO:tensorflow:global_step/sec: " $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log| awk -F' ' '{print $2}' | tail -n 2 | head -n +1` +#FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'*'${Time}'}'` +FPS=`grep train_throughput $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk -F "train_throughput :" '{print $2}' | sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Performance images/sec : $FPS" @@ -143,7 +150,7 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' #吞吐量,不需要修改 ActualFPS=${FPS} #单迭代训练时长,不需要修改 -TrainingTime=`awk -v x=320 -v y="$FPS" 'BECIN{printf "%3.f\n",y/x}'` +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${batch_size}'/'${FPS}'}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 loss=`grep 'INFO:tensorflow:loss' $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | tr -d '\b\r' | grep -Eo "INFO:tensorflow:loss = [0-9]*\.[0-9]*" | awk -F' = ' '{print $2}'` diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/trainer/task.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/trainer/task.py index c22742f76797eca15694dd5b3995a332513e198b..dc8bc7bbc2038d522b28f696bc560f80135859ce 100644 --- a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/trainer/task.py +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/trainer/task.py @@ -27,11 +27,11 @@ import tensorflow as tf import tensorflow_transform as tft from tensorflow.core.protobuf import rewriter_config_pb2 from trainer import features -from utils.dataloader import separate_input_fn -from utils.hooks.benchmark_hooks import BenchmarkLoggingHook -from utils.metrics import map_custom_metric, map_custom_metric_with_leak -from utils.schedulers import learning_rate_scheduler - +from util.dataloader import separate_input_fn +from util.hooks.benchmark_hooks import BenchmarkLoggingHook +from util.metrics import map_custom_metric, map_custom_metric_with_leak +from util.schedulers import learning_rate_scheduler +from util.dnn_linear_combined import DNNLinearCombinedClassifier MODEL_TYPES = ['wide', 'deep', 'wide_n_deep'] WIDE, DEEP, WIDE_N_DEEP = MODEL_TYPES @@ -239,6 +239,11 @@ def create_parser(): help='Number of steps for train performance benchmark', type=int, default=100) + parser.add_argument( + '--iteration_per_loop', + help='Number of iters per loop', + type=int, + default=0) return parser @@ -262,7 +267,7 @@ def construct_estimator(model_type, run_config, optimizer=deep_optimizer) elif model_type == WIDE_N_DEEP: - estimator = tf.estimator.DNNLinearCombinedClassifier( + estimator = DNNLinearCombinedClassifier( config=npu_run_config_init(run_config=run_config), linear_feature_columns=wide_columns, linear_optimizer=wide_optimizer, @@ -329,15 +334,14 @@ def main(FLAGS): log_device_placement=FLAGS.log_device_placement ) else: - #session_config = tf.compat.v1.ConfigProto( - # device_count={'GPU': 0}, - # log_device_placement=FLAGS.log_device_placement - #) session_config = tf.ConfigProto() custom_op = session_config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["use_off_line"].b = True custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + if FLAGS.iteration_per_loop: + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["iterations_per_loop"].i = FLAGS.iteration_per_loop session_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF session_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF @@ -361,12 +365,7 @@ def main(FLAGS): int(FLAGS.eval_epoch_interval * steps_per_epoch) count_steps = FLAGS.benchmark_steps + 1 if FLAGS.benchmark else 100 - run_config = tf.estimator.RunConfig(model_dir=model_dir, save_summary_steps=0) \ - .replace(session_config=session_config, - save_checkpoints_steps=save_checkpoints_steps, - save_summary_steps=count_steps, - log_step_count_steps=count_steps, - keep_checkpoint_max=1) + run_config = tf.estimator.RunConfig(model_dir=model_dir, save_summary_steps=0, session_config=session_config, save_checkpoints_steps=save_checkpoints_steps, log_step_count_steps=count_steps, keep_checkpoint_max=1) def wide_optimizer(): opt = tf.compat.v1.train.FtrlOptimizer( @@ -431,6 +430,8 @@ def main(FLAGS): estimator = tf.estimator.add_metrics(estimator, map_custom_metric_with_leak) hooks = [] + if FLAGS.iteration_per_loop: + hooks.append(npu_hook.SetIterationsVarHook(FLAGS.iteration_per_loop)) if FLAGS.hvd: hooks.append(NPUBroadcastGlobalVariablesHook(0, int(os.getenv('RANK_ID', '0')))) @@ -475,6 +476,7 @@ def main(FLAGS): else: # training if FLAGS.benchmark: + print("================is benchmark, not eval") benchmark_hook = BenchmarkLoggingHook(global_batch_size=FLAGS.global_batch_size, warmup_steps=FLAGS.benchmark_warmup_steps) hooks.append(benchmark_hook) @@ -482,6 +484,7 @@ def main(FLAGS): train_throughput = benchmark_hook.mean_throughput.value() dllogger.log(data={'train_throughput': train_throughput}, step=tuple()) else: + print('train and eval') train_spec = tf.estimator.TrainSpec(input_fn=train_input_fn, max_steps=max_steps, hooks=hooks) @@ -498,18 +501,20 @@ def main(FLAGS): if __name__ == '__main__': + FLAGS = create_parser().parse_args() session_config = tf.ConfigProto() custom_op = session_config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["use_off_line"].b = True custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") - custom_op.parameter_map["graph_memory_max_size"].s= tf.compat.as_bytes(str(16 * 1024 * 1024 * 1024)) - custom_op.parameter_map["variable_memory_max_size"].s = tf.compat.as_bytes(str(15 * 1024 * 1024 * 1024)) + if FLAGS.iteration_per_loop: + print('>>>>>>>>> iteration per loop var: %d'%(FLAGS.iteration_per_loop)) + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["iterations_per_loop"].i = FLAGS.iteration_per_loop session_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF session_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF (npu_sess, npu_shutdown) = init_resource(config=session_config) - FLAGS = create_parser().parse_args() main(FLAGS) shutdown_resource(npu_sess, npu_shutdown) close_session(npu_sess) diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/dataloader.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/dataloader.py similarity index 100% rename from TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/dataloader.py rename to TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/dataloader.py diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/dnn_linear_combined.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/dnn_linear_combined.py new file mode 100644 index 0000000000000000000000000000000000000000..08b0e2d88ef25cb41e67c05eaaa7eb5f531b8d50 --- /dev/null +++ b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/dnn_linear_combined.py @@ -0,0 +1,1152 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TensorFlow estimators for Linear and DNN joined training models.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import math + +import six + +from tensorflow.python.framework import ops +from tensorflow.python.keras.utils import losses_utils +from tensorflow.python.ops import control_flow_ops +from tensorflow.python.ops import nn +from tensorflow.python.ops import partitioned_variables +from tensorflow.python.ops import state_ops +from tensorflow.python.ops import variable_scope +from tensorflow.python.ops.losses import losses +from tensorflow.python.summary import summary +from tensorflow.python.training import sync_replicas_optimizer +from tensorflow.python.training import training_util +from tensorflow.python.util.tf_export import estimator_export +from tensorflow_estimator.python.estimator import estimator +from tensorflow_estimator.python.estimator.canned import dnn +from tensorflow_estimator.python.estimator.canned import head as head_lib +from tensorflow_estimator.python.estimator.canned import linear +from tensorflow_estimator.python.estimator.canned import optimizers +from tensorflow_estimator.python.estimator.head import head_utils +from tensorflow_estimator.python.estimator.head import regression_head +from tensorflow_estimator.python.estimator.mode_keys import ModeKeys + +# The default learning rates are a historical artifact of the initial +# implementation. +_DNN_LEARNING_RATE = 0.001 +_LINEAR_LEARNING_RATE = 0.005 + + +def _check_no_sync_replicas_optimizer(optimizer): + if isinstance(optimizer, sync_replicas_optimizer.SyncReplicasOptimizer): + raise ValueError( + 'SyncReplicasOptimizer does not support multi optimizers case. ' + 'Therefore, it is not supported in DNNLinearCombined model. ' + 'If you want to use this optimizer, please use either DNN or Linear ' + 'model.') + + +def _linear_learning_rate(num_linear_feature_columns): + """Returns the default learning rate of the linear model. + + The calculation is a historical artifact of this initial implementation, but + has proven a reasonable choice. + + Args: + num_linear_feature_columns: The number of feature columns of the linear + model. + + Returns: + A float. + """ + default_learning_rate = 1. / math.sqrt(num_linear_feature_columns) + return min(_LINEAR_LEARNING_RATE, default_learning_rate) + + +def _add_layer_summary(value, tag): + summary.scalar('%s/fraction_of_zero_values' % tag, nn.zero_fraction(value)) + summary.histogram('%s/activation' % tag, value) + + +def _validate_feature_columns(linear_feature_columns, dnn_feature_columns): + """Validates feature columns DNNLinearCombinedRegressor.""" + linear_feature_columns = linear_feature_columns or [] + dnn_feature_columns = dnn_feature_columns or [] + feature_columns = ( + list(linear_feature_columns) + list(dnn_feature_columns)) + if not feature_columns: + raise ValueError('Either linear_feature_columns or dnn_feature_columns ' + 'must be defined.') + return feature_columns + + +def _dnn_linear_combined_model_fn_v2( + features, + labels, + mode, + head, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + config=None, + batch_norm=False, + linear_sparse_combiner='sum', + loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE): + """Deep Neural Net and Linear combined model_fn. + + Args: + features: dict of `Tensor`. + labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype + `int32` or `int64` in the range `[0, n_classes)`. + mode: Defines whether this is training, evaluation or prediction. See + `ModeKeys`. + head: A `Head` instance. + linear_feature_columns: An iterable containing all the feature columns used + by the Linear model. + linear_optimizer: string, `Optimizer` object, or callable that defines the + optimizer to use for training the Linear model. Defaults to the Ftrl + optimizer. + dnn_feature_columns: An iterable containing all the feature columns used by + the DNN model. + dnn_optimizer: string, `Optimizer` object, or callable that defines the + optimizer to use for training the DNN model. Defaults to the Adagrad + optimizer. + dnn_hidden_units: List of hidden units per DNN layer. + dnn_activation_fn: Activation function applied to each DNN layer. If `None`, + will use `tf.nn.relu`. + dnn_dropout: When not `None`, the probability we will drop out a given DNN + coordinate. + config: `RunConfig` object to configure the runtime settings. + batch_norm: Whether to use batch normalization after each hidden layer. + linear_sparse_combiner: A string specifying how to reduce the linear model + if a categorical column is multivalent. One of "mean", "sqrtn", and + "sum". + loss_reduction: One of `tf.keras.losses.Reduction` except `NONE`. Describes + how to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`. + + Returns: + An `EstimatorSpec` instance. + + Raises: + ValueError: If both `linear_feature_columns` and `dnn_features_columns` + are empty at the same time, or `input_layer_partitioner` is missing, + or features has the wrong type. + """ + if not isinstance(features, dict): + raise ValueError('features should be a dictionary of `Tensor`s. ' + 'Given type: {}'.format(type(features))) + if not linear_feature_columns and not dnn_feature_columns: + raise ValueError( + 'Either linear_feature_columns or dnn_feature_columns must be defined.') + + del config + + # Build DNN Logits. + if not dnn_feature_columns: + dnn_logits = None + else: + if mode == ModeKeys.TRAIN: + dnn_optimizer = optimizers.get_optimizer_instance_v2( + dnn_optimizer, learning_rate=_DNN_LEARNING_RATE) + _check_no_sync_replicas_optimizer(dnn_optimizer) + + if not dnn_hidden_units: + raise ValueError( + 'dnn_hidden_units must be defined when dnn_feature_columns is ' + 'specified.') + dnn_logits, dnn_trainable_variables, dnn_update_ops = ( + dnn._dnn_model_fn_builder_v2( # pylint: disable=protected-access + units=head.logits_dimension, + hidden_units=dnn_hidden_units, + feature_columns=dnn_feature_columns, + activation_fn=dnn_activation_fn, + dropout=dnn_dropout, + batch_norm=batch_norm, + features=features, + mode=mode)) + + if not linear_feature_columns: + linear_logits = None + else: + if mode == ModeKeys.TRAIN: + linear_optimizer = optimizers.get_optimizer_instance_v2( + linear_optimizer, + learning_rate=_linear_learning_rate(len(linear_feature_columns))) + _check_no_sync_replicas_optimizer(linear_optimizer) + + linear_logits, linear_trainable_variables = ( + linear._linear_model_fn_builder_v2( # pylint: disable=protected-access + units=head.logits_dimension, + feature_columns=linear_feature_columns, + sparse_combiner=linear_sparse_combiner, + features=features)) + _add_layer_summary(linear_logits, 'linear') + + # Combine logits and build full model. + if dnn_logits is not None and linear_logits is not None: + logits = dnn_logits + linear_logits + elif dnn_logits is not None: + logits = dnn_logits + else: + logits = linear_logits + + def _train_op_fn(loss): + """Returns the op to optimize the loss.""" + train_ops = [] + # Scale loss by number of replicas. + if loss_reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE: + loss = losses_utils.scale_loss_for_distribution(loss) + + if dnn_logits is not None: + train_ops.extend( + dnn_optimizer.get_updates( + loss, + dnn_trainable_variables)) + if dnn_update_ops is not None: + train_ops.extend(dnn_update_ops) + if linear_logits is not None: + train_ops.extend( + linear_optimizer.get_updates( + loss, + linear_trainable_variables)) + train_op = control_flow_ops.group(*train_ops) + return train_op + + # In TRAIN mode, asssign global_step variable to optimizer.iterations to + # make global_step increased correctly, as Hooks relies on global step as + # step counter. Note that, Only one model's optimizer needs this assignment. + if mode == ModeKeys.TRAIN: + if dnn_logits is not None: + dnn_optimizer.iterations = training_util.get_or_create_global_step() + else: + linear_optimizer.iterations = training_util.get_or_create_global_step() + + return head.create_estimator_spec( + features=features, + mode=mode, + labels=labels, + train_op_fn=_train_op_fn, + logits=logits) + + +def _dnn_linear_combined_model_fn(features, + labels, + mode, + head, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + input_layer_partitioner=None, + config=None, + batch_norm=False, + linear_sparse_combiner='sum'): + """Deep Neural Net and Linear combined model_fn. + + Args: + features: dict of `Tensor`. + labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype + `int32` or `int64` in the range `[0, n_classes)`. + mode: Defines whether this is training, evaluation or prediction. + See `ModeKeys`. + head: A `Head` instance. + linear_feature_columns: An iterable containing all the feature columns used + by the Linear model. + linear_optimizer: string, `Optimizer` object, or callable that defines the + optimizer to use for training the Linear model. Defaults to the Ftrl + optimizer. + dnn_feature_columns: An iterable containing all the feature columns used by + the DNN model. + dnn_optimizer: string, `Optimizer` object, or callable that defines the + optimizer to use for training the DNN model. Defaults to the Adagrad + optimizer. + dnn_hidden_units: List of hidden units per DNN layer. + dnn_activation_fn: Activation function applied to each DNN layer. If `None`, + will use `tf.nn.relu`. + dnn_dropout: When not `None`, the probability we will drop out a given DNN + coordinate. + input_layer_partitioner: Partitioner for input layer. + config: `RunConfig` object to configure the runtime settings. + batch_norm: Whether to use batch normalization after each hidden layer. + linear_sparse_combiner: A string specifying how to reduce the linear model + if a categorical column is multivalent. One of "mean", "sqrtn", and + "sum". + Returns: + An `EstimatorSpec` instance. + + Raises: + ValueError: If both `linear_feature_columns` and `dnn_features_columns` + are empty at the same time, or `input_layer_partitioner` is missing, + or features has the wrong type. + """ + if not isinstance(features, dict): + raise ValueError('features should be a dictionary of `Tensor`s. ' + 'Given type: {}'.format(type(features))) + if not linear_feature_columns and not dnn_feature_columns: + raise ValueError( + 'Either linear_feature_columns or dnn_feature_columns must be defined.') + + num_ps_replicas = config.num_ps_replicas if config else 0 + input_layer_partitioner = input_layer_partitioner or ( + partitioned_variables.min_max_variable_partitioner( + max_partitions=num_ps_replicas, + min_slice_size=64 << 20)) + + # Build DNN Logits. + dnn_parent_scope = 'dnn' + + if not dnn_feature_columns: + dnn_logits = None + else: + dnn_optimizer = optimizers.get_optimizer_instance( + dnn_optimizer, learning_rate=_DNN_LEARNING_RATE) + _check_no_sync_replicas_optimizer(dnn_optimizer) + if not dnn_hidden_units: + raise ValueError( + 'dnn_hidden_units must be defined when dnn_feature_columns is ' + 'specified.') + dnn_partitioner = ( + partitioned_variables.min_max_variable_partitioner( + max_partitions=num_ps_replicas)) + with variable_scope.variable_scope( + dnn_parent_scope, + values=tuple(six.itervalues(features)), + partitioner=dnn_partitioner) as scope: + dnn_absolute_scope = scope.name + dnn_logit_fn = dnn.dnn_logit_fn_builder( + units=head.logits_dimension, + hidden_units=dnn_hidden_units, + feature_columns=dnn_feature_columns, + activation_fn=dnn_activation_fn, + dropout=dnn_dropout, + batch_norm=batch_norm, + input_layer_partitioner=input_layer_partitioner) + dnn_logits = dnn_logit_fn(features=features, mode=mode) + + linear_parent_scope = 'linear' + + if not linear_feature_columns: + linear_logits = None + else: + linear_optimizer = optimizers.get_optimizer_instance( + linear_optimizer, + learning_rate=_linear_learning_rate(len(linear_feature_columns))) + _check_no_sync_replicas_optimizer(linear_optimizer) + with variable_scope.variable_scope( + linear_parent_scope, + values=tuple(six.itervalues(features)), + partitioner=input_layer_partitioner) as scope: + linear_absolute_scope = scope.name + logit_fn = linear.linear_logit_fn_builder( + units=head.logits_dimension, + feature_columns=linear_feature_columns, + sparse_combiner=linear_sparse_combiner) + linear_logits = logit_fn(features=features) + _add_layer_summary(linear_logits, scope.name) + + # Combine logits and build full model. + if dnn_logits is not None and linear_logits is not None: + logits = dnn_logits + linear_logits + elif dnn_logits is not None: + logits = dnn_logits + else: + logits = linear_logits + + def _train_op_fn(loss): + """Returns the op to optimize the loss.""" + train_ops = [] + global_step = training_util.get_global_step() + if dnn_logits is not None: + train_ops.append( + dnn_optimizer.minimize( + loss, + var_list=ops.get_collection( + ops.GraphKeys.TRAINABLE_VARIABLES, + scope=dnn_absolute_scope))) + if linear_logits is not None: + train_ops.append( + linear_optimizer.minimize( + loss, + var_list=ops.get_collection( + ops.GraphKeys.TRAINABLE_VARIABLES, + scope=linear_absolute_scope))) + + train_op = control_flow_ops.group(*train_ops, name='IterationOp') + with ops.control_dependencies([train_op]): + return state_ops.assign_add(global_step, 1).op + + return head.create_estimator_spec( + features=features, + mode=mode, + labels=labels, + train_op_fn=_train_op_fn, + logits=logits) + + +@estimator_export('estimator.DNNLinearCombinedClassifier', v1=[]) +class DNNLinearCombinedClassifierV2(estimator.EstimatorV2): + """An estimator for TensorFlow Linear and DNN joined classification models. + + Note: This estimator is also known as wide-n-deep. + + Example: + + ```python + numeric_feature = numeric_column(...) + categorical_column_a = categorical_column_with_hash_bucket(...) + categorical_column_b = categorical_column_with_hash_bucket(...) + + categorical_feature_a_x_categorical_feature_b = crossed_column(...) + categorical_feature_a_emb = embedding_column( + categorical_column=categorical_feature_a, ...) + categorical_feature_b_emb = embedding_column( + categorical_id_column=categorical_feature_b, ...) + + estimator = DNNLinearCombinedClassifier( + # wide settings + linear_feature_columns=[categorical_feature_a_x_categorical_feature_b], + linear_optimizer=tf.train.FtrlOptimizer(...), + # deep settings + dnn_feature_columns=[ + categorical_feature_a_emb, categorical_feature_b_emb, + numeric_feature], + dnn_hidden_units=[1000, 500, 100], + dnn_optimizer=tf.train.ProximalAdagradOptimizer(...), + # warm-start settings + warm_start_from="/path/to/checkpoint/dir") + + # To apply L1 and L2 regularization, you can set dnn_optimizer to: + tf.train.ProximalAdagradOptimizer( + learning_rate=0.1, + l1_regularization_strength=0.001, + l2_regularization_strength=0.001) + # To apply learning rate decay, you can set dnn_optimizer to a callable: + lambda: tf.AdamOptimizer( + learning_rate=tf.exponential_decay( + learning_rate=0.1, + global_step=tf.get_global_step(), + decay_steps=10000, + decay_rate=0.96) + # It is the same for linear_optimizer. + + # Input builders + def input_fn_train: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_eval: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_predict: + # Returns tf.data.Dataset of (x, None) tuple. + pass + estimator.train(input_fn=input_fn_train, steps=100) + metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10) + predictions = estimator.predict(input_fn=input_fn_predict) + ``` + + Input of `train` and `evaluate` should have following features, + otherwise there will be a `KeyError`: + + * for each `column` in `dnn_feature_columns` + `linear_feature_columns`: + - if `column` is a `_CategoricalColumn`, a feature with `key=column.name` + whose `value` is a `SparseTensor`. + - if `column` is a `_WeightedCategoricalColumn`, two features: the first + with `key` the id column name, the second with `key` the weight column + name. Both features' `value` must be a `SparseTensor`. + - if `column` is a `_DenseColumn`, a feature with `key=column.name` + whose `value` is a `Tensor`. + + Loss is calculated by using softmax cross entropy. + + @compatibility(eager) + Estimators can be used while eager execution is enabled. Note that `input_fn` + and all hooks are executed inside a graph context, so they have to be written + to be compatible with graph mode. Note that `input_fn` code using `tf.data` + generally works in both graph and eager modes. + @end_compatibility + """ + + def __init__(self, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + n_classes=2, + weight_column=None, + label_vocabulary=None, + config=None, + warm_start_from=None, + loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE, + batch_norm=False, + linear_sparse_combiner='sum'): + """Initializes a DNNLinearCombinedClassifier instance. + + Args: + model_dir: Directory to save model parameters, graph and etc. This can + also be used to load checkpoints from the directory into a estimator + to continue training a previously saved model. + linear_feature_columns: An iterable containing all the feature columns + used by linear part of the model. All items in the set must be + instances of classes derived from `FeatureColumn`. + linear_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the linear part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to FTRL + optimizer. + dnn_feature_columns: An iterable containing all the feature columns used + by deep part of the model. All items in the set must be instances of + classes derived from `FeatureColumn`. + dnn_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the deep part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to Adagrad + optimizer. + dnn_hidden_units: List of hidden units per layer. All layers are fully + connected. + dnn_activation_fn: Activation function applied to each layer. If None, + will use `tf.nn.relu`. + dnn_dropout: When not None, the probability we will drop out + a given coordinate. + n_classes: Number of label classes. Defaults to 2, namely binary + classification. Must be > 1. + weight_column: A string or a `_NumericColumn` created by + `tf.feature_column.numeric_column` defining feature column representing + weights. It is used to down weight or boost examples during training. It + will be multiplied by the loss of the example. If it is a string, it is + used as a key to fetch weight tensor from the `features`. If it is a + `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, + then weight_column.normalizer_fn is applied on it to get weight tensor. + label_vocabulary: A list of strings represents possible label values. If + given, labels must be string type and have any value in + `label_vocabulary`. If it is not given, that means labels are + already encoded as integer or float within [0, 1] for `n_classes=2` and + encoded as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 . + Also there will be errors if vocabulary is not provided and labels are + string. + config: RunConfig object to configure the runtime settings. + warm_start_from: A string filepath to a checkpoint to warm-start from, or + a `WarmStartSettings` object to fully configure warm-starting. If the + string filepath is provided instead of a `WarmStartSettings`, then all + weights are warm-started, and it is assumed that vocabularies and Tensor + names are unchanged. + loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how + to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`. + batch_norm: Whether to use batch normalization after each hidden layer. + linear_sparse_combiner: A string specifying how to reduce the linear model + if a categorical column is multivalent. One of "mean", "sqrtn", and + "sum" -- these are effectively different ways to do example-level + normalization, which can be useful for bag-of-words features. For more + details, see `tf.feature_column.linear_model`. + + Raises: + ValueError: If both linear_feature_columns and dnn_features_columns are + empty at the same time. + """ + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + head = head_utils.binary_or_multi_class_head( + n_classes, weight_column=weight_column, + label_vocabulary=label_vocabulary, + loss_reduction=loss_reduction) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn_v2( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + config=config, + batch_norm=batch_norm, + linear_sparse_combiner=linear_sparse_combiner, + loss_reduction=loss_reduction) + + super(DNNLinearCombinedClassifierV2, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config, + warm_start_from=warm_start_from) + + +#@estimator_export(v1=['estimator.DNNLinearCombinedClassifier']) # pylint: disable=missing-docstring +class DNNLinearCombinedClassifier(estimator.Estimator): + __doc__ = DNNLinearCombinedClassifierV2.__doc__.replace( + 'SUM_OVER_BATCH_SIZE', 'SUM') + + def __init__(self, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + n_classes=2, + weight_column=None, + label_vocabulary=None, + input_layer_partitioner=None, + config=None, + warm_start_from=None, + loss_reduction=losses.Reduction.SUM, + batch_norm=False, + linear_sparse_combiner='sum'): + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + head = head_lib._binary_logistic_or_multi_class_head( # pylint: disable=protected-access + n_classes, weight_column, label_vocabulary, loss_reduction) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + input_layer_partitioner=input_layer_partitioner, + config=config, + batch_norm=batch_norm, + linear_sparse_combiner=linear_sparse_combiner) + + super(DNNLinearCombinedClassifier, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config, + warm_start_from=warm_start_from) + + +def _init_dnn_linear_combined_estimator( + head, + linear_feature_columns, + linear_optimizer, + dnn_feature_columns, + dnn_optimizer, + dnn_hidden_units, + dnn_activation_fn, + dnn_dropout, + input_layer_partitioner, + linear_sparse_combiner): + """Helper function for the initialization of DNNLinearCombinedEstimator.""" + linear_feature_columns = linear_feature_columns or [] + dnn_feature_columns = dnn_feature_columns or [] + feature_columns = ( + list(linear_feature_columns) + list(dnn_feature_columns)) + if not feature_columns: + raise ValueError('Either linear_feature_columns or dnn_feature_columns ' + 'must be defined.') + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + input_layer_partitioner=input_layer_partitioner, + config=config, + linear_sparse_combiner=linear_sparse_combiner) + return feature_columns, _model_fn + + +# TODO(b/117517419): Update these contrib references once head moves to core. +# Also references to the "_Head" class need to be replaced with "Head". +@estimator_export('estimator.DNNLinearCombinedEstimator', v1=[]) +class DNNLinearCombinedEstimatorV2(estimator.EstimatorV2): + """An estimator for TensorFlow Linear and DNN joined models with custom head. + + Note: This estimator is also known as wide-n-deep. + + Example: + + ```python + numeric_feature = numeric_column(...) + categorical_column_a = categorical_column_with_hash_bucket(...) + categorical_column_b = categorical_column_with_hash_bucket(...) + + categorical_feature_a_x_categorical_feature_b = crossed_column(...) + categorical_feature_a_emb = embedding_column( + categorical_column=categorical_feature_a, ...) + categorical_feature_b_emb = embedding_column( + categorical_column=categorical_feature_b, ...) + + estimator = DNNLinearCombinedEstimator( + head=tf.contrib.estimator.multi_label_head(n_classes=3), + # wide settings + linear_feature_columns=[categorical_feature_a_x_categorical_feature_b], + linear_optimizer=tf.train.FtrlOptimizer(...), + # deep settings + dnn_feature_columns=[ + categorical_feature_a_emb, categorical_feature_b_emb, + numeric_feature], + dnn_hidden_units=[1000, 500, 100], + dnn_optimizer=tf.train.ProximalAdagradOptimizer(...)) + + # To apply L1 and L2 regularization, you can set dnn_optimizer to: + tf.train.ProximalAdagradOptimizer( + learning_rate=0.1, + l1_regularization_strength=0.001, + l2_regularization_strength=0.001) + # To apply learning rate decay, you can set dnn_optimizer to a callable: + lambda: tf.AdamOptimizer( + learning_rate=tf.exponential_decay( + learning_rate=0.1, + global_step=tf.get_global_step(), + decay_steps=10000, + decay_rate=0.96) + # It is the same for linear_optimizer. + + # Input builders + def input_fn_train: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_eval: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_predict: + # Returns tf.data.Dataset of (x, None) tuple. + pass + estimator.train(input_fn=input_fn_train, steps=100) + metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10) + predictions = estimator.predict(input_fn=input_fn_predict) + ``` + + Input of `train` and `evaluate` should have following features, + otherwise there will be a `KeyError`: + + * for each `column` in `dnn_feature_columns` + `linear_feature_columns`: + - if `column` is a `_CategoricalColumn`, a feature with `key=column.name` + whose `value` is a `SparseTensor`. + - if `column` is a `_WeightedCategoricalColumn`, two features: the first + with `key` the id column name, the second with `key` the weight column + name. Both features' `value` must be a `SparseTensor`. + - if `column` is a `_DenseColumn`, a feature with `key=column.name` + whose `value` is a `Tensor`. + + Loss is calculated by using mean squared error. + + @compatibility(eager) + Estimators can be used while eager execution is enabled. Note that `input_fn` + and all hooks are executed inside a graph context, so they have to be written + to be compatible with graph mode. Note that `input_fn` code using `tf.data` + generally works in both graph and eager modes. + @end_compatibility + """ + + def __init__(self, + head, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + config=None, + linear_sparse_combiner='sum'): + """Initializes a DNNLinearCombinedEstimator instance. + + Args: + head: A `_Head` instance constructed with a method such as + `tf.contrib.estimator.multi_label_head`. + model_dir: Directory to save model parameters, graph and etc. This can + also be used to load checkpoints from the directory into an estimator + to continue training a previously saved model. + linear_feature_columns: An iterable containing all the feature columns + used by linear part of the model. All items in the set must be + instances of classes derived from `FeatureColumn`. + linear_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the linear part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to FTRL + optimizer. + dnn_feature_columns: An iterable containing all the feature columns used + by deep part of the model. All items in the set must be instances of + classes derived from `FeatureColumn`. + dnn_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the deep part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to Adagrad + optimizer. + dnn_hidden_units: List of hidden units per layer. All layers are fully + connected. + dnn_activation_fn: Activation function applied to each layer. If None, + will use `tf.nn.relu`. + dnn_dropout: When not None, the probability we will drop out + a given coordinate. + config: RunConfig object to configure the runtime settings. + linear_sparse_combiner: A string specifying how to reduce the linear model + if a categorical column is multivalent. One of "mean", "sqrtn", and + "sum" -- these are effectively different ways to do example-level + normalization, which can be useful for bag-of-words features. For more + details, see `tf.feature_column.linear_model`. + + Raises: + ValueError: If both linear_feature_columns and dnn_features_columns are + empty at the same time. + """ + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn_v2( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + config=config, + linear_sparse_combiner=linear_sparse_combiner) + + super(DNNLinearCombinedEstimatorV2, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config) + + +@estimator_export(v1=['estimator.DNNLinearCombinedEstimator']) # pylint: disable=missing-docstring +class DNNLinearCombinedEstimator(estimator.Estimator): + __doc__ = DNNLinearCombinedEstimatorV2.__doc__ + + def __init__(self, + head, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + input_layer_partitioner=None, + config=None, + linear_sparse_combiner='sum'): + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + input_layer_partitioner=input_layer_partitioner, + config=config, + linear_sparse_combiner=linear_sparse_combiner) + + super(DNNLinearCombinedEstimator, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config) + + +@estimator_export('estimator.DNNLinearCombinedRegressor', v1=[]) +class DNNLinearCombinedRegressorV2(estimator.EstimatorV2): + """An estimator for TensorFlow Linear and DNN joined models for regression. + + Note: This estimator is also known as wide-n-deep. + + Example: + + ```python + numeric_feature = numeric_column(...) + categorical_column_a = categorical_column_with_hash_bucket(...) + categorical_column_b = categorical_column_with_hash_bucket(...) + + categorical_feature_a_x_categorical_feature_b = crossed_column(...) + categorical_feature_a_emb = embedding_column( + categorical_column=categorical_feature_a, ...) + categorical_feature_b_emb = embedding_column( + categorical_column=categorical_feature_b, ...) + + estimator = DNNLinearCombinedRegressor( + # wide settings + linear_feature_columns=[categorical_feature_a_x_categorical_feature_b], + linear_optimizer=tf.train.FtrlOptimizer(...), + # deep settings + dnn_feature_columns=[ + categorical_feature_a_emb, categorical_feature_b_emb, + numeric_feature], + dnn_hidden_units=[1000, 500, 100], + dnn_optimizer=tf.train.ProximalAdagradOptimizer(...), + # warm-start settings + warm_start_from="/path/to/checkpoint/dir") + + # To apply L1 and L2 regularization, you can set dnn_optimizer to: + tf.train.ProximalAdagradOptimizer( + learning_rate=0.1, + l1_regularization_strength=0.001, + l2_regularization_strength=0.001) + # To apply learning rate decay, you can set dnn_optimizer to a callable: + lambda: tf.AdamOptimizer( + learning_rate=tf.exponential_decay( + learning_rate=0.1, + global_step=tf.get_global_step(), + decay_steps=10000, + decay_rate=0.96) + # It is the same for linear_optimizer. + + # Input builders + def input_fn_train: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_eval: + # Returns tf.data.Dataset of (x, y) tuple where y represents label's class + # index. + pass + def input_fn_predict: + # Returns tf.data.Dataset of (x, None) tuple. + pass + estimator.train(input_fn=input_fn_train, steps=100) + metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10) + predictions = estimator.predict(input_fn=input_fn_predict) + ``` + + Input of `train` and `evaluate` should have following features, + otherwise there will be a `KeyError`: + + * for each `column` in `dnn_feature_columns` + `linear_feature_columns`: + - if `column` is a `_CategoricalColumn`, a feature with `key=column.name` + whose `value` is a `SparseTensor`. + - if `column` is a `_WeightedCategoricalColumn`, two features: the first + with `key` the id column name, the second with `key` the weight column + name. Both features' `value` must be a `SparseTensor`. + - if `column` is a `_DenseColumn`, a feature with `key=column.name` + whose `value` is a `Tensor`. + + Loss is calculated by using mean squared error. + + @compatibility(eager) + Estimators can be used while eager execution is enabled. Note that `input_fn` + and all hooks are executed inside a graph context, so they have to be written + to be compatible with graph mode. Note that `input_fn` code using `tf.data` + generally works in both graph and eager modes. + @end_compatibility + """ + + def __init__(self, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + label_dimension=1, + weight_column=None, + config=None, + warm_start_from=None, + loss_reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE, + batch_norm=False, + linear_sparse_combiner='sum'): + """Initializes a DNNLinearCombinedRegressor instance. + + Args: + model_dir: Directory to save model parameters, graph and etc. This can + also be used to load checkpoints from the directory into a estimator + to continue training a previously saved model. + linear_feature_columns: An iterable containing all the feature columns + used by linear part of the model. All items in the set must be + instances of classes derived from `FeatureColumn`. + linear_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the linear part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to FTRL + optimizer. + dnn_feature_columns: An iterable containing all the feature columns used + by deep part of the model. All items in the set must be instances of + classes derived from `FeatureColumn`. + dnn_optimizer: An instance of `tf.Optimizer` used to apply gradients to + the deep part of the model. Can also be a string (one of 'Adagrad', + 'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to Adagrad + optimizer. + dnn_hidden_units: List of hidden units per layer. All layers are fully + connected. + dnn_activation_fn: Activation function applied to each layer. If None, + will use `tf.nn.relu`. + dnn_dropout: When not None, the probability we will drop out + a given coordinate. + label_dimension: Number of regression targets per example. This is the + size of the last dimension of the labels and logits `Tensor` objects + (typically, these have shape `[batch_size, label_dimension]`). + weight_column: A string or a `_NumericColumn` created by + `tf.feature_column.numeric_column` defining feature column representing + weights. It is used to down weight or boost examples during training. It + will be multiplied by the loss of the example. If it is a string, it is + used as a key to fetch weight tensor from the `features`. If it is a + `_NumericColumn`, raw tensor is fetched by key `weight_column.key`, + then weight_column.normalizer_fn is applied on it to get weight tensor. + config: RunConfig object to configure the runtime settings. + warm_start_from: A string filepath to a checkpoint to warm-start from, or + a `WarmStartSettings` object to fully configure warm-starting. If the + string filepath is provided instead of a `WarmStartSettings`, then all + weights are warm-started, and it is assumed that vocabularies and Tensor + names are unchanged. + loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how + to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`. + batch_norm: Whether to use batch normalization after each hidden layer. + linear_sparse_combiner: A string specifying how to reduce the linear model + if a categorical column is multivalent. One of "mean", "sqrtn", and + "sum" -- these are effectively different ways to do example-level + normalization, which can be useful for bag-of-words features. For more + details, see `tf.feature_column.linear_model`. + + Raises: + ValueError: If both linear_feature_columns and dnn_features_columns are + empty at the same time. + """ + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + head = regression_head.RegressionHead( + label_dimension=label_dimension, + weight_column=weight_column, + loss_reduction=loss_reduction) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn_v2( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + config=config, + batch_norm=batch_norm, + linear_sparse_combiner=linear_sparse_combiner) + + super(DNNLinearCombinedRegressorV2, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config, + warm_start_from=warm_start_from) + + +@estimator_export(v1=['estimator.DNNLinearCombinedRegressor']) # pylint: disable=missing-docstring +class DNNLinearCombinedRegressor(estimator.Estimator): + __doc__ = DNNLinearCombinedRegressorV2.__doc__.replace( + 'SUM_OVER_BATCH_SIZE', 'SUM') + + def __init__(self, + model_dir=None, + linear_feature_columns=None, + linear_optimizer='Ftrl', + dnn_feature_columns=None, + dnn_optimizer='Adagrad', + dnn_hidden_units=None, + dnn_activation_fn=nn.relu, + dnn_dropout=None, + label_dimension=1, + weight_column=None, + input_layer_partitioner=None, + config=None, + warm_start_from=None, + loss_reduction=losses.Reduction.SUM, + batch_norm=False, + linear_sparse_combiner='sum'): + self._feature_columns = _validate_feature_columns( + linear_feature_columns=linear_feature_columns, + dnn_feature_columns=dnn_feature_columns) + + head = head_lib._regression_head( # pylint: disable=protected-access + label_dimension=label_dimension, + weight_column=weight_column, + loss_reduction=loss_reduction) + + def _model_fn(features, labels, mode, config): + """Call the _dnn_linear_combined_model_fn.""" + return _dnn_linear_combined_model_fn( + features=features, + labels=labels, + mode=mode, + head=head, + linear_feature_columns=linear_feature_columns, + linear_optimizer=linear_optimizer, + dnn_feature_columns=dnn_feature_columns, + dnn_optimizer=dnn_optimizer, + dnn_hidden_units=dnn_hidden_units, + dnn_activation_fn=dnn_activation_fn, + dnn_dropout=dnn_dropout, + input_layer_partitioner=input_layer_partitioner, + config=config, + batch_norm=batch_norm, + linear_sparse_combiner=linear_sparse_combiner) + + super(DNNLinearCombinedRegressor, self).__init__( + model_fn=_model_fn, + model_dir=model_dir, + config=config, + warm_start_from=warm_start_from) diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/hooks/benchmark_hooks.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/hooks/benchmark_hooks.py similarity index 100% rename from TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/hooks/benchmark_hooks.py rename to TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/hooks/benchmark_hooks.py diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/hooks/training_hooks.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/hooks/training_hooks.py similarity index 100% rename from TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/hooks/training_hooks.py rename to TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/hooks/training_hooks.py diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/metrics.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/metrics.py similarity index 100% rename from TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/metrics.py rename to TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/metrics.py diff --git a/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/schedulers.py b/TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/schedulers.py similarity index 100% rename from TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/utils/schedulers.py rename to TensorFlow/built-in/recommendation/WideDeep_ID2940_for_TensorFlow/util/schedulers.py diff --git a/TensorFlow/built-in/recommendation/deepFM_unkownshape_ID0091_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/built-in/recommendation/deepFM_unkownshape_ID0091_for_TensorFlow/test/train_full_1p.sh index 81e47ad1016826a6bc2f92ba1d6a182fa4da476f..21a0df2a07b3d4693cebb762edd70db7aafaa101 100644 --- a/TensorFlow/built-in/recommendation/deepFM_unkownshape_ID0091_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/built-in/recommendation/deepFM_unkownshape_ID0091_for_TensorFlow/test/train_full_1p.sh @@ -102,7 +102,9 @@ fi - +#参数修改 +sed -i 's|"device\_id"\:"0"|"device_\id"\:"'$ASCEND_DEVICE_ID'"|g' $cur_path/../configs/hccl.json +wait #训练开始时间,不需要修改 @@ -114,7 +116,7 @@ for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$ASCEND_DEVICE_ID + export RANK_ID=$RANK_ID_START #自行设置变量 export RANK_TABLE_FILE=$cur_path/../configs/hccl.json diff --git a/TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_TensorFlow/author.txt b/TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_TensorFlow/author.txt new file mode 100644 index 0000000000000000000000000000000000000000..44656b06ed50b5b77cf3f9be868b99f1c29185f6 --- /dev/null +++ b/TensorFlow/contrib/cv/3D-POSE-BASELINE_ID0795_for_TensorFlow/author.txt @@ -0,0 +1,4 @@ +Yi Li, Lei Xie +Nanjing University +Nanjing, Jiangsu, China +yili@smail.nju.edu.cn, lxie@nju.edu.cn \ No newline at end of file diff --git a/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/README.md b/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/README.md index aff092c78f1ab1e0cb6bdf942db6799bc5a120c1..3ad066c0956ca2d105868d70ef5848c00aa8b501 100644 --- a/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/README.md @@ -141,24 +141,7 @@ Epoch 1/15: 0%| | 0/1000 [00:00性能指标 + +| gpu | npu | +|-----------|-------------| +|92.4 (it/s)|125.51 (it/s)| diff --git a/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/test/train_performance_1p.sh index 1bbb9a1daff5ad772f5af0dfb7a8db6980d01130..163217427d76372c3aa213224cb7f77631d6611d 100644 --- a/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/ADAGAN_ID2115_for_TensorFlow/test/train_performance_1p.sh @@ -125,9 +125,11 @@ e2e_time=$(( $end_time - $start_time )) #结果打印,不需要修改 echo "------------------ Final result ------------------" +##获取性能数据,不需要修改 +#吞吐量 +ActualFPS=`cat ${cur_path}test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | grep -Eo "[0-9]*\.[0-9]*it/s" | tail -n 10 | awk -F "i" '{print $1}' | awk '{sum+=$1} END {print "", sum/NR}' | awk '{print $1}'` #输出性能FPS,需要模型审视修改 -TrainingTime1=`grep "Perf: " $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk 'END {print $2}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${TrainingTime1}'/'3'}'` +TrainingTime=`awk 'BEGIN{printf "%.2f\n", '1'/'${ActualFPS}'}'` #性能看护结果汇总 #训练用例信息,不需要修改 @@ -135,10 +137,6 @@ BatchSize=${batch_size} DeviceType=`uname -m` CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' -##获取性能数据,不需要修改 -#吞吐量 -ActualFPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${TrainingTime}'}'` - #获取模型精度 train_accuracy=`grep "C= " $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk 'END {print $8}'` diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.DS_Store b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..903925f4243e1957bb69edcbb07a63bcf3bfe7dc Binary files /dev/null and b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.DS_Store differ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.gitignore b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..894a44cc066a027465cd26d634948d56d13af9af --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.gitignore @@ -0,0 +1,104 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/.gitignore b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..eaf91e2ac647df635a09f01b8a2a254252aae8d7 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/BigGAN-tensorflow.iml b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/BigGAN-tensorflow.iml new file mode 100644 index 0000000000000000000000000000000000000000..1d426b97b2b08fbefe4ef33ed06ae522c3e65504 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/BigGAN-tensorflow.iml @@ -0,0 +1,15 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/inspectionProfiles/profiles_settings.xml b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000000000000000000000000000000000000..105ce2da2d6447d11dfe32bfb846c3d5b199fc99 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/misc.xml b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/misc.xml new file mode 100644 index 0000000000000000000000000000000000000000..7e45480a8572c6d832d9cdf5eb92a77555fbeabb --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/modules.xml b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/modules.xml new file mode 100644 index 0000000000000000000000000000000000000000..0c178f724249a0840cfadb27e50e556187af101c --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/vcs.xml b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/vcs.xml new file mode 100644 index 0000000000000000000000000000000000000000..9661ac713428efbad557d3ba3a62216b5bb7d226 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/LICENSE b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5135900a8875c8b79c245aaa94f915b25e33b8c9 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 MingtaoGuo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/README.md b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..52b37eefc5cf6149f9f859d36545bb073ec6a0b6 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/README.md @@ -0,0 +1,220 @@ +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [训练结果](#训练结果.md) +- [高级参考](#高级参考.md) +

基本信息

+ +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):Computer Vision** + +**版本(Version):1.0** + +**修改时间(Modified) :2022.04.15** + +**大小(Size):2.57MB** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt** + +**精度(Precision):fp32** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架进行条件图像生成的训练代码** + +

概述

+ +BigGAN 是一种用于条件图像生成的 GAN 网络,可以用于生成指定类别的高质量图像。BigGAN 继承了 SAGAN 的主要思想,使用了自注意力模块来增强网络捕捉全局特征的能力,同时使用 hinge loss、谱归一化以及 TTUR 来增强训练的稳定性和效率。在此之上,BigGAN 通过大量的实验探索了大规模训练 GAN 网络的技巧,并通过加大批大小以及网络的深度和广度,大幅提升了模型性能。为了更有效地完成条件图像生成的任务,BigGAN 利用 shared-embedding、skip-z 和条件批归一化来向 Generator 提供类别信息,用投影的方法向 Discriminator 提供类别信息,进一步提升了模型性能。此外,BigGAN 还提出了截断技巧以及增强截断技巧稳定性的正交正则化用于平衡图像生成质量与多样性。 + +- 参考论文: + + https://arxiv.org/abs/1809.11096 + +- 参考实现: + + https://github.com/MingtaoGuo/BigGAN-tensorflow + +- 适配昇腾 AI 处理器的实现: + + + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow + + + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +## 默认配置 + +- 训练数据集预处理(以 Cifar-10 训练集为例,仅作为用户参考示例): + + - 图像的输入尺寸为 $32\times32$ + - 图像输入格式:.mat + - 生成图像类别数:10 +- 训练超参 + + - Batch size:64 + - Train step: 100000 + - Train step: 1000 + - Generator lr:1e-4;Discriminator lr:4e-4;beta1:0.0;beta2:0.9 + - Discriminator train step:2 + - Orthogonal regularization strength:1e-4 + - Truncation threshold:2.0 +- 模型结构超参 + - Base channel:96 + - Latent space dimensionality:120 + - Shared embedding dimensionality:128 + + + +## 支持特性 + +| 特性列表 | 是否支持 | +| ---------- | -------- | +| 分布式训练 | 否 | +| 混合精度 | 否 | +| 并行数据 | 否 | + +

训练环境准备

+ +1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 +2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 + + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + + **表 1** 镜像列表 + + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
+ + +

快速上手

+ +- 数据集准备 +1. 模型训练使用Cifar-10/ImageNet数据集,数据集请用户自行获取。 + +## 模型训练 + +- 单击“立即下载”,并选择合适的下载方式下载源码包。 + +- 启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + +- 单卡训练 + + 1. 配置训练参数。 + + 首先在脚本test/train_full_1p.sh中,配置batch_size、data_path、output_path等参数,请用户根据实际路径配置data_path,或者在启动训练的命令行中以参数形式下发。 + + ``` + batch_size=64 + data_path="../dataset" + output_path="../output" + ``` + + 2. 启动训练。 + + 启动单卡训练 (脚本为AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p.sh) + + ``` + bash train_full_1p.sh --data_path=../dataset --output_path=../output + ``` + +

训练结果

+ +- 精度结果比对 + +| 精度指标项 | 论文发布 | GPU实测 | NPU实测 | +| --------------- | -------- | ------- | ------- | +| Inception score | 9.22 | 6.66 | 6.98 | +| FID | 14.73 | 45.06 | 38.47 | + +- 性能结果比对 + +| 性能指标项 | GPU实测 | NPU实测 | +| ---------- | ------- | ------- | +| StepTime | 347ms | 732ms | + +*** + +- NPU 训练模型生成 $32\times32$ 图片 + + ![horse2car](assets/horse2car.gif) + + ![frog2dog](assets/frog2dog.gif) + + ![truck2bird](assets/truck2bird.gif) + + ![](assets/gen_image.jpg) + +

高级参考

+ +## 脚本和示例代码 + +``` +├── train.py //网络训练与测试代码 +├── README.md //代码说明文档 +├── pb_frozen.py //训练模型固化为pb模型代码 +├── test_pb.py //测试pb模型代码 +├── requirements.txt //训练python依赖列表 +├── utils.py //工具函数代码 +├── ops.py //BigGAN基础模块代码 +├── networks_32.py //用于训练32x32图像的网络结构代码 +├── networks_64.py //用于训练64x64图像的网络结构代码 +├── networks_128.py //用于训练128x128图像的网络结构代码 +├── help_modelarts.py //Modelarts训练工具代码 +├── boot_modelarts.py //Modelarts训练代码 +├── generate_fake_img.py //在线推理代码 +├── calc_IS_FID.py //计算IS、FID代码 +├── input2bin.py //将输入转化为.bin,用于离线推理 +├── test_om.py //测试离线推理精度 +├── test +│ ├──train_performance_1p.sh //单卡训练验证性能启动脚本 +│ ├──train_full_1p.sh //单卡全量训练启动脚本 +│ ├──train_full_1p_modelarts.sh //modelarts全量训练启动脚本 +├── scripts +│ ├──run_1p.sh //Modelarts训练脚本 +│ ├──run_cpu.sh //CPU训练脚本 +│ ├──run_gpu.sh //GPU训练脚本 +│ ├──run_msprof.sh //解析Profiling数据脚本 +├── metrics //计算IS、FID相关代码 +│ ├──... +``` + +## 训练过程 + +1. 通过“模型训练”中的训练指令启动单卡卡训练。 + +2. 参考脚本的模型存储路径为../output/model/xx/model.ckpt,其中"xx"为训练时的图片大小,取值为32/64/128。 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/frog2dog.gif b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/frog2dog.gif new file mode 100644 index 0000000000000000000000000000000000000000..55a4d8e1a9f5a85588e708f0875d32e097fcde9e Binary files /dev/null and b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/frog2dog.gif differ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/gen_image.jpg b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/gen_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..447b710c54910c27fc12728f6cc35c58dfebec84 Binary files /dev/null and b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/gen_image.jpg differ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/horse2car.gif b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/horse2car.gif new file mode 100644 index 0000000000000000000000000000000000000000..f3ec334b6158962e44bcc45a77be5ce25495d340 Binary files /dev/null and b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/horse2car.gif differ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/truck2bird.gif b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/truck2bird.gif new file mode 100644 index 0000000000000000000000000000000000000000..1795e7db637bff623ebee4a87dd631ea2e23e6e5 Binary files /dev/null and b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/assets/truck2bird.gif differ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/boot_modelarts.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/boot_modelarts.py new file mode 100644 index 0000000000000000000000000000000000000000..237461a8bac70e64ff82a27a91bd9e71f39c0d55 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/boot_modelarts.py @@ -0,0 +1,57 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +This is the boot file for ModelArts platform. +Firstly, the train datasets are copyed from obs to ModelArts. +Then, the string of train shell command is concated and using 'os.system()' to execute +""" +import os +import argparse +from help_modelarts import obs_data2modelarts + +print(os.system('env')) + +if __name__ == '__main__': + # Note: the code dir is not the same as work dir on ModelArts Platform!!! + code_dir = os.path.dirname(__file__) + work_dir = os.getcwd() + print("===>>>code_dir:{}, work_dir:{}".format(code_dir, work_dir)) + + parser = argparse.ArgumentParser() + parser.add_argument("--train_url", type=str, default="./output", help="output path in OBS") + parser.add_argument("--data_url", type=str, default="./dataset", help="data path in OBS") + parser.add_argument("--modelarts_data_dir", type=str, default="/cache/dataset", + help="data path in ModelArts platform") + parser.add_argument("--modelarts_result_dir", type=str, default="/cache/result", + help="output path in ModelArts platform") + # parser.add_argument("--num_gpus", type=int, default=1, help="number of gpu") + config = parser.parse_args() + + print("--------config----------") + for k in list(vars(config).keys()): + print("key:{}: value:{}".format(k, vars(config)[k])) + print("--------config----------") + + # copy dataset from obs to modelarts + obs_data2modelarts(config) + + # start to train on Modelarts platform + if not os.path.exists(config.modelarts_result_dir): + os.makedirs(config.modelarts_result_dir) + bash_header = os.path.join(code_dir, 'test/train_full_1p_modelarts.sh') + arg_url = '--data_path=%s --output_path=%s --obs_url=%s' % (config.modelarts_data_dir, config.modelarts_result_dir, + config.train_url) + bash_command = 'bash %s %s' % (bash_header, arg_url) + print("bash command:", bash_command) + os.system(bash_command) \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/calc_IS_FID.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/calc_IS_FID.py new file mode 100644 index 0000000000000000000000000000000000000000..ad1adc179979373b3083f846bf3974b1bf8395a9 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/calc_IS_FID.py @@ -0,0 +1,78 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import numpy as np +import tensorflow as tf +import os +# download https://github.com/lzhbrian/metrics to calculate IS and FID +from metrics.inception_score_official_tf import get_inception_score +from metrics.fid_official_tf import calculate_activation_statistics, calculate_frechet_distance +from utils import read_images, session_config + + +def get_FID(images, arg): + # load from precalculated + f = np.load(args.precalculated_path) + mu1, sigma1 = f['mu'][:], f['sigma'][:] + f.close() + + # session configuration + config = session_config(arg) + + # calc from image ndarray + # images should be Numpy array of dimension (N, H, W, C). images should be in 0~255 + with tf.Session(config=config) as sess: + sess.run(tf.global_variables_initializer()) + mu2, sigma2 = calculate_activation_statistics(images, sess, batch_size=arg.batch_size) + return calculate_frechet_distance(mu1, sigma1, mu2, sigma2) + + +def get_IS(images_list, arg, splits=10): + return get_inception_score(images_list, splits=splits, sess_config=session_config(arg)) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--chip", type=str, default="gpu", help="run on which chip, cpu or gpu or npu") + parser.add_argument("--fake_img_path", type=str, default="../output/test/fake/32", help="fake image path") + parser.add_argument("--gpu", type=str, default="0", help="GPU to use (leave blank for CPU only)") + parser.add_argument("--batch_size", type=int, default=100, help="batch size") + parser.add_argument("--precalculated_path", type=str, default="./metrics/res/stats_tf/fid_stats_cifar10_train.npz", + help="precalculated statistics for datasets, used in FID") + args = parser.parse_args() + + os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu + + image_list = read_images(args.fake_img_path) + image = np.array(image_list).astype(np.float32) + + fid_score = get_FID(image, args) + is_mean, is_std = get_IS(image_list, args, splits=10) + + print("IS : (%f, %f)" % (is_mean, is_std)) + print("FID : %f" % fid_score) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/fusion_switch.cfg b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/fusion_switch.cfg new file mode 100644 index 0000000000000000000000000000000000000000..89d8736b8b86fa16ee319bce45a16cb5616a50fe --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/fusion_switch.cfg @@ -0,0 +1,10 @@ +{ + "Switch":{ + "GraphFusion":{ + "ALL":"off" + }, + "UBFusion":{ + "ALL":"off" + } + } +} \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/generate_fake_img.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/generate_fake_img.py new file mode 100644 index 0000000000000000000000000000000000000000..d334f2573b51d9269d4b120b7d722ffbc2f4e164 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/generate_fake_img.py @@ -0,0 +1,205 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +import numpy as np +from PIL import Image +from utils import truncated_noise_sample, restore_img +import datetime +import os +import argparse +import math +import shutil +import imageio +import cv2 + + +def consecutive_category_morphing(arg, img_path, session, fake_img_morphing_op, z_op, y_op, y_end_op, alpha_op, + class1=0, class2=1, fps=2): + if os.path.exists(img_path): + shutil.rmtree(img_path) # delete previous images + os.makedirs(img_path) + + Z = truncated_noise_sample(arg.batch_size, arg.z_dim, arg.truncation) + + count = 0 + img_paths = [] + for Alpha in [i / 10.0 for i in range(10, -1, -1)]: + Alpha = np.ones([arg.batch_size, 1]) * Alpha + fake = session.run(fake_img_morphing_op, feed_dict={z_op: Z, y_op: class1 * np.ones([arg.batch_size]), + y_end_op: class2 * np.ones([arg.batch_size]), + alpha_op: Alpha}) + # display a batch of images in a grid + grid_size = int(arg.batch_size ** 0.5) + concat_img = np.zeros([grid_size * arg.img_h, grid_size * arg.img_w, 3]) + c = 0 + for i in range(grid_size): + for j in range(grid_size): + resized_img = cv2.resize(fake[c], dsize=(arg.img_h, arg.img_w), interpolation=cv2.INTER_LINEAR) + concat_img[i * arg.img_h: i * arg.img_h + arg.img_h, j * arg.img_w: j * arg.img_w + arg.img_w] = resized_img + c += 1 + img_path = os.path.join(fake_img_path, "%dto%d_%d.jpg" % (class1, class2, count)) + Image.fromarray(np.uint8(restore_img(concat_img))).save(img_path) + img_paths.append(img_path) + count += 1 + + # make gif + gif_images = [] + for path in img_paths: + gif_images.append(imageio.imread(path)) + gif_path = os.path.join(fake_img_path, "%dto%d.gif" % (class1, class2)) + imageio.mimsave(gif_path, gif_images, fps=fps) + + +def generate_img_of_one_class(arg, class_labels, img_name, img_path, session, fake_img_op, z_op, y_op): + Z = truncated_noise_sample(arg.batch_size, arg.z_dim, arg.truncation) + fake = session.run(fake_img_op, feed_dict={z_op: Z, y_op: class_labels}) + + # display a batch of images in a grid + grid_size = int(arg.batch_size ** 0.5) + concat_img = np.zeros([grid_size * arg.img_h, grid_size * arg.img_w, 3]) + c = 0 + for i in range(grid_size): + for j in range(grid_size): + resized_img = cv2.resize(fake[c], dsize=(arg.img_h, arg.img_w), interpolation=cv2.INTER_LINEAR) + concat_img[i * arg.img_h: i * arg.img_h + arg.img_h, j * arg.img_w: j * arg.img_w + arg.img_w] = resized_img + c += 1 + Image.fromarray(np.uint8(restore_img(concat_img))).save(os.path.join(img_path, img_name)) + + +def generate_img_by_class(arg, img_path, session, fake_img_op, z_op, y_op): + """For each class, generate some images and display them in a grid""" + if os.path.exists(img_path): + shutil.rmtree(img_path) # delete previous images + os.makedirs(img_path) + + for nums_c in range(arg.num_classes): + class_labels = nums_c * np.ones([arg.batch_size]) + img_name = "%d.jpg" % nums_c + generate_img_of_one_class(arg, class_labels, img_name, img_path, session, fake_img_op, z_op, y_op) + + +def generate_img(arg, img_path, session, fake_img_op, z_op, y_op): + """generate fake images with random classes""" + if os.path.exists(img_path): + shutil.rmtree(img_path) # delete previous images + os.makedirs(img_path) + + for b in range(math.ceil(arg.gen_num // arg.batch_size)): + Z = truncated_noise_sample(arg.batch_size, arg.z_dim, arg.truncation) + fake = session.run(fake_img_op, feed_dict={z_op: Z, y_op: np.random.randint(arg.num_classes, size=arg.batch_size)}) + + for i in range(arg.batch_size): + img = cv2.resize(fake[i], dsize=(arg.img_h, arg.img_w), interpolation=cv2.INTER_LINEAR) + Image.fromarray(np.uint8(restore_img(img))).save(os.path.join(img_path, "%d_fake.jpg" % (b * arg.batch_size + i))) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # platform arguments (Huawei Ascend) + parser.add_argument("--chip", type=str, default="gpu", help="run on which chip, cpu or gpu or npu") + # data arguments + parser.add_argument("--gen_num", type=int, default=5000, help="number of generated images") + parser.add_argument("--output", type=str, default=os.path.join("..", "output"), help="output path") + parser.add_argument("-b", "--batch_size", type=int, default=64, help="batch size") + parser.add_argument("-c", "--num_classes", type=int, default=10, help="number of classes") + parser.add_argument("--img_h", type=int, default=32, help="image height") + parser.add_argument("--img_w", type=int, default=32, help="image width") + parser.add_argument("--train_img_size", type=int, default=32, + help="image will be resized to this size when training") + # model arguments + parser.add_argument("--base_channel", type=int, default=96, help="base channel number for G and D") + parser.add_argument("--z_dim", type=int, default=120, help="latent space dimensionality") + parser.add_argument("--truncation", type=float, default=2.0, help="truncation threshold") + parser.add_argument("--ema", type=bool, default=True, help="use exponential moving average for G") + parser.add_argument("--shared_dim", type=int, default=128, help="shared embedding dimensionality") + # function arguments + parser.add_argument("--function", type=str, default="fake", + help="generate fake images or do category morphing (fake / morphing)") + parser.add_argument("--morphing_class", type=str, default="0_1", + help="generate category morphing images between two classes") + args = parser.parse_args() + + # use different architectures for different image sizes + if args.train_img_size == 128: + from networks_128 import Generator, Discriminator + elif args.train_img_size == 64: + from networks_64 import Generator, Discriminator + elif args.train_img_size == 32: + from networks_32 import Generator, Discriminator + + # get current time + now = datetime.datetime.now() + now_str = now.strftime('%Y_%m_%d_%H_%M_%S') + # check output dir + test_path = os.path.join(args.output, "test") + fake_img_path = os.path.join(test_path, "fake", str(args.train_img_size)) + image_of_each_class_path = os.path.join(test_path, "image_of_each_class", str(args.train_img_size)) + category_morphing_path = os.path.join(test_path, "category_morphing", str(args.train_img_size)) + # get model path + model_path = os.path.join(args.output, "model", str(args.train_img_size), "model.ckpt") + ema_model_path = os.path.join(args.output, "model", str(args.train_img_size), "ema.ckpt") + resume_path = ema_model_path if args.ema else model_path + + if args.chip == "gpu": + config = tf.ConfigProto(allow_soft_placement=True) + config.gpu_options.allow_growth = True + elif args.chip == 'cpu': + config = tf.ConfigProto() + + train_phase = tf.Variable(tf.constant(False, dtype=tf.bool), name="train_phase") + # train_phase = tf.placeholder(tf.bool) # is training or not + z = tf.placeholder(tf.float32, [args.batch_size, args.z_dim]) # latent vector + y = tf.placeholder(tf.int32, [None]) # class info + y_end = tf.placeholder(tf.int32, [None]) # category morphing + alpha = tf.placeholder(tf.float32, [None, 1]) + + G = Generator("generator", args.base_channel) + with tf.variable_scope("generator", reuse=tf.AUTO_REUSE): + embed_w = tf.get_variable("embed_w", [args.num_classes, args.shared_dim], initializer=tf.orthogonal_initializer()) + + if args.function == "fake": + fake_img = G(z, train_phase, y, embed_w, args.num_classes) + elif args.function == "morphing": + fake_img_morphing = G(z, train_phase, y, embed_w, args.num_classes, y_end, alpha) + + with tf.Session(config=config) as sess: + sess.run(tf.global_variables_initializer()) + # load model + saver = tf.train.Saver(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, "generator")) + saver.restore(sess, resume_path) + + if args.function == "fake": + # generate fake images + generate_img(args, fake_img_path, sess, fake_img, z, y) + # generate fake images for each class + generate_img_by_class(args, image_of_each_class_path, sess, fake_img, z, y) + elif args.function == "morphing": + # category morphing + classes = args.morphing_class.split("_") + consecutive_category_morphing(args, category_morphing_path, sess, fake_img_morphing, z, y, y_end, alpha, + class1=int(classes[0]), class2=int(classes[1]), fps=2) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/help_modelarts.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/help_modelarts.py new file mode 100644 index 0000000000000000000000000000000000000000..c717183d6d7d215fb006dbec0d676c92c74474ed --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/help_modelarts.py @@ -0,0 +1,52 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import datetime +import moxing as mox + +def obs_data2modelarts(config): + """ + Copy train data from obs to modelarts by using moxing api. + """ + start = datetime.datetime.now() + print("===>>>Copy files from obs:{} to modelarts dir:{}".format(config.data_url, config.modelarts_data_dir)) + mox.file.copy_parallel(src_url=config.data_url, dst_url=config.modelarts_data_dir) + end = datetime.datetime.now() + print("===>>>Copy from obs to modelarts, time use:{}(s)".format((end - start).seconds)) + files = os.listdir(config.modelarts_data_dir) + print("===>>>Files:", files) + + +def modelarts_result2obs(config): + """ + Copy debug data from modelarts to obs. + According to the swich flags, the debug data may contains auto tune repository, + dump data for precision comparision, even the computation graph and profiling data. + """ + ## copy result from modelarts to obs + obs_result_dir = os.path.join(config.obs_dir, 'result') + if not mox.file.exists(obs_result_dir): + mox.file.make_dirs(obs_result_dir) + mox.file.copy_parallel(src_url=config.output, dst_url=obs_result_dir) + print("===>>>Copy Event or Checkpoint from modelarts dir:{} to obs:{}".format(config.output, obs_result_dir)) + + ## Copy profiling data. Comment this snippets if npu_profiling is off. + if config.profiling: + modelarts_profiling_dir = config.profiling_dir + print("Profiling dir:", modelarts_profiling_dir) + obs_profiling_dir = os.path.join(config.obs_dir, 'npu_profiling') + if not mox.file.exists(obs_profiling_dir): + mox.file.make_dirs(obs_profiling_dir) + mox.file.copy_parallel(modelarts_profiling_dir, obs_profiling_dir) + print("===>>>Profiling data:{} on OBS dir:{}".format(mox.file.list_directory(obs_profiling_dir), obs_profiling_dir)) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/input2bin.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/input2bin.py new file mode 100644 index 0000000000000000000000000000000000000000..1fb52dd212dd9cff9065de74160761493faf4641 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/input2bin.py @@ -0,0 +1,59 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +from utils import truncated_noise_sample, check_dir +import numpy as np +import os + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # data arguments + parser.add_argument("--gen_num", type=int, default=5000, help="number of generated images") + parser.add_argument("--output", type=str, default="../output", help="output path") + parser.add_argument("-c", "--num_classes", type=int, default=10, help="number of classes") + parser.add_argument("--img_h", type=int, default=32, help="image height") + parser.add_argument("--img_w", type=int, default=32, help="image width") + parser.add_argument("--train_img_size", type=int, default=32, + help="image will be resized to this size when training") + # model arguments + parser.add_argument("--z_dim", type=int, default=120, help="latent space dimensionality") + parser.add_argument("--truncation", type=float, default=2.0, help="truncation threshold") + args = parser.parse_args() + + bin_path = os.path.join(args.output, "input_bin", str(args.train_img_size)) + z_bin_path = os.path.join(bin_path, "z") + y_bin_path = os.path.join(bin_path, "y") + check_dir(z_bin_path) + check_dir(y_bin_path) + + for i in range(args.gen_num): + z = truncated_noise_sample(1, args.z_dim, args.truncation) + y = np.random.randint(args.num_classes, size=(1, 1)) + z.tofile(os.path.join(z_bin_path, str(i) + ".bin")) + y.tofile(os.path.join(y_bin_path, str(i) + ".bin")) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/modelzoo_level.txt b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..aac47fa5aa57194dbb4cb1d825da033987898f41 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/modelzoo_level.txt @@ -0,0 +1,6 @@ +GPUStatus:OK +NPUMigrationStatus:OK +FuncStatus:OK +PrecisionStatus:OK +AutoTune:NOK +PerfStatus:POK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_128.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_128.py new file mode 100644 index 0000000000000000000000000000000000000000..eeb1b0fd349e063d5935c22e9dae3ca1ff7ab7e8 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_128.py @@ -0,0 +1,113 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ops import * + + +class Generator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, embed_w, nums_class, y_end=None, alpha=1.0): + """ + Args: + inputs: a noise vector. shape: [batch_size, z_dim] + train_phase: is training or not + y: class info + embed_w: weight for shared embedding + nums_class: number of image classes + """ + # hierarchical latent space: split z into one chunk per resolution + z_dim = int(inputs.shape[-1]) + nums_layer = 6 + remain = z_dim % nums_layer + chunk_size = (z_dim - remain) // nums_layer + z_split = tf.split(inputs, [chunk_size] * (nums_layer - 1) + [chunk_size + remain], axis=1) + y = tf.one_hot(y, nums_class) + + if not y_end is None: + # category morphing + y_end = tf.one_hot(y_end, nums_class) + y = y * alpha + y_end * (1 - alpha) + + embed_y = tf.matmul(y, embed_w) # shared embedding + inputs = tf.concat([z_split[0], embed_y], axis=1) + + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [16, 8, 4, 2, 1]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = dense("dense", inputs, 4*4*out_channels[0]) + inputs = tf.reshape(inputs, [-1, 4, 4, out_channels[0]]) + inputs = G_Resblock("ResBlock1", inputs, out_channels[0], train_phase, z_split[1], embed_y) + inputs = G_Resblock("ResBlock2", inputs, out_channels[1], train_phase, z_split[2], embed_y) + inputs = G_Resblock("ResBlock3", inputs, out_channels[2], train_phase, z_split[3], embed_y) + inputs = G_Resblock("ResBlock4", inputs, out_channels[3], train_phase, z_split[4], embed_y) + inputs = non_local("Non-local", inputs, None, is_sn=True) + inputs = G_Resblock("ResBlock5", inputs, out_channels[4], train_phase, z_split[5], embed_y) + inputs = tf.nn.relu(conditional_batchnorm(inputs, train_phase, "BN")) # batch normalization + inputs = conv("conv", inputs, k_size=3, nums_out=3, strides=1, is_sn=True) + return tf.nn.tanh(inputs) + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + + +class Discriminator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, nums_class, update_collection=None): + """ + Args: + inputs: an image. shape: [batch_size, 128, 128, 3] + y: class info (scalar) + nums_class: number of image classes + """ + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [1, 2, 4, 8, 16, 16]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = D_Resblock("ResBlock1", inputs, out_channels[0], train_phase, update_collection, is_down=True) # [N, 64, 64, ch] + inputs = non_local("Non-local", inputs, update_collection, True) + inputs = D_Resblock("ResBlock2", inputs, out_channels[1], train_phase, update_collection, is_down=True) # [N, 32, 32, 2*ch] + inputs = D_Resblock("ResBlock3", inputs, out_channels[2], train_phase, update_collection, is_down=True) # [N, 16, 16, 4*ch] + inputs = D_Resblock("ResBlock4", inputs, out_channels[3], train_phase, update_collection, is_down=True) # [N, 8, 8, 8*ch] + inputs = D_Resblock("ResBlock5", inputs, out_channels[4], train_phase, update_collection, is_down=True) # [N, 4, 4, 16*ch] + inputs = D_Resblock("ResBlock6", inputs, out_channels[5], train_phase, update_collection, is_down=False) + inputs = tf.nn.relu(inputs) + inputs = global_sum_pooling(inputs) # [N, 16*ch] + temp = d_projection(inputs, y, nums_class, update_collection) # [N, 1] + inputs = dense("dense", inputs, 1, update_collection, is_sn=True) # [N, 1] + inputs = temp + inputs + return inputs + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_32.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_32.py new file mode 100644 index 0000000000000000000000000000000000000000..bde002cb173bc2d27cdd1abcab5410b68a6537ae --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_32.py @@ -0,0 +1,109 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ops import * + + +class Generator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, embed_w, nums_class, y_end=None, alpha=1.0): + """ + Args: + inputs: a noise vector. shape: [batch_size, z_dim] + train_phase: is training or not + y: class info + embed_w: weight for shared embedding + nums_class: number of image classes + """ + # hierarchical latent space: split z into one chunk per resolution + z_dim = int(inputs.shape[-1]) + nums_layer = 4 + remain = z_dim % nums_layer + chunk_size = (z_dim - remain) // nums_layer + z_split = tf.split(inputs, [chunk_size] * (nums_layer - 1) + [chunk_size + remain], axis=1) + y = tf.one_hot(y, nums_class) + + if not y_end is None: + # category morphing + y_end = tf.one_hot(y_end, nums_class) + y = y * alpha + y_end * (1 - alpha) + + embed_y = tf.matmul(y, embed_w) # shared embedding + inputs = tf.concat([z_split[0], embed_y], axis=1) + + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [4, 4, 4]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = dense("dense", inputs, 4*4*out_channels[0]) + inputs = tf.reshape(inputs, [-1, 4, 4, out_channels[0]]) # [N, 4, 4, out_channels[0]] + inputs = G_Resblock("ResBlock1", inputs, out_channels[0], train_phase, z_split[1], embed_y) # [N, 8, 8, out_channels[0]] + inputs = G_Resblock("ResBlock2", inputs, out_channels[1], train_phase, z_split[2], embed_y) # [N, 16, 16, out_channels[1]] + inputs = non_local("Non-local", inputs, None, is_sn=True) + inputs = G_Resblock("ResBlock3", inputs, out_channels[2], train_phase, z_split[3], embed_y) # [N, 32, 32, out_channels[2]] + inputs = tf.nn.relu(conditional_batchnorm(inputs, train_phase, "BN")) # batch normalization + inputs = conv("conv", inputs, k_size=3, nums_out=3, strides=1, is_sn=True) + return tf.nn.tanh(inputs) + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + + +class Discriminator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, nums_class, update_collection=None): + """ + Args: + inputs: an image. shape: [batch_size, 32, 32, 3] + y: class info (scalar) + nums_class: number of image classes + """ + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [4, 4, 4, 4]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = D_Resblock("ResBlock1", inputs, out_channels[0], train_phase, update_collection, is_down=True) + inputs = non_local("Non-local", inputs, update_collection, True) + inputs = D_Resblock("ResBlock2", inputs, out_channels[1], train_phase, update_collection, is_down=True) + inputs = D_Resblock("ResBlock3", inputs, out_channels[2], train_phase, update_collection, is_down=False) + inputs = D_Resblock("ResBlock4", inputs, out_channels[3], train_phase, update_collection, is_down=False) + inputs = tf.nn.relu(inputs) + inputs = global_sum_pooling(inputs) # [N, ch] + temp = d_projection(inputs, y, nums_class, update_collection) # [N, 1] + inputs = dense("dense", inputs, 1, update_collection, is_sn=True) # [N, 1] + inputs = temp + inputs + return inputs + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_64.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_64.py new file mode 100644 index 0000000000000000000000000000000000000000..e2712b7870a637ace4cfef3633f4ef43900c3ab0 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/networks_64.py @@ -0,0 +1,111 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ops import * + + +class Generator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, embed_w, nums_class, y_end=None, alpha=1.0): + """ + Args: + inputs: a noise vector. shape: [batch_size, z_dim] + train_phase: is training or not + y: class info + embed_w: weight for shared embedding + nums_class: number of image classes + """ + # hierarchical latent space: split z into one chunk per resolution + z_dim = int(inputs.shape[-1]) + nums_layer = 5 + remain = z_dim % nums_layer + chunk_size = (z_dim - remain) // nums_layer + z_split = tf.split(inputs, [chunk_size] * (nums_layer - 1) + [chunk_size + remain], axis=1) + y = tf.one_hot(y, nums_class) + + if not y_end is None: + # category morphing + y_end = tf.one_hot(y_end, nums_class) + y = y * alpha + y_end * (1 - alpha) + + embed_y = tf.matmul(y, embed_w) # shared embedding + inputs = tf.concat([z_split[0], embed_y], axis=1) + + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [16, 8, 4, 2]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = dense("dense", inputs, 4*4*out_channels[0]) + inputs = tf.reshape(inputs, [-1, 4, 4, out_channels[0]]) + inputs = G_Resblock("ResBlock1", inputs, out_channels[0], train_phase, z_split[1], embed_y) + inputs = G_Resblock("ResBlock2", inputs, out_channels[1], train_phase, z_split[2], embed_y) + inputs = G_Resblock("ResBlock3", inputs, out_channels[2], train_phase, z_split[3], embed_y) + inputs = non_local("Non-local", inputs, None, is_sn=True) + inputs = G_Resblock("ResBlock4", inputs, out_channels[3], train_phase, z_split[4], embed_y) + inputs = tf.nn.relu(conditional_batchnorm(inputs, train_phase, "BN")) # batch normalization + inputs = conv("conv", inputs, k_size=3, nums_out=3, strides=1, is_sn=True) + return tf.nn.tanh(inputs) + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + + +class Discriminator: + def __init__(self, name, base_channel): + self.name = name + self.base_channel = base_channel + + def __call__(self, inputs, train_phase, y, nums_class, update_collection=None): + """ + Args: + inputs: an image. shape: [batch_size, 64, 64, 3] + y: class info (scalar) + nums_class: number of image classes + """ + ch = self.base_channel # base channel number per layer + out_channels = [ch * i for i in [1, 2, 4, 8, 16]] + + with tf.variable_scope(name_or_scope=self.name, reuse=tf.AUTO_REUSE): + inputs = D_Resblock("ResBlock1", inputs, out_channels[0], train_phase, update_collection, is_down=True) + inputs = non_local("Non-local", inputs, update_collection, True) + inputs = D_Resblock("ResBlock2", inputs, out_channels[1], train_phase, update_collection, is_down=True) + inputs = D_Resblock("ResBlock3", inputs, out_channels[2], train_phase, update_collection, is_down=True) + inputs = D_Resblock("ResBlock4", inputs, out_channels[3], train_phase, update_collection, is_down=True) + inputs = D_Resblock("ResBlock5", inputs, out_channels[4], train_phase, update_collection, is_down=False) + inputs = tf.nn.relu(inputs) + inputs = global_sum_pooling(inputs) + temp = d_projection(inputs, y, nums_class, update_collection) # [N, 1] + inputs = dense("dense", inputs, 1, update_collection, is_sn=True) # [N, 1] + inputs = temp + inputs + return inputs + + def var_list(self): + return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.name) + diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/ops.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..59842db82a31748f5ae2ec78a70c80a094fe3340 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/ops.py @@ -0,0 +1,305 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf + + +def spectral_normalization(name, weight, n_itr=1, update_collection=None): + """ + Args: + weight: shape -> fc: [in_dim, out_dim] + conv: [h, w, c_in, c_out] + """ + w_shape = weight.shape.as_list() + weight = tf.reshape(weight, [-1, w_shape[-1]]) # treat conv weight as a 2-D matrix: [h*w*c_in, c_out] + + # power iteration method + u = tf.get_variable(name + 'u', [1, w_shape[-1]], initializer=tf.truncated_normal_initializer(), + trainable=False) + u_hat = u # right singular vector + v_hat = None # left singular vector + # Because the weights change slowly, we only need to perform a single power iteration + # on the current version of these vectors for each step of learning + for _ in range(n_itr): + v_hat = tf.nn.l2_normalize(tf.matmul(u_hat, tf.transpose(weight))) + u_hat = tf.nn.l2_normalize(tf.matmul(v_hat, weight)) + + # spectral normalization + sigma = tf.squeeze(tf.matmul(tf.matmul(v_hat, weight), tf.transpose(u_hat))) + weight /= sigma + + if update_collection is None: + with tf.control_dependencies([u.assign(u_hat)]): + w_norm = tf.reshape(weight, w_shape) # get original shape + else: + w_norm = tf.reshape(weight, w_shape) + if update_collection != 'NO_OPS': + tf.add_to_collection(update_collection, u.assign(u_hat)) + + return w_norm + + +def conv(name, inputs, nums_out, k_size, strides, update_collection=None, is_sn=False): + """convolution layer (with spectral normalization)""" + nums_in = inputs.shape[-1] # num of input channels + with tf.variable_scope(name): + w = tf.get_variable("w", [k_size, k_size, nums_in, nums_out], initializer=tf.orthogonal_initializer()) + b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer([0.0])) + if is_sn: + w = spectral_normalization("sn", w, update_collection=update_collection) + op = tf.nn.conv2d(inputs, w, strides=[1, strides, strides, 1], padding="SAME") + return tf.nn.bias_add(op, b) + + +def dense(name, inputs, nums_out, update_collection=None, is_sn=False): + """fully connected layer (with spectral normalization)""" + nums_in = inputs.shape[-1] + with tf.variable_scope(name): + w = tf.get_variable("w", [nums_in, nums_out], initializer=tf.orthogonal_initializer()) + b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer([0.0])) + if is_sn: + w = spectral_normalization("sn", w, update_collection=update_collection) + return tf.nn.bias_add(tf.matmul(inputs, w), b) + + +def conditional_batchnorm(x, train_phase, name, split_z=None, embed_y=None): + """implementation of shared embedding and skip-z in the BigGAN paper + + Args: + split_z: vector -> one chunk of the noise vector "z" + embed_y: class info (shared embedding) + """ + with tf.variable_scope(name): + epsilon = 1e-5 # variance epsilon for batch norm + decay = 0.9 # decay rate for exponential moving average in batch norm + + if embed_y is None: + # batch normalization + beta = tf.get_variable(name=name + 'beta', shape=[x.shape[-1]], + initializer=tf.constant_initializer([0.]), trainable=True) + gamma = tf.get_variable(name=name + 'gamma', shape=[x.shape[-1]], + initializer=tf.constant_initializer([1.]), trainable=True) + else: + # conditional batch normalization + z = tf.concat([split_z, embed_y], axis=1) # get conditional vector + # use conditional vector to get batchNorm gains and biases + gamma = dense("gamma", z, x.shape[-1], is_sn=True) # scale + beta = dense("beta", z, x.shape[-1], is_sn=True) # offset + gamma = tf.reshape(gamma, [-1, 1, 1, x.shape[-1]]) + beta = tf.reshape(beta, [-1, 1, 1, x.shape[-1]]) + + # calculate batch mean and variance + batch_mean, batch_var = tf.nn.moments(x, [0, 1, 2], name='moments', keep_dims=True) + + ema = tf.train.ExponentialMovingAverage(decay=decay) + + def mean_var_with_update(): + ema_apply_op = ema.apply([batch_mean, batch_var]) + with tf.control_dependencies([ema_apply_op]): + return tf.identity(batch_mean), tf.identity(batch_var) + + mean, var = tf.cond(train_phase, mean_var_with_update, + lambda: (ema.average(batch_mean), ema.average(batch_var))) + normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon) + return normed + + +def down_sampling(inputs): + """down-sampling: avg pool with zero-padding (out_size = in_size / 2) + """ + return tf.nn.avg_pool(inputs, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME") + + +def up_sampling(inputs): + """nearest-neighbors up-sampling (out_size = in_size * 2) + """ + h, w = inputs.shape[1], inputs.shape[2] + return tf.image.resize_nearest_neighbor(inputs, [h * 2, w * 2]) + + +def non_local(name, inputs, update_collection, is_sn): + """attention module + + This implementation is different from the bigGAN paper. Please check this paper: Non-local Neural Networks. + It also uses down sampling to reduce computation. + """ + h, w, num_channels = inputs.shape[1], inputs.shape[2], inputs.shape[3] + location_num = h * w + down_sampled_num = location_num // 4 # after down sampling, feature map shrinks to a quarter of its size + + with tf.variable_scope(name): + # theta: [h*w, c//8] + theta = conv("f", inputs, num_channels // 8, 1, 1, update_collection, is_sn) + theta = tf.reshape(theta, [-1, location_num, num_channels // 8]) + # phi: [d_h*d_w, c//8] + phi = conv("h", inputs, num_channels // 8, 1, 1, update_collection, is_sn) + phi = down_sampling(phi) + phi = tf.reshape(phi, [-1, down_sampled_num, num_channels // 8]) + # attention map: [h*w, d_h*d_w] + attn = tf.matmul(theta, phi, transpose_b=True) + attn = tf.nn.softmax(attn) + # g: [d_h*d_w, c//2] + g = conv("g", inputs, num_channels // 2, 1, 1, update_collection, is_sn) + g = down_sampling(g) + g = tf.reshape(g, [-1, down_sampled_num, num_channels // 2]) + # attn_g: [h*w, c//2] + attn_g = tf.matmul(attn, g) + attn_g = tf.reshape(attn_g, [-1, h, w, num_channels // 2]) + # attn_g: [h*w, c] + attn_g = conv("attn", attn_g, num_channels, 1, 1, update_collection, is_sn) + + sigma = tf.get_variable("sigma_ratio", [], initializer=tf.constant_initializer(0.0)) + return inputs + sigma * attn_g + + +def non_local_bigGAN(name, inputs, update_collection, is_sn): + """attention module + + This implementation follows the bigGAN paper. + """ + H = inputs.shape[1] + W = inputs.shape[2] + C = inputs.shape[3] + C_ = C // 8 + inputs_ = tf.transpose(inputs, perm=[0, 3, 1, 2]) + inputs_ = tf.reshape(inputs_, [-1, C, H * W]) + with tf.variable_scope(name): + f = conv("f", inputs, C_, 1, 1, update_collection, is_sn) # key + g = conv("g", inputs, C_, 1, 1, update_collection, is_sn) # query + h = conv("h", inputs, C, 1, 1, update_collection, is_sn) # value + f = tf.transpose(f, [0, 3, 1, 2]) + f = tf.reshape(f, [-1, C_, H * W]) + g = tf.transpose(g, [0, 3, 1, 2]) + g = tf.reshape(g, [-1, C_, H * W]) + h = tf.transpose(h, [0, 3, 1, 2]) + h = tf.reshape(h, [-1, C, H * W]) + # attention map + s = tf.matmul(f, g, transpose_a=True) + beta = tf.nn.softmax(s, dim=0) + o = tf.matmul(h, beta) + gamma = tf.get_variable("gamma", [], initializer=tf.constant_initializer(0.)) + y = gamma * o + inputs_ + y = tf.reshape(y, [-1, C, H, W]) + y = tf.transpose(y, perm=[0, 2, 3, 1]) + return y + + +def global_sum_pooling(inputs): + """global sum pooling + + Args: + inputs -> shape: [N, H, W, C] + + Returns: + shape: [N, C] + """ + return tf.reduce_sum(inputs, axis=[1, 2], keep_dims=False) + + +def Hinge_loss(real_logits, fake_logits): + d_loss = -tf.reduce_mean(tf.minimum(0., -1.0 + real_logits)) - tf.reduce_mean(tf.minimum(0., -1.0 - fake_logits)) + g_loss = -tf.reduce_mean(fake_logits) + return d_loss, g_loss + + +def ortho_reg(vars_list): + """apply orthogonal regularization to convolutional layers + """ + s = 0 + for var in vars_list: + if "w" in var.name and var.shape.__len__() == 4: + # w shape: [k_size, k_size, in_channels, out_channels] + nums_kernel = int(var.shape[-1]) + w = tf.transpose(var, perm=[3, 0, 1, 2]) # [out_channels, k_size, k_size, in_channels] + w = tf.reshape(w, [nums_kernel, -1]) # [out_channels, k_size*k_size*in_channels] + ones = tf.ones([nums_kernel, nums_kernel]) + eyes = tf.eye(nums_kernel, nums_kernel) + y = tf.matmul(w, w, transpose_b=True) * (ones - eyes) + s += tf.nn.l2_loss(y) + return s + + +def d_projection(global_pooled, y, nums_class, update_collection=None): + """paper: cGANs with Projection Discriminator + + Args: + global_pooled: hidden layer after global sum pooling. shape -> [N, C] + y: class info (a scalar, not one-hot encoding!) + nums_class: number of classes + """ + w = global_pooled.shape[-1] + v = tf.get_variable("v", [nums_class, w], initializer=tf.orthogonal_initializer()) + v = tf.transpose(v) + # V^T acts like a fully connected layer, so we need to perform spectral norm on V^T instead of V + v = spectral_normalization("embed", v, update_collection=update_collection) + v = tf.transpose(v) + # Embed(y); same as V^Ty (, assuming y is a one-hot vector) + temp = tf.nn.embedding_lookup(v, y) + # Embed(y) . h + temp = tf.reduce_sum(temp * global_pooled, axis=1, keep_dims=True) + return temp + + +def G_Resblock(name, inputs, nums_out, train_phase, split_z, embed_y, is_up=True): + """A residual block in BigGAN's generator""" + with tf.variable_scope(name): + temp = tf.identity(inputs) + inputs = conditional_batchnorm(inputs, train_phase, "bn1", split_z, embed_y) + inputs = tf.nn.relu(inputs) + if is_up: + inputs = up_sampling(inputs) + inputs = conv("conv1", inputs, nums_out, 3, 1, is_sn=True) + inputs = conditional_batchnorm(inputs, train_phase, "bn2", split_z, embed_y) + inputs = tf.nn.relu(inputs) + inputs = conv("conv2", inputs, nums_out, 3, 1, is_sn=True) + # skip connection + if is_up: + temp = up_sampling(temp) + temp = conv("identity", temp, nums_out, 1, 1, is_sn=True) + return inputs + temp + + +def D_Resblock(name, inputs, nums_out, train_phase, update_collection=None, is_down=True, use_bn=False): + """A residual block in BigGAN's discriminator""" + with tf.variable_scope(name): + temp = tf.identity(inputs) + if use_bn: + inputs = conditional_batchnorm(inputs, train_phase, "BN1") + inputs = tf.nn.relu(inputs) + inputs = conv("conv1", inputs, nums_out, 3, 1, update_collection, is_sn=True) + if use_bn: + inputs = conditional_batchnorm(inputs, train_phase, "BN2") + inputs = tf.nn.relu(inputs) + inputs = conv("conv2", inputs, nums_out, 3, 1, update_collection, is_sn=True) + if is_down: + inputs = down_sampling(inputs) + # skip connection + temp = conv("identity", temp, nums_out, 1, 1, update_collection, is_sn=True) + temp = down_sampling(temp) + else: + temp = conv("identity", temp, nums_out, 1, 1, update_collection, is_sn=True) + return inputs + temp diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/pb_frozen.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/pb_frozen.py new file mode 100644 index 0000000000000000000000000000000000000000..8457e14e2ac32743da06cf542fe34547741becfa --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/pb_frozen.py @@ -0,0 +1,137 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from tensorflow.python.tools import freeze_graph +from tensorflow.python.framework import graph_util +import os +import argparse + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # data arguments + parser.add_argument("--gen_num", type=int, default=5000, help="number of generated images") + parser.add_argument("--output", type=str, default="../output", help="output path") + parser.add_argument("-b", "--batch_size", type=int, default=64, help="batch size") + parser.add_argument("-c", "--num_classes", type=int, default=10, help="number of classes") + parser.add_argument("--img_h", type=int, default=32, help="image height") + parser.add_argument("--img_w", type=int, default=32, help="image width") + parser.add_argument("--train_img_size", type=int, default=32, + help="image will be resized to this size when training") + # model arguments + parser.add_argument("--base_channel", type=int, default=96, help="base channel number for G and D") + parser.add_argument("--z_dim", type=int, default=120, help="latent space dimensionality") + parser.add_argument("--ema", type=bool, default=False, help="use exponential moving average for G") + parser.add_argument("--shared_dim", type=int, default=128, help="shared embedding dimensionality") + args = parser.parse_args() + + # use different architectures for different image sizes + if args.train_img_size == 128: + from networks_128 import Generator, Discriminator + elif args.train_img_size == 64: + from networks_64 import Generator, Discriminator + elif args.train_img_size == 32: + from networks_32 import Generator, Discriminator + + # model path + base_path = os.path.join(args.output, "model", str(args.train_img_size)) + model_path = os.path.join(base_path, "model.ckpt") + ema_model_path = os.path.join(base_path, "ema.ckpt") + ckpt_path = ema_model_path if args.ema else model_path + + # pb path + pb_path = os.path.join(args.output, "pb_model", str(args.train_img_size)) + graph_pb_path = os.path.join(pb_path, "tmp_model.pb") + model_pb_path = os.path.join(pb_path, "model.pb") + final_pb_path = os.path.join(pb_path, "final_model.pb") + + tf.reset_default_graph() + train_phase = tf.Variable(tf.constant(False, dtype=tf.bool), name="train_phase") + # train_phase = tf.placeholder(tf.bool) # is training or not + z = tf.placeholder(tf.float32, [None, args.z_dim], name="z") # latent vector + y = tf.placeholder(tf.int32, [None, 1], name="y") # class info + y = tf.reshape(y, [-1]) + + G = Generator("generator", args.base_channel) + with tf.variable_scope("generator", reuse=tf.AUTO_REUSE): + embed_w = tf.get_variable("embed_w", [args.num_classes, args.shared_dim], initializer=tf.orthogonal_initializer()) + + fake_img = G(z, train_phase, y, embed_w, args.num_classes) + output = tf.identity(fake_img, name="output") + + with tf.Session() as sess: + tf.train.write_graph(sess.graph_def, pb_path, "tmp_model.pb") + # freeze model + freeze_graph.freeze_graph( + input_graph=graph_pb_path, + input_saver='', + input_binary=False, + input_checkpoint=ckpt_path, + output_node_names="output", + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph=model_pb_path, + clear_devices=False, + initializer_nodes='') + + # see https://blog.csdn.net/u011765925/article/details/103038349 and + # https://github.com/onnx/tensorflow-onnx/issues/77 + tf.reset_default_graph() + with tf.gfile.FastGFile(model_pb_path, "rb") as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + + for node in graph_def.node: + if node.op == 'RefSwitch': + node.op = 'Switch' + for index in range(len(node.input)): + if 'moving_' in node.input[index]: + node.input[index] = node.input[index] + '/read' + elif node.op == 'AssignSub': + node.op = 'Sub' + if 'use_locking' in node.attr: + del node.attr['use_locking'] + elif node.op == 'Assign': + node.op = 'Identity' + if 'use_locking' in node.attr: + del node.attr['use_locking'] + if 'validate_shape' in node.attr: + del node.attr['validate_shape'] + if len(node.input) == 2: + # input0: ref: Should be from a Variable node. May be uninitialized. + # input1: value: The value to be assigned to the variable. + node.input[0] = node.input[1] + del node.input[1] + elif node.op == 'AssignAdd': + node.op = 'Add' + if 'use_locking' in node.attr: + del node.attr['use_locking'] + with tf.Session() as sess: + converted_graph_def = graph_util.convert_variables_to_constants(sess, graph_def, ['output']) + tf.train.write_graph(converted_graph_def, pb_path, "final_model.pb", as_text=False) + diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/requirements.txt b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..4bdba62afb1db6af510bb2e5b435e5372037cfdd --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/requirements.txt @@ -0,0 +1,33 @@ +absl-py==0.11.0 +astor==0.8.1 +cached-property==1.5.2 +cycler==0.10.0 +gast==0.2.2 +google-pasta==0.2.0 +grpcio==1.35.0 +h5py==3.1.0 +imageio==2.16.2 +importlib-metadata==3.4.0 +Keras-Applications==1.0.8 +Keras-Preprocessing==1.1.2 +kiwisolver==1.3.1 +Markdown==3.3.3 +matplotlib==3.3.4 +numpy==1.20.0 +opencv-python==4.5.5.64 +opt-einsum==3.3.0 +Pillow==9.1.0 +protobuf==3.14.0 +pyparsing==2.4.7 +python-dateutil==2.8.1 +scipy==1.7.3 +six==1.15.0 +tensorboard==1.15.0 +tensorflow-estimator==1.15.1 +tensorflow-gpu==1.15.0 +termcolor==1.1.0 +tqdm==4.56.0 +typing-extensions==3.7.4.3 +Werkzeug==1.0.1 +wrapt==1.12.1 +zipp==3.4.0 diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_1p.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..f63debacb18ef4be2bc46d6172b4cf796a0c2824 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_1p.sh @@ -0,0 +1,28 @@ +#!/bin/bash +### Do not need to Configure CANN Environment on Modelarts Platform, because it has been set already. +### Modelarts Platform command for train +export TF_CPP_MIN_LOG_LEVEL=2 ## Tensorflow api print Log Config +export ASCEND_SLOG_PRINT_TO_STDOUT=0 ## Print log on terminal on(1), off(0) + +code_dir=${1} +data_dir=${2} +result_dir=${3} +obs_url=${4} + +current_time=`date "+%Y-%m-%d-%H-%M-%S"` + +python3.7 ${code_dir}/train.py \ + --dataset=${data_dir} \ + --output=${result_dir} \ + --obs_dir=${obs_url} \ + --chip=npu \ + --platform=modelarts \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=100000 \ + --batch_size=64 \ +# --use_fp16 \ +# --profiling \ +# --load_model \ diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_cpu.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_cpu.sh new file mode 100644 index 0000000000000000000000000000000000000000..ed67da96bb25c546085ac15ae44656cebabb0473 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_cpu.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +python train.py \ + --dataset=../dataset \ + --output=../output \ + --chip=cpu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --batch_size=64 \ + --train_itr=100000 \ + # --load_model \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_gpu.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_gpu.sh new file mode 100644 index 0000000000000000000000000000000000000000..db6a3eb47ac45d2fcd5fa93e28670214805bfa73 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_gpu.sh @@ -0,0 +1,20 @@ +#!/bin/bash +#set env +### GPU Platform command for train +# export CUDA_VISIBLE_DEVICES=0 +# export LD_LIBRARY_PATH=/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:${LD_LIBRARY_PATH} + +current_time=`date "+%Y-%m-%d-%H-%M-%S"` + +python train.py \ + --dataset=../dataset \ + --output=../output \ + --chip=gpu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --batch_size=64 \ + --train_itr=100000 \ + # --load_model diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_msprof.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_msprof.sh new file mode 100644 index 0000000000000000000000000000000000000000..4081bca18b12b332813a4631e1c7f684c654bfa6 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/scripts/run_msprof.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -e +### Before run this shell, make sure you have generated profiling data, and have installed CANN toolkit package +### refer to link: https://support.huaweicloud.com/Development-tg-cann202training1/atlasprofilingtrain_16_0015.html +### $1 is the absolute directory of profiling data. +### start commands sample: sh scripts/run_msprof.sh /home/npu_profiling + +PROFILING_DIR=$1 + +## Be careful the $MSPROF_DIR, you may change it on different plateform +## arm architecture, `uname -a` +# MSPROF_DIR=/home/HwHiAiUser/Ascend/ascend-toolkit/latest/arm64-linux/toolkit/tools/profiler/profiler_tool/analysis/msprof +## x86 architecture, `uname -a` For Ai1S platform +MSPROF_DIR=/usr/local/Ascend/ascend-toolkit/latest/x86_64-linux/toolkit/tools/profiler/profiler_tool/analysis/msprof + +python3.7 ${MSPROF_DIR}/msprof.py import -dir ${PROFILING_DIR} +echo "===>>>[OK] msprof sqlite.\n" + +python3.7 ${MSPROF_DIR}/msprof.py query -dir ${PROFILING_DIR} +echo "===>>>[OK] msprof query.\n" + +python3.7 ${MSPROF_DIR}/msprof.py export timeline -dir ${PROFILING_DIR} +echo "===>>>[OK] msprof timeline.\n" + +python3.7 ${MSPROF_DIR}/msprof.py export summary -dir ${PROFILING_DIR} +echo "===>>>[OK] msprof summary.\n" \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..56a7283fe678d1658fd9954614cda2b138300730 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p.sh @@ -0,0 +1,213 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" +obs_url="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +cp -r ${data_path}/metrics ./ + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=64 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path}/dataset \ + --output=${output_path} \ + --chip=npu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=100000 \ + --batch_size=${batch_size} \ +# --use_fp16 +else + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path}/dataset \ + --output=${output_path} \ + --chip=npu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=100000 \ + --batch_size=${batch_size} > ${print_log} 2>&1 + python3.7 ./generate_fake_img.py --chip=cpu --output=${output_path} >> ${print_log} 2>&1 + python3.7 ./calc_IS_FID.py --gpu="" --fake_img_path=${output_path}/test/fake/32 >> ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "Iteration" ${print_log} | tail -n 10 | awk '{print $8,$10,$NF}' | awk '{sum+=$1+$2+$3} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +#train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}' +train_accuracy=`grep "FID :" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +grep "Iteration" ${print_log} | awk '{print $3,$4,$5,$6}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p_modelarts.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p_modelarts.sh new file mode 100644 index 0000000000000000000000000000000000000000..54d896229a4c8e04f27d52af57d91e97017bbdb0 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_full_1p_modelarts.sh @@ -0,0 +1,214 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" +obs_url="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=64 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path} \ + --output=${output_path} \ + --obs_dir=${obs_url} \ + --chip=npu \ + --platform=modelarts \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=100000 \ + --batch_size=${batch_size} \ +# --use_fp16 +else + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path} \ + --output=${output_path} \ + --obs_dir=${obs_url} \ + --chip=npu \ + --platform=modelarts \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=100000 \ + --batch_size=${batch_size} \ +# --use_fp16 + 1>${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "Iteration" ${print_log} | tail -n 10 | awk '{print $8,$10,$NF}' | awk '{sum+=$1+$2+$3} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +#train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}' +train_accuracy='No Acc' +# 提取所有loss打印信息 +grep "Iteration" ${print_log} | awk '{print $3,$4,$5,$6}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..e1804482eb45e4f9bdf19f360bb00486b68c70f3 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test/train_performance_1p.sh @@ -0,0 +1,215 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" +obs_url="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +cp -r ${data_path}/metrics ./ + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +#train_epochs=2 +train_steps=100 +batch_size=64 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path}/dataset \ + --output=${output_path} \ + --chip=npu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=${train_steps} \ + --batch_size=${batch_size} \ +# --use_fp16 +else + python3.7 ${cur_path}/../train.py \ + --dataset=${data_path}/dataset \ + --output=${output_path} \ + --chip=npu \ + --platform=linux \ + --num_classes=10 \ + --img_h=32 \ + --img_w=32 \ + --train_img_size=32 \ + --train_itr=${train_steps} \ + --batch_size=${batch_size} > ${print_log} 2>&1 + # python3.7 ./generate_fake_img.py --chip=cpu --output=${output_path} >> ${print_log} 2>&1 + # python3.7 ./calc_IS_FID.py --gpu="" --fake_img_path=${output_path}/test/fake/32 >> ${print_log} 2>&1 + +fi + +# 性能相关数据计算 +StepTime=`grep "Iteration" ${print_log} | tail -n 10 | awk '{print $8,$10,$NF}' | awk '{sum+=$1+$2+$3} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +#train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}'` +train_accuracy='No Acc' +# 提取所有loss打印信息 +grep "Iteration" ${print_log} | awk '{print $3,$4,$5,$6}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_om.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_om.py new file mode 100644 index 0000000000000000000000000000000000000000..64ebbf69723d99b88e9a33330b66bd06ac055e78 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_om.py @@ -0,0 +1,72 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import argparse +import os +from PIL import Image +from utils import restore_img, check_dir, read_images +from calc_IS_FID import get_FID, get_IS +from tqdm import tqdm + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--output", type=str, default="../output", help="output path") + parser.add_argument("--train_img_size", type=int, default=32, + help="image will be resized to this size when training") + parser.add_argument("--chip", type=str, default="gpu", help="run on which chip, cpu or gpu or npu") + parser.add_argument("--gpu", type=str, default="0", help="GPU to use (leave blank for CPU only)") + parser.add_argument("--batch_size", type=int, default=100, help="batch size") + parser.add_argument("--precalculated_path", type=str, default="./metrics/res/stats_tf/fid_stats_cifar10_train.npz", + help="precalculated statistics for datasets, used in FID") + args = parser.parse_args() + + bin_path = os.path.join(args.output, "inference", str(args.train_img_size), "bin") + image_path = os.path.join(args.output, "inference", str(args.train_img_size), "image") + check_dir(image_path) + + # recover image from bin + print("Recovering image from bin...") + files = os.listdir(bin_path) + output_num = 0 + for file_name in tqdm(files): + if file_name.endswith(".bin"): + output_num += 1 + file_bin_path = os.path.join(bin_path, file_name) + file_image_path = os.path.join(image_path, file_name.replace(".bin", ".jpg")) + image = np.fromfile(file_bin_path, dtype='float32').reshape(args.train_img_size, args.train_img_size, 3) + Image.fromarray(np.uint8(restore_img(image))).save(file_image_path) + + # calc FID and IS + print("Calculating FID and IS...") + images_list = read_images(image_path) + images = np.array(images_list).astype(np.float32) + fid_score = get_FID(images, args) + is_mean, is_std = get_IS(images_list, args, splits=10) + print("IS : (%f, %f)" % (is_mean, is_std)) + print("FID : %f" % fid_score) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_pb.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_pb.py new file mode 100644 index 0000000000000000000000000000000000000000..8d8501dff3366f120acb401a14174bfcc949a495 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/test_pb.py @@ -0,0 +1,84 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from tensorflow.python.framework import graph_util +from google.protobuf import text_format +import os +import argparse +from utils import session_config, check_dir +import numpy as np +from generate_fake_img import generate_img_of_one_class + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # platform arguments (Huawei Ascend) + parser.add_argument("--chip", type=str, default="gpu", help="run on which chip, cpu or gpu or npu") + # data arguments + parser.add_argument("--output", type=str, default="../output", help="output path") + parser.add_argument("-b", "--batch_size", type=int, default=64, help="batch size") + parser.add_argument("-c", "--num_classes", type=int, default=10, help="number of classes") + parser.add_argument("--img_h", type=int, default=32, help="image height") + parser.add_argument("--img_w", type=int, default=32, help="image width") + parser.add_argument("--train_img_size", type=int, default=32, + help="image will be resized to this size when training") + # model arguments + parser.add_argument("--base_channel", type=int, default=96, help="base channel number for G and D") + parser.add_argument("--z_dim", type=int, default=120, help="latent space dimensionality") + parser.add_argument("--truncation", type=float, default=2.0, help="truncation threshold") + parser.add_argument("--ema", type=bool, default=True, help="use exponential moving average for G") + parser.add_argument("--shared_dim", type=int, default=128, help="shared embedding dimensionality") + args = parser.parse_args() + + # get output dir + inference_path = os.path.join(args.output, "inference", str(args.train_img_size)) + check_dir(inference_path) + # pb path + pb_path = os.path.join(args.output, "pb_model", str(args.train_img_size)) + graph_pb_path = os.path.join(pb_path, "tmp_model.pb") + model_pb_path = os.path.join(pb_path, "model.pb") + final_pb_path = os.path.join(pb_path, "final_model.pb") + + tf.reset_default_graph() + with tf.gfile.FastGFile(final_pb_path, "rb") as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + # text_format.Merge(f.read(), graph_def) + + _ = tf.import_graph_def(graph_def, name="") + + config = session_config(args) + with tf.Session(config=config) as sess: + sess.run(tf.global_variables_initializer()) + + z = sess.graph.get_tensor_by_name("z:0") + y = sess.graph.get_tensor_by_name("y:0") + fake_img = sess.graph.get_tensor_by_name("output:0") + + class_labels = np.random.randint(0, 11, size=(args.batch_size, 1)) + generate_img_of_one_class(args, class_labels, "inference.jpg", inference_path, sess, fake_img, z, y) diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/train.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/train.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4e7ab047dad10f3c5780141de0b0ffdb3e3a90 --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/train.py @@ -0,0 +1,341 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ops import Hinge_loss, ortho_reg +import tensorflow as tf +import numpy as np +from utils import truncated_noise_sample, get_one_batch, session_config, read_images, check_dir +import cv2 +import datetime +import scipy.io as sio +import argparse +import os +from generate_fake_img import generate_img, generate_img_by_class +from calc_IS_FID import get_IS, get_FID + +parser = argparse.ArgumentParser() +# platform arguments (Huawei Ascend) +parser.add_argument("--chip", type=str, default="gpu", help="run on which chip, cpu or gpu or npu") +parser.add_argument("--gpu", type=str, default="0", help="GPU to use (leave blank for CPU only)") +parser.add_argument("--platform", type=str, default="linux", help="Run on linux/apulis/modelarts platform. Modelarts " + "Platform has some extra data copy operations") +parser.add_argument("--obs_dir", type=str, default="obs://lianlio/log", help="obs result path, not need on gpu and apulis platform") +parser.add_argument("--profiling", action="store_true", help="profiling for performance or not") +# data arguments +parser.add_argument("--dataset", type=str, default="../dataset", help="dataset path") +parser.add_argument("--output", type=str, default="../output", help="output path") +parser.add_argument("-c", "--num_classes", type=int, default=10, help="number of classes") +parser.add_argument("--img_h", type=int, default=32, help="image height") +parser.add_argument("--img_w", type=int, default=32, help="image width") +parser.add_argument("--train_img_size", type=int, default=32, help="image will be resized to this size when training") +parser.add_argument("--data", type=str, default="cifar10", help="which dataset to use (cifar10 / imagenet64)") +# metrics arguments +parser.add_argument("--metrics", type=str, default="fid", help="use FID or IS as metrics (fid / is)") +parser.add_argument("--precalculated_path", type=str, default="./metrics/res/stats_tf/fid_stats_cifar10_train.npz", + help="precalculated statistics for datasets, used in FID") +parser.add_argument("--gen_num", type=int, default=5000, help="number of generated images to calc IS or FID " + "(at least 2048 for FID)") +# training arguments +parser.add_argument('--use_fp16', action="store_true", help='enable mixed precision training') +parser.add_argument("--load_model", action="store_true", help="load model and continue to train") +parser.add_argument("--save_freq", type=int, default=1000, help="frequency of saving model") +parser.add_argument("--log_freq", type=int, default=50, help="frequency of logging") +parser.add_argument("-b", "--batch_size", type=int, default=64, help="batch size (larger batch size may have better performance)") +parser.add_argument("-i", "--train_itr", type=int, default=100000, help="number of training iterations") +parser.add_argument("--d_lr", type=float, default=4e-4, help="learning rate for discriminator") +parser.add_argument("--g_lr", type=float, default=1e-4, help="learning rate for generator") +parser.add_argument("--d_train_step", type=int, default=2, help="number of D training steps per G training step") +parser.add_argument('--beta1', type=float, default=0.0, help='beta1 for Adam optimizer') +parser.add_argument('--beta2', type=float, default=0.9, help='beta2 for Adam optimizer') +# model arguments +parser.add_argument("--base_channel", type=int, default=96, help="base channel number for G and D") +parser.add_argument("--z_dim", type=int, default=120, help="latent space dimensionality") +parser.add_argument("--shared_dim", type=int, default=128, help="shared embedding dimensionality") +parser.add_argument("--beta", type=float, default=1e-4, help="orthogonal regularization strength") +parser.add_argument("--truncation", type=float, default=2.0, help="truncation threshold") +parser.add_argument("--ema_decay", type=float, default=0.9999, help="decay rate of exponential moving average for the weights of G") +# other arguments +parser.add_argument("--debug", action="store_true", help="debug or not") +args = parser.parse_args() + +if args.chip == "npu": + from npu_bridge.npu_init import * +if args.debug is True: + from tensorflow.python import debug as tf_dbg + +os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu +args.metrics = args.metrics.upper() + +# use different architectures for different image sizes +if args.train_img_size == 128: + from networks_128 import Generator, Discriminator +elif args.train_img_size == 64: + from networks_64 import Generator, Discriminator +elif args.train_img_size == 32: + from networks_32 import Generator, Discriminator + +# get current time +now = datetime.datetime.now() +now_str = now.strftime('%Y_%m_%d_%H_%M_%S') + +# check output dir +model_path = os.path.join(args.output, "model", str(args.train_img_size)) +resume_path = os.path.join(model_path, "model.ckpt") +ema_model_path = os.path.join(model_path, "ema.ckpt") +log_path = os.path.join(args.output, "log", str(args.train_img_size)) +test_path = os.path.join(args.output, "gen_img") +fake_img_path = os.path.join(test_path, "fake", str(args.train_img_size)) +image_of_each_class_path = os.path.join(test_path, "image_of_each_class", str(args.train_img_size)) +check_dir(model_path) +check_dir(log_path) +if args.profiling is True: + args.profiling_dir = "/tmp/profiling" + check_dir(args.profiling_dir) + + +def train(): + train_phase = tf.Variable(tf.constant(True, dtype=tf.bool), name="train_phase") + # train_phase = tf.placeholder(tf.bool) # is training or not + x = tf.placeholder(tf.float32, [None, args.train_img_size, args.train_img_size, 3]) # input image(, which will be resized to 128x128) + z = tf.placeholder(tf.float32, [None, args.z_dim]) # latent vector + y = tf.placeholder(tf.int32, [None]) # class info + + with tf.variable_scope("generator"): + embed_w = tf.get_variable("embed_w", [args.num_classes, args.shared_dim], initializer=tf.orthogonal_initializer()) # weight for shared embedding + + global_step = tf.Variable(0, trainable=False) # global training step + add_step = global_step.assign(global_step + 1) + + set_train_phase_true = tf.assign(train_phase, True) + set_train_phase_false = tf.assign(train_phase, False) + + G = Generator('generator', args.base_channel) + D = Discriminator('discriminator', args.base_channel) + fake_img = G(z, train_phase, y, embed_w, args.num_classes) # generate fake img + fake_logits = D(fake_img, train_phase, y, args.num_classes, None) # D(G(z), y) + real_logits = D(x, train_phase, y, args.num_classes, 'NO_OPS') # D(x, y) + + D_loss, G_loss = Hinge_loss(real_logits, fake_logits) + G_ortho = args.beta * ortho_reg(G.var_list()) # Orthogonal Regularization + G_loss += G_ortho # get total loss + + D_opt = tf.train.AdamOptimizer(args.d_lr, beta1=args.beta1, beta2=args.beta2).minimize(D_loss, var_list=D.var_list()) + G_opt = tf.train.AdamOptimizer(args.g_lr, beta1=args.beta1, beta2=args.beta2).minimize(G_loss, var_list=G.var_list()) + + # loss scale for mixed precision training + # if args.use_fp16 is True and args.chip == "npu": + # loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=1000, + # decr_every_n_nan_or_inf=2, decr_ratio=0.5) + # D_opt = NPULossScaleOptimizer(tf.train.AdamOptimizer(args.d_lr, beta1=args.beta1, beta2=args.beta2), loss_scale_manager).minimize(D_loss, var_list=D.var_list()) + # G_opt = NPULossScaleOptimizer(tf.train.AdamOptimizer(args.g_lr, beta1=args.beta1, beta2=args.beta2), loss_scale_manager).minimize(G_loss, var_list=G.var_list()) + + # add exponential moving average for G's weights + with tf.variable_scope("ema_weights"): + var_ema = tf.train.ExponentialMovingAverage(args.ema_decay, global_step) + with tf.control_dependencies([G_opt]): + G_opt_ema = var_ema.apply(tf.trainable_variables(scope='generator')) + # assign ema weights + assign_vars = [] + for var in tf.trainable_variables(scope='generator'): + v = var_ema.average(var) + if v is not None: + assign_vars.append(tf.assign(var, v)) + + with tf.variable_scope("metrics", reuse=tf.AUTO_REUSE): + FID_now = tf.get_variable("FID_now", shape=[], initializer=tf.constant_initializer(1e3), trainable=False) + IS_now = tf.get_variable("IS_now", shape=[], initializer=tf.constant_initializer(0.0), trainable=False) + FID_best = tf.get_variable("FID_best", shape=[], initializer=tf.constant_initializer(1e3), trainable=False) + IS_best = tf.get_variable("IS_best", shape=[], initializer=tf.constant_initializer(0.0), trainable=False) + + # log loss, FID, IS + log_suffix = "_" + str(args.train_img_size) + "_bs_" + str(args.batch_size) + "_ch_" + str(args.base_channel) + tf.summary.scalar(now_str + '/d_loss' + log_suffix, D_loss) + tf.summary.scalar(now_str + '/g_loss' + log_suffix, G_loss) + # tf.summary.scalar(now_str + '/IS' + log_suffix, IS_now) + # tf.summary.scalar(now_str + '/FID' + log_suffix, FID_now) + summary_op = tf.summary.merge_all() + + config = session_config(args) + + print("Using", args.chip, "!") + + if args.data == "cifar10": + # get cifar-10 training data + data_path = os.path.join(args.dataset, "data_batch_") + test_data_path = os.path.join(args.dataset, "test_batch.mat") + raw_data = np.concatenate((sio.loadmat(data_path + "1.mat")["data"], + sio.loadmat(data_path + "2.mat")["data"], + sio.loadmat(data_path + "3.mat")["data"], + sio.loadmat(data_path + "4.mat")["data"], + sio.loadmat(data_path + "5.mat")["data"], + sio.loadmat(test_data_path)["data"] + ), + axis=0) + raw_data = np.reshape(raw_data, [-1, 3, args.img_h, args.img_w]) + raw_data = np.transpose(raw_data, axes=[0, 2, 3, 1]) # (N, H, W, C) + labels = np.concatenate((sio.loadmat(data_path + "1.mat")["labels"], + sio.loadmat(data_path + "2.mat")["labels"], + sio.loadmat(data_path + "3.mat")["labels"], + sio.loadmat(data_path + "4.mat")["labels"], + sio.loadmat(data_path + "5.mat")["labels"], + sio.loadmat(test_data_path)["labels"] + ), + axis=0)[:, 0] + elif args.data == "imagenet64": + # get imagenet64 training data + data_path = os.path.join(args.dataset, "imagenet64.mat") + data_and_label = sio.loadmat(data_path) + labels = data_and_label["labels"][0, :] + raw_data = data_and_label["data"] + else: + pass + + # resize images to training size + start = datetime.datetime.now() + data = np.zeros(shape=[raw_data.shape[0], args.train_img_size, args.train_img_size, 3], dtype=raw_data.dtype) + for i, img in enumerate(raw_data): + data[i] = cv2.resize(img, dsize=(args.train_img_size, args.train_img_size), interpolation=cv2.INTER_LINEAR) + end = datetime.datetime.now() + print("data preprocess time:", (end - start).total_seconds()) + + with tf.Session(config=config) as sess: + summary_writer = tf.summary.FileWriter(logdir=log_path, graph=sess.graph) + sess.run(tf.global_variables_initializer()) + + if args.debug is True: + sess = tf_dbg.LocalCLIDebugWrapperSession(sess) + + # load model + saver = tf.train.Saver() + if args.load_model is True: + print('Loading checkpoint from {}...'.format(resume_path)) + saver.restore(sess, save_path=resume_path) + + for itr in range(args.train_itr): + d_update_time = 0 # discriminator update time + g_update_time = 0 # generator update time + data_preprocess_time = 0 + + # Train Discriminator + for d in range(args.d_train_step): + # read one mini-batch + start = datetime.datetime.now() + batch, Y = get_one_batch(data, labels, args.batch_size) # get one batch + end = datetime.datetime.now() + data_preprocess_time += (end - start).total_seconds() + + # truncation trick + Z = truncated_noise_sample(args.batch_size, args.z_dim, args.truncation) + + start = datetime.datetime.now() + sess.run(set_train_phase_true) + sess.run(D_opt, feed_dict={z: Z, x: batch, y: Y}) + end = datetime.datetime.now() + d_update_time += (end - start).total_seconds() + + # Train Generator + Z = truncated_noise_sample(args.batch_size, args.z_dim, args.truncation) + start = datetime.datetime.now() + sess.run(set_train_phase_true) + sess.run([G_opt_ema, add_step, global_step], feed_dict={z: Z, y: Y}) + end = datetime.datetime.now() + g_update_time += (end - start).total_seconds() + + if itr % args.log_freq == 0: + sess.run(set_train_phase_false) + summary, d_loss, g_loss, is_now, is_best, fid_now, fid_best = sess.run([summary_op, D_loss, G_loss, IS_now, IS_best, FID_now, FID_best], + feed_dict={z: Z, x: batch, y: Y}) + summary_writer.add_summary(summary, itr) + metrics_best = fid_best if args.metrics == "FID" else is_best + # print("Iteration: %d, D_loss: %f, G_loss: %f, IS: %f, FID: %f, best %s: %f, " + # "D_updata_time: %f(s), G_updata_time: %f(s), data preprocess time: %f(s)" + # % (itr, d_loss, g_loss, is_now, fid_now, args.metrics, metrics_best, + # d_update_time, g_update_time, data_preprocess_time)) + print("Iteration: %d, D_loss: %f, G_loss: %f, " + "D_updata_time: %f(s), G_updata_time: %f(s), data preprocess time: %f(s)" + % (itr, d_loss, g_loss, d_update_time, g_update_time, data_preprocess_time)) + # generate fake images for each class + generate_img_by_class(args, image_of_each_class_path, sess, fake_img, z, y) + + # print loss scale value + if args.use_fp16 is True and args.chip == "npu": + lossScale = tf.get_default_graph().get_tensor_by_name("loss_scale:0") + overflow_status_reduce_all = tf.get_default_graph().get_tensor_by_name( + "overflow_status_reduce_all:0") + l_s, overflow_status_reduce_all = sess.run([lossScale, overflow_status_reduce_all]) + print('loss_scale is: ', l_s) + print("overflow_status_reduce_all:", overflow_status_reduce_all) + if itr % args.save_freq == 0: + saver.save(sess, save_path=resume_path) # save current model + print("Model saved in", resume_path) + sess.run(set_train_phase_false) + sess.run(assign_vars, feed_dict={z: Z, y: Y}) # get ema model + + # calc FID and IS + # generate_img(args, fake_img_path, sess, fake_img, z, y) # generate fake images + # images_list = read_images(fake_img_path) + # images = np.array(images_list).astype(np.float32) + + # fid_now = get_FID(images, args) + # is_now, _ = get_IS(images_list, args, splits=10) + # + # if args.metrics == "FID": + # fid_best = sess.run(FID_best) + # if fid_now < fid_best: + # fid_best = fid_now + # saver.save(sess, save_path=ema_model_path) # save ema model + # print("New best model!\nBest FID:", fid_best) + # else: + # is_best = sess.run(IS_best) + # if is_now > is_best: + # is_best = is_now + # saver.save(sess, save_path=ema_model_path) # save ema model + # print("New best model!\nBest IS:", is_best) + saver.save(sess, save_path=ema_model_path) # save ema model + print("EMA Model saved in", ema_model_path) + saver.restore(sess, save_path=resume_path) # restore current model + + # if args.metrics == "FID": + # sess.run(tf.assign(FID_best, tf.cast(tf.constant(fid_best), tf.float32))) # update best FID / IS + # else: + # sess.run(tf.assign(IS_best, tf.cast(tf.constant(is_best), tf.float32))) + # + # sess.run(tf.assign(IS_now, tf.cast(tf.constant(is_now), tf.float32))) # update FID and IS + # sess.run(tf.assign(FID_now, tf.cast(tf.constant(fid_now), tf.float32))) + + summary_writer.close() + + if args.platform.lower() == 'modelarts': + from help_modelarts import modelarts_result2obs + modelarts_result2obs(args) + print("Data transferred to OBS!") + + print("Training finished!") + + +if __name__ == "__main__": + train() diff --git a/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/utils.py b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2b83d70120dd59d303185a85fd3385595d517a6f --- /dev/null +++ b/TensorFlow/contrib/cv/AnimeFaceGAN_ID1062_for_Tensorflow/utils.py @@ -0,0 +1,116 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +from scipy.stats import truncnorm +import tensorflow as tf +import imageio +from PIL import Image +import os +from glob import glob + + +def truncated_noise_sample(batch_size=1, dim_z=128, trunc=1., seed=None): + """truncation trick""" + state = None if seed is None else np.random.RandomState(seed) + if trunc <= 0: + return np.random.normal(size=(batch_size, dim_z)) # do not use truncation + else: + return truncnorm.rvs(-trunc, trunc, size=(batch_size, dim_z), random_state=state).astype(np.float32) + + +def read_image(filename): + x = imageio.imread(filename) + return np.array(Image.fromarray(x)) + + +def read_images(img_path): + filenames = glob(os.path.join(img_path, '*.*')) + images_list = [read_image(filename) for filename in filenames] + return images_list + + +def normalize_img(img): + return img / 127.5 - 1 + + +def restore_img(img): + return (img + 1) * 127.5 + + +def get_one_batch(data, labels, batch_size): + rand_select = np.random.randint(0, data.shape[0], batch_size) + batch_labels = labels[rand_select] + batch = data[rand_select] + + return normalize_img(batch), batch_labels + + +def session_config(args): + if args.chip == "npu": + from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + if args.use_fp16 is True: + custom_op.parameter_map['precision_mode'].s = tf.compat.as_bytes('allow_mix_precision') + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + fusion_cfg_path = os.path.join(os.path.dirname(__file__), "fusion_switch.cfg") + custom_op.parameter_map["fusion_switch_file"].s = tf.compat.as_bytes(fusion_cfg_path) + # custom_op.parameter_map["auto_tune_mode"].s = tf.compat.as_bytes("RL,GA") + if args.profiling is True: + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["profiling_mode"].b = True + custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes( + '{"output":"/tmp/profiling","task_trace":"on","aicpu":"on"}') + elif args.chip == "gpu": + config = tf.ConfigProto(allow_soft_placement=True) + config.gpu_options.allow_growth = True + elif args.chip == 'cpu': + config = tf.ConfigProto() + return config + + +def check_dir(path): + if not os.path.exists(path): + os.makedirs(path) + + +if __name__ == '__main__': + import matplotlib.pyplot as plt + import scipy + + truncation = 3.0 + N = scipy.stats.norm(loc=0., scale=1.) + + fig = plt.figure() + ax1 = fig.add_subplot(2, 1, 1) + ax1.hist(truncated_noise_sample(dim_z=10000, trunc=truncation).squeeze(), normed=True, bins=30) # histogram of truncated normal distribution + ax2 = fig.add_subplot(2, 1, 2) + ax2.hist(N.rvs(10000), normed=True, bins=30) # histogram of standard normal distribution + plt.show() diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/README.md b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/README.md index a6c4f49874e549c640a839502ad6b2004512370d..68fdd45c69f8fc6458cec5845ee4cb97e25296fe 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/README.md @@ -1,65 +1,239 @@ -### **BicycleGAN** +

基本信息

+ +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):** ImageGeneration + +**版本(Version):1.1** + +**修改时间(Modified) :2022.04.21** + +**大小(Size):** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):** + +**精度(Precision):** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架的BicycleGAN图像生成网络训练代码** + +

概述

+ BicycleGAN模型是Toward Multimodal Image-to-Image Translation论文的Tensorflow的实现,该论文的核心思想体现在确保输入噪声向量与输出图像的双向映射一致性。BicycleGAN通过结合cVAE-GAN和cLR-GAN这两个方法来共同地促进隐层向量和输出图像在两个方向上的连接。通过BicycleGAN生成的图像多样性更好,且更具有视觉上的真实性。 +- 参考论文: + + [Zhu, Jun-Yan, et al. "Toward multimodal image-to-image translation." Advances in neural information processing systems 30 (2017).] + - arXiv:1711.11586(https://arxiv.org/pdf/arXiv:1711.11586.pdf) + +- 参考实现: + + https://github.com/prakashpandey9/BicycleGAN + +- 适配昇腾 AI 处理器的实现: + + + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow + + + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +- 精度 + +| | GPU | NPU | +|-------|-------|-------| +| LPIPS | 0.412 | 0.392 | + +- 性能 + +| batchsize | image_size | GPU (v100) | NPU | +|-----------|------------|---|---| +| 1 | 256 x 256 | | | + + +## 默认配置 + +- 训练数据集预处理(以原论文的maps训练集为例,仅作为用户参考示例): + + - 图像的输入尺寸为256*256 + - 图像输入格式:JEPG + - 输入图像进行归一化至[-1,1] + +- 测试数据集预处理(以原论文的maps验证集为例,仅作为用户参考示例) + + - 图像的输入尺寸为256*256 + - 图像输入格式:JEPG + - 输入图像进行归一化至[-1,1] + +- 训练超参 + + - Batch size: 1 + - Learning rate(LR): 0.0002 + - Optimizer: Adam + - Train epoch: 20 + +## 支持特性 + +| 特性列表 | 是否支持 | +|-------|------| +| 分布式训练 | 否 | +| 混合精度 | 否 | +| 并行数据 | 否 | + + -### **概述** +

训练环境准备

-迁移NIMA到ascend910平台 -将结果与原论文进行比较 +1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 +2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 - | | 论文 | ascend | -|----------------|------|--------| -| LIPIS Distance | 0.110±0.002 | 待测 | + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + **表 1** 镜像列表 -### Requirements + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
-1. Tensorflow 1.15 -### **代码及路径解释** +

快速上手

+- 数据集准备 +1. 模型训练使用原论文提供的maps数据集,数据集请用户自行获取。 +2. 获得数据集后,放入模型目录下,在训练脚本中指定数据集路径,可正常使用。 + + +## 模型训练 + +- 单击“立即下载”,并选择合适的下载方式下载源码包。 + +- 启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + +- 单卡训练 + + 1. 配置训练参数。 + + 首先在脚本test/train_full_1p.sh中,配置训练数据集路径,请用户根据实际路径配置,数据集参数如下所示: + + ``` + --data_path ./dataset + ``` + + 2. 启动训练。 + + 启动单卡训练 (脚本为modelarts_entry_acc.py) + + ``` + python3 modelarts_entry_acc.py + ``` + + + +

迁移学习指导

+ +- 数据集准备。 + + 数据集要求如下: + + 1. 获取数据。 + + 如果要使用自己的数据集,需要将数据集放到脚本参数data_path对应目录下。参考代码中的数据集存放路径如下: + + - 训练集: ./dataset/train + - 测试集: ./dataset/val + + 数据集也可以放在其它目录,则修改对应的脚本入参data_path即可。 + + +- 模型训练。 + + 参考“模型训练”中训练步骤。 + +- 模型评估。 + + 参考“模型训练”中验证步骤。 + +

高级参考

+ +## 脚本和示例代码 ``` -BicycleGAN +BicycyeGAN └─ ├─README.md - ├─folder_npu.py 用于检查文件夹结构 - ├─layers.py 用于创建基础的神经层 - ├─load_data_npu.py 用于创建数据流 - ├─log.py 用于创建训练日志 - ├─model_npu_tmp.py 用于定义模型结构 - ├─main_npu.py 用于启动训练和测试过程 - ├─maps 用于存放训练数据集 obs://bicyclegan/BicycleGAN2/maps/ + ├─dataset用于存放训练数据集 ├─train - └─... - ├─val - └─... - ├─checkpoints 用于存放训练好的模型文件 - ├─logs 用于存放训练日志 - ├─results 用于存放训练集和测试集的测试的结果 - ├─train_1p.sh 模型的启动脚本, - ├─test_1p.sh 模型的启动测试脚本 + └─val + ├─logs 用于存放日志文件 + ├─... + └─... + ├─weights 用于存放预训练模型 + ├─net-lin_alex_v0.1.pb + └─... + ├─results 用于模型生成的图片 + ├─net-lin_alex_v0.1.pb + └─... ``` -### **数据集和模型** -BicycleGAN模型所使用的数据集为Google maps-satellites,是一个pixel to pixel的风格迁移数据集,其中包括1096张实际街景图片和与之对应的地图标签。 +## 脚本参数 +``` + +--Z_dim Decoder后向量的维度,默认是8 +--reconst_coeff Reconstruction的系数,默认是10 +--latent_coeff Latent的系数,默认是0.5 +--kl_coeff KL的系数,默认是0.01 +--learning_rate 学习率,默认是0.0002 +--image_size 输入的图片大小,默认是256 +--batch_size 训练的batch大小,默认是1 +--epoch 训练的epoch数,默认是20 +--data_path 训练集文件路径 +--output_path 日志,模型文件等存放的路径 +``` -### 训练过程及结果 -epoch=200 \ -batch_size=1 \ -lr=0.0002 \ -耗费近1小时 +## 训练过程 -### 数据集百度云链接及提取码 -链接:https://pan.baidu.com/s/17rKdfkp_8_pvn89nII13fg -提取码:zdx1 +1. 通过“模型训练”中的训练指令启动单卡训练。 + +2. 参考脚本的模型存储路径为./checkpoint。 + + + +## 推理/验证过程 - **启动训练和测试过程** -执行shell脚本: -``` -bash train_1p.sh -``` \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/eval.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/eval.py new file mode 100644 index 0000000000000000000000000000000000000000..73f2c567162854e2e12bdd8501861ddf529f7061 --- /dev/null +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/eval.py @@ -0,0 +1,77 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import numpy as np +from tqdm import tqdm +import tensorflow as tf +import lpips_tf +from PIL import Image +from npu_bridge.npu_init import * +from tensorflow_core.core.protobuf.rewriter_config_pb2 import RewriterConfig + + +def eval_tf(basedir): + # NPU config + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 + + with tf.Session(config=config) as sess: + image_dirs = [] + for root, dirs, files in os.walk(basedir): # find all dirs + if dirs != []: + for dirname in dirs: + full_dirname = os.path.join(root, dirname) + image_dirs.append(full_dirname) + + dist_consecutive = [] + image0_ph = tf.placeholder(tf.float32) + image1_ph = tf.placeholder(tf.float32) + dist_t = lpips_tf.lpips(image0_ph, image1_ph) + + for dir in tqdm(image_dirs): # find all pictures of the dir + lpips_pairs = [] + files = os.listdir(dir) + for file in files: + if file.startswith('random'): + path = os.path.join(dir, file) + image = Image.open(path) + image = np.asarray(image.resize((256, 256), Image.BICUBIC)) + # when evaluating,the image is normalized to [0,1], + # because the lpips will do the work that transforms [0,1] to [-1,1] + image = image.astype(np.float32) / 255.0 + lpips_pairs.append(image) + + for i in range(0, len(lpips_pairs) - 1): # consecutive test,computing (N-1) pairs + dist = sess.run(dist_t, feed_dict={image0_ph: lpips_pairs[i], image1_ph: lpips_pairs[i + 1]}) + dist_consecutive.append(dist) + + print('Final Average Distances : {}'.format(sum(dist_consecutive) / len(dist_consecutive))) diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder_npu.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder.py similarity index 96% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder_npu.py rename to TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder.py index 5c68ac5282cf60884ffee6af5d5d9f16cb94f618..8ebbc72083636f07eeecc0b144ff1678e57bcd7e 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder_npu.py +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/folder.py @@ -1,4 +1,3 @@ - # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,11 +27,10 @@ # limitations under the License. import os -# import moxing as mox def check_folder(log_dir): if not os.path.exists(log_dir): - os.mkdir(log_dir) + os.makedirs(log_dir) # print (log_dir) return log_dir diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/layers.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/layers.py index 5fa36fb343798195e158ee050f9dea9b905e6153..40e780535ca73d35229f78cb1f5adb1cfa7859a8 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/layers.py +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/layers.py @@ -1,4 +1,3 @@ - # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -104,8 +103,8 @@ def deconv2d_layer(x, out_channel, filter_height, filter_width, stride_height, s # Function for Residual Blocks def residual_block1(input, num_filters, filter_size, is_training, name="res_block"): with tf.variable_scope(name): - x_shortcut = input - x = lrelu_layer(bn_layer(conv2d_layer(input, num_filters, filter_size, filter_size, 2, 2, name='res_convd1'), + x_shortcut = x + x = lrelu_layer(bn_layer(conv2d_layer(x, num_filters, filter_size, filter_size, 2, 2, name='res_convd1'), is_training=is_training, scope='ebn_1')) x = bn_layer(conv2d_layer(x, num_filters, 1, 1, 1, 1, name='res_convd2'), is_training=is_training, scope='ebn_2') diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data.py new file mode 100644 index 0000000000000000000000000000000000000000..12d148f10c189dd0fb26fef5246c1059656f4b7b --- /dev/null +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/load_data.py @@ -0,0 +1,62 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import glob +import os +from PIL import Image + + +def load_images(path, image_size): + train_all = sorted(glob.glob(os.path.join(path, "train/*.jpg"))) + test_all = sorted(glob.glob(os.path.join(path, "val/*.jpg"))) + + train_input = [] + test_input = [] + train_output = [] + test_output = [] + + for img in train_all: + full_image = Image.open(img) + full_image = np.asarray(full_image.resize((2 * image_size, image_size), Image.BICUBIC)) + + # in maps dataset,the input and output merge to one image + # and the output is the left part + train_output.append(full_image[:, :full_image.shape[1] // 2, :] / 255.) + train_input.append(full_image[:, full_image.shape[1] // 2:, :] / 255.) + + for img in test_all: + full_image = Image.open(img) + full_image = np.asarray(full_image.resize((2 * image_size, image_size), Image.BICUBIC)) + + test_output.append(full_image[:, :full_image.shape[1] // 2, :] / 255.) + test_input.append(full_image[:, full_image.shape[1] // 2:, :] / 255.) + + # need to normalize to [-1,1] + return np.asarray(train_input) * 2 - 1, np.asarray(train_output) * 2 - 1, \ + np.asarray(test_input) * 2 - 1, np.asarray(test_output) * 2 - 1 diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/log.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/log.py index c3c36238be376cde2ea80305d7d7048bd6e62e30..c24ddf6c139127fe2d292dc0c40f0b90da045015 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/log.py +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/log.py @@ -1,4 +1,3 @@ - # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,6 +34,7 @@ logging.info("Start BicycleGAN") logger = logging.getLogger('BicycleGAN') logger.setLevel(logging.INFO) + def makedirs(path): if not os.path.exists(path): - os.makedirs(path) + os.makedirs(path) diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/lpips_tf.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/lpips_tf.py new file mode 100644 index 0000000000000000000000000000000000000000..98d5d8991cd8d30e0840eeeb2acc1185dc030f3f --- /dev/null +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/lpips_tf.py @@ -0,0 +1,118 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +import tensorflow as tf +from six.moves import urllib + +_URL = 'http://rail.eecs.berkeley.edu/models/lpips' + + +def _download(url, output_dir): + """Downloads the `url` file into `output_dir`. + + Modified from https://github.com/tensorflow/models/blob/master/research/slim/datasets/dataset_utils.py + """ + filename = url.split('/')[-1] + filepath = os.path.join(output_dir, filename) + + def _progress(count, block_size, total_size): + sys.stdout.write('\r>> Downloading %s %.1f%%' % ( + filename, float(count * block_size) / float(total_size) * 100.0)) + sys.stdout.flush() + + filepath, _ = urllib.request.urlretrieve(url, filepath, _progress) + print() + statinfo = os.stat(filepath) + print('Successfully downloaded', filename, statinfo.st_size, 'bytes.') + + +def lpips(input0, input1, model='net-lin', net='alex', version=0.1): + """ + Learned Perceptual Image Patch Similarity (LPIPS) metric. + + Args: + input0: An image tensor of shape `[..., height, width, channels]`, + with values in [0, 1]. + input1: An image tensor of shape `[..., height, width, channels]`, + with values in [0, 1]. + + Returns: + The Learned Perceptual Image Patch Similarity (LPIPS) distance. + + Reference: + Richard Zhang, Phillip Isola, Alexei A. Efros, Eli Shechtman, Oliver Wang. + The Unreasonable Effectiveness of Deep Features as a Perceptual Metric. + In CVPR, 2018. + """ + # flatten the leading dimensions + batch_shape = tf.shape(input0)[:-3] + input0 = tf.reshape(input0, tf.concat([[-1], tf.shape(input0)[-3:]], axis=0)) + input1 = tf.reshape(input1, tf.concat([[-1], tf.shape(input1)[-3:]], axis=0)) + # NHWC to NCHW + input0 = tf.transpose(input0, [0, 3, 1, 2]) + input1 = tf.transpose(input1, [0, 3, 1, 2]) + # normalize to [-1, 1] + input0 = input0 * 2.0 - 1.0 + input1 = input1 * 2.0 - 1.0 + + input0_name, input1_name = '0:0', '1:0' + + default_graph = tf.get_default_graph() + # producer_version = default_graph.graph_def_versions.producer + producer_version = 27 + cache_dir = './weights' + os.makedirs(cache_dir, exist_ok=True) + # files to try. try a specific producer version, but fallback to the version-less version (latest). + pb_fnames = [ + '%s_%s_v%s.pb' % (model, net, version), + '%s_%s_v%s_%d.pb' % (model, net, version, producer_version) + ] + for pb_fname in pb_fnames: + if not os.path.isfile(os.path.join(cache_dir, pb_fname)): + try: + _download(os.path.join(_URL, pb_fname), cache_dir) + except urllib.error.HTTPError: + pass + if os.path.isfile(os.path.join(cache_dir, pb_fname)): + break + + with open(os.path.join(cache_dir, pb_fname), 'rb') as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + _ = tf.import_graph_def(graph_def, + input_map={input0_name: input0, input1_name: input1}) + distance, = default_graph.get_operations()[-1].outputs + + if distance.shape.ndims == 4: + distance = tf.squeeze(distance, axis=[-3, -2, -1]) + # reshape the leading dimensions + distance = tf.reshape(distance, batch_shape) + return distance diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main_npu.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main.py similarity index 43% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main_npu.py rename to TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main.py index 9e34fa655591d191a7080d9a8c11278069bd3cab..6216ef54a7d66ea2e863f33cb07ce51da6bd55c4 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main_npu.py +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/main.py @@ -1,4 +1,3 @@ - # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -30,63 +29,41 @@ from __future__ import division from __future__ import print_function from __future__ import absolute_import -import sys import argparse -import numpy as np -import tensorflow as tf -from model_npu_tmp import BicycleGAN -from folder_npu import check_folder -# from load_data import load_images import os -# import moxing as mox -from npu_bridge.estimator import npu_ops -from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig -from tensorflow.python.framework import graph_util -from tensorflow.python import pywrap_tensorflow - -os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' -os.environ['ASCEND_SLOG_PRINT_TO_STDOUT'] = "3" +import tensorflow as tf +from load_data import load_images +from model import BicycleGAN +from folder import check_folder +from eval import eval_tf +from npu_bridge.npu_init import * +from tensorflow_core.core.protobuf.rewriter_config_pb2 import RewriterConfig def parse_args(): - desc = "Tensorflow implementation of BicycleGAN" - parser = argparse.ArgumentParser(description=desc) + parser = argparse.ArgumentParser() parser.add_argument('--Z_dim', type=int, default=8, help='Size of latent vector') parser.add_argument('--reconst_coeff', type=float, default=10, help='Reconstruction Coefficient') parser.add_argument('--latent_coeff', type=float, default=0.5, help='Latent Coefficient') parser.add_argument('--kl_coeff', type=float, default=0.01, help='KL Coefficient') parser.add_argument('--learning_rate', type=float, default=0.0002, help='Learning Rate') parser.add_argument('--image_size', type=int, default=256, help='Image Size') - parser.add_argument('--batch_size', type=int, default=1, help='Size of the minibatch') - parser.add_argument('--gan_type', type=str, default='BicycleGAN', help='Type of GAN') - parser.add_argument('--dataset', type=str, default='./maps', help='The name of dataset') - parser.add_argument('--epoch', type=int, default=200, help='The number of epochs to run') - parser.add_argument('--checkpoint_dir', type=str, default='./checkpoints', - help='Directory name to save the checkpoints') - parser.add_argument('--train_url', type=str, default=None, help='train_url') - parser.add_argument('--data_url', type=str, default=None, help='data_url') - parser.add_argument('--result_dir', type=str, default='./results', help='Directory name to save the generated images') - parser.add_argument('--log_dir', type=str, default='./logs', help='Directory name to save training logs') + parser.add_argument('--batch_size', type=int, default=1, help='number of images in one minibatch') + parser.add_argument('--epoch', type=int, default=20, help='The number of epochs to run') + parser.add_argument('--data_path', type=str, default='', help='Datasets location') + parser.add_argument('--output_path', type=str, default='', help='Output location') return check_args(parser.parse_args()) -"""checking arguments""" - - def check_args(args): - # --checkpoint_dir - check_folder(args.checkpoint_dir) - - # --result_dir - check_folder(args.result_dir) - # --result_dir - check_folder(args.log_dir) + check_folder(args.output_path) # --epoch assert args.epoch > 0, 'Totral number of epochs must be greater than zero' # --batch_size + # Due to the limit of the network,the batch_size must be set to 1 currently assert args.batch_size > 0, 'Batch size must be greater than zero' # --z_dim @@ -95,76 +72,36 @@ def check_args(args): return args -"""main function""" - - def main(): # parse arguments args = parse_args() if args is None: exit() - # Open New Tensorflow Session - model = BicycleGAN - #add - # TMP_DATA_PATH = './' + args.dataset - # TMP_RESULTS_PATH = '.' + args.result_dir - # TMP_CHECKPOINT_PATH = './' + args.checkpoint_dir - # TMP_LOGS_PATH = './' + args.log_dir - - # OBS_DATA_PATH = 'obs://bicyclegan/BicycleGAN2/' + args.dataset - # OBS_RESULTS_PATH = 'obs://bicyclegan/BicycleGAN2/' + args.result_dir - # OBS_CHECKPOINT_DIR = 'obs://bicyclegan/BicycleGAN2/' + args.checkpoint_dir - # OBS_LOG_PATH = 'obs://bicyclegan/BicycleGAN2/' + args.log_dir - # mox.file.make_dirs(TMP_DATA_PATH) - # mox.file.make_dirs(TMP_RESULTS_PATH) - # mox.file.make_dirs(TMP_CHECKPOINT_PATH) - # mox.file.make_dirs(TMP_LOGS_PATH) - # mox.file.copy_parallel(OBS_RESULTS_PATH, TMP_RESULTS_PATH) - # mox.file.copy_parallel(OBS_DATA_PATH, TMP_DATA_PATH) - # mox.file.copy_parallel(OBS_LOG_PATH, TMP_LOGS_PATH) - # mox.file.copy_parallel(OBS_CHECKPOINT_DIR, TMP_CHECKPOINT_PATH) - config = tf.ConfigProto(allow_soft_placement=True) + # NPU config + config = tf.ConfigProto() custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" - custom_op.parameter_map["use_off_line"].b = True - config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 - config.gpu_options.allow_growth = True + # Open New Tensorflow Session + model = BicycleGAN with tf.Session(config=config) as sess: - # Declare instance for GAN - - gan = None - if args.gan_type == model.model_name: - gan = model(sess, - epoch=args.epoch, - batch_size=args.batch_size, - Z_dim=args.Z_dim, - image_size=args.image_size, - dataset_name=args.dataset, - checkpoint_dir=args.checkpoint_dir, - result_dir=args.result_dir, - log_dir=args.log_dir) - if gan is None: - raise Exception("[!] There is no option for " + args.gan_type) - - # Build Tesnorflow Graph - gan.build_model() - - # show network architecture - # show_all_variables() - - # Launch the graph in a session - gan.train() + gan = model(sess=sess, args=args) + + train_A, train_B, test_A, test_B = load_images(args.data_path, args.image_size) + assert len(test_A) == len(test_B) + assert len(train_A) == len(train_B) + + gan.train(train_A=train_A, train_B=train_B) print(" [*] Training finished!") - # visualize learned generator - gan.test() + gan.test(test_A=test_A, test_B=test_B) print(" [*] Testing finished!") - # mox.file.copy_parallel(TMP_RESULTS_PATH, OBS_RESULTS_PATH) - # mox.file.copy_parallel(TMP_DATA_PATH, OBS_DATA_PATH) - # mox.file.copy_parallel(TMP_LOGS_PATH, OBS_LOG_PATH) - # mox.file.copy_parallel(TMP_CHECKPOINT_PATH, OBS_CHECKPOINT_DIR) + + path = os.path.join(args.output_path, "results", "test_results") + eval_tf(path) if __name__ == '__main__': diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model_npu_tmp.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model.py similarity index 69% rename from TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model_npu_tmp.py rename to TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model.py index dbbc737a1489382d43f32372fa77ea3ecbbd953d..e11930396c568016218246d68bf286f9c407ceb1 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model_npu_tmp.py +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/model.py @@ -1,4 +1,3 @@ - # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,69 +26,106 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- from __future__ import division import os import time -import glob -import tensorflow as tf -import numpy as np -# import scipy.misc -from PIL import Image +from tqdm import trange from layers import * -# from tensorflow.contrib import layers -from folder_npu import check_folder -from load_data_npu import load_images, save_images, imsave, load_batch_image, load_test_image -import matplotlib - -matplotlib.use('Tkagg') -#import matplotlib.pyplot as plt -#import moxing as mox - -os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" +from folder import check_folder +from imageio import imwrite +import random class BicycleGAN(object): - model_name = "BicycleGAN" - def __init__(self, sess, epoch, batch_size, Z_dim, image_size, dataset_name, checkpoint_dir, result_dir, log_dir): - self.TMP_DATA_PATH = dataset_name - self.TMP_RESULTS_PATH = result_dir - self.TMP_CHECKPOINT_PATH = checkpoint_dir - self.TMP_LOGS_PATH = log_dir + def __init__(self, sess, args): self.sess = sess - self.dataset_name = dataset_name - self.checkpoint_dir = checkpoint_dir - self.result_dir = result_dir - self.log_dir = log_dir - self.epoch = epoch - self.batch_size = batch_size - self.image_size = image_size - - self.input_width = 256 - self.input_height = 256 - self.output_width = 256 - self.output_height = 256 - self.channels = 3 - - self.Z_dim = Z_dim + self.data_path = args.data_path + self.checkpoint_dir = os.path.join(args.output_path, 'checkpoints') + self.result_dir = os.path.join(args.output_path, 'results') + self.log_dir = os.path.join(args.output_path, 'logs') + self.epoch = args.epoch + self.batch_size = args.batch_size + self.image_size = args.image_size # train - self.learning_rate = 0.0002 - self.beta1 = 0.5 - self.beta2 = 0.999 - self.reconst_coeff = 10 - self.latent_coeff = 0.5 - self.kl_coeff = 0.01 + self.Z_dim = args.Z_dim + self.learning_rate = args.learning_rate + self.reconst_coeff = args.reconst_coeff + self.latent_coeff = args.latent_coeff + self.kl_coeff = args.kl_coeff # test - self.sample_num = 64 + self.sample_num = 20 # how many images will model generates for one input + + # Input Image A + self.image_A = tf.placeholder(tf.float32, [self.batch_size] + [self.image_size, self.image_size, 3], + name='input_images') + + # Output Image B + self.image_B = tf.placeholder(tf.float32, [self.batch_size] + [self.image_size, self.image_size, 3], + name='output_images') + + # Noise z + self.z = tf.placeholder(tf.float32, [self.batch_size, self.Z_dim], name='latent_vector') - # load data - # self.train_A, self.train_B, self.test_A, self.test_B = load_images() - #self.train_A = mox.file.glob(dataset_name + "/train/*.jpg") - self.train_A = glob.glob(dataset_name + "/train/*.jpg") - self.num_batches = len(self.train_A) // self.batch_size + ''' Implementation of cVAE-GAN: B -> z -> B' ''' + # Encoder is fed the correct output image B for encding it to the latent representation z to learn the distribution of z + # It outputs 3 things: Enocded value z as Q(z|B), mu of Q(z|B), log_sigma of Q(z|B) + self.encoded_true_img, self.encoded_mu, self.encoded_log_sigma = self.Encoder(self.image_B) + + # This encoded representation z along with the input image A is then fed to the Generator to output the image B' + self.desired_gen_img = self.Generator(self.image_A, self.encoded_true_img) # Image B_cap + + ''' Implementation of cLR-GAN: z -> B' -> z' ''' + # Now, z is sampled from a normal distribution N(z) which in addition to the input image A is fed to the Generator to output B' + self.LR_desired_img = self.Generator(self.image_A, self.z) # Generated Image B' + + # B' is then fed to the Encoder to output z' which we try to be close to N(z). + self.reconst_z, self.reconst_mu, self.reconst_log_sigma = self.Encoder(self.LR_desired_img) # Encoded z' + + self.P_real = self.Discriminator(self.image_B) # Probability of ground_truth/real image (B) as real/fake + self.P_fake = self.Discriminator( + self.LR_desired_img) # Probability of generated output images (G(A, N(z)) as real/fake + self.P_fake_encoded = self.Discriminator( + self.desired_gen_img) # Probability of generated output images (G(A, Q(z|B)) as real/fake + + self.loss_vae_gan_D = (tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) + tf.reduce_mean( + tf.square(self.P_fake_encoded))) + + self.loss_lr_gan_D = ( + tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) + tf.reduce_mean(tf.square(self.P_fake))) + + self.loss_vae_gan_GE = tf.reduce_mean(tf.squared_difference(self.P_fake_encoded, 0.9)) # G + + self.loss_gan_G = tf.reduce_mean(tf.squared_difference(self.P_fake, 0.9)) + + self.loss_vae_GE = tf.reduce_mean(tf.abs(self.image_B - self.desired_gen_img)) # G + + self.loss_latent_GE = tf.reduce_mean(tf.abs(self.z - self.reconst_z)) # G + + self.loss_kl_E = 0.5 * tf.reduce_mean( + -1 - self.encoded_log_sigma + self.encoded_mu ** 2 + tf.exp(self.encoded_log_sigma)) + + self.loss_D = self.loss_vae_gan_D + self.loss_lr_gan_D - tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) + self.loss_G = self.loss_vae_gan_GE + self.loss_gan_G + self.reconst_coeff * self.loss_vae_GE + self.latent_coeff * self.loss_latent_GE + self.loss_E = self.loss_vae_gan_GE + self.reconst_coeff * self.loss_vae_GE + self.latent_coeff * self.loss_latent_GE + self.kl_coeff * self.loss_kl_E + + # Optimizer + self.dis_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Discriminator") + self.gen_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Generator") + self.enc_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Encoder") + opt = tf.train.AdamOptimizer(self.learning_rate, beta1=0.5) + + with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)): + self.D_solver = opt.minimize(self.loss_D, var_list=self.dis_var) + self.G_solver = opt.minimize(self.loss_G, var_list=self.gen_var) + self.E_solver = opt.minimize(self.loss_E, var_list=self.enc_var) + + """ Summary """ + self.d_loss_sum = tf.summary.scalar("d_loss", self.loss_D) + self.g_loss_sum = tf.summary.scalar("g_loss", self.loss_G) + self.e_loss_sum = tf.summary.scalar("e_loss", self.loss_E) def Discriminator(self, x, is_training=True, reuse=True): with tf.variable_scope("Discriminator", reuse=tf.AUTO_REUSE): @@ -197,201 +233,108 @@ class BicycleGAN(object): return z, mu, log_sigma - def build_model(self): - image_dims = [self.input_width, self.input_height, self.channels] - - ''' Graph input ''' - # Input Image A - self.image_A = tf.placeholder(tf.float32, [self.batch_size] + image_dims, name='input_images') - - # Output Image B - self.image_B = tf.placeholder(tf.float32, [self.batch_size] + image_dims, name='output_images') - - # Noise z - self.z = tf.placeholder(tf.float32, [self.batch_size, self.Z_dim], name='latent_vector') - - ''' Implementation of cVAE-GAN: B -> z -> B' ''' - # Encoder is fed the correct output image B for encding it to the latent representation z to learn the distribution of z - # It outputs 3 things: Enocded value z as Q(z|B), mu of Q(z|B), log_sigma of Q(z|B) - self.encoded_true_img, self.encoded_mu, self.encoded_log_sigma = self.Encoder(self.image_B) - - # This encoded representation z along with the input image A is then fed to the Generator to output the image B' - self.desired_gen_img = self.Generator(self.image_A, self.encoded_true_img) # Image B_cap - - ''' Implementation of cLR-GAN: z -> B' -> z' ''' - # Now, z is sampled from a normal distribution N(z) which in addition to the input image A is fed to the Generator to output B' - self.LR_desired_img = self.Generator(self.image_A, self.z) # Generated Image B' - - # B' is then fed to the Encoder to output z' which we try to be close to N(z). - self.reconst_z, self.reconst_mu, self.reconst_log_sigma = self.Encoder(self.LR_desired_img) # Encoded z' - - self.P_real = self.Discriminator(self.image_B) # Probability of ground_truth/real image (B) as real/fake - self.P_fake = self.Discriminator( - self.LR_desired_img) # Probability of generated output images (G(A, N(z)) as real/fake - self.P_fake_encoded = self.Discriminator( - self.desired_gen_img) # Probability of generated output images (G(A, Q(z|B)) as real/fake - - self.loss_vae_gan_D = (tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) + tf.reduce_mean( - tf.square(self.P_fake_encoded))) - - self.loss_lr_gan_D = ( - tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) + tf.reduce_mean(tf.square(self.P_fake))) - - self.loss_vae_gan_GE = tf.reduce_mean(tf.squared_difference(self.P_fake_encoded, 0.9)) # G - - self.loss_gan_G = tf.reduce_mean(tf.squared_difference(self.P_fake, 0.9)) - - self.loss_vae_GE = tf.reduce_mean(tf.abs(self.image_B - self.desired_gen_img)) # G - - self.loss_latent_GE = tf.reduce_mean(tf.abs(self.z - self.reconst_z)) # G - - self.loss_kl_E = 0.5 * tf.reduce_mean( - -1 - self.encoded_log_sigma + self.encoded_mu ** 2 + tf.exp(self.encoded_log_sigma)) - - self.loss_D = self.loss_vae_gan_D + self.loss_lr_gan_D - tf.reduce_mean(tf.squared_difference(self.P_real, 0.9)) - self.loss_G = self.loss_vae_gan_GE + self.loss_gan_G + self.reconst_coeff * self.loss_vae_GE + self.latent_coeff * self.loss_latent_GE - self.loss_E = self.loss_vae_gan_GE + self.reconst_coeff * self.loss_vae_GE + self.latent_coeff * self.loss_latent_GE + self.kl_coeff * self.loss_kl_E - - # Optimizer - self.dis_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Discriminator") - self.gen_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Generator") - self.enc_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="Encoder") - opt = tf.train.AdamOptimizer(self.learning_rate, beta1=0.5) - - with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)): - self.D_solver = opt.minimize(self.loss_D, var_list=self.dis_var) - self.G_solver = opt.minimize(self.loss_G, var_list=self.gen_var) - self.E_solver = opt.minimize(self.loss_E, var_list=self.enc_var) - - """ Testing """ - # self.fake_images = self.Generator(self.image_A, self.z, is_training=False, reuse=True) - - """ Summary """ - - self.d_loss_sum = tf.summary.scalar("d_loss", self.loss_D) - self.g_loss_sum = tf.summary.scalar("g_loss", self.loss_G) - self.e_loss_sum = tf.summary.scalar("e_loss", self.loss_E) - - # final summary operations - # self.g_sum = tf.summary.merge([d_loss_fake_sum, g_loss_sum]) - # self.d_sum = tf.summary.merge([d_loss_real_sum, d_loss_sum]) - # self.q_sum = tf.summary.merge([q_loss_sum, q_disc_sum, q_cont_sum]) - - def train(self): - - # include code for logger.info() - + def train(self, train_A, train_B): # First initialize all variables tf.global_variables_initializer().run() - # Input to graph from training data - self.z_sample = np.random.normal(size=(self.batch_size, self.Z_dim)) - input_img1, batch_imagesB = load_batch_image(0, self.dataset_name) - self.input_img1 = np.expand_dims(input_img1, axis=0) - # self.input_img1 = self.train_A[0:self.batch_size] # training results for a single image # saving the model self.saver = tf.train.Saver() # summary writer - self.writer = tf.summary.FileWriter(self.log_dir + '/' + self.model_name, self.sess.graph) + self.writer = tf.summary.FileWriter(self.log_dir, self.sess.graph) + + self.num_batches = len(train_A) // self.batch_size + # restore check-point if it exits could_load, checkpoint_counter = self.load(self.checkpoint_dir) if could_load: start_epoch = (int)(checkpoint_counter / self.num_batches) - start_batch_id = checkpoint_counter - start_epoch * self.num_batches counter = checkpoint_counter print(" [*] Load SUCCESS") else: start_epoch = 0 - start_batch_id = 0 counter = 1 print(" [!] Load failed...") + # for generating temporary images during training + self.img_sample = np.expand_dims(train_A[0], axis=0) + # loop for epoch - for epoch in range(start_epoch, self.epoch): - - # get batch data - for idx in range(len(self.train_A)): + for idx in range(len(train_A)): start_time = time.time() - batch_imagesA, batch_imagesB = load_batch_image(idx, self.dataset_name) - batch_imagesA = np.expand_dims(batch_imagesA, axis=0) - batch_imagesB = np.expand_dims(batch_imagesB, axis=0) - batch_z = np.random.normal(size=(self.batch_size, self.Z_dim)) + + # get data + image_A = np.expand_dims(train_A[idx], axis=0) + image_B = np.expand_dims(train_B[idx], axis=0) + random_z = np.random.normal(size=(self.batch_size, self.Z_dim)) _, summary_str_d, D_loss_curr = self.sess.run([self.D_solver, self.d_loss_sum, self.loss_D], - feed_dict={self.image_A: batch_imagesA, - self.image_B: batch_imagesB, self.z: batch_z}) + feed_dict={self.image_A: image_A, self.image_B: image_B, + self.z: random_z}) self.writer.add_summary(summary_str_d, counter) _, summary_str_g, G_loss_curr = self.sess.run([self.G_solver, self.g_loss_sum, self.loss_G], - feed_dict={self.image_A: batch_imagesA, - self.image_B: batch_imagesB, self.z: batch_z}) + feed_dict={self.image_A: image_A, self.image_B: image_B, + self.z: random_z}) self.writer.add_summary(summary_str_g, counter) _, summary_str_e, E_loss_curr = self.sess.run([self.E_solver, self.e_loss_sum, self.loss_E], - feed_dict={self.image_A: batch_imagesA, - self.image_B: batch_imagesB, self.z: batch_z}) + feed_dict={self.image_A: image_A, self.image_B: image_B, + self.z: random_z}) self.writer.add_summary(summary_str_e, counter) - # display training status - counter += 1 - print("Epoch: [%2d] [%4d/%4d] time: %4.4f d_loss: %.8f g_loss: %.8f e_loss: %.8f" % ( - epoch, idx, self.num_batches, time.time() - start_time, D_loss_curr, G_loss_curr, E_loss_curr)) - # Saving training results for every 100 examples + temp_dir = check_folder(os.path.join(self.result_dir, 'temps')) if counter % 100 == 0: + z_sample = np.random.normal(size=(1, self.Z_dim)) samples = self.sess.run(self.LR_desired_img, - feed_dict={self.image_A: self.input_img1, self.z: self.z_sample}) - tot_num_samples = min(self.sample_num, self.batch_size) - manifold_h = int(np.floor(np.sqrt(tot_num_samples))) - manifold_w = int(np.floor(np.sqrt(tot_num_samples))) - save_images(samples[:manifold_h * manifold_w, :, :, :], [manifold_h, manifold_w], check_folder( - self.result_dir + '/' + self.model_dir) + '/' + self.model_name + '_train_{:02d}_{:04d}.png'.format( - epoch, idx)) - # mox.file.copy_parallel(self.TMP_RESULTS_PATH, self.OBS_RESULTS_PATH) - # mox.file.copy_parallel(self.TMP_DATA_PATH, self.OBS_DATA_PATH) - # mox.file.copy_parallel(self.TMP_LOGS_PATH, self.OBS_LOG_PATH) - # mox.file.copy_parallel(self.TMP_CHECKPOINT_PATH, self.OBS_CHECKPOINT_DIR) - # After an epoch, start_batch_id is set to zero - start_batch_id = 0 - # non-zero value is only for the first epoch after loading pre-trained model + feed_dict={self.image_A: self.img_sample, self.z: z_sample}) + # transform from [-1,1] to [0,255] + samples = (np.asarray(samples + 1) / 2 * 255).astype(np.uint8) + imwrite(os.path.join(temp_dir, f'train_{epoch}_{idx}.jpg'), + np.squeeze(samples)) + + # display training status + counter += 1 + cost_time = time.time() - start_time + print("epoch : {}----step : {}----|d_loss : {}----g_loss : {}----e_loss : {}|----sec/step : {}" + .format(epoch, counter, D_loss_curr, G_loss_curr, E_loss_curr, cost_time)) + # save model self.save(self.checkpoint_dir, counter) # save model for final step self.save(self.checkpoint_dir, counter) - def test(self): + def test(self, test_A, test_B): # generate images self.step = 0 - base_dir = os.path.join('test_results') - check_folder(os.path.join(self.result_dir, base_dir)) - #test_all = mox.file.glob(self.dataset_name + "/val/*.jpg") - test_all = glob.glob(self.dataset_name + "/val/*.jpg") - for idx in range(len(test_all)): - self.step += 1 - img_A = load_test_image(idx, self.dataset_name) - input_img = np.expand_dims(img_A, axis=0) - z = np.random.normal(size=(self.batch_size, self.Z_dim)) - LR_desired_img = self.sess.run(self.LR_desired_img, feed_dict={self.image_A: input_img, self.z: z}) - image = LR_desired_img[0] - image = Image.fromarray(np.uint8((np.concatenate((img_A * 255., image * 255.), axis=1)))) - image.save(os.path.join(self.result_dir, base_dir, 'random_{}.jpg'.format(self.step))) - - @property - def model_dir(self): - return "{}_{}_{}_{}".format(self.model_name, (self.dataset_name).split("/")[-1], self.batch_size, self.Z_dim) - - def save(self, checkpoint_dir, step): - checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir) + for idx in trange(len(test_A)): + self.step += 1 + save_dir = check_folder(os.path.join(self.result_dir, "test_results", str(self.step))) + + # get input and save groundtruth + image_A = np.expand_dims(test_A[idx], axis=0) + imwrite(os.path.join(save_dir, f'ground_truth.jpg'), + (np.asarray(test_B[idx] + 1) / 2 * 255).astype(np.uint8)) + + # generate images + for i in range(0, self.sample_num): + z = np.random.normal(size=(1, self.Z_dim)) + LR_desired_img = self.sess.run(self.LR_desired_img, + feed_dict={self.image_A: image_A, self.z: z}) + # transform from [-1,1] to [0,255] + LR_desired_img = (np.asarray(LR_desired_img + 1) / 2 * 255).astype(np.uint8) + imwrite(os.path.join(save_dir, f'random_{i + 1}.jpg'), + np.squeeze(LR_desired_img)) + + def save(self, checkpoint_dir, step): # save checkpoints if not os.path.exists(checkpoint_dir): - #mox.file.make_dirs(checkpoint_dir) - os.mkdir(checkpoint_dir) - self.saver.save(self.sess, os.path.join(checkpoint_dir, self.model_name + '.model'), global_step=step) + os.makedirs(checkpoint_dir) + self.saver.save(self.sess, os.path.join(checkpoint_dir, 'BicycleGAN.model'), global_step=step) - def load(self, checkpoint_dir): + def load(self, checkpoint_dir): # load checkpoint if it exits import re print(" [*] Reading checkpoints...") - checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir) ckpt = tf.train.get_checkpoint_state(checkpoint_dir) if ckpt and ckpt.model_checkpoint_path: diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_acc.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..13077b10e660de32d6f7861257a50e1a01ede9ba --- /dev/null +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_acc.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_full_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_perf.py b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..14384e227a0fa90a514254590aef5078c62ff700 --- /dev/null +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/modelarts_entry_perf.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_performance_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_full_1p.sh index a6b1b1f81516e54b703287701c163c04f59c44dd..60e2f4fd0414e5512f33377f1f5397b5de2e089a 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_full_1p.sh @@ -1,203 +1,184 @@ -Copyright 2019 The TensorFlow Authors. All rights reserved. - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./main.py --data_path=${data_path}/maps --output_path=${output_path} +else + python3.7 ./main.py --data_path=${data_path}/maps --output_path=${output_path} > ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "sec/step :" ${print_log} | tail -n 7 | awk '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "Final Average Distances :" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +#grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt +grep "d_loss :" ${print_log} | awk -F "|" '{print $2}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_performance_1p.sh index 613238bc25aa2b5c68efb2b7de41c8fd08b04445..e7b4b0ec33976f3904379eba15a771389d21eec7 100644 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/test/train_performance_1p.sh @@ -1,129 +1,184 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# !/bin/bash -cur_path=`pwd`/.. -echo $cur_path +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + export RANK_SIZE=1 +export RANK_ID=0 export JOB_ID=10087 -#export ASCEND_DEVICE_ID=0 -#模型训练参数 -Network="BicycleGAN_ID1287_for_TensorFlow" -data_path='' -result_path=${cur_path}/test/output/$ASCEND_DEVICE_ID/ckpt/ -batch_size=1 -epochs=2 +# 路径参数初始化 +data_path="" +output_path="" -# 帮助信息, +# 帮助信息,不需要修改 if [[ $1 == --help || $1 == -h ]];then echo"usage:./train_performance_1P.sh " echo " " echo "parameter explain: - --Network name of the network will be trained - --data_path source data of training , default is ${cur_path}/MNIST_data/ - --result_path output path, default is ${cur_path}/test/output/$ASCEND_DEVICE_ID/ckpt/ - --batch_size batchsize of input per step, default is 256 - --epochs num of epochs, default is 1 - -h/--help show help message + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message " exit 1 fi -#参数校验,不需要修改 +# 参数校验,不需要修改 for para in $* do if [[ $para == --data_path* ]];then data_path=`echo ${para#*=}` - elif [[ $para == --result_path* ]];then - result_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` elif [[ $para == --batch_size* ]];then batch_size=`echo ${para#*=}` - elif [[ $para == --epochs* ]];then - epochs=`echo ${para#*=}` fi done -#检查data_path -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path \" must be config" - exit 1 +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" fi +echo "### get your log here : ${print_log}" -#训练过程 -cd ${cur_path} +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} -start=$(date +%s) +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 +epoch=1 -if [ -d ${cur_path}/test/output/${ASCEND_DEVICE_ID} ];then - rm -rf ${cur_path}/test/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/test/output/$ASCEND_DEVICE_ID/ckpt - else - mkdir -p ${cur_path}/test/output/$ASCEND_DEVICE_ID/ckpt +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./main.py --data_path=${data_path}/maps --output_path=${output_path} --epoch=${epoch} +else + python3.7 ./main.py --data_path=${data_path}/maps --output_path=${output_path} --epoch=${epoch} > ${print_log} 2>&1 fi -python3 main_npu.py \ - --dataset=${data_path}/maps/ \ - --checkpoint_dir=${result_path} \ - --epoch=${epochs} \ - --batch_size=${batch_size} > ${cur_path}/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & -wait - -end=$(date +%s) -e2e_time=$(( $end - $start )) -echo "Final Training Duration sec : $e2e_time" - -#结果打印 + +# 性能相关数据计算 +StepTime=`grep "sec/step :" ${print_log} | tail -n 7 | awk '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "Final Average Distances :" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +#grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt +grep "d_loss :" ${print_log} | awk -F "|" '{print $2}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + echo "------------------ Final result ------------------" -#输出性能 -TrainingTime=`grep "time" ${cur_path}/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $6*1000}'` -FPS=`awk 'BEGIN{printf "%.2f\n",'${batch_size}'*1000/'${TrainingTime}'}'` -#accuracy=`grep "accuracy" ${cur_path}/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $2}'| sed 's/,//g'` -##打印 -echo "Final Performance TrainingTime : $TrainingTime" +# 输出性能FPS/单step耗时/端到端耗时 echo "Final Performance images/sec : $FPS" -#echo "Final Accuracy : ${accuracy}" - -BatchSize=${batch_size} -DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' - -#train_loss -grep "d_loss:" ${cur_path}/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $8}' >> $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt -#最后一个迭代loss值,不需要修改 -ActualLoss=`awk 'END {print $1}' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` - -echo "Network = ${Network}" > $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = ${DeviceType}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${FPS}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -#echo "Accuracy = ${accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/train_1p.sh b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/train_1p.sh deleted file mode 100644 index 3f71b5f474747f973a4e45862c9e1fb816aad7d5..0000000000000000000000000000000000000000 --- a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/train_1p.sh +++ /dev/null @@ -1 +0,0 @@ -python main_npu.py \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1.pb b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1.pb new file mode 100644 index 0000000000000000000000000000000000000000..c8ec4cc41e4090f3957f157a2d7fb236f7e21f2d Binary files /dev/null and b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1.pb differ diff --git a/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1_27.pb b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1_27.pb new file mode 100644 index 0000000000000000000000000000000000000000..c8ec4cc41e4090f3957f157a2d7fb236f7e21f2d Binary files /dev/null and b/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow/weights/net-lin_alex_v0.1_27.pb differ diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md index 4e3f0e500eece353e7187effff822b7b5a921534..81df7f3663fe0bfccb554e12a7f181a7a035193a 100644 --- a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md @@ -118,10 +118,11 @@ BlitzNet在一次前向传递中联合执行对象检测和语义分割,从而

快速上手

-- 数据集准备 - +- 训练数据集准备 + OBS下载地址:(下载的数据集为处理完的tf数据集) https://blitznets.obs.myhuaweicloud.com:443/Datasets/voc12-train-seg?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1661686224&Signature=QkWct66ZOwIUfNOYeoWFFZ/FTsk%3D + - ResNet预训练模型准备 OBS下载地址:(将下载的resnet50_full.ckpt文件置于Weights_imagenet中) @@ -140,26 +141,41 @@ BlitzNet在一次前向传递中联合执行对象检测和语义分割,从而 - 单卡训练 - 1. 配置训练参数。 + 1. 单卡性能训练。 - 首先在脚本run_1p.sh中,配置训练数据集路径,请用户根据实际路径配置,数据集参数如下所示: + 用户可以执行test/train_performance_1p.sh脚本执行少量step获取性能信息: ``` - - python3 ${code_dir}/train_1p.py --obs_dir=${obs_url} --run_name=BlitzNet300_x4_VOC12_detsegaug --dataset=voc12-train --trunk=resnet50 --x4 --batch_size=32 --optimizer=adam --detect --segment --max_iterations=40000 --lr_decay 25000 35000 + cd test + bash train_performance_1p.sh --data_path=数据集路径 + + train_performance_1p.sh中调用的训练命令示例如下: + python3 train_1p.py --obs_dir=${obs_url} --run_name=BlitzNet300_x4_VOC12_detsegaug --dataset=voc12-train --trunk=resnet50 --x4 --batch_size=16 --optimizer=adam --detect --segment --max_iterations=10 --lr_decay 25000 35000 ``` - 2. 启动训练。 + 2. 单卡精度训练。 - 启动单卡精度训练 (脚本为BlitzNet_ID0948_for_Tensorflow/train_testcase.sh) + 用户可以执行test/train_full_1p.sh脚本执行少量step获取性能信息: ``` - bash train_testcase.sh --code_url=/npu/traindata/cnews --data_url=/npu/traindata/cnews --result_url=/npu/traindata/cnews - ``` + cd test + bash train_full_1p.sh --data_path=数据集路径 -

高级参考

+ train_performance_1p.sh中调用的训练命令示例如下: + python3 train_1p.py --obs_dir=${obs_url} --run_name=BlitzNet300_x4_VOC12_detsegaug --dataset=voc12-train --trunk=resnet50 --x4 --batch_size=16 --optimizer=adam --detect --segment --max_iterations=40000 --lr_decay 25000 35000 + 3. 执行结果。 + |精度指标项|论文发布|GPU实测|NPU实测| + |---|---|---|---| + |ACC|xxx|0.88|0.88| + + |性能指标项|论文发布|GPU实测|NPU实测| + |---|---|---|---| + |FPS|XXX|0.35 sec/batch|0.23 sec/batch| + + +

高级参考

## 脚本和示例代码 @@ -174,39 +190,3 @@ BlitzNet在一次前向传递中联合执行对象检测和语义分割,从而 │ ├──train_testcase.sh //自测试用例脚本 ``` -## 脚本参数 - -``` -data_input_test.py ---obs_dir=${obs_url} ---run_name=BlitzNet300_x4_VOC12_detsegaug ---dataset=voc12-train ---trunk=resnet50 ---x4 ---batch_size=32 ---optimizer=adam ---detect ---segment ---max_iterations=40000 ---lr_decay 25000 35000 -``` - - -## 训练过程 - -1. 通过“模型训练”中的训练指令启动性能或者精度训练。性能和精度通过运行不同脚本,支持性能、精度网络训练。 - -2. 参考脚本的模型存储路径为test/output/*,训练脚本train_*.log中可查看性能、精度的相关运行状态。 - - -

精度测试

- -训练集:VOC12 train-seg-aug - -测试集:VOC12 val - -| | mIoU | mAP | -| ---------- | -------- | -------- | -| 论文精度 | 72.8 | 80.0 | -| GPU精度 | 72.8 | 80.0 | -| NPU精度 | 待测 | 待测 | \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..775d0e09f523b9fd28f7e965d696ff8d98d6fc62 --- /dev/null +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py @@ -0,0 +1,90 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import tensorflow as tf +from tensorflow.python.tools import freeze_graph +import os +from Train.config import args +from help_modelarts import modelarts_result2obs + +from Train.resnet import ResNet +from Train.config import config as net_config + +INIT_CKPT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'checkpoint65') +ckpt_path = os.path.join(INIT_CKPT_DIR, 'model.ckpt-65000') + +def main(): + print("start ckpt To pb") + print("ckpt_path") + tf.reset_default_graph() + img_ph = tf.placeholder(tf.float32, shape=[1, 300, 300, 3], name="input") + dataset_num_classes = 21 + + net = ResNet + depth = 50 + net = net(config=net_config, depth=depth, training=False) + + net.create_trunk(img_ph) + + if args.detect: + net.create_multibox_head(dataset_num_classes) + confidence = net.outputs['confidence'] + location = net.outputs['location'] + else: + location, confidence = None, None + + if args.segment: + net.create_segmentation_head(dataset_num_classes) + seg_logits = net.outputs['segmentation'] + else: + seg_logits = None + + print("confidence = ", confidence) + print("location = ", location) + print("seg_logits = ", seg_logits) + + with tf.Session() as sess: + tf.train.write_graph(sess.graph_def, args.result_dir, 'model.pb') + modelarts_result2obs(args) + freeze_graph.freeze_graph( + input_graph=os.path.join(args.result_dir, 'model.pb'), + input_saver='', + input_binary=False, + input_checkpoint=ckpt_path, + output_node_names="concat_1, concat_2, ssd_2/Conv_7/BiasAdd", # graph outputs node + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph=os.path.join(args.result_dir, 'bliznet_tf_310.pb'), # graph outputs name + clear_devices=False, + initializer_nodes="") + print("done") + + modelarts_result2obs(args) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_full_1p.sh index 5c7f983fdf736ff28c7884434e1d81ca88f5e22d..dae392c7169767e77e2b1b4f41cf561762367928 100644 --- a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_full_1p.sh @@ -24,9 +24,9 @@ Network="BlitzNet_ID0948_for_TensorFlow" #训练epoch train_epochs= #训练batch_size -batch_size=32 +batch_size=16 #训练step -train_steps=1000 +train_steps=40000 #学习率 learning_rate= @@ -129,7 +129,7 @@ do --dataset=voc12-train \ --trunk=resnet50 \ --x4 \ - --batch_size=32 \ + --batch_size=16 \ --optimizer=adam \ --detect \ --segment \ @@ -161,7 +161,7 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' ActualFPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${TrainingTime}'}'` #获取模型精度 -train_accuracy=`grep "acc =" $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk 'END {print $12}'|sed 's/,//g'` +train_accuracy=`grep "acc =" $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | tail -n 10 | awk '{print $12}' | awk -F"," '{print $1}' | awk '{sum+=$1} END {print sum/NR}'` #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 grep 'loss =' $cur_path/test/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $9}'|sed 's/,//g' > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_performance_1p.sh index fde86eb992fe0340df09bf596a2b2405d71bcd01..75cb8fede09eab7520ca6fc15daf13f5007f9246 100644 --- a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/test/train_performance_1p.sh @@ -24,7 +24,7 @@ Network="BlitzNet_ID0948_for_TensorFlow" #训练epoch train_epochs= #训练batch_size -batch_size=32 +batch_size=16 #训练step train_steps=10 #学习率 @@ -130,7 +130,7 @@ do --dataset=voc12-train \ --trunk=resnet50 \ --x4 \ - --batch_size=32 \ + --batch_size=16 \ --optimizer=adam \ --detect \ --segment \ diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py new file mode 100644 index 0000000000000000000000000000000000000000..9b9564af69a2a3d5cb8a83dc74fe53a79d99a885 --- /dev/null +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py @@ -0,0 +1,233 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf +from config import args +from getData.voc_loader import VOCLoader + +from tensorflow.python.ops.metrics_impl import mean_iou as streaming_mean_iou +from utils import decode_bboxes +from getData.boxer import PriorBoxGrid +from config import config as net_config +from detector import Detector +from tabulate import tabulate +import progressbar +import numpy as np +import logging +log = logging.getLogger() + +def eval_category(gt, dets, cid): + """Computes average precision for one category""" + cgt = gt[cid] + cdets = np.array(dets[cid]) + if (cdets.shape == (0, )): + return None, None + scores = cdets[:, 1] + sorted_inds = np.argsort(-scores) + image_ids = cdets[sorted_inds, 0].astype(int) + BB = cdets[sorted_inds] + + npos = 0 + for img_gt in cgt.values(): + img_gt['ignored'] = np.array(img_gt['difficult']) + img_gt['det'] = np.zeros(len(img_gt['difficult']), dtype=np.bool) + npos += np.sum(~img_gt['ignored']) + + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + ovmax = -np.inf + if image_ids[d] in cgt: + R = cgt[image_ids[d]] + bb = BB[d, 2:].astype(float) + + BBGT = R['bbox'].astype(float) + + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 0] + BBGT[:, 2], bb[0] + bb[2]) + iymax = np.minimum(BBGT[:, 1] + BBGT[:, 3], bb[1] + bb[3]) + iw = np.maximum(ixmax - ixmin, 0.) + ih = np.maximum(iymax - iymin, 0.) + inters = iw * ih + + # union + uni = (bb[2] * bb[3] + BBGT[:, 2] * BBGT[:, 3] - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > args.voc_iou_thresh: + if not R['ignored'][jmax]: + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = True + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + N = float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = rec * N / np.maximum(rec * N + fp, np.finfo(np.float32).eps) + return rec, prec + +def voc_ap(rec, prec, use_07_metric=False): + """ ap = voc_ap(rec, prec, [use_07_metric]) + Compute VOC AP given precision and recall. + If use_07_metric is true, uses the + VOC 07 11 point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + p = 0 if np.sum(rec >= t) == 0 else np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + +def compute_ap(gt, dets, loader): + """computes average precision for all categories""" + aps = {} + for cid in range(1, loader.num_classes): + cat_name = loader.ids_to_cats[cid] + rec, prec = eval_category(gt, dets, cid) + ap = voc_ap(rec, prec, loader.year == '07') + aps[loader.ids_to_cats[cid]] = ap + return aps + +def make_detection_table(gt, dets, loader): + """creates a table with AP per category and mean AP""" + aps = compute_ap(gt, dets, loader) + print("ap = ", aps) + eval_cache = [aps] + + table = [] + for cid in range(1, loader.num_classes): + cat_name = loader.ids_to_cats[cid] + table.append((cat_name, ) + tuple(aps.get(cat_name, 'N/A') for aps in eval_cache)) + mean_ap = np.mean([a for a in list(aps.values()) if a >= 0]) + table.append(("AVERAGE", ) + tuple(np.mean(list(aps.values())) for aps in eval_cache)) + x = tabulate(table, headers=(["Category", "mAP (all)"]), + tablefmt='orgtbl', floatfmt=".3f") + log.info("Eval results:\n%s", x) + return table + +def compute_mean_iou(detector): + iou = detector.get_mean_iou() + print(iou) + log.info("\n Mean IoU is %f", iou) + return iou + +def main(argv=None): + if args.dataset == 'voc07' or args.dataset == 'voc07+12': + loader = VOCLoader('07', 'test') + if args.dataset == 'voc12-val': + loader = VOCLoader('12', 'val', segmentation=args.segment) + + with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, + log_device_placement=False)) as sess: + detector = Detector(sess, loader, net_config, no_gt=args.no_seg_gt) + + filenames = loader.get_filenames() + gt = {cid: {} for cid in range(1, loader.num_classes)} + dets = {cid: [] for cid in range(1, loader.num_classes)} + + bar = progressbar.ProgressBar()# 显示进度条 + # print("filenames = ", filenames) + + init_op = tf.group(tf.local_variables_initializer(), tf.global_variables_initializer()) + sess.run(init_op) + for i in bar(range(len(filenames))): + name = filenames[i] + # print("name = ", name) + img_id = i + img = loader.load_image(name) # 获取图片 + # img = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32) + # img.shape = 1, 300, 300, 3 + gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息 + + confidence = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_0.txt".format(i)) + location = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_1.txt".format(i)) + seg_logits = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_2.txt".format(i)) + confidence.shape = 1, 45390, 21 + location.shape = 1, 45390, 4 + seg_logits.shape = 1, 75, 75, 21 + + for cid in np.unique(gt_cats): + mask = (gt_cats == cid) + bbox = gt_bboxes[mask] + diff = difficulty[mask] + det = np.zeros(len(diff), dtype=np.bool) + gt[cid][img_id] = {'bbox': bbox, 'difficult': diff, 'det': det} + + confidence1 = confidence + location1 = location + seg_logits1 = seg_logits + output = detector.feed_forward(img, seg_gt, confidence1, location1, seg_logits1, + w, h, name, gt_bboxes, gt_cats) # result + + if args.detect: + det_bboxes, det_probs, det_cats = output[:3] + for i in range(len(det_cats)): + dets[det_cats[i]].append((img_id, det_probs[i]) + tuple(det_bboxes[i])) + + # print("gt = ", gt) + # print("dets = ", dets) + print("table result:") + table = make_detection_table(gt, dets, loader) if args.detect else None + print("iou result:") + iou = compute_mean_iou(detector) if args.segment else None + + +if __name__ == '__main__': + tf.app.run() \ No newline at end of file diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/train_1p.py b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/train_1p.py index 3b46c37d3452f92268f832bd7727b14aa8a0f149..a44a8bba02b1ec2fea648409bfde09e7cd39b629 100644 --- a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/train_1p.py +++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/train_1p.py @@ -54,9 +54,9 @@ log = logging.getLogger() dataset_num_classes = len(VOC_CATS) -def npu_tf_optimizer(opt): - npu_opt = NPUDistributedOptimizer(opt) - return npu_opt +#def npu_tf_optimizer(opt): +# npu_opt = NPUDistributedOptimizer(opt) +# return npu_opt def objective(location, confidence, refine_ph, classes_ph, @@ -271,9 +271,11 @@ def train(net, config): learning_rate = cosine_decay(tf.to_int32(global_step), steps, learning_rates) if args.optimizer == 'adam': - opt = npu_tf_optimizer(tf.train.AdamOptimizer(learning_rate=learning_rate)) + # opt = npu_tf_optimizer(tf.train.AdamOptimizer(learning_rate=learning_rate)) + opt = tf.train.AdamOptimizer(learning_rate=learning_rate) elif args.optimizer == 'nesterov': - opt = npu_tf_optimizer(tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.9, use_nesterov=True)) + # opt = npu_tf_optimizer(tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.9, use_nesterov=True)) + opt = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.9, use_nesterov=True) else: raise ValueError @@ -292,7 +294,7 @@ def train(net, config): custom_op = config_npu.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["use_off_line"].b = True - # custom_op.parameter_map["mix_compile_mode"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") config_npu.graph_options.rewrite_options.remapping = RewriterConfig.OFF with tf.Session(config=config_npu) as sess: diff --git a/TensorFlow/contrib/cv/CFL_ID1230_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/CFL_ID1230_for_TensorFlow/test/train_full_1p.sh index f85b93576acf9888c68abfe822c316ca49d2930a..898fc05da046dfb148bc057ad3952d00f907e6f2 100644 --- a/TensorFlow/contrib/cv/CFL_ID1230_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/CFL_ID1230_for_TensorFlow/test/train_full_1p.sh @@ -130,7 +130,7 @@ StepTime=`grep "sec/step :" ${print_log} | tail -n 10 | awk '{print $NF}' | awk FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` # 精度相关数据计算 -train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}' | awk -F ";" '{print $2}' | awk -F ":" '{print $2}'` +train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}' | tr -d ":"` # 提取所有loss打印信息 #grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt diff --git a/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/README.md b/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/README.md index e8bd391485e72929096a71c755892b3b3009e442..12f47ff8f2b9aa280f3db2d0d02112066ee89307 100644 --- a/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/README.md @@ -112,6 +112,7 @@ To train: a.请用户自行准备好数据集,包含训练集和验证集两部分,数据集包括Mnist等,包含train和 val两部分。以Mnist数据集为例。 b.上传数据压缩包到训练环境上,无需解压 +``` ``` ├── /datasets/mnist @@ -166,43 +167,72 @@ Extracting cache/dataset/t10k-images-idx3-ubyte.gz Extracting cache/dataset/t10k-labels-idx1-ubyte.gz start training 2021-12-08 13:13:49.596612: I /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_ARM/tensorflow/tf_adapter/kernels/geop_npu.cc:765] The model has been compiled on the Ascend AI processor, current graph id is:11 -step 0, loss = 621.13 (3.9 examples/sec; 25.653 sec/batch) -step 0, loss = 473.88 (15663.8 examples/sec; 0.006 sec/batch) -step 0, loss = 393.31 (22854.8 examples/sec; 0.004 sec/batch) -step 0, loss = 329.65 (23974.3 examples/sec; 0.004 sec/batch) -step 0, loss = 269.96 (24415.3 examples/sec; 0.004 sec/batch) -step 0, loss = 244.54 (24506.6 examples/sec; 0.004 sec/batch) -step 0, loss = 240.48 (23996.2 examples/sec; 0.004 sec/batch) -step 0, loss = 232.23 (24784.6 examples/sec; 0.004 sec/batch) -step 0, loss = 231.09 (25579.7 examples/sec; 0.004 sec/batch) -step 0, loss = 227.22 (26723.8 examples/sec; 0.004 sec/batch) -step 0, loss = 234.37 (24330.3 examples/sec; 0.004 sec/batch) -step 0, loss = 228.46 (23971.6 examples/sec; 0.004 sec/batch) -step 0, loss = 224.78 (25015.2 examples/sec; 0.004 sec/batch) -step 0, loss = 237.21 (24771.5 examples/sec; 0.004 sec/batch) -step 0, loss = 228.91 (24708.7 examples/sec; 0.004 sec/batch) -step 0, loss = 218.84 (24153.8 examples/sec; 0.004 sec/batch) -step 0, loss = 213.00 (24834.5 examples/sec; 0.004 sec/batch) -step 0, loss = 214.56 (25046.6 examples/sec; 0.004 sec/batch) -step 0, loss = 217.74 (25662.7 examples/sec; 0.004 sec/batch) -step 0, loss = 217.85 (24633.3 examples/sec; 0.004 sec/batch) -step 0, loss = 210.44 (24662.2 examples/sec; 0.004 sec/batch) -step 0, loss = 208.78 (24595.7 examples/sec; 0.004 sec/batch) -step 0, loss = 208.42 (24978.0 examples/sec; 0.004 sec/batch) -step 0, loss = 210.64 (24394.0 examples/sec; 0.004 sec/batch) -step 0, loss = 210.62 (24500.9 examples/sec; 0.004 sec/batch) -step 0, loss = 214.60 (25016.7 examples/sec; 0.004 sec/batch) -step 0, loss = 207.89 (24577.0 examples/sec; 0.004 sec/batch) -step 0, loss = 202.35 (24890.5 examples/sec; 0.004 sec/batch) -step 0, loss = 205.51 (24786.1 examples/sec; 0.004 sec/batch) -step 0, loss = 211.48 (23782.6 examples/sec; 0.004 sec/batch) -step 0, loss = 216.67 (24902.4 examples/sec; 0.004 sec/batch) -step 0, loss = 212.29 (24771.5 examples/sec; 0.004 sec/batch) -step 0, loss = 204.17 (24849.2 examples/sec; 0.004 sec/batch) -step 0, loss = 210.40 (24969.1 examples/sec; 0.004 sec/batch) -step 0, loss = 210.41 (29371.9 examples/sec; 0.003 sec/batch) -step 0, loss = 203.19 (28197.0 examples/sec; 0.004 sec/batch) -step 0, loss = 210.02 (28771.5 examples/sec; 0.003 sec/batch) +step 12, loss = 103.19 (51654.0 examples/sec; 0.002 sec/batch) +step 12, loss = 103.04 (53065.6 examples/sec; 0.002 sec/batch) +step 12, loss = 98.33 (51807.1 examples/sec; 0.002 sec/batch) +step 12, loss = 100.44 (52187.4 examples/sec; 0.002 sec/batch) +step 12, loss = 98.23 (52911.6 examples/sec; 0.002 sec/batch) +step 12, loss = 103.22 (52083.7 examples/sec; 0.002 sec/batch) +step 12, loss = 102.44 (51974.0 examples/sec; 0.002 sec/batch) +step 12, loss = 103.07 (52389.5 examples/sec; 0.002 sec/batch) +step 12, loss = 99.82 (52435.4 examples/sec; 0.002 sec/batch) +step 12, loss = 100.67 (52402.6 examples/sec; 0.002 sec/batch) +step 12, loss = 103.34 (52851.6 examples/sec; 0.002 sec/batch) +step 12, loss = 104.92 (52632.8 examples/sec; 0.002 sec/batch) +step 12, loss = 99.91 (51539.7 examples/sec; 0.002 sec/batch) +step 12, loss = 101.96 (53410.2 examples/sec; 0.002 sec/batch) +step 12, loss = 103.25 (53166.5 examples/sec; 0.002 sec/batch) +step 12, loss = 107.41 (53335.5 examples/sec; 0.002 sec/batch) +step 12, loss = 106.33 (53546.6 examples/sec; 0.002 sec/batch) +step 12, loss = 104.44 (52291.5 examples/sec; 0.002 sec/batch) +step 12, loss = 97.90 (51609.5 examples/sec; 0.002 sec/batch) +step 12, loss = 101.81 (52298.1 examples/sec; 0.002 sec/batch) +step 12, loss = 104.01 (51590.5 examples/sec; 0.002 sec/batch) +step 12, loss = 98.72 (52032.1 examples/sec; 0.002 sec/batch) +step 12, loss = 98.53 (53200.2 examples/sec; 0.002 sec/batch) +step 12, loss = 98.74 (52317.6 examples/sec; 0.002 sec/batch) +step 12, loss = 105.42 (52232.9 examples/sec; 0.002 sec/batch) +step 12, loss = 103.77 (52665.8 examples/sec; 0.002 sec/batch) +step 12, loss = 102.57 (52396.1 examples/sec; 0.002 sec/batch) +step 12, loss = 99.36 (53234.0 examples/sec; 0.002 sec/batch) +step 12, loss = 101.95 (53615.0 examples/sec; 0.002 sec/batch) +step 12, loss = 105.18 (52422.2 examples/sec; 0.002 sec/batch) +step 12, loss = 102.93 (51704.9 examples/sec; 0.002 sec/batch) +step 12, loss = 100.61 (52369.9 examples/sec; 0.002 sec/batch) +step 12, loss = 106.17 (51225.0 examples/sec; 0.002 sec/batch) +step 12, loss = 102.04 (51813.5 examples/sec; 0.002 sec/batch) +step 12, loss = 107.66 (52369.9 examples/sec; 0.002 sec/batch) +step 12, loss = 109.57 (51673.1 examples/sec; 0.002 sec/batch) +step 12, loss = 104.66 (51388.2 examples/sec; 0.002 sec/batch) +step 12, loss = 101.40 (52514.1 examples/sec; 0.002 sec/batch) +step 12, loss = 99.98 (51337.9 examples/sec; 0.002 sec/batch) +step 12, loss = 103.62 (51916.1 examples/sec; 0.002 sec/batch) +step 12, loss = 101.46 (53105.9 examples/sec; 0.002 sec/batch) +step 12, loss = 104.52 (52599.7 examples/sec; 0.002 sec/batch) +step 12, loss = 99.36 (52396.1 examples/sec; 0.002 sec/batch) +step 12, loss = 95.70 (52884.9 examples/sec; 0.002 sec/batch) +step 12, loss = 103.42 (47148.2 examples/sec; 0.002 sec/batch) +step 12, loss = 102.05 (50889.4 examples/sec; 0.002 sec/batch) +step 12, loss = 104.18 (52441.9 examples/sec; 0.002 sec/batch) +step 12, loss = 102.44 (52109.6 examples/sec; 0.002 sec/batch) +step 12, loss = 101.26 (51546.1 examples/sec; 0.002 sec/batch) +step 12, loss = 102.05 (52468.2 examples/sec; 0.002 sec/batch) +step 12, loss = 102.58 (52051.4 examples/sec; 0.002 sec/batch) +step 12, loss = 99.98 (52540.4 examples/sec; 0.002 sec/batch) +step 12, loss = 103.54 (53553.4 examples/sec; 0.002 sec/batch) +step 12, loss = 102.53 (53342.3 examples/sec; 0.002 sec/batch) +step 12, loss = 104.03 (52402.6 examples/sec; 0.002 sec/batch) +step 12, loss = 99.35 (52805.0 examples/sec; 0.002 sec/batch) +step 12, loss = 104.60 (51903.3 examples/sec; 0.002 sec/batch) +step 12, loss = 104.71 (51896.9 examples/sec; 0.002 sec/batch) +step 12, loss = 103.18 (52945.0 examples/sec; 0.002 sec/batch) +step 12, loss = 98.93 (52019.1 examples/sec; 0.002 sec/batch) +step 12, loss = 101.61 (51858.4 examples/sec; 0.002 sec/batch) +step 12, loss = 105.64 (52187.4 examples/sec; 0.002 sec/batch) +step 12, loss = 98.17 (52070.8 examples/sec; 0.002 sec/batch) +step 12, loss = 101.07 (51698.6 examples/sec; 0.002 sec/batch) +step 12, loss = 96.97 (52858.3 examples/sec; 0.002 sec/batch) +step 12, loss = 101.28 (52045.0 examples/sec; 0.002 sec/batch) ```

精度指标

diff --git a/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/mnist_vae/src/main.py b/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/mnist_vae/src/main.py index 57e1d382f566ced1778e45e2050a190b3cd59590..0bad2bed658f0d11168960500c773f48cf2c74f1 100644 --- a/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/mnist_vae/src/main.py +++ b/TensorFlow/contrib/cv/CSGM_ID2109_for_TensorFlow/mnist_vae/src/main.py @@ -43,6 +43,13 @@ import time from npu_bridge.npu_init import * flags = tf.flags FLAGS = flags.FLAGS +config = tf.ConfigProto() +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") +#关闭日志级别 +#os.system('export ASCEND_SLOG_PRINT_TO_STDOUT=0') +#os.system('export ASCEND_GLOBAL_LOG_LEVEL=3') #import argparse #import moxing as mox @@ -98,11 +105,6 @@ def main(hparams): print(var.op.name) print ('') - #npu迁移代码 - config = tf.ConfigProto() - custom_op = config.graph_options.rewrite_options.custom_optimizers.add() - custom_op.name = "NpuOptimizer" - config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # Get a new session sess = tf.Session(config=config) diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/.keep b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4246e35a2d0b6c4d6fa2939d57cb4a689f62e336 --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/LICENSE @@ -0,0 +1,251 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +## Some of TensorFlow's code is derived from Caffe, which is subject to the following copyright notice: + +COPYRIGHT + +All contributions by the University of California: + +Copyright (c) 2014, The Regents of the University of California (Regents) +All rights reserved. + +All other contributions: + +Copyright (c) 2014, the respective contributors +All rights reserved. + +Caffe uses a shared copyright model: each contributor holds copyright over +their contributions to Caffe. The project versioning records all such +contribution and copyright details. If a contributor wants to further mark +their specific copyright on a particular contribution, they should indicate +their copyright solely in the commit message of the change when it is +committed. + +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +CONTRIBUTION AGREEMENT + +By contributing to the BVLC/caffe repository through pull-request, comment, +or otherwise, the contributor releases their content to the +license and copyright terms herein. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/README.MD b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/README.MD new file mode 100644 index 0000000000000000000000000000000000000000..886453bfadb30d6cdd88b7387256b06257dd9aca --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/README.MD @@ -0,0 +1,149 @@ +## 基本信息 + +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):Computer Vision** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt** + +**精度(Precision):Mixed** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架对CIFAR-10数据集进行分类的训练代码** + +## 概述 + + CSPNet + +- 参考论文: + + https://openaccess.thecvf.com/content_CVPRW_2020/papers/w28/Wang_CSPNet_A_New_Backbone_That_Can_Enhance_Learning_Capability_of_CVPRW_2020_paper.pdf + +- 参考实现: + + https://github.com/WongKinYiu/CrossStagePartialNetworks + + +## 默认配置 + +- 训练数据集预处理: + + - 解压CIFAR-10数据集 + +- 训练超参 + + - EPOCHS 50 + - BATCH_SIZE 64 + - LEARNING_RATE 1e-03 + - MOMENTUM 0.9 + - LAMBDA 5e-04 + +## 快速上手 + +数据集准备 +模型训练使用CIFAR-10数据集,数据集请用户自行获取. +obs桶地址: +>obs://cann-id0840/npu/ + +## 模型训练 +单卡训练 + +1. 配置训练参数 +2. 启动训练 +``` +bash train_full_1p.sh \ + --data_path="./dataset" \ + --output_path="./checkpoint" +``` + + +## 训练结果 + +- 精度结果比对 + +|精度指标项|GPU实测|NPU实测| +|---|---|---| +|Top-1 Acc|0.6605|0.6645| + +- 性能结果比对 + +|性能指标项|GPU实测|NPU实测| +|---|---|---| +|FPS|1126.45|1245.03| + + +## 文件夹结构 + +``` +├── README.md //代码说明文档 +├── utils.py //工具文件 +├── train.py //网络训练 +├── test.py //用于衡量模型在数据集上的精度 +├── requirements.txt //依赖列表 +├── LICENSE +├── checkpoint //checkpoint模型保存地址 +├── models //模型定义 +│ ├──cspdarknet53.py +├── dataset //cifar-10数据集文件夹 +│ ├──data_batch_1 +│ ├──data_batch_2 +│ ├──data_batch_3 +│ ├──data_batch_4 +│ ├──data_batch_5 +│ ├──test_batch +│ ├──batches.meta +│ ├──readme.html +├── test +│ ├──train_performance_1p.sh //单卡训练验证性能启动脚本 +│ ├──train_full_1p.sh //单卡全量训练启动脚本 + +``` + + +## 启动脚本说明 +在test文件夹下, 有train_performace_1p.sh和train_full_1p.sh脚本, +可分别用于检测训练性能与训练精度. + +### 检测性能 +命令: +``` +bash train_performance_1p.sh \ + --data_path="./dataset" \ + --output_path="./checkpoint" +``` +打屏信息: + +>------------------ INFO NOTICE START------------------ +>INFO, your task have used Ascend NPU, please check your result. +>------------------ INFO NOTICE END------------------ +>------------------ Final result ------------------ +>Final Performance images/sec : 1245.0326379815197 +>Final Performance sec/step : 0.05140427491423721 +>E2E Training Duration sec : 536 +>Final Train Accuracy : 0.5850 + + +### 检测精度 +命令: +``` +bash train_full_1p.sh \ + --data_path="./dataset" \ + --output_path="./checkpoint" +``` + +打屏信息: +>------------------ INFO NOTICE START------------------ +>INFO, your task have used Ascend NPU, please check your result. +>------------------ INFO NOTICE END------------------ +>------------------ Final result ------------------ +>Final Performance images/sec : 1785.9793972748832 +>Final Performance sec/step : 0.03583467989477017 +>E2E Training Duration sec : 1993 +>Final Train Accuracy : 0.6645 + + diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/checkpoint/.keep b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/checkpoint/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/dataset/.keep b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/dataset/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/.keep b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/__init__.py b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2e54cabd2df3bca9b25ef885244a7f3255e5e4df --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/cspdarknet53.py b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/cspdarknet53.py new file mode 100644 index 0000000000000000000000000000000000000000..1f8daeea2066b47728a95427878786af38114a95 --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/models/cspdarknet53.py @@ -0,0 +1,181 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * +import tensorflow as tf +import utils as tu + + +def Conv2dBatchLeaky(x, kernel_size, in_channels, out_channels, conv_stride, stage_name): + w = tu.weight([kernel_size, kernel_size, in_channels, out_channels], name='w_'+stage_name) + b = tu.bias(0.0, [out_channels], name='b_'+stage_name) + x = tf.add(tu.conv2d(x, w, stride=(conv_stride, conv_stride), padding='SAME'), b) + x = tu.batch_norm(x) + x = x * tf.math.tanh(tf.math.softplus(x)) + return x + + +def SmallBlock(x, channels, stage_name): + c1 = Conv2dBatchLeaky(x, 1, channels, channels, 1, stage_name+'c1') + c2 = Conv2dBatchLeaky(c1, 3, channels, channels, 1, stage_name+'c2') + return c2 + + +def classifier(x): + with tf.name_scope('stage1') as scope: + with tf.name_scope('stage1_Conv2dBatchLeaky') as inner_scope: + s1c1 = Conv2dBatchLeaky(x, 3, 3, 32, 1, 's1c1') + + with tf.name_scope('stage2') as scope: + with tf.name_scope('stage2_Conv2dBatchLeaky_1') as inner_scope: + s2c1 = Conv2dBatchLeaky(s1c1, 3, 32, 64, 2, 's2c1') + with tf.name_scope('stage2_Split_0') as inner_scope: + s2s0 = Conv2dBatchLeaky(s2c1, 1, 64, 64, 1, 's2s0') + with tf.name_scope('stage2_Split_1') as inner_scope: + s2s1 = Conv2dBatchLeaky(s2c1, 1, 64, 64, 1, 's2s1') + with tf.name_scope('stage2_Conv2dBatchLeaky_2') as inner_scope: + s2c2 = Conv2dBatchLeaky(s2s1, 1, 64, 32, 1, 's2c2') + with tf.name_scope('stage2_Conv2dBatchLeaky_3') as inner_scope: + s2c3 = Conv2dBatchLeaky(s2c2, 3, 32, 64, 1, 's2c3') + shortcut2 = tf.add(s2c3, s2s1) + with tf.name_scope('stage2_Conv2dBatchLeaky_4') as inner_scope: + s2c4 = Conv2dBatchLeaky(shortcut2, 1, 64, 64, 1, 's2c4') + route2 = tf.concat([s2s0, s2c4], axis=3) + + with tf.name_scope('stage3') as scope: + with tf.name_scope('stage3_Conv2dBatchLeaky_1') as inner_scope: + s3c1 = Conv2dBatchLeaky(route2, 1, 128, 64, 1, 's3c1') + with tf.name_scope('stage3_Conv2dBatchLeaky_2') as inner_scope: + s3c2 = Conv2dBatchLeaky(s3c1, 3, 64, 128, 2, 's3c2') + with tf.name_scope('stage3_Split_0') as inner_scope: + s3s0 = Conv2dBatchLeaky(s3c2, 1, 128, 64, 1, 's3s0') + with tf.name_scope('stage3_Split_1') as inner_scope: + s3s1 = Conv2dBatchLeaky(s3c2, 1, 128, 64, 1, 's3s1') + with tf.name_scope('stage3_Block_1') as inner_scope: + s3b1 = SmallBlock(s3s1, 64, 's3b1') + with tf.name_scope('stage3_Block_2') as inner_scope: + s3b2 = SmallBlock(s3b1, 64, 's3b2') + with tf.name_scope('stage3_Conv2dBatchLeaky_3') as inner_scope: + s3c3 = Conv2dBatchLeaky(s3b2, 1, 64, 64, 1, 's3c3') + route3 = tf.concat([s3s0, s3c3], axis=3) + + with tf.name_scope('stage4') as scope: + with tf.name_scope('stage4_Conv2dBatchLeaky_1') as inner_scope: + s4c1 = Conv2dBatchLeaky(route3, 1, 128, 128, 1, 's4c1') + with tf.name_scope('stage4_Conv2dBatchLeaky_2') as inner_scope: + s4c2 = Conv2dBatchLeaky(s4c1, 3, 128, 256, 2, 's4c2') + with tf.name_scope('stage4_Split_0') as inner_scope: + s4s0 = Conv2dBatchLeaky(s4c2, 1, 256, 128, 1, 's4s0') + with tf.name_scope('stage4_Split_1') as inner_scope: + s4s1 = Conv2dBatchLeaky(s4c2, 1, 256, 128, 1, 's4s1') + + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b1 = SmallBlock(s4s1, 128, 's4b1') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b2 = SmallBlock(s4b1, 128, 's4b2') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b3 = SmallBlock(s4b2, 128, 's4b3') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b4 = SmallBlock(s4b3, 128, 's4b4') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b5 = SmallBlock(s4b4, 128, 's4b5') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b6 = SmallBlock(s4b5, 128, 's4b6') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b7 = SmallBlock(s4b6, 128, 's4b7') + with tf.name_scope('stage4_Block_1') as inner_scope: + s4b8 = SmallBlock(s4b7, 128, 's4b8') + + with tf.name_scope('stage4_Conv2dBatchLeaky_3') as inner_scope: + s4c3 = Conv2dBatchLeaky(s4b8, 1, 128, 128, 1, 's4c3') + route4 = tf.concat([s4s0, s4c3], axis=3) + + with tf.name_scope('stage5') as scope: + with tf.name_scope('stage5_Conv2dBatchLeaky_1') as inner_scope: + s5c1 = Conv2dBatchLeaky(route4, 1, 256, 256, 1, 's5c1') + with tf.name_scope('stage5_Conv2dBatchLeaky_2') as inner_scope: + s5c2 = Conv2dBatchLeaky(s5c1, 3, 256, 512, 2, 's5c2') + with tf.name_scope('stage5_Split_0') as inner_scope: + s5s0 = Conv2dBatchLeaky(s5c2, 1, 512, 256, 1, 's5s0') + with tf.name_scope('stage5_Split_1') as inner_scope: + s5s1 = Conv2dBatchLeaky(s5c2, 1, 512, 256, 1, 's5s1') + + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b1 = SmallBlock(s5s1, 256, 's5b1') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b2 = SmallBlock(s5b1, 256, 's5b2') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b3 = SmallBlock(s5b2, 256, 's5b3') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b4 = SmallBlock(s5b3, 256, 's5b4') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b5 = SmallBlock(s5b4, 256, 's5b5') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b6 = SmallBlock(s5b5, 256, 's5b6') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b7 = SmallBlock(s5b6, 256, 's5b7') + with tf.name_scope('stage5_Block_1') as inner_scope: + s5b8 = SmallBlock(s5b7, 256, 's5b8') + + with tf.name_scope('stage5_Conv2dBatchLeaky_3') as inner_scope: + s5c3 = Conv2dBatchLeaky(s5b8, 1, 256, 256, 1, 's5c3') + route5 = tf.concat([s5s0, s5c3], axis=3) + + with tf.name_scope('stage6') as scope: + with tf.name_scope('stage6_Conv2dBatchLeaky_1') as inner_scope: + s6c1 = Conv2dBatchLeaky(route5, 1, 512, 512, 1, 's6c1') + with tf.name_scope('stage6_Conv2dBatchLeaky_2') as inner_scope: + s6c2 = Conv2dBatchLeaky(s6c1, 3, 512, 1024, 2, 's6c2') + with tf.name_scope('stage6_Split_0') as inner_scope: + s6s0 = Conv2dBatchLeaky(s6c2, 1, 1024, 512, 1, 's6s0') + with tf.name_scope('stage6_Split_1') as inner_scope: + s6s1 = Conv2dBatchLeaky(s6c2, 1, 1024, 512, 1, 's6s1') + + with tf.name_scope('stage6_Block_1') as inner_scope: + s6b1 = SmallBlock(s6s1, 512, 's6b1') + with tf.name_scope('stage6_Block_1') as inner_scope: + s6b2 = SmallBlock(s6b1, 512, 's6b2') + with tf.name_scope('stage6_Block_1') as inner_scope: + s6b3 = SmallBlock(s6b2, 512, 's6b3') + with tf.name_scope('stage6_Block_1') as inner_scope: + s6b4 = SmallBlock(s6b3, 512, 's6b4') + + + with tf.name_scope('stage6_Conv2dBatchLeaky_3') as inner_scope: + s6c3 = Conv2dBatchLeaky(s6b4, 1, 512, 512, 1, 's6c3') + route6 = tf.concat([s6s0, s6c3], axis=3) + + conv = Conv2dBatchLeaky(route6, 1, 1024, 1024, 1, 'conv') + avg_pool = tf.squeeze(conv) + + wfc = tu.weight([1024, 10], name='wfc') + bfc = tu.bias(0.0, [10], name='bfc') + out = tf.add(tf.matmul(avg_pool, wfc), bfc) + + return out + diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..8df2e8d83260d53970a93d017b12d9d94e7934bc --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,5 @@ +GPUStatus:OK +NPUMigrationStatus:OK +FuncStatus:OK +PrecisionStatus:OK +PerfStatus:OK diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/requirements.txt b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6958b17ce504a3ad77331e1bc85931090d4ed1d --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/requirements.txt @@ -0,0 +1,2 @@ +numpy==1.19.3 +tensorflow==1.15.0 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test.py b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test.py new file mode 100644 index 0000000000000000000000000000000000000000..41bdb206a76c851f95413d3592f7987975431404 --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test.py @@ -0,0 +1,92 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * +import os.path +import tensorflow as tf +from utils import load_cifar, random_batch +from models import cspdarknet53 +import argparse + + +def test( + batch_size, + cifar_path, + ckpt_path): + + Xtr, Ytr, Xte, Yte = load_cifar(cifar_path) + Xte, Yte = random_batch(Xte, Yte, batch_size) + + x = tf.placeholder(tf.float32, [None, 32, 32, 3]) + y = tf.placeholder(tf.float32, [None, 10]) + + pred = cspdarknet53.classifier(x) + + # accuracy + top1_correct = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1)) + top1_accuracy = tf.reduce_mean(tf.cast(top1_correct, tf.float32)) + + saver = tf.train.Saver() + + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + + with tf.Session(config=config) as sess: + + saver.restore(sess, os.path.join(ckpt_path)) + total_top1_accuracy = 0. + + for i in range(len(Yte)): + top1_a = sess.run([top1_accuracy], feed_dict={x: Xte[i], y: Yte[i]}) + total_top1_accuracy += top1_a[0] + print ('---- Final accuracy ----') + print ('Top-1: {:.4f}'.format(total_top1_accuracy / len(Yte))) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--CKPT_PATH', default='./checkpoint/cspdarknet53.ckpt', type=str) + parser.add_argument('--CIFAR_PATH', default='./dataset', type=str) + parser.add_argument('--DISPLAY_STEP', default=10, type=int) + args = parser.parse_args() + + BATCH_SIZE = 64 + DISPLAY_STEP = args.DISPLAY_STEP + CIFAR_PATH = args.CIFAR_PATH + CKPT_PATH = args.CKPT_PATH + + test( + BATCH_SIZE, + CIFAR_PATH, + CKPT_PATH) + + + diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/.keep b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..f68ce72b2d3aee92c12431822b5071e59c3c331b --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,193 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +train_epochs=50 +batch_size=64 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./train.py --data_path=${data_path}/dataset/cifar-10-batches-py --output_path=${output_path} \ + --epochs=${train_epochs} --batch_size=${batch_size} +else + python3.7 ./train.py --data_path=${data_path}/dataset/cifar-10-batches-py --output_path=${output_path} \ + --epochs=${train_epochs} --batch_size=${batch_size} 1>${print_log} 2>&1 +fi + +# 性能相关数据计算 + +#读取iteration/s数据(ITPS),再计算StepTime = 1/ITPS; FPS=BATCH_SIZE * ITPS +#ITPS=`grep "100%" ${print_log} | awk '{print $NF}'| cut -d "i" -f 1 | awk '{sum+=$1} END {print sum/NR}'` +#StepTime=`awk 'BEGIN{printf "%.2f", '1'/'${ITPS}'}'` +#FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'*'${ITPS}'}'` +StepTime=`grep "StepTime" ${print_log} | cut -c 12-` +FPS=`grep "FPS" ${print_log} | cut -c 7-` + +# 精度相关数据计算 +train_accuracy=`grep "Validation accuracy:" ${print_log} | tail -n 1|awk '{print $3}'` + +# 提取所有loss打印信息 +grep "Loss:" ${print_log} | awk -F " " '{print $7}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..2bed4fc5b7c1ff62a1f9f067ef3ed45e2c35135e --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,193 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +train_epochs=5 +batch_size=64 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./train.py --data_path=${data_path}/dataset/cifar-10-batches-py --output_path=${output_path} \ + --epochs=${train_epochs} --batch_size=${batch_size} +else + python3.7 ./train.py --data_path=${data_path}/dataset/cifar-10-batches-py --output_path=${output_path} \ + --epochs=${train_epochs} --batch_size=${batch_size} 1>${print_log} 2>&1 +fi + +# 性能相关数据计算 + +#读取iteration/s数据(ITPS),再计算StepTime = 1/ITPS; FPS=BATCH_SIZE * ITPS +#ITPS=`grep "100%" ${print_log} | awk '{print $NF}'| cut -d "i" -f 1 | awk '{sum+=$1} END {print sum/NR}'` +#StepTime=`awk 'BEGIN{printf "%.2f", '1'/'${ITPS}'}'` +#FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'*'${ITPS}'}'` +StepTime=`grep "StepTime" ${print_log} | cut -c 12-` +FPS=`grep "FPS" ${print_log} | cut -c 7-` + +# 精度相关数据计算 +train_accuracy=`grep "Validation accuracy:" ${print_log} | tail -n 1|awk '{print $3}'` + +# 提取所有loss打印信息 +grep "Loss:" ${print_log} | awk -F " " '{print $7}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/train.py b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/train.py new file mode 100644 index 0000000000000000000000000000000000000000..02cde5dae9b4e35d756df98718a7857efbfa81f8 --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/train.py @@ -0,0 +1,203 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import time +from models import cspdarknet53 +import tensorflow as tf +from utils import load_cifar, random_batch, format_time +import numpy as np +import argparse + +def train( + epochs, + batch_size, + learning_rate, + momentum, + lmbda, + resume, + cifar_path, + display_step, + val_epoch, + ckpt_path, + summary_path): + + Xtr, Ytr, Xte, Yte = load_cifar(cifar_path) + Xtr, Ytr = random_batch(Xtr, Ytr, batch_size) + Xte, Yte = random_batch(Xte, Yte, batch_size) + ts_size = len(Ytr) + num_batches = int(float(ts_size) / batch_size) + x = tf.placeholder(tf.float32, [None, 32, 32, 3]) + y = tf.placeholder(tf.float32, [None, 10]) + + lr = tf.placeholder(tf.float32) + + pred = cspdarknet53.classifier(x) + + # cross-entropy and weight decay + with tf.name_scope('cross_entropy'): + cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y, name='cross-entropy')) + + with tf.name_scope('l2_loss'): + l2_loss = tf.reduce_sum(lmbda * tf.stack([tf.nn.l2_loss(v) for v in tf.get_collection('weights')])) + tf.summary.scalar('l2_loss', l2_loss) + + with tf.name_scope('loss'): + loss = cross_entropy + l2_loss + tf.summary.scalar('loss', loss) + + # accuracy + with tf.name_scope('accuracy'): + correct = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1)) + accuracy = tf.reduce_mean(tf.cast(correct, tf.float32)) + tf.summary.scalar('accuracy', accuracy) + + global_step = tf.Variable(0, trainable=False) + + # momentum optimizer + with tf.name_scope('optimizer'): + optimizer = tf.train.MomentumOptimizer(learning_rate=lr, momentum=momentum).minimize(loss, global_step=global_step) + + # merge summaries to write them to file + merged = tf.summary.merge_all() + + # checkpoint saver + saver = tf.train.Saver() + coord = tf.train.Coordinator() + init = tf.global_variables_initializer() + + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + # config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + + with tf.Session(config=config) as sess: + if resume: + saver.restore(sess, ckpt_path) + else: + sess.run(init) + train_writer = tf.summary.FileWriter(os.path.join(summary_path, 'train'), sess.graph) + + start_time = time.time() + num_steps = 0 + train_time = 0 + warnup_time = 0 + + for e in range(0, epochs): + if e==1: + warnup_time = train_time + for im, l in zip(Xtr, Ytr): + iter_start_time = time.time() + summary_str,_, step = sess.run([merged,optimizer, global_step], feed_dict={x: im, y: l, lr: learning_rate}) + iter_end_time = time.time() + train_time += iter_end_time - iter_start_time + + train_writer.add_summary(summary_str, step) + num_steps = step + + # display current training informations + if step % display_step == 0: + c, a = sess.run([loss, accuracy], feed_dict={x: im, y: l, lr: learning_rate}) + print ('Epoch: {:03d} Step/Batch: {:09d} --- Loss: {:.7f} Training accuracy: {:.4f}'.format(e, step, c, a)) + int_time = time.time() + print ('Elapsed time: {}'.format(format_time(int_time - start_time))) + + # make test and evaluate validation accuracy + if ((e+1) % val_epoch == 0) or (e+1==epochs): + print('Epoch {}, validating ....'.format(e)) + v_a = [] + for i in range(len(Yte)): + v_a.append(sess.run(accuracy, feed_dict={x: Xte[i], y: Yte[i], lr: learning_rate})) + v_a = np.mean(v_a) + # intermediate time + int_time = time.time() + print ('Elapsed time: {}'.format(format_time(int_time - start_time))) + print ('Validation accuracy: {:.04f}'.format(v_a)) + + save_path = saver.save(sess, os.path.join(ckpt_path, 'cspdarknet53.ckpt')) + print('checkpoint saved in file: %s' % save_path) + + + end_time = time.time() + print ('Elapsed time: {}'.format(format_time(end_time - start_time))) + StepTime = (train_time - warnup_time)/(num_steps*(epochs-1)/epochs) + print('StepTime = {}'.format(StepTime)) + FPS = batch_size/ StepTime + print('FPS = {}'.format(FPS)) + save_path = saver.save(sess, os.path.join(ckpt_path, 'cspdarknet53.ckpt')) + print('checkpoint saved in file: %s' % save_path) + + coord.request_stop() + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--MOMENTUM', default=0.9, type=int) + parser.add_argument('--LAMBDA', default=5e-04, type=float) + parser.add_argument('--LEARNING_RATE', default=1e-03, type=float) + parser.add_argument('--epochs', default=5, type=int) + parser.add_argument('--batch_size', default=64, type=int) + parser.add_argument('--SUMMARY', default='logs', type=str) + parser.add_argument('--output_path', default='./checkpoint', type=str) + parser.add_argument('--data_path', default='./dataset', type=str) + parser.add_argument('--DISPLAY_STEP', default=1000, type=int) + parser.add_argument('--VAL_EPOCH', default=5, type=int) + parser.add_argument('--resume', default=False, type=bool) + + args = parser.parse_args() + MOMENTUM = args.MOMENTUM + LAMBDA = args.LAMBDA # for weight decay + LEARNING_RATE = args.LEARNING_RATE + EPOCHS = args.epochs + BATCH_SIZE = args.batch_size + CKPT_PATH = args.output_path + if not os.path.exists(CKPT_PATH): + os.makedirs(CKPT_PATH) + SUMMARY = args.SUMMARY + if not os.path.exists(SUMMARY): + os.makedirs(SUMMARY) + + CIFAR_PATH = args.data_path + DISPLAY_STEP = args.DISPLAY_STEP + VAL_EPOCH = args.VAL_EPOCH + resume = args.resume + + train( + EPOCHS, + BATCH_SIZE, + LEARNING_RATE, + MOMENTUM, + LAMBDA, + resume, + CIFAR_PATH, + DISPLAY_STEP, + VAL_EPOCH, + CKPT_PATH, + SUMMARY) + diff --git a/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/utils.py b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e09ad1bc99baaa9e45626358d59d8629c464504b --- /dev/null +++ b/TensorFlow/contrib/cv/CSPNet_ID0840_for_TensorFlow/utils.py @@ -0,0 +1,119 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * +import os +import tensorflow as tf +import numpy as np +import pickle + +def weight(shape, name): + initial = tf.truncated_normal(shape, stddev=0.01) + w = tf.Variable(initial, name=name) + tf.add_to_collection('weights', w) + return w + +def bias(value, shape, name): + initial = tf.constant(value, shape=shape) + return tf.Variable(initial, name=name) + +def conv2d(x, W, stride, padding): + return tf.nn.conv2d(x, W, strides=[1, stride[0], stride[1], 1], padding=padding) + +def max_pool2d(x, kernel, stride, padding): + return tf.nn.max_pool(x, ksize=kernel, strides=stride, padding=padding) + +def lrn(x, depth_radius, bias, alpha, beta): + return tf.nn.local_response_normalization(x, depth_radius, bias, alpha, beta) + +def relu(x): + return tf.nn.relu(x) + +def batch_norm(x): + epsilon = 1e-3 + batch_mean, batch_var = tf.nn.moments(x, [0]) + return tf.nn.batch_normalization(x, batch_mean, batch_var, None, None, epsilon) + +def onehot(index): + """ It creates a one-hot vector with a 1.0 in + position represented by index + """ + onehot = np.zeros(10) + onehot[index] = 1.0 + return onehot + +def unpickle(file): + with open(file, 'rb') as fo: + dict = pickle.load(fo, encoding='bytes') + X = dict[b'data'] + Y = dict[b'labels'] + X = X.reshape(10000, 3, 32, 32).transpose(0, 2, 3, 1).astype("float") + Y = np.array(Y) + return X,Y + +def load_cifar(ROOT): + xs = [] + ys = [] + for b in range(1,6): + f = os.path.join(ROOT,'data_batch_%d' % b) + X ,Y = unpickle(f) + xs.append(X) + ys.append(Y) + Xtr = np.concatenate(xs) + Ytr = np.concatenate(ys) + Xte,Yte = unpickle(os.path.join(ROOT,'test_batch')) + Yte = [onehot(x) for x in Yte] + Ytr = [onehot(x) for x in Ytr] + return Xtr,Ytr,Xte,Yte + + +def random_batch(x, y, batch_size): + sz = len(x) + idx = np.arange(0, sz) + np.random.shuffle(idx) + idx = idx[:sz-sz % batch_size] + x = np.split(x[idx], sz // batch_size) + y = np.split(np.array(y)[idx], sz // batch_size) + return x, y + +def format_time(time): + """ It formats a datetime to print it + + Args: + time: datetime + + Returns: + a formatted string representing time + """ + m, s = divmod(time, 60) + h, m = divmod(m, 60) + d, h = divmod(h, 24) + return ('{:02d}d {:02d}h {:02d}m {:02d}s').format(int(d), int(h), int(m), int(s)) + + + diff --git a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/README.md b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/README.md index 6c6554da8ac19fee6a60fb59df4041c5e5747067..32b8c9a40d8be445ad37bb2e87a8047d7b2da33c 100644 --- a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/README.md @@ -43,9 +43,8 @@ Cascade模型是论文“A Deep Cascade of Convolutional Neural Networks for Dyn # 训练性能 | | GPU | NPU | |----------------|------|--------| -| Epoch | 2 min | 6 min | -* 训练性能单个epoch耗时NPU约为GPU3倍,提交ISSUE,分析后主要原因为: 网络涉及大量FFT与IFFT操作,该算子NPU训练时不支持,训练时速度无法提高 -* 但离线推理时,推理性能不受影响,离线推理速度: 重建一组数据耗时0.52s +| 平均单Step耗时 | ~7.0E-4 s | ~7.7E-4 s | +* 训练性能平均单个Step耗时NPU略高于GPU,提交ISSUE,分析后主要原因为: 网络涉及大量FFT与IFFT操作,该算子NPU尚不支持,训练时速度无法提高 # 离线推理命令参考 * ./out/msame --model="cascade_om.om" --input="./feature/,./mask/" --output="./" --outfmt BIN # pb转om命令参考 diff --git a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/modelzoo_level.txt index bdba79da80909f4dea1018c9d762586ae291acaf..0c3c740194ecb07da4ae0f63a6c29a50a8b67391 100644 --- a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/modelzoo_level.txt +++ b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/modelzoo_level.txt @@ -3,4 +3,4 @@ PerfStatus:POK PrecisionStatus:OK GPUStatus:OK NPUMigrationStatus:POK -AutoTune:POK \ No newline at end of file +AutoTune:OK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/test/train_performance_1p.sh index 9d0ea3d1788eb06088aa53eb867458d760f087bd..007829f090319b5bad6af533da708dcba708d417 100644 --- a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/test/train_performance_1p.sh @@ -96,6 +96,7 @@ start_time=$(date +%s) #进入训练脚本目录,需要模型审视修改 cd $cur_path/ + for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); do #设置环境变量,不需要修改 @@ -128,16 +129,16 @@ do --data_url ${data_path} \ --batch_size 16 \ --image_size 256 \ - --num_epoch 4 \ + --num_epoch 2 \ --data_train_dir ${data_path}/chest_train_acc3.hdf5 \ --save_ckpt_Dir ./save_ckpt > ${cur_path}test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 - python3 test.py \ + python3 test.py \ --data_url ${data_path} \ --data_test_dir ${data_path}/chest_test_acc3.hdf5 \ --batch_size 16 \ --image_size 256 \ - --num_epoch 4 \ + --num_epoch 2 \ --save_ckpt_Dir ./save_ckpt/ > ${cur_path}test/output/${ASCEND_DEVICE_ID}/val_${ASCEND_DEVICE_ID}.log 2>&1 @@ -151,15 +152,12 @@ e2e_time=$(( $end_time - $start_time )) #结果打印,不需要修改 echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 -grep "steptime:" $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk '{print $7}' > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime.txt -cat $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime.txt |awk '{sum+=$1} END {print "Avg = ",sum/NR}' > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime_avg.txt -TrainingTime=`grep 'Avg' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime_avg.txt |awk '{print $3}'` - +StepTime=`grep "StepTime" ${cur_path}test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk '{print $7}' | tail -n +2 | awk '{sum+=$1} END {print sum/NR}'` #打印,不需要修改 #echo "Final Performance image/sec : $FPS" #输出训练精度,需要模型审视修改 -train_acc=`grep "Avg PSNR" $cur_path/test/output/${ASCEND_DEVICE_ID}/val_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` +#train_acc=`grep "Avg PSNR" $cur_path/test/output/${ASCEND_DEVICE_ID}/val_${ASCEND_DEVICE_ID}.log|awk '{print $3}'` #打印,不需要修改 #echo "Final Train Accuracy : ${train_acc}" #echo "E2E Training Duration sec : $e2e_time" @@ -172,9 +170,11 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' ##获取性能数据,不需要修改 #吞吐量 -ActualFPS=`awk 'BEGIN{printf "%.3f\n", 16/'${TrainingTime}'}'` -#单迭代训练时长 +ActualFPS=`awk 'BEGIN{printf "%.3f\n", 16/'${StepTime}'}'` +echo "Final Performance images/sec : $ActualFPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" #从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 grep 'AvgLoss:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $5}' > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt @@ -189,6 +189,6 @@ echo "BatchSize = ${BatchSize}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${Cas echo "DeviceType = ${DeviceType}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "CaseName = ${CaseName}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualFPS = ${ActualFPS}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/train.py b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/train.py index d60a51242020fb9fcdaec90a3df21fc3696db649..f6adbc84f1e010f957535c2f2e567d49bf3e369b 100644 --- a/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/train.py +++ b/TensorFlow/contrib/cv/CascadeNet_ID2121_for_TensorFlow/train.py @@ -38,37 +38,53 @@ import os import time from models.pre_input import get_right_images import models.model_tf as mm -#import moxing as mx -from npu_bridge.npu_init import RewriterConfig +# import moxing as mx +from npu_bridge.npu_init import NPULossScaleOptimizer, npu_config_proto, RewriterConfig, \ + ExponentialUpdateLossScaleManager, FixedLossScaleManager + # if not work, please use import * # from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig - flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_integer('batch_size', 16, 'Number of samples per batch') flags.DEFINE_integer('image_size', 256, 'Image sample size in pixels') flags.DEFINE_integer('random_seed', 0, 'Seed used to initializer rng') -flags.DEFINE_integer('num_epoch', 500, 'number of epoch') +flags.DEFINE_integer('num_epoch', 5, 'number of epoch') flags.DEFINE_integer('checkpoint_period', 10, 'save the model every time') +flags.DEFINE_integer( + 'Dn', 11, ' the number of the convolution layers in one residual block') +flags.DEFINE_integer('Dc', 5, 'the number of the data consistency layers') +flags.DEFINE_string('model_name', 'dc', 'model name') +flags.DEFINE_string('data_url', 'obs://imagenet2012-lp/cascade_re/data/', + 'the path of train data in obs') +flags.DEFINE_string( + 'data_train_dir', '/home/ma-user/modelarts/inputs/data_url_0/chest_train_acc3.hdf5', + 'the path of train data') flags.DEFINE_float('learning_rate', 1e-3, 'initial learning rate') flags.DEFINE_bool('continue_training', False, 'continue training') -flags.DEFINE_string('data_url', 'obs://imagenet2012-lp/cascade_re/data/','the path of train data in obs') -flags.DEFINE_string('data_train_dir', '/home/ma-user/modelarts/inputs/data_url_0/chest_train_acc3.hdf5', - 'the path of train data') -flags.DEFINE_string('save_ckpt_Dir','./', 'the path of train data') -flags.DEFINE_integer('Dn', 10, ' the number of the convolution layers in one residual block') -flags.DEFINE_integer('Dc', 7, 'the number of the data consistency layers') +flags.DEFINE_string( + 'train_url', 'obs://imagenet2012-lp/cascade_log/', 'the path of train log in obs') +flags.DEFINE_string('last_checkpoint_dir', + 'obs://imagenet2012-lp/cascade_log/MA-cascade_modelarts-10-19-15-26/output/V0018', + 'the path of train data') +flags.DEFINE_string('last_checkpoint_dir_name', + '/D11-C5-25-19/', 'the path of train data') +flags.DEFINE_string('save_ckpt_Dir', None, 'checkpoint save path') print('***************************************************') start_time = time.time() # creat checkpoint save path - -directory = FLAGS.save_ckpt_Dir +# saveDir = '/cache/saveModels' +saveDir = FLAGS.save_ckpt_Dir +cwd = os.getcwd() +directory = saveDir + '/' + 'D' + \ + str(FLAGS.Dn) + '-C' + str(FLAGS.Dc) + \ + '-' + datetime.now().strftime("%d-%H") if not os.path.exists(directory): os.makedirs(directory) - +sessFileName = directory + '/model' image_size = FLAGS.image_size # net architecture K = FLAGS.Dc @@ -78,9 +94,10 @@ tf.reset_default_graph() config = tf.ConfigProto() custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" +custom_op.parameter_map["use_off_line"].b = True # set precision mode allow_fp32_to_fp16 allow_mix_precision custom_op.parameter_map['precision_mode'].s = tf.compat.as_bytes( - 'allow_fp32_to_fp16') + 'allow_mix_precision') # # dump path # custom_op.parameter_map['dump_path'].s = tf.compat.as_bytes(saveDir + '/') # # set dump debug @@ -102,14 +119,8 @@ feature = tf.placeholder(tf.float32, shape=( out = mm.makeModel(feature, mask, train=False, nLayers=numlayers, K=K) predTst = out['dc' + str(K)] predTst = tf.identity(predTst, name='predTst') +sessFileNameTst = directory + '/modelTst' -sessFileName = os.path.join(directory + '/model') -if not os.path.exists(sessFileName): - os.makedirs(sessFileName) -sessFileNameTst = os.path.join(directory + '/modelTst') -if not os.path.exists(sessFileNameTst): - os.makedirs(sessFileNameTst) - saver = tf.train.Saver() with tf.Session(config=config) as sess: sess.run(tf.global_variables_initializer()) @@ -121,9 +132,9 @@ print('testing model saved:' + saveFile) # mx.file.copy_parallel(FLAGS.data_url, '/cache/data/') # copy to modelarts path_train = FLAGS.data_train_dir feature_trn, label_trn, mask_trn = get_right_images(path_train) -#if FLAGS.continue_training: - #mx.file.copy_parallel(FLAGS.last_checkpoint_dir + FLAGS.last_checkpoint_dir_name, - #saveDir + FLAGS.last_checkpoint_dir_name) +# if FLAGS.continue_training: + # mx.file.copy_parallel(FLAGS.last_checkpoint_dir + FLAGS.last_checkpoint_dir_name, + # saveDir + FLAGS.last_checkpoint_dir_name) tf.reset_default_graph() rows = image_size @@ -163,11 +174,13 @@ global_step = tf.Variable( decayed_lr = tf.train.exponential_decay( FLAGS.learning_rate, global_step, 1000, 0.98, staircase=True) # opti = tf.train.AdamOptimizer(learning_rate=decayed_lr, name='optimizer') -opti = tf.train.GradientDescentOptimizer(decayed_lr, - name='optimizer') +loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=100, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) +opti_tmp = tf.train.GradientDescentOptimizer(decayed_lr, + name='optimizer') # loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, # incr_every_n_steps=1000, decr_every_n_nan_or_inf=2, decr_ratio=0.5) -# opti = NPULossScaleOptimizer(opt, loss_scale_manager) +opti = NPULossScaleOptimizer(opti_tmp, loss_scale_manager) update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) # with tf.control_dependencies(update_ops): @@ -213,14 +226,16 @@ with tf.Session(config=config) as sess: if np.remainder(step + 1, nBatch) == 0: ep += 1 avgTrnLoss = np.mean(totalLoss) / nTrn - start_time = time.time() + step_start_time = time.time() summary = sess.run(merged, feed_dict={lossT: avgTrnLoss}) writer.add_summary(summary, ep) saveLoss.append(avgTrnLoss) totalLoss = [] - step_time=time.time()-start_time + step_time = time.time() - step_start_time + scale_value = sess.run([loss_scale_manager.get_loss_scale()]) print(datetime.now().strftime("%H:%M"), - '---Epoch: ', ep, '---AvgLoss: ', avgTrnLoss, '---steptime:', step_time) + '---Epoch: ', ep, '---AvgLoss: ', avgTrnLoss, '---StepTime:', step_time, '---ScaleValue', + scale_value) # todo if np.remainder(ep, FLAGS.checkpoint_period) == 0: savedfile = saver.save( @@ -233,7 +248,7 @@ with tf.Session(config=config) as sess: end_time = time.time() print('Training completed in minutes', ((end_time - start_time) / 60)) print('training completed at', datetime.now().strftime('%d-%b-%Y %I:%M%p')) -print('****************************************************') +# print('****************************************************') # copy results to obs -#mx.file.copy_parallel('/cache/saveModels', FLAGS.train_url) -#print('copy saved model to obs.') +# mx.file.copy_parallel('/cache/saveModels', FLAGS.train_url) +# print('copy saved model to obs.') \ No newline at end of file diff --git a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/config.py b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/config.py index d22387a9eae741f6a1ee98f8d9af66431883335d..b35cfa513ac24da5067738f33bc2b38f77b1cd1b 100644 --- a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/config.py +++ b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/config.py @@ -53,6 +53,8 @@ def make_config(FLAGS): custom_op.name = "NpuOptimizer" # custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + custom_op.parameter_map["enable_data_pre_proc"].b = True + custom_op.parameter_map["iterations_per_loop"].i = 10 # Performance Profiling # refer to link:https://support.huaweicloud.com/Development-tg-cann202training1/atlasprofilingtrain_16_0003.html if FLAGS.profiling: diff --git a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/model.py b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/model.py index fa16011b8849348f9f19368ac0fce50bff696a9d..641faab519a72afc1bab7135e8da786c99fee4e2 100644 --- a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/model.py +++ b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/model.py @@ -31,7 +31,7 @@ import ops from reader import Reader from discriminator import Discriminator from generator import Generator - +from utils import ImagePool REAL_LABEL = 0.9 @@ -83,10 +83,10 @@ class CycleGAN: self.D_X = Discriminator('D_X', self.is_training, norm=norm, use_sigmoid=use_sigmoid) - self.fake_x = tf.placeholder(tf.float32, - shape=[batch_size, image_size, image_size, 3]) - self.fake_y = tf.placeholder(tf.float32, - shape=[batch_size, image_size, image_size, 3]) + #self.fake_x = tf.placeholder(tf.float32, + #shape=[batch_size, image_size, image_size, 3]) + #self.fake_y = tf.placeholder(tf.float32, + #shape=[batch_size, image_size, image_size, 3]) def model(self): X_reader = Reader(self.X_train_file, name='X', @@ -94,8 +94,13 @@ class CycleGAN: Y_reader = Reader(self.Y_train_file, name='Y', image_size=self.image_size, batch_size=self.batch_size) - x = X_reader.feed() - y = Y_reader.feed() + #x = X_reader.feed() + #y = Y_reader.feed() + x, x_initializer = X_reader.feed() + y, y_initializer = Y_reader.feed() + FLAGS = tf.flags.FLAGS + fake_Y_pool = ImagePool(FLAGS.pool_size) + fake_X_pool = ImagePool(FLAGS.pool_size) cycle_loss = self.cycle_consistency_loss(self.G, self.F, x, y) @@ -103,13 +108,13 @@ class CycleGAN: fake_y = self.G(x) G_gan_loss = self.generator_loss(self.D_Y, fake_y, use_lsgan=self.use_lsgan) G_loss = G_gan_loss + cycle_loss - D_Y_loss = self.discriminator_loss(self.D_Y, y, self.fake_y, use_lsgan=self.use_lsgan) + D_Y_loss = self.discriminator_loss(self.D_Y, y, fake_Y_pool.query(fake_y), use_lsgan=self.use_lsgan) # Y -> X fake_x = self.F(y) F_gan_loss = self.generator_loss(self.D_X, fake_x, use_lsgan=self.use_lsgan) F_loss = F_gan_loss + cycle_loss - D_X_loss = self.discriminator_loss(self.D_X, x, self.fake_x, use_lsgan=self.use_lsgan) + D_X_loss = self.discriminator_loss(self.D_X, x, fake_Y_pool.query(fake_x), use_lsgan=self.use_lsgan) # summary tf.summary.histogram('D_Y/true', self.D_Y(y)) @@ -123,7 +128,7 @@ class CycleGAN: tf.summary.scalar('loss/D_X', D_X_loss) tf.summary.scalar('loss/cycle', cycle_loss) - return G_loss, D_Y_loss, F_loss, D_X_loss, fake_y, fake_x + return G_loss, D_Y_loss, F_loss, D_X_loss, fake_y, fake_x, y_initializer, x_initializer def optimize(self, G_loss, D_Y_loss, F_loss, D_X_loss): def make_optimizer(loss, variables, name='Adam'): diff --git a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/reader.py b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/reader.py index fe6adb31cf43b35cbfa567334aa6648f520bb629..b985ecb18445112b30dfd4dafbc9cfc6a3cac3a3 100644 --- a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/reader.py +++ b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/reader.py @@ -56,9 +56,10 @@ class Reader(): dataset = tf.data.TFRecordDataset(self.tfrecords_file, buffer_size=256 << 20) data_ = dataset.map(self.parse_function, num_parallel_calls=tf.data.experimental.AUTOTUNE).shuffle( self.min_queue_examples + 3 * self.batch_size).repeat().batch(self.batch_size, drop_remainder=True) - data = data_.make_one_shot_iterator() - return data.get_next() - + #data = data_.make_one_shot_iterator() + #return data.get_next() + data = data_.make_initializable_iterator() + return data.get_next(),data.initializer def parse_function(self, example_proto): dics = { 'image/file_name': tf.FixedLenFeature([], tf.string), diff --git a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/train.py b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/train.py index d974f4faa53bf6c1fac3aba4ae3e9760bd51322c..f7234b15fb35346651c2e92abae78f45d90f42ed 100644 --- a/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/train.py +++ b/TensorFlow/contrib/cv/CycleGAN_ID0716_for_TensorFlow/train.py @@ -97,7 +97,7 @@ def train(): beta1=FLAGS.beta1, ngf=FLAGS.ngf ) - G_loss, D_Y_loss, F_loss, D_X_loss, fake_y, fake_x = cycle_gan.model() + G_loss, D_Y_loss, F_loss, D_X_loss, fake_y, fake_x , y_initializer ,x_initializer= cycle_gan.model() optimizers = cycle_gan.optimize(G_loss, D_Y_loss, F_loss, D_X_loss) summary_op = tf.summary.merge_all() @@ -116,20 +116,24 @@ def train(): else: sess.run(tf.global_variables_initializer()) step = 0 + sess.run(y_initializer) + sess.run(x_initializer) fake_Y_pool = ImagePool(FLAGS.pool_size) fake_X_pool = ImagePool(FLAGS.pool_size) + train_op = util.set_iteration_per_loop(sess, optimizers, 10) + while step <= FLAGS.train_epochs * FLAGS.step_per_epoch: - fake_y_val, fake_x_val = sess.run([fake_y, fake_x]) + #fake_y_val, fake_x_val = sess.run([fake_y, fake_x]) # train start_time = time.time() _, G_loss_val, D_Y_loss_val, F_loss_val, D_X_loss_val, summary = ( sess.run( - [optimizers, G_loss, D_Y_loss, F_loss, D_X_loss, summary_op], - feed_dict={cycle_gan.fake_y: fake_Y_pool.query(fake_y_val), - cycle_gan.fake_x: fake_X_pool.query(fake_x_val)} + [optimizers, G_loss, D_Y_loss, F_loss, D_X_loss, summary_op] + #feed_dict={cycle_gan.fake_y: fake_Y_pool.query(fake_y_val), + #cycle_gan.fake_x: fake_X_pool.query(fake_x_val)} ) ) train_writer.add_summary(summary, step) @@ -141,7 +145,7 @@ def train(): logging.info(' D_Y_loss : {}'.format(D_Y_loss_val)) logging.info(' F_loss : {}'.format(F_loss_val)) logging.info(' D_X_loss : {}'.format(D_X_loss_val)) - logging.info(' Perf : {}'.format(time.time()-start_time)) + logging.info(' Perf : {}'.format((time.time()-start_time)/10)) if step % (10 * FLAGS.step_per_epoch) == 0: save_path = saver.save(sess, checkpoints_dir + "/model.ckpt", global_step=step) diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/README.md b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/README.md index e729540505203c94f4fb1b63c3e78231ad10de62..a641c904ff6d25cabf38fc62e305d485ed8387ff 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/README.md +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/README.md @@ -110,14 +110,16 @@ To train: 1. 数据集准备。 - a.请用户自行准备好数据集,包含训练集和验证集两部分,数据集包括Mnist等,包含train和 val两部分。以Mnist数据集为例。 + a.请用户自行准备好数据集,包含训练集和测试集两部分,数据集包括BSDS300,包含train和test两部分 - b.上传数据压缩包到训练环境上,无需解压 + b.上传数据压缩包到训练环境上,解压 - ├── /datasets/imagenet - │ ├──imagenet - │ ├──Berkeley + ├── /src + │ ├──BSDS300 + │ │ ├──images + │ │ │ ├──train + │ │ │ ├──test ``` ``` @@ -126,6 +128,7 @@ To train: ``` ``` ├── src +│ ├──BSDS300/ //数据集 │ ├──config.py //训练定义 │ ├──DAE.py //模型定义 │ ├──DAE_model.py //重载模型 @@ -133,6 +136,8 @@ To train: │ ├──DMSPDeblur.py //先验去噪 │ ├──network.py //其他功能函数 │ ├──ops.py //算子定义 +├── scripts +│ ├──train_dmsp.sh //训练脚本 ``` 2. 模型训练。 @@ -151,86 +156,37 @@ $ python ./src/demo_DMSP.py 通过“模型训练”中的训练指令启动单卡训练 ``` -2022-02-23 22:32:03.855277: W tensorflow/core/platform/profile_utils/cpu_utils.cc:98] Failed to find bogomips in /proc/cpuinfo; cannot determine CPU frequency -2022-02-23 22:32:03.864021: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0xaaaade38ad00 initialized for platform Host (this does not guarantee that XLA will be used). Devices: -2022-02-23 22:32:03.864068: I tensorflow/compiler/xla/service/service.cc:176] StreamExecutor device (0): Host, Default Version -============start non-blind deblurring on Berkeley segmentation dataset============== -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:37: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:37: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:38: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:38: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:42: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:42: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:7: calling Constant.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version. -Instructions for updating: -Call initializer instance with the dtype argument instead of passing it to the constructor -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:7: calling Constant.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version. -Instructions for updating: -Call initializer instance with the dtype argument instead of passing it to the constructor -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:8: The name tf.get_variable is deprecated. Please use tf.compat.v1.get_variable instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:8: The name tf.get_variable is deprecated. Please use tf.compat.v1.get_variable instead. - -====================dae================ -{'layer0': , 'layer1': , 'layer2': , 'layer3': , 'layer4': , 'layer5': , 'layer6': , 'layer7': , 'layer8': , 'layer9': , 'layer10': , 'layer11': , 'layer12': , 'layer13': , 'layer14': , 'layer15': , 'layer16': , 'layer17': , 'layer18': , 'layer19': , 'layer20': , 'layer21': , 'layer22': , 'layer23': , 'layer24': , 'layer25': , 'layer26': , 'layer27': , 'layer28': , 'layer29': , 'layer30': , 'layer31': , 'layer32': , 'layer33': , 'layer34': , 'layer35': , 'layer36': , 'layer37': , 'layer38': } -====================dae output========= -Tensor("strided_slice_1:0", shape=(?, ?, ?, 3), dtype=float32) -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:51: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead. - -WARNING:tensorflow:From /home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_model.py:51: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead. - -2022-02-23 22:32:14.897055: I /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_ARM/tensorflow/tf_adapter/kernels/geop_npu.cc:694] The model has been compiled on the Ascend AI processor, current graph id is:1 -Initialized with PSNR: 17.78958876047073 -2022-02-23 22:32:40.320450: I /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_ARM/tensorflow/tf_adapter/kernels/geop_npu.cc:694] The model has been compiled on the Ascend AI processor, current graph id is:11 -Finished psnr = 27.65 (1.5 examples/sec; 0.646 sec/batch) -Initialized with PSNR: 21.71935774044799 -Finished psnr = 29.31 (1.4 examples/sec; 0.697 sec/batch) -Initialized with PSNR: 12.418238314349477 -Finished psnr = 21.70 (1.4 examples/sec; 0.704 sec/batch) -Initialized with PSNR: 17.761670521195924 -Finished psnr = 27.69 (1.5 examples/sec; 0.672 sec/batch) -Initialized with PSNR: 23.028104067351563 -Finished psnr = 32.53 (1.4 examples/sec; 0.704 sec/batch) -Initialized with PSNR: 15.075084013742561 -Finished psnr = 27.08 (1.4 examples/sec; 0.703 sec/batch) -Initialized with PSNR: 17.302924438930848 -Finished psnr = 24.16 (1.2 examples/sec; 0.824 sec/batch) -Initialized with PSNR: 17.10059787725738 -Finished psnr = 25.20 (1.3 examples/sec; 0.751 sec/batch) -Initialized with PSNR: 16.07467978560146 -Finished psnr = 25.66 (1.4 examples/sec; 0.712 sec/batch) -Initialized with PSNR: 15.523285818788821 -Finished psnr = 25.79 (1.4 examples/sec; 0.718 sec/batch) -Initialized with PSNR: 20.173765682212093 -Finished psnr = 33.91 (1.5 examples/sec; 0.688 sec/batch) -Initialized with PSNR: 17.809478987327715 -Finished psnr = 29.48 (1.6 examples/sec; 0.640 sec/batch) -Initialized with PSNR: 18.0941733503732 -Finished psnr = 33.18 (1.4 examples/sec; 0.702 sec/batch) -Initialized with PSNR: 17.11170706335929 -Finished psnr = 24.92 (1.4 examples/sec; 0.705 sec/batch) -Initialized with PSNR: 16.409065638468267 -Finished psnr = 29.45 (1.4 examples/sec; 0.727 sec/batch) -Initialized with PSNR: 16.58872443970573 -Finished psnr = 26.77 (1.4 examples/sec; 0.702 sec/batch) -Initialized with PSNR: 16.632015946049982 -Finished psnr = 28.54 (1.2 examples/sec; 0.805 sec/batch) -Initialized with PSNR: 14.895557404412923 -Finished psnr = 25.84 (1.3 examples/sec; 0.741 sec/batch) -Initialized with PSNR: 17.557421710572992 -Finished psnr = 25.67 (1.4 examples/sec; 0.702 sec/batch) -Initialized with PSNR: 23.73822886222646 -Finished psnr = 31.20 (1.1 examples/sec; 0.895 sec/batch) -Initialized with PSNR: 14.288116614544533 -Finished psnr = 21.96 (1.4 examples/sec; 0.735 sec/batch) -Initialized with PSNR: 19.533104118880125 -Finished psnr = 28.99 (1.4 examples/sec; 0.710 sec/batch) +2022-03-21 23:33:35.866972: W /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_X86/tensorflow/tf_adapter/util/ge_plugin.cc:124] [GePlugin] can not find Environment variable : JOB_ID +2022-03-21 23:33:39.807011: I /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_X86/tensorflow/tf_adapter/kernels/geop_npu.cc:749] The model has been compiled on the Ascend AI processor, current graph id is:1 +Initialized with PSNR: 18.26756789065104 +2022-03-21 23:33:52.281454: I /home/jenkins/agent/workspace/Compile_GraphEngine_Centos_X86/tensorflow/tf_adapter/kernels/geop_npu.cc:749] The model has been compiled on the Ascend AI processor, current graph id is:11 +Finished psnr = 25.43 (20.0 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 19.61013455418367 +Finished psnr = 29.58 (20.0 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 16.046844525072277 +Finished psnr = 26.21 (19.3 examples/sec; 0.052 sec/batch) +Initialized with PSNR: 19.088294082853533 +Finished psnr = 24.01 (20.3 examples/sec; 0.049 sec/batch) +Initialized with PSNR: 27.903391840839276 +Finished psnr = 33.05 (19.9 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 17.58393445793693 +Finished psnr = 25.87 (19.3 examples/sec; 0.052 sec/batch) +Initialized with PSNR: 21.496189549703043 +Finished psnr = 27.39 (20.3 examples/sec; 0.049 sec/batch) +Initialized with PSNR: 17.183577420828943 +Finished psnr = 24.84 (19.2 examples/sec; 0.052 sec/batch) +Initialized with PSNR: 18.31449854593027 +Finished psnr = 27.68 (20.2 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 14.78985085202309 +Finished psnr = 22.40 (19.9 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 18.795507564810553 +Finished psnr = 27.73 (19.6 examples/sec; 0.051 sec/batch) +Initialized with PSNR: 16.154563492696358 +Finished psnr = 24.16 (19.9 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 19.207686742438906 +Finished psnr = 27.37 (19.9 examples/sec; 0.050 sec/batch) +Initialized with PSNR: 18.436603775139783 +Finished psnr = 27.64 (20.2 examples/sec; 0.050 sec/batch) ```

精度指标

diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/scripts/.keep b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/scripts/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/scripts/train_dmsp.sh b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/scripts/train_dmsp.sh new file mode 100644 index 0000000000000000000000000000000000000000..bbe4d01359fbc81290b22892d481440e3be240ef --- /dev/null +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/scripts/train_dmsp.sh @@ -0,0 +1 @@ +python3 ./src/demo_DMSP.py \ No newline at end of file diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/.keep b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE.py index 81348a702b34c6610ff97df38a99c3845aa1bfe4..0598cdadd30f74c8d9e4889713f6a69d8a80dc43 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE.py @@ -1,83 +1,82 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from network import * -from PIL import Image -import tensorflow as tf -import scipy.misc as misc -import os - - -class DAE_MODEL: - def __init__(self): - self.clean_img = tf.placeholder(tf.float32, [None, None, None, IMG_C]) - self.noised_img = tf.placeholder(tf.float32, [None, None, None, IMG_C]) - self.train_phase = tf.placeholder(tf.bool) - dncnn = net("DnCNN") - self.res = dncnn(self.noised_img, self.train_phase) - self.denoised_img = self.noised_img - self.res - self.loss = tf.reduce_mean(tf.reduce_sum(tf.square(self.res - (self.noised_img - self.clean_img)), [1, 2, 3])) - self.Opt = tf.train.AdamOptimizer(1e-3).minimize(self.loss) - self.sess = tf.Session() - self.sess.run(tf.global_variables_initializer()) - - def train(self,train_dir): - filepath = train_dir - filenames = os.listdir(filepath) - print(filenames) - saver = tf.train.Saver() - for epoch in range(50): - for i in range(filenames.__len__()//BATCH_SIZE): - t = time.time() - cleaned_batch = np.zeros([BATCH_SIZE, IMG_H, IMG_W, IMG_C]) - for idx, filename in enumerate(filenames[i*BATCH_SIZE:i*BATCH_SIZE+BATCH_SIZE]): - cleaned_batch[idx, :, :, :] = np.array(Image.open(filepath+'/'+filename).resize((IMG_H,IMG_W))) - noised_batch = cleaned_batch + np.random.normal(0, SIGMA, cleaned_batch.shape) - self.sess.run(self.Opt, feed_dict={self.clean_img: cleaned_batch, self.noised_img: noised_batch, self.train_phase: True}) - if i % 1 == 0: - [loss, denoised_img] = self.sess.run([self.loss, self.denoised_img], feed_dict={self.clean_img: cleaned_batch, self.noised_img: noised_batch, self.train_phase: False}) - print("Epoch: %d, Step: %d, Loss: %g, Time %g"%(epoch, i, loss,time.time()-t)) - compared = np.concatenate((cleaned_batch[0, :, :, 0], noised_batch[0, :, :, 0], denoised_img[0, :, :, 0]), 1) - # Image.fromarray(np.uint8(compared)).save("./TrainingResults//"+str(epoch)+"_"+str(i)+".jpg") - if i % 500 == 0: - saver.save(self.sess, "./save_para//DnCNN.ckpt") - np.random.shuffle(filenames) - - def test(self, cleaned_path="./TestingSet//02.png"): - saver = tf.train.Saver() - saver.restore(self.sess, "./save_para/DnCNN.ckpt") - cleaned_img = np.reshape(np.array(misc.imresize(np.array(Image.open(cleaned_path)), [256, 256])), [1, 256, 256, 1]) - noised_img = cleaned_img + np.random.normal(0, SIGMA, cleaned_img.shape) - [denoised_img] = self.sess.run([self.denoised_img], feed_dict={self.clean_img: cleaned_img, self.noised_img: noised_img, self.train_phase: False}) - compared = np.concatenate((cleaned_img[0, :, :, 0], noised_img[0, :, :, 0], denoised_img[0, :, :, 0]), 1) - Image.fromarray(np.uint8(compared)).show() - - - +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import time + +from network import * +from PIL import Image +import tensorflow as tf +import scipy.misc as misc +import os + + +class DAE_MODEL: + def __init__(self): + self.clean_img = tf.placeholder(tf.float32, [None, None, None, IMG_C]) + self.noised_img = tf.placeholder(tf.float32, [None, None, None, IMG_C]) + self.train_phase = tf.placeholder(tf.bool) + dncnn = net("DnCNN") + self.res = dncnn(self.noised_img, self.train_phase) + self.denoised_img = self.noised_img - self.res + self.loss = tf.reduce_mean(tf.reduce_sum(tf.square(self.res - (self.noised_img - self.clean_img)), [1, 2, 3])) + self.Opt = tf.train.AdamOptimizer(1e-3).minimize(self.loss) + self.sess = tf.Session() + self.sess.run(tf.global_variables_initializer()) + + def train(self,train_dir): + filepath = train_dir + filenames = os.listdir(filepath) + print(filenames) + saver = tf.train.Saver() + for epoch in range(50): + for i in range(filenames.__len__()//BATCH_SIZE): + t = time.time() + cleaned_batch = np.zeros([BATCH_SIZE, IMG_H, IMG_W, IMG_C]) + for idx, filename in enumerate(filenames[i*BATCH_SIZE:i*BATCH_SIZE+BATCH_SIZE]): + cleaned_batch[idx, :, :, :] = np.array(Image.open(filepath+'/'+filename).resize((IMG_H,IMG_W))) + noised_batch = cleaned_batch + np.random.normal(0, SIGMA, cleaned_batch.shape) + self.sess.run(self.Opt, feed_dict={self.clean_img: cleaned_batch, self.noised_img: noised_batch, self.train_phase: True}) + if i % 1 == 0: + [loss, denoised_img] = self.sess.run([self.loss, self.denoised_img], feed_dict={self.clean_img: cleaned_batch, self.noised_img: noised_batch, self.train_phase: False}) + print("Epoch: %d, Step: %d, Loss: %g, Time %g"%(epoch, i, loss,time.time()-t)) + compared = np.concatenate((cleaned_batch[0, :, :, 0], noised_batch[0, :, :, 0], denoised_img[0, :, :, 0]), 1) + # Image.fromarray(np.uint8(compared)).save("./TrainingResults//"+str(epoch)+"_"+str(i)+".jpg") + if i % 500 == 0: + saver.save(self.sess, "./save_para//DnCNN.ckpt") + np.random.shuffle(filenames) + + def test(self, cleaned_path="./TestingSet//02.png"): + saver = tf.train.Saver() + saver.restore(self.sess, "./save_para/DnCNN.ckpt") + cleaned_img = np.reshape(np.array(misc.imresize(np.array(Image.open(cleaned_path)), [256, 256])), [1, 256, 256, 1]) + noised_img = cleaned_img + np.random.normal(0, SIGMA, cleaned_img.shape) + [denoised_img] = self.sess.run([self.denoised_img], feed_dict={self.clean_img: cleaned_img, self.noised_img: noised_img, self.train_phase: False}) + compared = np.concatenate((cleaned_img[0, :, :, 0], noised_img[0, :, :, 0], denoised_img[0, :, :, 0]), 1) + Image.fromarray(np.uint8(compared)).show() + + + diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_model.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_model.py index 39a15fb14a4e0beb6289ec91c507d17b790786f5..bbef767f8682d156eb536298dc27a0b0cd15d422 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_model.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_model.py @@ -65,7 +65,7 @@ class denoiser(object): self.in_image = tf.placeholder(tf.float32, shape=[None, None, None, 3], name="input_image") image_bgr = self.in_image[..., ::-1] - weights = io.loadmat('/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/DAE_sigma11.mat') + weights = io.loadmat('./src/DAE_sigma11.mat') with tf.variable_scope("dae", reuse=None): dae_net = network(weights=weights, image=image_bgr) print('====================dae================') diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_sigma11.mat b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_sigma11.mat new file mode 100644 index 0000000000000000000000000000000000000000..703eb34cd9806fd07b9aedfc91e2e40b0ecd96c0 Binary files /dev/null and b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DAE_sigma11.mat differ diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DMSPDeblur.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DMSPDeblur.py index 4052bf652080f3e78c0e5cb111bb0201e1c67365..725a0f45fd76b36b804304b3975d3e4377d072f9 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DMSPDeblur.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/DMSPDeblur.py @@ -25,6 +25,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import time import matplotlib as mpl mpl.use('Agg') @@ -53,7 +54,8 @@ def filter_image(image, kernel, mode='valid'): """ Implements color filtering (convolution using a flipped kernel) """ chs = [] for d in range(image.shape[2]): - channel = sig.convolve2d(image[:,:,d], np.flipud(np.fliplr(kernel)), mode=mode) + # channel = sig.convolve2d(image[:,:,d], np.flipud(np.fliplr(kernel)), mode=mode) + channel = sig.fftconvolve(image[:, :, d], np.flipud(np.fliplr(kernel)), mode=mode) chs.append(channel) return np.stack(chs, axis=2) @@ -61,7 +63,8 @@ def convolve_image(image, kernel, mode='valid'): """ Implements color image convolution """ chs = [] for d in range(image.shape[2]): - channel = sig.convolve2d(image[:,:,d], kernel, mode=mode) + # channel = sig.convolve2d(image[:,:,d], kernel, mode=mode) + channel = sig.fftconvolve(image[:, :, d], kernel, mode=mode) chs.append(channel) return np.stack(chs, axis=2) @@ -127,7 +130,7 @@ def DMSPDeblur(degraded, kernel, sigma_d, params): #离线推理 input_image = res + noise - input_image.tofile("/cache/model/dmsp_input_image.bin") # 处理后的图片保存为bin文件 + # input_image.tofile("/cache/model/dmsp_input_image.bin") rec = params['denoiser'].denoise(res + noise,False) prior_grad = res - rec diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/config.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/config.py index b62cb5ac93e7a6783445ae5e76dfed6f098da81f..6f839b7f8fa8f130ec1f407c0e8e26cf25605862 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/config.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/config.py @@ -1,35 +1,36 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -IMG_H = 40 -IMG_W = 40 -IMG_C = 3 -DEPTH = 17 -BATCH_SIZE = 1 -EPOCHS = 50 -SIGMA = 11 -EPSILON = 1e-10 +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +IMG_H = 40 +IMG_W = 40 +IMG_C = 3 +DEPTH = 17 +BATCH_SIZE = 1 +EPOCHS = 50 +SIGMA = 11 +EPSILON = 1e-10 diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/demo_DMSP.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/demo_DMSP.py index a2312d17ea73719f3e10cbde9ce41bb75e086b0d..07443382be24f276e58b7f4068bf0ecabc0a605e 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/demo_DMSP.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/demo_DMSP.py @@ -1,198 +1,188 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#npu代码 -from npu_bridge.npu_init import * - -import numpy as np -import scipy.io as io -from PIL import Image -import tensorflow as tf - - -# The DMSP deblur function and the RGB filtering function (flipped convolution) -# from DMSPDeblur import DMSPDeblur, filter_image - -#2 -from DMSPDeblur import DMSPDeblur, filter_image -# The denoiser implementation -from DAE_model import denoiser - -# Limit the GPU access -# import os -# os.environ["CUDA_VISIBLE_DEVICES"]="0" - - -# configure the tensorflow and instantiate a DAE -config = tf.ConfigProto() - -#=========npu代码============== -profiling_dir = "/cache/profiling" -dump_debug_dir = "/cache/dump" -model_dir = "/cache/model/" -os.makedirs(profiling_dir) -os.makedirs(dump_debug_dir) -os.makedirs(model_dir) -custom_op = config.graph_options.rewrite_options.custom_optimizers.add() -custom_op.name = "NpuOptimizer" -#=============关闭融合策略============= -custom_op.parameter_map["fusion_switch_file"].s = tf.compat.as_bytes("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/fusion_switch.cfg") -#==============打开混合精度============ -# custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") -#===============溢出检查================ -# custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes("/cache/dump") -# custom_op.parameter_map["enable_dump_debug"].b = True -# custom_op.parameter_map["dump_debug_mode"].s = tf.compat.as_bytes("all") - - -# custom_op.parameter_map["profiling_mode"].b = True -# custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes('{"output":"/cache/profiling","task_trace":"on"}') -#=========npu代码============== -config.gpu_options.allow_growth = True -config.allow_soft_placement = True -sess = tf.Session(config=config) - - - -#gpu代码 -import os -def listdir(base_dir, list_name): # 传入存储的list - for file in os.listdir(base_dir): - file_path = os.path.join(base_dir, file) - if os.path.isdir(file_path): - listdir(file_path, list_name) - else: - list_name.append(file_path) - - -#==============npu代码========================= -import argparse -import moxing as mox - -# 解析输入参数data_url -parser = argparse.ArgumentParser() -parser.add_argument("--data_url", type=str, default="obs://train-dmsp/BSDS300/images/test/") -# config = parser.parse_args() -config, unparsed = parser.parse_known_args() -# 在ModelArts容器创建数据存放目录 -data_dir = "cache/dataset" -os.makedirs(data_dir) -# OBS数据拷贝到ModelArts容器内 -mox.file.copy_parallel(config.data_url, data_dir) -#======================================================================== -list_name = [] -base_dir = '/home/dataset/ILSVRC/Data/CLS-LOC/test' # 文件夹路径 -listdir(data_dir, list_name) -size = len(list_name) -# Load data -sigma_d = 255 * .01 -matFile = io.loadmat('/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/kernels.mat') -kernel = matFile['kernels'][0,0] -kernel = kernel / np.sum(kernel[:]) - -test_set = [] -test_dir = data_dir -listdir(test_dir, test_set) -non_blind= [] -i=1 - - -# print("==================training DAE=====================") -# from DAE import DAE_MODEL -# dncnn = DAE_MODEL() -# dncnn.train(data_dir) - - -print("============start non-blind deblurring on Berkeley segmentation dataset==============") -params = {} -DAE = denoiser(sess) -params['denoiser'] = DAE -params['sigma_dae'] = 11.0 -params['num_iter'] = 300 -params['mu'] = 0.9 -params['alpha'] = 0.1 -for image_path in test_set: - if(i==51): - break - gt = Image.open(image_path) - gt = gt.resize((180,180)) - gt = np.array(gt,dtype=np.float32) - degraded = filter_image(gt, kernel) - noise = np.random.normal(0.0, sigma_d, degraded.shape).astype(np.float32) - degraded = degraded + noise - - img_degraded = Image.fromarray(np.clip(degraded, 0, 255).astype(dtype=np.uint8)) - img_degraded.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/data/degraded.png","png") - - # non-blind deblurring demo - # run DMSP - params['gt'] = gt # feed ground truth to monitor the PSNR at each iteration - - restored,psnr = DMSPDeblur(degraded, kernel, sigma_d, params) - non_blind.append(psnr) - img_restored = Image.fromarray(np.clip(restored, 0, 255).astype(dtype=np.uint8)) - img_restored.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/data/restored.png","png") - i+=1 - -print("non_blind PSNR is:",np.mean(non_blind)) - -print("============start noise-blind deblurring on Berkeley segmentation dataset==============") - -test_res = [] - -j = 0 -for test_image in test_set: - j+=1 - if(j==51): - break - gt = Image.open(test_image) - gt = gt.resize((180, 180)) - # nshape = np.array(gt, dtype=np.float32) - # gt = gt.resize((int(nshape.shape[1]),int(nshape.shape[0]))) - gt = np.array(gt, dtype=np.float32) - degraded = filter_image(gt, kernel) - params['gt'] = gt - - noise = np.random.normal(0.0, sigma_d, degraded.shape).astype(np.float32) - - degraded = degraded + noise - img_degraded = Image.fromarray(np.clip(degraded, 0, 255).astype(dtype=np.uint8)) - - restored_nb,psnr_test = DMSPDeblur(degraded, kernel, -1, params) - test_res.append(psnr_test) - img_restored_nb = Image.fromarray(np.clip(restored_nb, 0, 255).astype(dtype=np.uint8)) - img_restored_nb.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/restored_noise_blind.png","png") - -print("noise-blind PSNR is:",np.mean(test_res)) - -print("========================") -print(os.listdir(model_dir)) -print(os.listdir(profiling_dir)) -print("========================") -mox.file.copy_parallel(model_dir, "obs://train-dmsp/output/model/") -mox.file.copy_parallel(profiling_dir, "obs://train-dmsp/output/profiling/") +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +#npu代码 +from npu_bridge.npu_init import * + +import numpy as np +import scipy.io as io +from PIL import Image +import tensorflow as tf + + +# The DMSP deblur function and the RGB filtering function (flipped convolution) +# from DMSPDeblur import DMSPDeblur, filter_image + +#2 +from DMSPDeblur import DMSPDeblur, filter_image +# The denoiser implementation +from DAE_model import denoiser + +# Limit the GPU access +# import os +# os.environ["CUDA_VISIBLE_DEVICES"]="0" + + +# configure the tensorflow and instantiate a DAE +config = tf.ConfigProto() + +#=========npu代码============== +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +#=============关闭融合策略============= +# custom_op.parameter_map["fusion_switch_file"].s = tf.compat.as_bytes("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/fusion_switch.cfg") +#==============打开混合精度============ +# custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") +#===============溢出检查================ +# custom_op.parameter_map["dump_path"].s = tf.compat.as_bytes("/cache/dump") +# custom_op.parameter_map["enable_dump_debug"].b = True +# custom_op.parameter_map["dump_debug_mode"].s = tf.compat.as_bytes("all") + +# profiling +# custom_op.parameter_map["profiling_mode"].b = True +# custom_op.parameter_map["profiling_options"].s = tf.compat.as_bytes('{"output":"/cache/profiling","task_trace":"on"}') +#=========npu代码============== +config.gpu_options.allow_growth = True +config.allow_soft_placement = True +sess = tf.Session(config=config) + + + +#gpu代码 +import os +def listdir(base_dir, list_name): # 传入存储的list + for file in os.listdir(base_dir): + file_path = os.path.join(base_dir, file) + if os.path.isdir(file_path): + listdir(file_path, list_name) + else: + list_name.append(file_path) +#==============npu代码========================= +import argparse + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="./BSDS300/images/train") +# config = parser.parse_args() +config, unparsed = parser.parse_known_args() +# 在ModelArts容器创建数据存放目录 +data_dir = config.data_url +# os.makedirs(data_dir) +# OBS数据拷贝到ModelArts容器内 +# mox.file.copy_parallel(config.data_url, data_dir) +#======================================================================== +list_name = [] +base_dir = './train' # 文件夹路径 +listdir(data_dir, list_name) +size = len(list_name) +# Load data +sigma_d = 255 * .01 +matFile = io.loadmat('./src/kernels.mat') +kernel = matFile['kernels'][0,0] +kernel = kernel / np.sum(kernel[:]) + +test_set = [] +test_dir = data_dir +listdir(test_dir, test_set) +non_blind= [] +i=1 + +# print("==================training DAE=====================") +# from DAE import DAE_MODEL +# dncnn = DAE_MODEL() +# dncnn.train(data_dir) + +print("============start non-blind deblurring on Berkeley segmentation dataset==============") +params = {} +DAE = denoiser(sess) +params['denoiser'] = DAE +params['sigma_dae'] = 11.0 +params['num_iter'] = 300 +params['mu'] = 0.9 +params['alpha'] = 0.1 +for image_path in test_set: + if(i==21): + break + gt = Image.open(image_path) + gt = gt.resize((180,180)) + gt = np.array(gt,dtype=np.float32) + degraded = filter_image(gt, kernel) + noise = np.random.normal(0.0, sigma_d, degraded.shape).astype(np.float32) + degraded = degraded + noise + + img_degraded = Image.fromarray(np.clip(degraded, 0, 255).astype(dtype=np.uint8)) + # img_degraded.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/data/degraded.png","png") + + # non-blind deblurring demo + # run DMSP + params['gt'] = gt # feed ground truth to monitor the PSNR at each iteration + + restored,psnr = DMSPDeblur(degraded, kernel, sigma_d, params) + non_blind.append(psnr) + img_restored = Image.fromarray(np.clip(restored, 0, 255).astype(dtype=np.uint8)) + # img_restored.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/data/restored.png","png") + i+=1 + +print("non_blind PSNR is:",np.mean(non_blind)) + +print("============start noise-blind deblurring on Berkeley segmentation dataset==============") + +test_res = [] + +j = 0 +for test_image in test_set: + j+=1 + if(j==21): + break + gt = Image.open(test_image) + gt = gt.resize((180, 180)) + # nshape = np.array(gt, dtype=np.float32) + # gt = gt.resize((int(nshape.shape[1]),int(nshape.shape[0]))) + gt = np.array(gt, dtype=np.float32) + degraded = filter_image(gt, kernel) + params['gt'] = gt + + noise = np.random.normal(0.0, sigma_d, degraded.shape).astype(np.float32) + + degraded = degraded + noise + img_degraded = Image.fromarray(np.clip(degraded, 0, 255).astype(dtype=np.uint8)) + + restored_nb,psnr_test = DMSPDeblur(degraded, kernel, -1, params) + test_res.append(psnr_test) + img_restored_nb = Image.fromarray(np.clip(restored_nb, 0, 255).astype(dtype=np.uint8)) + # img_restored_nb.save("/home/ma-user/modelarts/user-job-dir/code/dmsp-tensorflow/restored_noise_blind.png","png") + +print("noise-blind PSNR is:",np.mean(test_res)) + +# print("========================") +# print(os.listdir(model_dir)) +# print(os.listdir(profiling_dir)) +# print("========================") +# mox.file.copy_parallel(model_dir, "obs://train-dmsp/output/model/") +# mox.file.copy_parallel(profiling_dir, "obs://train-dmsp/output/profiling/") diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/kernels.mat b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/kernels.mat new file mode 100644 index 0000000000000000000000000000000000000000..753b645e197e5be5cf7a248f0d1e7466ea66a94a Binary files /dev/null and b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/kernels.mat differ diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/network.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/network.py index cf6be48f431ea017029f4a02d5931b34a01f0e56..97e5bb428516ba3545e14948671c126e2350640e 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/network.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/network.py @@ -1,42 +1,42 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from ops import * -from config import * -import numpy as np - -class net: - def __init__(self, name): - self.name = name - - def __call__(self, inputs, train_phase): - with tf.variable_scope(self.name): - inputs = tf.nn.relu(conv("conv0", inputs, 64, 3, 1)) - for d in np.arange(1, DEPTH - 1): - inputs = tf.nn.relu(batchnorm(conv("conv_" + str(d + 1), inputs, 64, 3, 1), train_phase, "bn" + str(d))) - inputs = conv("conv" + str(DEPTH - 1), inputs, IMG_C, 3, 1) +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ops import * +from config import * +import numpy as np + +class net: + def __init__(self, name): + self.name = name + + def __call__(self, inputs, train_phase): + with tf.variable_scope(self.name): + inputs = tf.nn.relu(conv("conv0", inputs, 64, 3, 1)) + for d in np.arange(1, DEPTH - 1): + inputs = tf.nn.relu(batchnorm(conv("conv_" + str(d + 1), inputs, 64, 3, 1), train_phase, "bn" + str(d))) + inputs = conv("conv" + str(DEPTH - 1), inputs, IMG_C, 3, 1) return inputs \ No newline at end of file diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/ops.py b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/ops.py index ef1e9adcb6ead600e477341e58176f9cc18783df..4191c5d68a58bdf1ceb849d1a2e142a025aae807 100644 --- a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/ops.py +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/src/ops.py @@ -1,109 +1,110 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import tensorflow as tf - - - -def batchnorm(x, train_phase, scope_bn): - #Batch Normalization - #Ioffe S, Szegedy C. Batch normalization: accelerating deep network training by reducing internal covariate shift[J]. 2015:448-456. - with tf.variable_scope(scope_bn, reuse=tf.AUTO_REUSE): - beta = tf.get_variable(name='beta', shape=[x.shape[-1]], initializer=tf.constant_initializer([0.]), trainable=True) - gamma = tf.get_variable(name='gamma', shape=[x.shape[-1]], initializer=tf.constant_initializer([1.]), trainable=True) - batch_mean, batch_var = tf.nn.moments(x, [0, 1, 2], name='moments') - ema = tf.train.ExponentialMovingAverage(decay=0.5) - - def mean_var_with_update(): - ema_apply_op = ema.apply([batch_mean, batch_var]) - with tf.control_dependencies([ema_apply_op]): - return tf.identity(batch_mean), tf.identity(batch_var) - - mean, var = tf.cond(train_phase, mean_var_with_update, - lambda: (ema.average(batch_mean), ema.average(batch_var))) - normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) - return normed - -def InstanceNorm(inputs, name): - with tf.variable_scope(name): - mean, var = tf.nn.moments(inputs, axes=[1, 2], keep_dims=True) - scale = tf.get_variable("scale", shape=mean.shape[-1], initializer=tf.constant_initializer([1.])) - shift = tf.get_variable("shift", shape=mean.shape[-1], initializer=tf.constant_initializer([0.])) - return (inputs - mean) * scale / tf.sqrt(var + 1e-10) + shift - -def conv(name, inputs, nums_out, ksize, strides, padding="SAME", is_SN=False): - with tf.variable_scope(name): - W = tf.get_variable("W", shape=[ksize, ksize, int(inputs.shape[-1]), nums_out], initializer=tf.truncated_normal_initializer(stddev=0.02)) - b = tf.get_variable("b", shape=[nums_out], initializer=tf.constant_initializer(0.)) - if is_SN: - return tf.nn.conv2d(inputs, spectral_norm(name, W), [1, strides, strides, 1], padding) + b - else: - return tf.nn.conv2d(inputs, W, [1, strides, strides, 1], padding) + b - -def uconv(name, inputs, nums_out, ksize, strides, padding="SAME"): - with tf.variable_scope(name): - w = tf.get_variable("W", shape=[ksize, ksize, nums_out, int(inputs.shape[-1])], initializer=tf.truncated_normal_initializer(stddev=0.02)) - b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer(0.)) - # inputs = tf.image.resize_nearest_neighbor(inputs, [H*strides, W*strides]) - # return tf.nn.conv2d(inputs, w, [1, 1, 1, 1], padding) + b - return tf.nn.conv2d_transpose(inputs, w, [tf.shape(inputs)[0], int(inputs.shape[1])*strides, int(inputs.shape[2])*strides, nums_out], [1, strides, strides, 1], padding=padding) + b - - -def fully_connected(name, inputs, nums_out): - with tf.variable_scope(name, reuse=tf.AUTO_REUSE): - W = tf.get_variable("W", [int(inputs.shape[-1]), nums_out], initializer=tf.truncated_normal_initializer(stddev=0.02)) - b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer(0.)) - return tf.matmul(inputs, W) + b - - -def spectral_norm(name, w, iteration=1): - #Spectral normalization which was published on ICLR2018,please refer to "https://www.researchgate.net/publication/318572189_Spectral_Normalization_for_Generative_Adversarial_Networks" - #This function spectral_norm is forked from "https://github.com/taki0112/Spectral_Normalization-Tensorflow" - w_shape = w.shape.as_list() - w = tf.reshape(w, [-1, w_shape[-1]]) - with tf.variable_scope(name, reuse=False): - u = tf.get_variable("u", [1, w_shape[-1]], initializer=tf.truncated_normal_initializer(), trainable=False) - u_hat = u - v_hat = None - - def l2_norm(v, eps=1e-12): - return v / (tf.reduce_sum(v ** 2) ** 0.5 + eps) - - for i in range(iteration): - v_ = tf.matmul(u_hat, tf.transpose(w)) - v_hat = l2_norm(v_) - u_ = tf.matmul(v_hat, w) - u_hat = l2_norm(u_) - sigma = tf.matmul(tf.matmul(v_hat, w), tf.transpose(u_hat)) - w_norm = w / sigma - with tf.control_dependencies([u.assign(u_hat)]): - w_norm = tf.reshape(w_norm, w_shape) - return w_norm - -def leaky_relu(x, slope=0.2): - return tf.maximum(x, slope*x) - +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + + + +def batchnorm(x, train_phase, scope_bn): + #Batch Normalization + #Ioffe S, Szegedy C. Batch normalization: accelerating deep network training by reducing internal covariate shift[J]. 2015:448-456. + with tf.variable_scope(scope_bn, reuse=tf.AUTO_REUSE): + beta = tf.get_variable(name='beta', shape=[x.shape[-1]], initializer=tf.constant_initializer([0.]), trainable=True) + gamma = tf.get_variable(name='gamma', shape=[x.shape[-1]], initializer=tf.constant_initializer([1.]), trainable=True) + batch_mean, batch_var = tf.nn.moments(x, [0, 1, 2], name='moments') + ema = tf.train.ExponentialMovingAverage(decay=0.5) + + def mean_var_with_update(): + ema_apply_op = ema.apply([batch_mean, batch_var]) + with tf.control_dependencies([ema_apply_op]): + return tf.identity(batch_mean), tf.identity(batch_var) + + mean, var = tf.cond(train_phase, mean_var_with_update, + lambda: (ema.average(batch_mean), ema.average(batch_var))) + normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) + return normed + +def InstanceNorm(inputs, name): + with tf.variable_scope(name): + mean, var = tf.nn.moments(inputs, axes=[1, 2], keep_dims=True) + scale = tf.get_variable("scale", shape=mean.shape[-1], initializer=tf.constant_initializer([1.])) + shift = tf.get_variable("shift", shape=mean.shape[-1], initializer=tf.constant_initializer([0.])) + return (inputs - mean) * scale / tf.sqrt(var + 1e-10) + shift + +def conv(name, inputs, nums_out, ksize, strides, padding="SAME", is_SN=False): + with tf.variable_scope(name): + W = tf.get_variable("W", shape=[ksize, ksize, int(inputs.shape[-1]), nums_out], initializer=tf.truncated_normal_initializer(stddev=0.02)) + b = tf.get_variable("b", shape=[nums_out], initializer=tf.constant_initializer(0.)) + if is_SN: + return tf.nn.conv2d(inputs, spectral_norm(name, W), [1, strides, strides, 1], padding) + b + else: + return tf.nn.conv2d(inputs, W, [1, strides, strides, 1], padding) + b + +def uconv(name, inputs, nums_out, ksize, strides, padding="SAME"): + with tf.variable_scope(name): + w = tf.get_variable("W", shape=[ksize, ksize, nums_out, int(inputs.shape[-1])], initializer=tf.truncated_normal_initializer(stddev=0.02)) + b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer(0.)) + # inputs = tf.image.resize_nearest_neighbor(inputs, [H*strides, W*strides]) + # return tf.nn.conv2d(inputs, w, [1, 1, 1, 1], padding) + b + return tf.nn.conv2d_transpose(inputs, w, [tf.shape(inputs)[0], int(inputs.shape[1])*strides, int(inputs.shape[2])*strides, nums_out], [1, strides, strides, 1], padding=padding) + b + + +def fully_connected(name, inputs, nums_out): + with tf.variable_scope(name, reuse=tf.AUTO_REUSE): + W = tf.get_variable("W", [int(inputs.shape[-1]), nums_out], initializer=tf.truncated_normal_initializer(stddev=0.02)) + b = tf.get_variable("b", [nums_out], initializer=tf.constant_initializer(0.)) + return tf.matmul(inputs, W) + b + + +def spectral_norm(name, w, iteration=1): + #Spectral normalization which was published on ICLR2018,please refer to "https://www.researchgate.net/publication/318572189_Spectral_Normalization_for_Generative_Adversarial_Networks" + #This function spectral_norm is forked from "https://github.com/taki0112/Spectral_Normalization-Tensorflow" + w_shape = w.shape.as_list() + w = tf.reshape(w, [-1, w_shape[-1]]) + with tf.variable_scope(name, reuse=False): + u = tf.get_variable("u", [1, w_shape[-1]], initializer=tf.truncated_normal_initializer(), trainable=False) + u_hat = u + v_hat = None + + def l2_norm(v, eps=1e-12): + return v / (tf.reduce_sum(v ** 2) ** 0.5 + eps) + + for i in range(iteration): + v_ = tf.matmul(u_hat, tf.transpose(w)) + v_hat = l2_norm(v_) + u_ = tf.matmul(v_hat, w) + u_hat = l2_norm(u_) + sigma = tf.matmul(tf.matmul(v_hat, w), tf.transpose(u_hat)) + w_norm = w / sigma + with tf.control_dependencies([u.assign(u_hat)]): + w_norm = tf.reshape(w_norm, w_shape) + return w_norm + +def leaky_relu(x, slope=0.2): + return tf.maximum(x, slope*x) + diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/test/.keep b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/test/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..97a37af55bcbe367b09d251b597f55a1ee22a619 --- /dev/null +++ b/TensorFlow/contrib/cv/DMSP_ID1290_for_Tensorflow/test/train_performance_1p.sh @@ -0,0 +1,170 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 + +python3.7 ./src/demo_DMSP.py --data_url=${data_path}/BSDS300/images/train > ${print_log} 2>&1 + +# 性能相关数据计算 +StepTime=`grep "sec/batch" ${print_log} | tail -n 10 | awk '{print $7}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`grep "examples/sec" ${print_log} | tail -n 10 | awk '{print $5}' | tr -d '(' | awk '{sum+=$1} END {print sum/NR}'` + +# 提取所有loss打印信息 +# grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt +# grep "d_loss :" ${print_log} | awk -F "|" '{print $2}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +# if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +# then + # mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +# fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance examples/sec : $FPS" +echo "Final Performance sec/batch : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +# echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +# ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +# echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/DeltaEncoder_ID1273_for_TensorFlow/README.md b/TensorFlow/contrib/cv/DeltaEncoder_ID1273_for_TensorFlow/README.md index bd461c583ae922d4952505ba31c9cdcde775b6be..4d9d28737828f1fac692f364bfdea7e2e9d542f0 100644 --- a/TensorFlow/contrib/cv/DeltaEncoder_ID1273_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/DeltaEncoder_ID1273_for_TensorFlow/README.md @@ -99,6 +99,9 @@ num_ways: ways number. In the original paper, it was 5 |--|--| | GPU(V100)| 5.18s/epoch | | NPU(Ascend910)| 10s/epoch | +| NPU(Ascend910)-开启混合精度| 7s/epoch | +| NPU(Ascend910)-关闭日志| 3.5s/epoch | + #### 1-shot 5-way 精度结果 ##### GPU结果 ``` diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/.keep b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/README.md b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..01f40a87c0e15b18f9f11cf17e24f74dbcb6b10a --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/README.md @@ -0,0 +1,191 @@ +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [训练结果](#训练结果.md) +- [高级参考](#高级参考.md) +

基本信息

+ +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):CV** + +**版本(Version):** + +**修改时间(Modified) :2022.4.16** + +**大小(Size):** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):h5** + +**精度(Precision):Mixed** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架的EfficientDet网络训练代码** + + +- 参考论文: + + https://arxiv.org/abs/1911.09070 + +- 参考实现: + + https://github.com/xuannianz/EfficientDet + +- 适配昇腾 AI 处理器的实现: + + + https://gitee.com/dw8023/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow%20 + + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +## 支持特性 + +| 特性列表 | 是否支持 | +|-------|------| +| 分布式训练 | 否 | +| 混合精度 | 是 | +| 并行数据 | 否 | + +## 混合精度训练 + +昇腾910 AI处理器提供自动混合精度功能,可以针对全网中float32数据类型的算子,按照内置的优化策略,自动将部分float32的算子降低精度到float16,从而在精度损失很小的情况下提升系统性能并减少内存使用。 + +## 开启混合精度 + +脚本已默认开启混合精度,设置precision_mode参数的脚本参考如下。 + + ``` + custom_op = session_config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(str(args.precision_mode)) + ``` + +

训练环境准备

+ +1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 +2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 + + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + + **表 1** 镜像列表 + + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
+ + +

快速上手

+ +- 数据集准备 +1. 模型训练使用VOC2007数据集,数据集obs链接如下: + + obs://efficientdet-id0693/dataset/ + +- 源码obs链接如下: + + obs://efficientdet-id0693/npu + +- 单卡训练 + + 1. 配置训练参数。 + + 在脚本train_full_1p.sh中配置python train_sess.py所需参数。 + + ``` + --pretrained_model 预训练模型路径 + (预训练模型链接:链接:https://pan.baidu.com/s/1OGnFSCCr3eTI3jO-v4hxNQ 提取码:oy34) + + pascal /home/dataset/VOCdevkit/VOC2007 voc数据集位置 + + ``` + + 2. 配置测试参数。 + + 然后在脚本train_full_1p.sh中,配置python common.py所需参数。 + + ``` + --data_path='/home/dataset/VOCdevkit/VOC2007' voc数据集位置 + ``` + + 3. 启动训练和测试。 + + 启动单卡训练 (脚本为EfficientDet_ID0693_for_TensorFlow/train_full_1p.sh) + + ``` + bash train_full_1p.sh + ``` + +

训练结果

+ +- 精度结果比对 + +|精度指标项|论文发布|GPU实测|NPU实测| +|---|---|---|---| +|mAP|xxx|72.6%|67.6%| + +- 性能结果比对 (暂无) + +|性能指标项|论文发布|GPU实测|NPU实测| +|---|---|---|---| +|FPS|XXX|YYY|ZZZ| + + +

高级参考

+ +## 脚本和示例代码 + +``` +├── callback.py //回调函数 +├── README.md //代码说明文档 +├── common.py //精度测试代码 +├── losses.py //loss函数定义代码 +├── model.py //模型定义代码 +├── layers.py //层定义代码 +├── efficientdet.py //backbone网络结构代码 +├── requirements.txt //训练python依赖列表 +├── train_full_1p.sh //训练启动脚本 +├── utils //训练与测试流程工具代码文件夹 +├── generators //数据集generators生成代码文件夹 +├── augmentor //数据增强代码文件夹 +├── eval //测试精度工具代码文件夹 +│ 、、、 +``` + + +## 训练过程 + +1. 通过“模型训练”中的训练指令启动单卡卡训练。 + +2. 参考脚本的模型存储路径为checkpoints/*.h5。 + + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/__init__.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f45a2c79aa9e815ecbb58bc3e5e8cc1ef8f1dc4 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/color.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/color.py new file mode 100644 index 0000000000000000000000000000000000000000..ef99d6249eca4d0c1834a31b09a317c494fd8ea4 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/color.py @@ -0,0 +1,207 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import numpy as np +from PIL import Image, ImageEnhance, ImageOps + + +def autocontrast(image, prob=0.5): + random_prob = np.random.uniform() + if random_prob > prob: + return image + image = Image.fromarray(image[..., ::-1]) + image = ImageOps.autocontrast(image) + image = np.array(image)[..., ::-1] + return image + + +def equalize(image, prob=0.5): + random_prob = np.random.uniform() + if random_prob > prob: + return image + image = Image.fromarray(image[..., ::-1]) + image = ImageOps.equalize(image) + image = np.array(image)[..., ::-1] + return image + + +def solarize(image, prob=0.5, threshold=128.): + random_prob = np.random.uniform() + if random_prob > prob: + return image + image = Image.fromarray(image[..., ::-1]) + image = ImageOps.solarize(image, threshold=threshold) + image = np.array(image)[..., ::-1] + return image + + +def sharpness(image, prob=0.5, min=0, max=2, factor=None): + random_prob = np.random.uniform() + if random_prob > prob: + return image + if factor is None: + # 0 模糊一点, 1 原图, 2 清晰一点 + factor = np.random.uniform(min, max) + image = Image.fromarray(image[..., ::-1]) + enhancer = ImageEnhance.Sharpness(image) + image = enhancer.enhance(factor=factor) + return np.array(image)[..., ::-1] + + +def color(image, prob=0.5, min=0., max=1., factor=None): + random_prob = np.random.uniform() + if random_prob > prob: + return image + if factor is None: + # factor=0 返回黑白色, factor=1 返回原图 + factor = np.random.uniform(min, max) + image = Image.fromarray(image[..., ::-1]) + enhancer = ImageEnhance.Color(image) + image = enhancer.enhance(factor=factor) + return np.array(image)[..., ::-1] + + +def contrast(image, prob=0.5, min=0.2, max=1., factor=None): + random_prob = np.random.uniform() + if random_prob > prob: + return image + if factor is None: + # factor=0 返回灰色, factor=1 返回原图 + factor = np.random.uniform(min, max) + image = Image.fromarray(image[..., ::-1]) + enhancer = ImageEnhance.Contrast(image) + image = enhancer.enhance(factor=factor) + return np.array(image)[..., ::-1] + + +def brightness(image, prob=0.5, min=0.8, max=1., factor=None): + random_prob = np.random.uniform() + if random_prob > prob: + return image + if factor is None: + # factor=0 返回全黑色, factor=1 返回原图 + factor = np.random.uniform(min, max) + image = Image.fromarray(image[..., ::-1]) + enhancer = ImageEnhance.Brightness(image) + image = enhancer.enhance(factor=factor) + return np.array(image)[..., ::-1] + + +class VisualEffect: + """ + Struct holding parameters and applying image color transformation. + + Args + solarize_threshold: + color_factor: A factor for adjusting color. + contrast_factor: A factor for adjusting contrast. + brightness_factor: A factor for adjusting brightness. + sharpness_factor: A factor for adjusting sharpness. + """ + + def __init__( + self, + color_factor=None, + contrast_factor=None, + brightness_factor=None, + sharpness_factor=None, + color_prob=0.5, + contrast_prob=0.5, + brightness_prob=0.5, + sharpness_prob=0.5, + autocontrast_prob=0.5, + equalize_prob=0.5, + solarize_prob=0.1, + solarize_threshold=128., + + ): + self.color_factor = color_factor + self.contrast_factor = contrast_factor + self.brightness_factor = brightness_factor + self.sharpness_factor = sharpness_factor + self.color_prob = color_prob + self.contrast_prob = contrast_prob + self.brightness_prob = brightness_prob + self.sharpness_prob = sharpness_prob + self.autocontrast_prob = autocontrast_prob + self.equalize_prob = equalize_prob + self.solarize_prob = solarize_prob + self.solarize_threshold = solarize_threshold + + def __call__(self, image): + """ + Apply a visual effect on the image. + + Args + image: Image to adjust + """ + random_enhance_id = np.random.randint(0, 4) + if random_enhance_id == 0: + image = color(image, prob=self.color_prob, factor=self.color_factor) + elif random_enhance_id == 1: + image = contrast(image, prob=self.contrast_prob, factor=self.contrast_factor) + elif random_enhance_id == 2: + image = brightness(image, prob=self.brightness_prob, factor=self.brightness_factor) + else: + image = sharpness(image, prob=self.sharpness_prob, factor=self.sharpness_factor) + + random_ops_id = np.random.randint(0, 3) + if random_ops_id == 0: + image = autocontrast(image, prob=self.autocontrast_prob) + elif random_ops_id == 1: + image = equalize(image, prob=self.equalize_prob) + else: + image = solarize(image, prob=self.solarize_prob, threshold=self.solarize_threshold) + return image + + +if __name__ == '__main__': + from generators.pascal import PascalVocGenerator + import cv2 + + train_generator = PascalVocGenerator( + 'datasets/VOC0712', + 'trainval', + skip_difficult=True, + anchors_path='voc_anchors_416.txt', + batch_size=1 + ) + visual_effect = VisualEffect() + for i in range(train_generator.size()): + image = train_generator.load_image(i) + image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) + annotations = train_generator.load_annotations(i) + boxes = annotations['bboxes'] + for box in boxes.astype(np.int32): + cv2.rectangle(image, (box[0], box[1]), (box[2], box[3]), (0, 0, 255), 2) + src_image = image.copy() + image = visual_effect(image) + cv2.namedWindow('image', cv2.WINDOW_NORMAL) + cv2.imshow('image', np.concatenate([src_image, image], axis=1)) + cv2.waitKey(0) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/misc.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..a4ae004a7df327f7b098f4b3959db131992d0f6e --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/misc.py @@ -0,0 +1,292 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import cv2 +import numpy as np +from augmentor.transform import translation_xy, change_transform_origin, scaling_xy +from utils import reorder_vertexes + + +def rotate(image, annotations, prob=0.5, border_value=(128, 128, 128)): + assert 'bboxes' in annotations, 'annotations should contain bboxes even if it is empty' + + random_prob = np.random.uniform() + if random_prob < (1 - prob): + return image, annotations + + rotate_degree = np.random.uniform(low=-10, high=10) + h, w = image.shape[:2] + # Compute the rotation matrix. + M = cv2.getRotationMatrix2D(center=(w / 2, h / 2), + angle=rotate_degree, + scale=1) + + # Get the sine and cosine from the rotation matrix. + abs_cos_angle = np.abs(M[0, 0]) + abs_sin_angle = np.abs(M[0, 1]) + + # Compute the new bounding dimensions of the image. + new_w = int(h * abs_sin_angle + w * abs_cos_angle) + new_h = int(h * abs_cos_angle + w * abs_sin_angle) + + # Adjust the rotation matrix to take into account the translation. + M[0, 2] += new_w // 2 - w // 2 + M[1, 2] += new_h // 2 - h // 2 + + # Rotate the image. + image = cv2.warpAffine(image, M=M, dsize=(new_w, new_h), flags=cv2.INTER_CUBIC, + borderMode=cv2.BORDER_CONSTANT, + borderValue=border_value) + + bboxes = annotations['bboxes'] + if bboxes.shape[0] != 0: + new_bboxes = [] + for bbox in bboxes: + x1, y1, x2, y2 = bbox + points = M.dot([ + [x1, x2, x1, x2], + [y1, y2, y2, y1], + [1, 1, 1, 1], + ]) + # Extract the min and max corners again. + min_xy = np.sort(points, axis=1)[:, :2] + min_x = np.mean(min_xy[0]) + min_y = np.mean(min_xy[1]) + max_xy = np.sort(points, axis=1)[:, 2:] + max_x = np.mean(max_xy[0]) + max_y = np.mean(max_xy[1]) + new_bboxes.append([min_x, min_y, max_x, max_y]) + annotations['bboxes'] = np.array(new_bboxes, dtype=np.float32) + + if 'quadrangles' in annotations and annotations['quadrangles'].shape[0] != 0: + quadrangles = annotations['quadrangles'] + rotated_quadrangles = [] + for quadrangle in quadrangles: + quadrangle = np.concatenate([quadrangle, np.ones((4, 1))], axis=-1) + rotated_quadrangle = M.dot(quadrangle.T).T[:, :2] + quadrangle = reorder_vertexes(rotated_quadrangle) + rotated_quadrangles.append(quadrangle) + quadrangles = np.stack(rotated_quadrangles) + annotations['quadrangles'] = quadrangles + xmin = np.min(quadrangles, axis=1)[:, 0] + ymin = np.min(quadrangles, axis=1)[:, 1] + xmax = np.max(quadrangles, axis=1)[:, 0] + ymax = np.max(quadrangles, axis=1)[:, 1] + bboxes = np.stack([xmin, ymin, xmax, ymax], axis=1) + annotations['bboxes'] = bboxes + return image, annotations + + +def crop(image, annotations, prob=0.5): + assert 'bboxes' in annotations, 'annotations should contain bboxes even if it is empty' + + random_prob = np.random.uniform() + if random_prob < (1 - prob): + return image, annotations + h, w = image.shape[:2] + bboxes = annotations['bboxes'] + if bboxes.shape[0] != 0: + min_x1, min_y1 = np.min(bboxes, axis=0)[:2] + max_x2, max_y2 = np.max(bboxes, axis=0)[2:] + random_x1 = np.random.randint(0, max(min_x1 // 2, 1)) + random_y1 = np.random.randint(0, max(min_y1 // 2, 1)) + random_x2 = np.random.randint(max_x2 + 1, max(min(w, max_x2 + (w - max_x2) // 2), max_x2 + 2)) + random_y2 = np.random.randint(max_y2 + 1, max(min(h, max_y2 + (h - max_y2) // 2), max_y2 + 2)) + image = image[random_y1:random_y2, random_x1:random_x2] + bboxes[:, [0, 2]] = bboxes[:, [0, 2]] - random_x1 + bboxes[:, [1, 3]] = bboxes[:, [1, 3]] - random_y1 + if 'quadrangles' in annotations and annotations['quadrangles'].shape[0] != 0: + quadrangles = annotations['quadrangles'] + quadrangles[:, :, 0] = quadrangles[:, :, 0] - random_x1 + quadrangles[:, :, 1] = quadrangles[:, :, 1] - random_y1 + else: + random_x1 = np.random.randint(0, max(w // 8, 1)) + random_y1 = np.random.randint(0, max(h // 8, 1)) + random_x2 = np.random.randint(7 * w // 8, w - 1) + random_y2 = np.random.randint(7 * h // 8, h - 1) + image = image[random_y1:random_y2, random_x1:random_x2] + return image, annotations + + +def flipx(image, annotations, prob=0.5): + assert 'bboxes' in annotations, 'annotations should contain bboxes even if it is empty' + + random_prob = np.random.uniform() + if random_prob < (1 - prob): + return image, annotations + bboxes = annotations['bboxes'] + h, w = image.shape[:2] + image = image[:, ::-1] + if bboxes.shape[0] != 0: + tmp = bboxes.copy() + bboxes[:, 0] = w - 1 - bboxes[:, 2] + bboxes[:, 2] = w - 1 - tmp[:, 0] + if 'quadrangles' in annotations and annotations['quadrangles'].shape[0] != 0: + quadrangles = annotations['quadrangles'] + tmp = quadrangles.copy() + quadrangles[:, 0, 0] = w - 1 - quadrangles[:, 0, 0] + quadrangles[:, 1, 0] = w - 1 - tmp[:, 3, 0] + quadrangles[:, 1, 1] = tmp[:, 3, 1] + quadrangles[:, 2, 0] = w - 1 - quadrangles[:, 2, 0] + quadrangles[:, 3, 0] = w - 1 - tmp[:, 1, 0] + quadrangles[:, 3, 1] = tmp[:, 1, 1] + return image, annotations + + +def multi_scale(image, annotations, prob=1.): + assert 'bboxes' in annotations, 'annotations should contain bboxes even if it is empty' + + random_prob = np.random.uniform() + if random_prob < (1 - prob): + return image, annotations + h, w = image.shape[:2] + scale = np.random.choice(np.arange(0.7, 1.4, 0.1)) + nh, nw = int(round(h * scale)), int(round(w * scale)) + image = cv2.resize(image, (nw, nh), interpolation=cv2.INTER_LINEAR) + bboxes = annotations['bboxes'] + if bboxes.shape[0] != 0: + annotations['bboxes'] = np.round(bboxes * scale) + if 'quadrangles' in annotations and annotations['quadrangles'].shape[0] != 0: + quadrangles = annotations['quadrangles'] + annotations['quadrangles'] = np.round(quadrangles * scale) + return image, annotations + + +def translate(image, annotations, prob=0.5, border_value=(128, 128, 128)): + assert 'bboxes' in annotations, 'annotations should contain bboxes even if it is empty' + + random_prob = np.random.uniform() + if random_prob < (1 - prob): + return image, annotations + h, w = image.shape[:2] + bboxes = annotations['bboxes'] + if bboxes.shape[0] != 0: + min_x1, min_y1 = np.min(bboxes, axis=0)[:2].astype(np.int32) + max_x2, max_y2 = np.max(bboxes, axis=0)[2:].astype(np.int32) + translation_matrix = translation_xy(min=(min(-(min_x1 // 2), 0), min(-(min_y1 // 2), 0)), + max=(max((w - 1 - max_x2) // 2, 1), max((h - 1 - max_y2) // 2, 1)), + prob=1.) + else: + translation_matrix = translation_xy(min=(min(-w // 8, 0), min(-h // 8, 0)), + max=(max(w // 8, 1), max(h // 8, 1))) + translation_matrix = change_transform_origin(translation_matrix, (w / 2, h / 2)) + image = cv2.warpAffine( + image, + translation_matrix[:2, :], + dsize=(w, h), + flags=cv2.INTER_CUBIC, + borderMode=cv2.BORDER_CONSTANT, + borderValue=border_value, + ) + if bboxes.shape[0] != 0: + new_bboxes = [] + for bbox in bboxes: + x1, y1, x2, y2 = bbox + points = translation_matrix.dot([ + [x1, x2, x1, x2], + [y1, y2, y2, y1], + [1, 1, 1, 1], + ]) + min_x, min_y = np.min(points, axis=1)[:2] + max_x, max_y = np.max(points, axis=1)[:2] + new_bboxes.append([min_x, min_y, max_x, max_y]) + annotations['bboxes'] = np.array(new_bboxes).astype(np.float32) + + if 'quadrangles' in annotations and annotations['quadrangles'].shape[0] != 0: + quadrangles = annotations['quadrangles'] + translated_quadrangles = [] + for quadrangle in quadrangles: + quadrangle = np.concatenate([quadrangle, np.ones((4, 1))], axis=-1) + translated_quadrangle = translation_matrix.dot(quadrangle.T).T[:, :2] + quadrangle = reorder_vertexes(translated_quadrangle) + translated_quadrangles.append(quadrangle) + quadrangles = np.stack(translated_quadrangles) + annotations['quadrangles'] = quadrangles + xmin = np.min(quadrangles, axis=1)[:, 0] + ymin = np.min(quadrangles, axis=1)[:, 1] + xmax = np.max(quadrangles, axis=1)[:, 0] + ymax = np.max(quadrangles, axis=1)[:, 1] + bboxes = np.stack([xmin, ymin, xmax, ymax], axis=1) + annotations['bboxes'] = bboxes + + return image, annotations + + +class MiscEffect: + def __init__(self, multi_scale_prob=0.5, rotate_prob=0.05, flip_prob=0.5, crop_prob=0.5, translate_prob=0.5, + border_value=(128, 128, 128)): + self.multi_scale_prob = multi_scale_prob + self.rotate_prob = rotate_prob + self.flip_prob = flip_prob + self.crop_prob = crop_prob + self.translate_prob = translate_prob + self.border_value = border_value + + def __call__(self, image, annotations): + image, annotations = multi_scale(image, annotations, prob=self.multi_scale_prob) + image, annotations = rotate(image, annotations, prob=self.rotate_prob, border_value=self.border_value) + image, annotations = flipx(image, annotations, prob=self.flip_prob) + image, annotations = crop(image, annotations, prob=self.crop_prob) + image, annotations = translate(image, annotations, prob=self.translate_prob, border_value=self.border_value) + return image, annotations + + +if __name__ == '__main__': + from generators.csv_ import CSVGenerator + + train_generator = CSVGenerator('datasets/ic15/train.csv', + 'datasets/ic15/classes.csv', + detect_text=True, + batch_size=1, + phi=5, + shuffle_groups=False) + misc_effect = MiscEffect() + for i in range(train_generator.size()): + image = train_generator.load_image(i) + image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) + annotations = train_generator.load_annotations(i) + boxes = annotations['bboxes'].astype(np.int32) + quadrangles = annotations['quadrangles'].astype(np.int32) + for box in boxes: + cv2.rectangle(image, (box[0], box[1]), (box[2], box[3]), (0, 0, 255), 1) + cv2.drawContours(image, quadrangles, -1, (0, 255, 255), 1) + src_image = image.copy() + # cv2.namedWindow('src_image', cv2.WINDOW_NORMAL) + cv2.imshow('src_image', src_image) + # image, annotations = misc_effect(image, annotations) + image, annotations = multi_scale(image, annotations, prob=1.) + image = image.copy() + boxes = annotations['bboxes'].astype(np.int32) + quadrangles = annotations['quadrangles'].astype(np.int32) + for box in boxes: + cv2.rectangle(image, (box[0], box[1]), (box[2], box[3]), (0, 255, 0), 1) + cv2.drawContours(image, quadrangles, -1, (255, 255, 0), 1) + cv2.namedWindow('image', cv2.WINDOW_NORMAL) + cv2.imshow('image', image) + cv2.waitKey(0) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/transform.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..f8cce349c97efaa707ab0b79d110b137daf1a202 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/augmentor/transform.py @@ -0,0 +1,537 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import cv2 +import numpy as np + +identity_matrix = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) + + +def colvec(*args): + """ + Create a numpy array representing a column vector. + """ + return np.array([args]).T + + +def transform_aabb(transform_matrix, aabb): + """ + Apply a transformation to an axis aligned bounding box. + + The result is a new AABB in the same coordinate system as the original AABB. + The new AABB contains all corner points of the original AABB after applying the given transformation. + + Args + transform: The transformation to apply. + x1: The minimum x value of the AABB. + y1: The minimum y value of the AABB. + x2: The maximum x value of the AABB. + y2: The maximum y value of the AABB. + Returns + The new AABB as tuple (x1, y1, x2, y2) + """ + x1, y1, x2, y2 = aabb + # Transform all 4 corners of the AABB. + points = transform_matrix.dot([ + [x1, x2, x1, x2], + [y1, y2, y2, y1], + [1, 1, 1, 1], + ]) + + # Extract the min and max corners again. + # (3, ) (min_x, min_y, 1) + min_corner = points.min(axis=1) + # (3, ) (max_x, max_y, 1) + max_corner = points.max(axis=1) + + return [min_corner[0], min_corner[1], max_corner[0], max_corner[1]] + + +def random_value(min, max): + return np.random.uniform(min, max) + + +def random_vector(min, max): + """ + Construct a random vector between min and max. + + Args + min: the minimum value for each component, (n, ) + max: the maximum value for each component, (n, ) + """ + min = np.array(min) + max = np.array(max) + assert min.shape == max.shape + assert len(min.shape) == 1 + return np.random.uniform(min, max) + + +def rotation(min=0, max=0, prob=0.5): + """ + Construct a homogeneous 2D rotation matrix. + + Args + min: a scalar for the minimum absolute angle in radians + max: a scalar for the maximum absolute angle in radians + Returns + the rotation matrix as 3 by 3 numpy array + """ + random_prob = np.random.uniform() + if random_prob > prob: + # angle: the angle in radians + angle = random_value(min=min, max=max) + return np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def translation_x(min=0, max=0, prob=0.5): + """ + Construct a homogeneous 2D translation matrix. + + Args: + min: a scalar for the minimum translation for x axis + max: a scalar for the maximum translation for x axis + + Returns: + the translation matrix as 3 by 3 numpy array + + """ + random_prob = np.random.uniform() + if random_prob > prob: + # translation: the translation 2D vector + translation = random_value(min=min, max=max) + return np.array([ + [1, 0, translation], + [0, 1, ], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def translation_y(min=0, max=0, prob=0.5): + """ + Construct a homogeneous 2D translation matrix. + + Args: + min: a scalar for the minimum translation for y axis + max: a scalar for the maximum translation for y axis + + Returns: + the translation matrix as 3 by 3 numpy array + + """ + random_prob = np.random.uniform() + if random_prob > prob: + # translation: the translation 2D vector + translation = random_value(min=min, max=max) + return np.array([ + [1, 0], + [0, 1, translation], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def translation_xy(min=(0, 0), max=(0, 0), prob=0.5): + """ + Construct a homogeneous 2D translation matrix. + + Args: + min: a scalar for the minimum translation for y axis + max: a scalar for the maximum translation for y axis + + Returns: + the translation matrix as 3 by 3 numpy array + + """ + random_prob = np.random.uniform() + if random_prob < prob: + # translation: the translation 2D vector + dx = np.random.randint(min[0], max[0]) + dy = np.random.randint(min[1], max[1]) + return np.array([ + [1, 0, dx], + [0, 1, dy], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def shear_x(min=0, max=0, prob=0.5): + """ + Construct a homogeneous 2D shear matrix. + + Args + min: the minimum shear angle in radians. + max: the maximum shear angle in radians. + Returns + the shear matrix as 3 by 3 numpy array + """ + random_prob = np.random.uniform() + if random_prob > prob: + # angle: the shear angle in radians + angle = random_value(min=min, max=max) + return np.array([ + [1, np.tan(angle), 0], + [0, 1, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def shear_y(min, max, prob=0.5): + """ + Construct a homogeneous 2D shear matrix. + + Args + min: the minimum shear angle in radians. + max: the maximum shear angle in radians. + Returns + the shear matrix as 3 by 3 numpy array + """ + random_prob = np.random.uniform() + if random_prob > prob: + # angle: the shear angle in radians + angle = random_value(min=min, max=max) + return np.array([ + [1, 0, 0], + [np.tan(angle), 1, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def scaling_x(min=0.9, max=1.1, prob=0.5): + """ + Construct a homogeneous 2D scaling matrix. + + Args + factor: a 2D vector for X and Y scaling + Returns + the zoom matrix as 3 by 3 numpy array + """ + + random_prob = np.random.uniform() + if random_prob > prob: + # angle: the shear angle in radians + factor = random_value(min=min, max=max) + return np.array([ + [factor, 0, 0], + [0, 1, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def scaling_y(min=0.9, max=1.1, prob=0.5): + """ + Construct a homogeneous 2D scaling matrix. + + Args + factor: a 2D vector for X and Y scaling + Returns + the zoom matrix as 3 by 3 numpy array + """ + + random_prob = np.random.uniform() + if random_prob > prob: + # angle: the shear angle in radians + factor = random_value(min=min, max=max) + return np.array([ + [1, 0, 0], + [0, factor, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def scaling_xy(min=(0.9, 0.9), max=(1.1, 1.1), prob=0.5): + """ + Construct a homogeneous 2D scaling matrix. + + Args + min: a 2D vector containing the minimum scaling factor for X and Y. + min: a 2D vector containing The maximum scaling factor for X and Y. + Returns + the zoom matrix as 3 by 3 numpy array + """ + + random_prob = np.random.uniform() + if random_prob > prob: + # factor: a 2D vector for X and Y scaling + factor = random_vector(min=min, max=max) + return np.array([ + [factor[0], 0, 0], + [0, factor[1], 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def flip_x(prob=0.8): + """ + Construct a transformation randomly containing X/Y flips (or not). + + Args + flip_x_chance: The chance that the result will contain a flip along the X axis. + flip_y_chance: The chance that the result will contain a flip along the Y axis. + Returns + a homogeneous 3 by 3 transformation matrix + """ + random_prob = np.random.uniform() + if random_prob > prob: + # 1 - 2 * bool gives 1 for False and -1 for True. + return np.array([ + [-1, 0, 0], + [0, 1, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def flip_y(prob=0.8): + """ + Construct a transformation randomly containing X/Y flips (or not). + + Args + flip_x_chance: The chance that the result will contain a flip along the X axis. + flip_y_chance: The chance that the result will contain a flip along the Y axis. + Returns + a homogeneous 3 by 3 transformation matrix + """ + random_prob = np.random.uniform() + if random_prob > prob: + # 1 - 2 * bool gives 1 for False and -1 for True. + return np.array([ + [1, 0, 0], + [0, -1, 0], + [0, 0, 1] + ]) + else: + return identity_matrix + + +def change_transform_origin(transform, center): + """ + Create a new transform representing the same transformation, only with the origin of the linear part changed. + + Args + transform: the transformation matrix + center: the new origin of the transformation + Returns + translate(center) * transform * translate(-center) + """ + center = np.array(center) + return np.linalg.multi_dot([np.array([[1, 0, center[0]], [0, 1, center[1]], [0, 0, 1]]), + transform, + np.array([[1, 0, -center[0]], [0, 1, -center[1]], [0, 0, 1]])]) + + +def random_transform( + min_rotation=0, + max_rotation=0, + min_translation=(0, 0), + max_translation=(0, 0), + min_shear=0, + max_shear=0, + min_scaling=(1, 1), + max_scaling=(1, 1), +): + """ + Create a random transformation. + + The transformation consists of the following operations in this order (from left to right): + * rotation + * translation + * shear + * scaling + * flip x (if applied) + * flip y (if applied) + + Note that by default, the data generators in `keras_retinanet.preprocessing.generators` interpret the translation + as factor of the image size. So an X translation of 0.1 would translate the image by 10% of it's width. + Set `relative_translation` to `False` in the `TransformParameters` of a data generator to have it interpret + the translation directly as pixel distances instead. + + Args + min_rotation: The minimum rotation in radians for the transform as scalar. + max_rotation: The maximum rotation in radians for the transform as scalar. + min_translation: The minimum translation for the transform as 2D column vector. + max_translation: The maximum translation for the transform as 2D column vector. + min_shear: The minimum shear angle for the transform in radians. + max_shear: The maximum shear angle for the transform in radians. + min_scaling: The minimum scaling for the transform as 2D column vector. + max_scaling: The maximum scaling for the transform as 2D column vector. + """ + return np.linalg.multi_dot([ + rotation(min_rotation, max_rotation), + translation_xy(min_translation, max_translation), + shear_x(min_shear, max_shear) if np.random.uniform() > 0.5 else shear_y(min_shear, max_shear), + scaling_xy(min_scaling, max_scaling), + flip_x() if np.random.uniform() > 0.5 else flip_y(), + ]) + + +def random_transform_generator(**kwargs): + """ + Create a random transform generator. + + The transformation consists of the following operations in this order (from left to right): + * rotation + * translation + * shear + * scaling + * flip x (if applied) + * flip y (if applied) + + Note that by default, the data generators in `keras_retinanet.preprocessing.generators` interpret the translation + as factor of the image size. So an X translation of 0.1 would translate the image by 10% of it's width. + Set `relative_translation` to `False` in the `TransformParameters` of a data generator to have it interpret + the translation directly as pixel distances instead. + + Args + min_rotation: The minimum rotation in radians for the transform as scalar. + max_rotation: The maximum rotation in radians for the transform as scalar. + min_translation: The minimum translation for the transform as 2D column vector. + max_translation: The maximum translation for the transform as 2D column vector. + min_shear: The minimum shear angle for the transform in radians. + max_shear: The maximum shear angle for the transform in radians. + min_scaling: The minimum scaling for the transform as 2D column vector. + max_scaling: The maximum scaling for the transform as 2D column vector. + """ + + while True: + yield random_transform(**kwargs) + + +def adjust_transform_for_image(transform, image, relative_translation): + """ + Adjust a transformation for a specific image. + + The translation of the matrix will be scaled with the size of the image. + The linear part of the transformation will adjusted so that the origin of the transformation will be at the center of the image. + """ + height, width, channels = image.shape + + result = transform + + # Scale the translation with the image size if specified. + if relative_translation: + result[0:2, 2] *= [width, height] + + # Move the origin of transformation. + result = change_transform_origin(transform, (0.5 * width, 0.5 * height)) + + return result + + +class TransformParameters: + """ + Struct holding parameters determining how to apply a transformation to an image. + + Args + fill_mode: One of: 'constant', 'nearest', 'reflect', 'wrap' + interpolation: One of: 'nearest', 'linear', 'cubic', 'area', 'lanczos4' + cval: Fill value to use with fill_mode='constant' + relative_translation: If true (the default), interpret translation as a factor of the image size. + If false, interpret it as absolute pixels. + """ + + def __init__( + self, + fill_mode='nearest', + interpolation='linear', + cval=0, + relative_translation=True, + ): + self.fill_mode = fill_mode + self.cval = cval + self.interpolation = interpolation + self.relative_translation = relative_translation + + def cv_border_mode(self): + if self.fill_mode == 'constant': + return cv2.BORDER_CONSTANT + if self.fill_mode == 'nearest': + return cv2.BORDER_REPLICATE + if self.fill_mode == 'reflect': + return cv2.BORDER_REFLECT_101 + if self.fill_mode == 'wrap': + return cv2.BORDER_WRAP + + def cv_interpolation(self): + if self.interpolation == 'nearest': + return cv2.INTER_NEAREST + if self.interpolation == 'linear': + return cv2.INTER_LINEAR + if self.interpolation == 'cubic': + return cv2.INTER_CUBIC + if self.interpolation == 'area': + return cv2.INTER_AREA + if self.interpolation == 'lanczos4': + return cv2.INTER_LANCZOS4 + + +def apply_transform(matrix, image, params): + """ + Apply a transformation to an image. + + The origin of transformation is at the top left corner of the image. + + The matrix is interpreted such that a point (x, y) on the original image is moved to transform * (x, y) in the generated image. + Mathematically speaking, that means that the matrix is a transformation from the transformed image space to the original image space. + + Args + matrix: A homogeneous 3 by 3 matrix holding representing the transformation to apply. + image: The image to transform. + params: The transform parameters (see TransformParameters) + """ + output = cv2.warpAffine( + image, + matrix[:2, :], + dsize=(image.shape[1], image.shape[0]), + flags=params.cvInterpolation(), + borderMode=params.cvBorderMode(), + borderValue=params.cval, + ) + return output + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/callbacks.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/callbacks.py new file mode 100644 index 0000000000000000000000000000000000000000..de58b1279bf3edeb5e26c1abb8a9ff1ecba01e63 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/callbacks.py @@ -0,0 +1,103 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +from tensorflow.keras.callbacks import Callback +import tensorflow.keras.backend as K +import numpy as np + + +class CosineAnnealingScheduler(Callback): + def __init__(self, cycle_iterations, min_lr, t_mu=2, start_iteration=0): + self.iteration_id = 0 + self.start_iteration = start_iteration + self.cycle_iteration_id = 0 + self.lrs = [] + self.min_lr = min_lr + self.cycle_iterations = cycle_iterations + self.t_mu = t_mu + super(CosineAnnealingScheduler, self).__init__() + + def on_batch_end(self, batch, logs): + if self.iteration_id > self.start_iteration: + # (1, 0) + cosine_decay = 0.5 * (1 + np.cos(np.pi * (self.cycle_iteration_id / self.cycle_iterations))) + decayed_lr = (self.max_lr - self.min_lr) * cosine_decay + self.min_lr + K.set_value(self.model.optimizer.lr, decayed_lr) + if self.cycle_iteration_id == self.cycle_iterations: + self.cycle_iteration_id = 0 + self.cycle_iterations = int(self.cycle_iterations * self.t_mu) + else: + self.cycle_iteration_id = self.cycle_iteration_id + 1 + self.lrs.append(decayed_lr) + elif self.iteration_id == self.start_iteration: + self.max_lr = K.get_value(self.model.optimizer.lr) + self.iteration_id += 1 + + def on_train_begin(self, logs={}): + self.max_lr = K.get_value(self.model.optimizer.lr) + + +class ExponentialScheduler(Callback): + def __init__(self, min_lr, max_lr, iterations): + self.factor = np.exp(np.log(max_lr / min_lr) / iterations) + self.min_lr = min_lr + self.max_lr = max_lr + # debug + self.lrs = [] + self.losses = [] + + def on_batch_end(self, batch, logs): + lr = K.get_value(self.model.optimizer.lr) + self.lrs.append(lr) + self.losses.append(logs["loss"]) + K.set_value(self.model.optimizer.lr, lr * self.factor) + + def on_train_begin(self, logs={}): + K.set_value(self.model.optimizer.lr, self.min_lr) + + +class LinearWarmUpScheduler(Callback): + def __init__(self, iterations, min_lr): + self.iterations = iterations + self.min_lr = min_lr + self.iteration_id = 0 + # debug + self.lrs = [] + + def on_batch_begin(self, batch, logs): + if self.iteration_id < self.iterations: + lr = (self.max_lr - self.min_lr) / self.iterations * (self.iteration_id + 1) + self.min_lr + K.set_value(self.model.optimizer.lr, lr) + self.iteration_id += 1 + self.lrs.append(K.get_value(self.model.optimizer.lr)) + + def on_train_begin(self, logs={}): + self.max_lr = K.get_value(self.model.optimizer.lr) + K.set_value(self.model.optimizer.lr, self.min_lr) + self.lrs.append(K.get_value(self.model.optimizer.lr)) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/common.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/common.py new file mode 100644 index 0000000000000000000000000000000000000000..1524c639bb5a4db9c01443e36ada63564e3040da --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/common.py @@ -0,0 +1,328 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +from generators.pascal import PascalVocGenerator +from model import efficientdet +import os +from utils.compute_overlap import compute_overlap +from utils.visualization import draw_detections, draw_annotations +import numpy as np +import cv2 +import progressbar + +assert (callable(progressbar.progressbar)), "Using wrong progressbar module, install 'progressbar2' instead." + + +def _compute_ap(recall, precision): + """ + Compute the average precision, given the recall and precision curves. + + Code originally from https://github.com/rbgirshick/py-faster-rcnn. + + Args: + recall: The recall curve (list). + precision: The precision curve (list). + + Returns: + The average precision as computed in py-faster-rcnn. + + """ + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], recall, [1.])) + mpre = np.concatenate(([0.], precision, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + + +def _get_detections(generator, model, score_threshold=0.05, max_detections=100, visualize=False): + """ + Get the detections from the model using the generator. + + The result is a list of lists such that the size is: + all_detections[num_images][num_classes] = detections[num_class_detections, 5] + + Args: + generator: The generator used to run images through the model. + model: The model to run on the images. + score_threshold: The score confidence threshold to use. + max_detections: The maximum number of detections to use per image. + save_path: The path to save the images with visualized detections to. + + Returns: + A list of lists containing the detections for each image in the generator. + + """ + all_detections = [[None for i in range(generator.num_classes()) if generator.has_label(i)] for j in + range(generator.size())] + + for i in progressbar.progressbar(range(generator.size()), prefix='Running network: '): + image = generator.load_image(i) + src_image = image.copy() + h, w = image.shape[:2] + + anchors = generator.anchors + image, scale = generator.preprocess_image(image) + + # run network + boxes, scores, *_, labels = model.predict_on_batch([np.expand_dims(image, axis=0)]) + boxes /= scale + boxes[:, :, 0] = np.clip(boxes[:, :, 0], 0, w - 1) + boxes[:, :, 1] = np.clip(boxes[:, :, 1], 0, h - 1) + boxes[:, :, 2] = np.clip(boxes[:, :, 2], 0, w - 1) + boxes[:, :, 3] = np.clip(boxes[:, :, 3], 0, h - 1) + + # select indices which have a score above the threshold + indices = np.where(scores[0, :] > score_threshold)[0] + + # select those scores + scores = scores[0][indices] + + # find the order with which to sort the scores + scores_sort = np.argsort(-scores)[:max_detections] + + # select detections + # (n, 4) + image_boxes = boxes[0, indices[scores_sort], :] + # (n, ) + image_scores = scores[scores_sort] + # (n, ) + image_labels = labels[0, indices[scores_sort]] + # (n, 6) + detections = np.concatenate( + [image_boxes, np.expand_dims(image_scores, axis=1), np.expand_dims(image_labels, axis=1)], axis=1) + + if visualize: + draw_annotations(src_image, generator.load_annotations(i), label_to_name=generator.label_to_name) + draw_detections(src_image, detections[:5, :4], detections[:5, 4], detections[:5, 5].astype(np.int32), + label_to_name=generator.label_to_name, + score_threshold=score_threshold) + + # cv2.imwrite(os.path.join(save_path, '{}.png'.format(i)), raw_image) + cv2.namedWindow('{}'.format(i), cv2.WINDOW_NORMAL) + cv2.imshow('{}'.format(i), src_image) + cv2.waitKey(0) + + # copy detections to all_detections + for class_id in range(generator.num_classes()): + all_detections[i][class_id] = detections[detections[:, -1] == class_id, :-1] + + return all_detections + + +def _get_annotations(generator): + """ + Get the ground truth annotations from the generator. + + The result is a list of lists such that the size is: + all_annotations[num_images][num_classes] = annotations[num_class_annotations, 5] + + Args: + generator: The generator used to retrieve ground truth annotations. + + Returns: + A list of lists containing the annotations for each image in the generator. + + """ + all_annotations = [[None for i in range(generator.num_classes())] for j in range(generator.size())] + + for i in progressbar.progressbar(range(generator.size()), prefix='Parsing annotations: '): + # load the annotations + annotations = generator.load_annotations(i) + + # copy detections to all_annotations + for label in range(generator.num_classes()): + if not generator.has_label(label): + continue + + all_annotations[i][label] = annotations['bboxes'][annotations['labels'] == label, :].copy() + + return all_annotations + + +def evaluate( + generator, + model, + iou_threshold=0.5, + score_threshold=0.01, + max_detections=100, + visualize=False, + epoch=0 +): + """ + Evaluate a given dataset using a given model. + + Args: + generator: The generator that represents the dataset to evaluate. + model: The model to evaluate. + iou_threshold: The threshold used to consider when a detection is positive or negative. + score_threshold: The score confidence threshold to use for detections. + max_detections: The maximum number of detections to use per image. + visualize: Show the visualized detections or not. + + Returns: + A dict mapping class names to mAP scores. + + """ + # gather all detections and annotations + all_detections = _get_detections(generator, model, score_threshold=score_threshold, max_detections=max_detections, + visualize=visualize) + all_annotations = _get_annotations(generator) + average_precisions = {} + num_tp = 0 + num_fp = 0 + + # process detections and annotations + for label in range(generator.num_classes()): + if not generator.has_label(label): + continue + + false_positives = np.zeros((0,)) + true_positives = np.zeros((0,)) + scores = np.zeros((0,)) + num_annotations = 0.0 + + for i in range(generator.size()): + detections = all_detections[i][label] + annotations = all_annotations[i][label] + num_annotations += annotations.shape[0] + detected_annotations = [] + + for d in detections: + scores = np.append(scores, d[4]) + + if annotations.shape[0] == 0: + false_positives = np.append(false_positives, 1) + true_positives = np.append(true_positives, 0) + continue + overlaps = compute_overlap(np.expand_dims(d, axis=0), annotations) + assigned_annotation = np.argmax(overlaps, axis=1) + max_overlap = overlaps[0, assigned_annotation] + + if max_overlap >= iou_threshold and assigned_annotation not in detected_annotations: + false_positives = np.append(false_positives, 0) + true_positives = np.append(true_positives, 1) + detected_annotations.append(assigned_annotation) + else: + false_positives = np.append(false_positives, 1) + true_positives = np.append(true_positives, 0) + + # no annotations -> AP for this class is 0 (is this correct?) + if num_annotations == 0: + average_precisions[label] = 0, 0 + continue + + # sort by score + indices = np.argsort(-scores) + false_positives = false_positives[indices] + true_positives = true_positives[indices] + + # compute false positives and true positives + false_positives = np.cumsum(false_positives) + true_positives = np.cumsum(true_positives) + + if false_positives.shape[0] == 0: + num_fp += 0 + else: + num_fp += false_positives[-1] + if true_positives.shape[0] == 0: + num_tp += 0 + else: + num_tp += true_positives[-1] + + # compute recall and precision + recall = true_positives / num_annotations + precision = true_positives / np.maximum(true_positives + false_positives, np.finfo(np.float64).eps) + + # compute average precision + average_precision = _compute_ap(recall, precision) + average_precisions[label] = average_precision, num_annotations + print('num_fp={}, num_tp={}'.format(num_fp, num_tp)) + + return average_precisions + + +def parse_args(args): + """ + Parse the arguments. + """ + parser.add_argument('--model_path', help='path to tfrecord', default='/home/dingwei/efficientdet/checkpoints/pascal_13_0.3418_0.5143.h5') + parser.add_argument('--data_path', help='path to dataset', default='/home/dataset/VOCdevkit/VOC2007') + print(vars(parser.parse_args(args))) + return parser.parse_args(args) + + +def main(args=None): + if args is None: + args = sys.argv[1:] + args = parse_args(args) + + phi = 0 + weighted_bifpn = False + common_args = { + 'batch_size': 4, + 'phi': phi, + } + test_generator = PascalVocGenerator( + args.data_path, + 'test', + shuffle_groups=False, + skip_truncated=False, + skip_difficult=True, + **common_args + ) + input_shape = (test_generator.image_size, test_generator.image_size) + anchors = test_generator.anchors + num_classes = test_generator.num_classes() + model, prediction_model = efficientdet(phi=phi, num_classes=num_classes, weighted_bifpn=weighted_bifpn) + prediction_model.load_weights(args.model_path, by_name=True) + average_precisions = evaluate(test_generator, prediction_model, visualize=False) + # compute per class average precision + total_instances = [] + precisions = [] + for label, (average_precision, num_annotations) in average_precisions.items(): + print('{:.0f} instances of class'.format(num_annotations), test_generator.label_to_name(label), + 'with average precision: {:.4f}'.format(average_precision)) + total_instances.append(num_annotations) + precisions.append(average_precision) + mean_ap = sum(precisions) / sum(x > 0 for x in total_instances) + print('mAP: {:.4f}'.format(mean_ap)) + +if __name__ == '__main__': + main() + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/efficientnet.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/efficientnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7793514242161923299e0d931a1227ef22139dfc --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/efficientnet.py @@ -0,0 +1,577 @@ +# Copyright 2019 The TensorFlow Authors, Pavel Yakubovskiy, Björn Barz. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Contains definitions for EfficientNet model. + +[1] Mingxing Tan, Quoc V. Le + EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks. + ICML'19, https://arxiv.org/abs/1905.11946 +""" + +# Code of this model implementation is mostly written by +# Björn Barz ([@Callidior](https://github.com/Callidior)) + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from npu_bridge.npu_init import * + +import os +import json +import math +import string +import collections +import numpy as np + +from six.moves import xrange +from keras_applications.imagenet_utils import _obtain_input_shape +from keras_applications.imagenet_utils import decode_predictions +from keras_applications.imagenet_utils import preprocess_input as _preprocess_input + +from utils import get_submodules_from_kwargs +from layers import BatchNormalization + +backend = None +layers = None +models = None +keras_utils = None + + +BASE_WEIGHTS_PATH = ( + 'https://github.com/Callidior/keras-applications/' + 'releases/download/efficientnet/') + +WEIGHTS_HASHES = { + 'efficientnet-b0': ('163292582f1c6eaca8e7dc7b51b01c61' + '5b0dbc0039699b4dcd0b975cc21533dc', + 'c1421ad80a9fc67c2cc4000f666aa507' + '89ce39eedb4e06d531b0c593890ccff3'), + 'efficientnet-b1': ('d0a71ddf51ef7a0ca425bab32b7fa7f1' + '6043ee598ecee73fc674d9560c8f09b0', + '75de265d03ac52fa74f2f510455ba64f' + '9c7c5fd96dc923cd4bfefa3d680c4b68'), + 'efficientnet-b2': ('bb5451507a6418a574534aa76a91b106' + 'f6b605f3b5dde0b21055694319853086', + '433b60584fafba1ea3de07443b74cfd3' + '2ce004a012020b07ef69e22ba8669333'), + 'efficientnet-b3': ('03f1fba367f070bd2545f081cfa7f3e7' + '6f5e1aa3b6f4db700f00552901e75ab9', + 'c5d42eb6cfae8567b418ad3845cfd63a' + 'a48b87f1bd5df8658a49375a9f3135c7'), + 'efficientnet-b4': ('98852de93f74d9833c8640474b2c698d' + 'b45ec60690c75b3bacb1845e907bf94f', + '7942c1407ff1feb34113995864970cd4' + 'd9d91ea64877e8d9c38b6c1e0767c411'), + 'efficientnet-b5': ('30172f1d45f9b8a41352d4219bf930ee' + '3339025fd26ab314a817ba8918fefc7d', + '9d197bc2bfe29165c10a2af8c2ebc675' + '07f5d70456f09e584c71b822941b1952'), + 'efficientnet-b6': ('f5270466747753485a082092ac9939ca' + 'a546eb3f09edca6d6fff842cad938720', + '1d0923bb038f2f8060faaf0a0449db4b' + '96549a881747b7c7678724ac79f427ed'), + 'efficientnet-b7': ('876a41319980638fa597acbbf956a82d' + '10819531ff2dcb1a52277f10c7aefa1a', + '60b56ff3a8daccc8d96edfd40b204c11' + '3e51748da657afd58034d54d3cec2bac') +} + +BlockArgs = collections.namedtuple('BlockArgs', [ + 'kernel_size', 'num_repeat', 'input_filters', 'output_filters', + 'expand_ratio', 'id_skip', 'strides', 'se_ratio' +]) +# defaults will be a public argument for namedtuple in Python 3.7 +# https://docs.python.org/3/library/collections.html#collections.namedtuple +BlockArgs.__new__.__defaults__ = (None,) * len(BlockArgs._fields) + +DEFAULT_BLOCKS_ARGS = [ + BlockArgs(kernel_size=3, num_repeat=1, input_filters=32, output_filters=16, + expand_ratio=1, id_skip=True, strides=[1, 1], se_ratio=0.25), + BlockArgs(kernel_size=3, num_repeat=2, input_filters=16, output_filters=24, + expand_ratio=6, id_skip=True, strides=[2, 2], se_ratio=0.25), + BlockArgs(kernel_size=5, num_repeat=2, input_filters=24, output_filters=40, + expand_ratio=6, id_skip=True, strides=[2, 2], se_ratio=0.25), + BlockArgs(kernel_size=3, num_repeat=3, input_filters=40, output_filters=80, + expand_ratio=6, id_skip=True, strides=[2, 2], se_ratio=0.25), + BlockArgs(kernel_size=5, num_repeat=3, input_filters=80, output_filters=112, + expand_ratio=6, id_skip=True, strides=[1, 1], se_ratio=0.25), + BlockArgs(kernel_size=5, num_repeat=4, input_filters=112, output_filters=192, + expand_ratio=6, id_skip=True, strides=[2, 2], se_ratio=0.25), + BlockArgs(kernel_size=3, num_repeat=1, input_filters=192, output_filters=320, + expand_ratio=6, id_skip=True, strides=[1, 1], se_ratio=0.25) +] + +CONV_KERNEL_INITIALIZER = { + 'class_name': 'VarianceScaling', + 'config': { + 'scale': 2.0, + 'mode': 'fan_out', + # EfficientNet actually uses an untruncated normal distribution for + # initializing conv layers, but keras.initializers.VarianceScaling use + # a truncated distribution. + # We decided against a custom initializer for better serializability. + 'distribution': 'normal' + } +} + +DENSE_KERNEL_INITIALIZER = { + 'class_name': 'VarianceScaling', + 'config': { + 'scale': 1. / 3., + 'mode': 'fan_out', + 'distribution': 'uniform' + } +} + + +def preprocess_input(x, **kwargs): + kwargs = {k: v for k, v in kwargs.items() if k in ['backend', 'layers', 'models', 'utils']} + return _preprocess_input(x, mode='torch', **kwargs) + + +def get_swish(**kwargs): + backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) + + def swish(x): + """Swish activation function: x * sigmoid(x). + Reference: [Searching for Activation Functions](https://arxiv.org/abs/1710.05941) + """ + + if backend.backend() == 'tensorflow': + try: + # The native TF implementation has a more + # memory-efficient gradient implementation + return backend.tf.nn.swish(x) + except AttributeError: + pass + + return x * backend.sigmoid(x) + + return swish + + +def get_dropout(**kwargs): + """Wrapper over custom dropout. Fix problem of ``None`` shape for tf.keras. + It is not possible to define FixedDropout class as global object, + because we do not have modules for inheritance at first time. + + Issue: + https://github.com/tensorflow/tensorflow/issues/30946 + """ + backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) + + class FixedDropout(layers.Dropout): + def _get_noise_shape(self, inputs): + if self.noise_shape is None: + return self.noise_shape + + symbolic_shape = backend.shape(inputs) + noise_shape = [symbolic_shape[axis] if shape is None else shape + for axis, shape in enumerate(self.noise_shape)] + return tuple(noise_shape) + + return FixedDropout + + +def round_filters(filters, width_coefficient, depth_divisor): + """Round number of filters based on width multiplier.""" + + filters *= width_coefficient + new_filters = int(filters + depth_divisor / 2) // depth_divisor * depth_divisor + new_filters = max(depth_divisor, new_filters) + # Make sure that round down does not go down by more than 10%. + if new_filters < 0.9 * filters: + new_filters += depth_divisor + return int(new_filters) + + +def round_repeats(repeats, depth_coefficient): + """Round number of repeats based on depth multiplier.""" + + return int(math.ceil(depth_coefficient * repeats)) + + +def mb_conv_block(inputs, block_args, activation, drop_rate=None, prefix='', freeze_bn=False): + """Mobile Inverted Residual Bottleneck.""" + + has_se = (block_args.se_ratio is not None) and (0 < block_args.se_ratio <= 1) + bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 + + # workaround over non working dropout with None in noise_shape in tf.keras + Dropout = get_dropout( + backend=backend, + layers=layers, + models=models, + utils=keras_utils + ) + + # Expansion phase + filters = block_args.input_filters * block_args.expand_ratio + if block_args.expand_ratio != 1: + x = layers.Conv2D(filters, 1, + padding='same', + use_bias=False, + kernel_initializer=CONV_KERNEL_INITIALIZER, + name=prefix + 'expand_conv')(inputs) + # x = BatchNormalization(freeze=freeze_bn, axis=bn_axis, name=prefix + 'expand_bn')(x) + x = layers.BatchNormalization(axis=bn_axis, name=prefix + 'expand_bn')(x) + x = layers.Activation(activation, name=prefix + 'expand_activation')(x) + else: + x = inputs + + # Depthwise Convolution + x = layers.DepthwiseConv2D(block_args.kernel_size, + strides=block_args.strides, + padding='same', + use_bias=False, + depthwise_initializer=CONV_KERNEL_INITIALIZER, + name=prefix + 'dwconv')(x) + # x = BatchNormalization(freeze=freeze_bn, axis=bn_axis, name=prefix + 'bn')(x) + x = layers.BatchNormalization(axis=bn_axis, name=prefix + 'bn')(x) + x = layers.Activation(activation, name=prefix + 'activation')(x) + + # Squeeze and Excitation phase + if has_se: + num_reduced_filters = max(1, int( + block_args.input_filters * block_args.se_ratio + )) + se_tensor = layers.GlobalAveragePooling2D(name=prefix + 'se_squeeze')(x) + + target_shape = (1, 1, filters) if backend.image_data_format() == 'channels_last' else (filters, 1, 1) + se_tensor = layers.Reshape(target_shape, name=prefix + 'se_reshape')(se_tensor) + se_tensor = layers.Conv2D(num_reduced_filters, 1, + activation=activation, + padding='same', + use_bias=True, + kernel_initializer=CONV_KERNEL_INITIALIZER, + name=prefix + 'se_reduce')(se_tensor) + se_tensor = layers.Conv2D(filters, 1, + activation='sigmoid', + padding='same', + use_bias=True, + kernel_initializer=CONV_KERNEL_INITIALIZER, + name=prefix + 'se_expand')(se_tensor) + if backend.backend() == 'theano': + # For the Theano backend, we have to explicitly make + # the excitation weights broadcastable. + pattern = ([True, True, True, False] if backend.image_data_format() == 'channels_last' + else [True, False, True, True]) + se_tensor = layers.Lambda( + lambda x: backend.pattern_broadcast(x, pattern), + name=prefix + 'se_broadcast')(se_tensor) + x = layers.multiply([x, se_tensor], name=prefix + 'se_excite') + + # Output phase + x = layers.Conv2D(block_args.output_filters, 1, + padding='same', + use_bias=False, + kernel_initializer=CONV_KERNEL_INITIALIZER, + name=prefix + 'project_conv')(x) + # x = BatchNormalization(freeze=freeze_bn, axis=bn_axis, name=prefix + 'project_bn')(x) + x = layers.BatchNormalization(axis=bn_axis, name=prefix + 'project_bn')(x) + if block_args.id_skip and all( + s == 1 for s in block_args.strides + ) and block_args.input_filters == block_args.output_filters: + if drop_rate and (drop_rate > 0): + x = Dropout(drop_rate, + noise_shape=(None, 1, 1, 1), + name=prefix + 'drop')(x) + x = layers.add([x, inputs], name=prefix + 'add') + + return x + + +def EfficientNet(width_coefficient, + depth_coefficient, + default_resolution, + dropout_rate=0.2, + drop_connect_rate=0.2, + depth_divisor=8, + blocks_args=DEFAULT_BLOCKS_ARGS, + model_name='efficientnet', + include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + freeze_bn=False, + **kwargs): + """Instantiates the EfficientNet architecture using given scaling coefficients. + Optionally loads weights pre-trained on ImageNet. + Note that the data format convention used by the model is + the one specified in your Keras config at `~/.keras/keras.json`. + # Arguments + width_coefficient: float, scaling coefficient for network width. + depth_coefficient: float, scaling coefficient for network depth. + default_resolution: int, default input image size. + dropout_rate: float, dropout rate before final classifier layer. + drop_connect_rate: float, dropout rate at skip connections. + depth_divisor: int. + blocks_args: A list of BlockArgs to construct block modules. + model_name: string, model name. + include_top: whether to include the fully-connected + layer at the top of the network. + weights: one of `None` (random initialization), + 'imagenet' (pre-training on ImageNet), + or the path to the weights file to be loaded. + input_tensor: optional Keras tensor + (i.e. output of `layers.Input()`) + to use as image input for the model. + input_shape: optional shape tuple, only to be specified + if `include_top` is False. + It should have exactly 3 inputs channels. + pooling: optional pooling mode for feature extraction + when `include_top` is `False`. + - `None` means that the output of the model will be + the 4D tensor output of the + last convolutional layer. + - `avg` means that global average pooling + will be applied to the output of the + last convolutional layer, and thus + the output of the model will be a 2D tensor. + - `max` means that global max pooling will + be applied. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + # Returns + A Keras model instance. + # Raises + ValueError: in case of invalid argument for `weights`, + or invalid input shape. + """ + global backend, layers, models, keras_utils + backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) + features = [] + if not (weights in {'imagenet', None} or os.path.exists(weights)): + raise ValueError('The `weights` argument should be either ' + '`None` (random initialization), `imagenet` ' + '(pre-training on ImageNet), ' + 'or the path to the weights file to be loaded.') + + if weights == 'imagenet' and include_top and classes != 1000: + raise ValueError('If using `weights` as `"imagenet"` with `include_top`' + ' as true, `classes` should be 1000') + + # Determine proper input shape + input_shape = _obtain_input_shape(input_shape, + default_size=default_resolution, + min_size=32, + data_format=backend.image_data_format(), + require_flatten=include_top, + weights=weights) + + if input_tensor is None: + img_input = layers.Input(shape=input_shape) + else: + if backend.backend() == 'tensorflow': + from tensorflow.python.keras.backend import is_keras_tensor + else: + is_keras_tensor = backend.is_keras_tensor + if not is_keras_tensor(input_tensor): + img_input = layers.Input(tensor=input_tensor, shape=input_shape) + else: + img_input = input_tensor + + bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 + activation = get_swish(**kwargs) + + # Build stem + x = img_input + x = layers.Conv2D(round_filters(32, width_coefficient, depth_divisor), 3, + strides=(2, 2), + padding='same', + use_bias=False, + kernel_initializer=CONV_KERNEL_INITIALIZER, + name='stem_conv')(x) + # x = BatchNormalization(freeze=freeze_bn, axis=bn_axis, name='stem_bn')(x) + x = layers.BatchNormalization(axis=bn_axis, name='stem_bn')(x) + x = layers.Activation(activation, name='stem_activation')(x) + # Build blocks + num_blocks_total = sum(block_args.num_repeat for block_args in blocks_args) + block_num = 0 + for idx, block_args in enumerate(blocks_args): + assert block_args.num_repeat > 0 + # Update block input and output filters based on depth multiplier. + block_args = block_args._replace( + input_filters=round_filters(block_args.input_filters, + width_coefficient, depth_divisor), + output_filters=round_filters(block_args.output_filters, + width_coefficient, depth_divisor), + num_repeat=round_repeats(block_args.num_repeat, depth_coefficient)) + + # The first block needs to take care of stride and filter size increase. + drop_rate = drop_connect_rate * float(block_num) / num_blocks_total + x = mb_conv_block(x, block_args, + activation=activation, + drop_rate=drop_rate, + prefix='block{}a_'.format(idx + 1), + freeze_bn=freeze_bn + ) + block_num += 1 + if block_args.num_repeat > 1: + # pylint: disable=protected-access + block_args = block_args._replace( + input_filters=block_args.output_filters, strides=[1, 1]) + # pylint: enable=protected-access + for bidx in xrange(block_args.num_repeat - 1): + drop_rate = drop_connect_rate * float(block_num) / num_blocks_total + block_prefix = 'block{}{}_'.format( + idx + 1, + string.ascii_lowercase[bidx + 1] + ) + x = mb_conv_block(x, block_args, + activation=activation, + drop_rate=drop_rate, + prefix=block_prefix, + freeze_bn=freeze_bn + ) + block_num += 1 + if idx < len(blocks_args) - 1 and blocks_args[idx + 1].strides[0] == 2: + features.append(x) + elif idx == len(blocks_args) - 1: + features.append(x) + return features + + +def EfficientNetB0(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.0, 1.0, 224, 0.2, + model_name='efficientnet-b0', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB1(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.0, 1.1, 240, 0.2, + model_name='efficientnet-b1', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB2(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.1, 1.2, 260, 0.3, + model_name='efficientnet-b2', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB3(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.2, 1.4, 300, 0.3, + model_name='efficientnet-b3', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB4(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.4, 1.8, 380, 0.4, + model_name='efficientnet-b4', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB5(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.6, 2.2, 456, 0.4, + model_name='efficientnet-b5', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB6(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(1.8, 2.6, 528, 0.5, + model_name='efficientnet-b6', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +def EfficientNetB7(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000, + **kwargs): + return EfficientNet(2.0, 3.1, 600, 0.5, + model_name='efficientnet-b7', + include_top=include_top, weights=weights, + input_tensor=input_tensor, input_shape=input_shape, + pooling=pooling, classes=classes, + **kwargs) + + +setattr(EfficientNetB0, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB1, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB2, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB3, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB4, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB5, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB6, '__doc__', EfficientNet.__doc__) +setattr(EfficientNetB7, '__doc__', EfficientNet.__doc__) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/__init__.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f45a2c79aa9e815ecbb58bc3e5e8cc1ef8f1dc4 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/coco.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a9113e38a83c27594b374f7ba832b4f821cee60 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/coco.py @@ -0,0 +1,204 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +# import keras +from tensorflow import keras +import tensorflow as tf + +from pycocotools.cocoeval import COCOeval +import numpy as np +import json +from tqdm import trange +import cv2 + +from generators.coco import CocoGenerator + + +def evaluate(generator, model, threshold=0.01): + """ + Use the pycocotools to evaluate a COCO model on a dataset. + + Args + generator: The generator for generating the evaluation data. + model: The model to evaluate. + threshold: The score threshold to use. + """ + # start collecting results + results = [] + image_ids = [] + for index in trange(generator.size(), desc='COCO evaluation: '): + image = generator.load_image(index) + src_image = image.copy() + h, w = image.shape[:2] + + image, scale = generator.preprocess_image(image) + + # run network + boxes, scores, labels = model.predict_on_batch([np.expand_dims(image, axis=0)]) + boxes /= scale + boxes[:, :, 0] = np.clip(boxes[:, :, 0], 0, w - 1) + boxes[:, :, 1] = np.clip(boxes[:, :, 1], 0, h - 1) + boxes[:, :, 2] = np.clip(boxes[:, :, 2], 0, w - 1) + boxes[:, :, 3] = np.clip(boxes[:, :, 3], 0, h - 1) + + # change to (x, y, w, h) (MS COCO standard) + boxes[:, :, 2] -= boxes[:, :, 0] + boxes[:, :, 3] -= boxes[:, :, 1] + + # select indices which have a score above the threshold + indices = np.where(scores[0, :] > threshold)[0] + boxes = boxes[0, indices] + scores = scores[0, indices] + class_ids = labels[0, indices] + + # compute predicted labels and scores + for box, score, class_id in zip(boxes, scores, class_ids): + # append detection for each positively labeled class + image_result = { + 'image_id': generator.image_ids[index], + 'category_id': int(class_id) + 1, + 'score': float(score), + 'bbox': box.tolist(), + } + # append detection to results + results.append(image_result) + + # box = np.round(box).astype(np.int32) + # class_name = generator.label_to_name(generator.coco_label_to_label(class_id + 1)) + # ret, baseline = cv2.getTextSize(class_name, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + # cv2.rectangle(src_image, (box[0], box[1]), (box[0] + box[2], box[1] + box[3]), (0, 255, 0), 1) + # cv2.putText(src_image, class_name, (box[0], box[1] + box[3] - baseline), cv2.FONT_HERSHEY_SIMPLEX, 0.5, + # (0, 0, 0), 1) + # cv2.namedWindow('image', cv2.WINDOW_NORMAL) + # cv2.imshow('image', src_image) + # cv2.waitKey(0) + + # append image to list of processed images + image_ids.append(generator.image_ids[index]) + + if not len(results): + return + + # write output + json.dump(results, open('{}_bbox_results.json'.format(generator.set_name), 'w'), indent=4) + json.dump(image_ids, open('{}_processed_image_ids.json'.format(generator.set_name), 'w'), indent=4) + + # # load results in COCO evaluation tool + # coco_true = generator.coco + # coco_pred = coco_true.loadRes('{}_bbox_results.json'.format(generator.set_name)) + # + # # run COCO evaluation + # coco_eval = COCOeval(coco_true, coco_pred, 'bbox') + # coco_eval.params.imgIds = image_ids + # coco_eval.evaluate() + # coco_eval.accumulate() + # coco_eval.summarize() + # return coco_eval.stats + + +class Evaluate(keras.callbacks.Callback): + """ Performs COCO evaluation on each epoch. + """ + + def __init__(self, generator, model, tensorboard=None, threshold=0.01): + """ Evaluate callback initializer. + + Args + generator : The generator used for creating validation data. + model: prediction model + tensorboard : If given, the results will be written to tensorboard. + threshold : The score threshold to use. + """ + self.generator = generator + self.active_model = model + self.threshold = threshold + self.tensorboard = tensorboard + + super(Evaluate, self).__init__() + + def on_epoch_end(self, epoch, logs=None): + logs = logs or {} + + coco_tag = ['AP @[ IoU=0.50:0.95 | area= all | maxDets=100 ]', + 'AP @[ IoU=0.50 | area= all | maxDets=100 ]', + 'AP @[ IoU=0.75 | area= all | maxDets=100 ]', + 'AP @[ IoU=0.50:0.95 | area= small | maxDets=100 ]', + 'AP @[ IoU=0.50:0.95 | area=medium | maxDets=100 ]', + 'AP @[ IoU=0.50:0.95 | area= large | maxDets=100 ]', + 'AR @[ IoU=0.50:0.95 | area= all | maxDets= 1 ]', + 'AR @[ IoU=0.50:0.95 | area= all | maxDets= 10 ]', + 'AR @[ IoU=0.50:0.95 | area= all | maxDets=100 ]', + 'AR @[ IoU=0.50:0.95 | area= small | maxDets=100 ]', + 'AR @[ IoU=0.50:0.95 | area=medium | maxDets=100 ]', + 'AR @[ IoU=0.50:0.95 | area= large | maxDets=100 ]'] + coco_eval_stats = evaluate(self.generator, self.active_model, self.threshold) + if coco_eval_stats is not None and self.tensorboard is not None: + if tf.version.VERSION < '2.0.0' and self.tensorboard.writer is not None: + summary = tf.Summary() + for index, result in enumerate(coco_eval_stats): + summary_value = summary.value.add() + summary_value.simple_value = result + summary_value.tag = '{}. {}'.format(index + 1, coco_tag[index]) + self.tensorboard.writer.add_summary(summary, epoch) + logs[coco_tag[index]] = result + else: + for index, result in enumerate(coco_eval_stats): + tag = '{}. {}'.format(index + 1, coco_tag[index]) + tf.summary.scalar(tag, result, epoch) + + +if __name__ == '__main__': + npu_keras_sess = set_keras_session_npu_config() + from model import efficientdet + import os + from generators.coco import CocoGenerator + + os.environ['CUDA_VISIBLE_DEVICES'] = '0' + + phi = 2 + weighted_bifpn = True + model_path = 'efficientdet-d2.h5' + common_args = { + 'batch_size': 1, + 'phi': phi, + } + + test_generator = CocoGenerator( + '/cache/MScoco', + 'test-dev2017', + shuffle_groups=False, + **common_args + ) + num_classes = test_generator.num_classes() + model, prediction_model = efficientdet(phi=phi, num_classes=num_classes, weighted_bifpn=weighted_bifpn, + score_threshold=0.01) + prediction_model.load_weights(model_path, by_name=True) + evaluate(test_generator, prediction_model, threshold=0.01) + close_session(npu_keras_sess) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/pascal.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/pascal.py new file mode 100644 index 0000000000000000000000000000000000000000..767ac263ca75a2a48da0edbb1ec050e9ac4ff00f --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/eval/pascal.py @@ -0,0 +1,119 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +# import keras +from tensorflow import keras +import tensorflow as tf +from eval.common import evaluate + + +class Evaluate(keras.callbacks.Callback): + """ + Evaluation callback for arbitrary datasets. + """ + + def __init__( + self, + generator, + model, + iou_threshold=0.5, + score_threshold=0.01, + max_detections=100, + save_path=None, + tensorboard=None, + weighted_average=False, + verbose=1 + ): + """ + Evaluate a given dataset using a given model at the end of every epoch during training. + + Args: + generator: The generator that represents the dataset to evaluate. + iou_threshold: The threshold used to consider when a detection is positive or negative. + score_threshold: The score confidence threshold to use for detections. + max_detections: The maximum number of detections to use per image. + save_path: The path to save images with visualized detections to. + tensorboard: Instance of keras.callbacks.TensorBoard used to log the mAP value. + weighted_average: Compute the mAP using the weighted average of precisions among classes. + verbose: Set the verbosity level, by default this is set to 1. + """ + self.generator = generator + self.iou_threshold = iou_threshold + self.score_threshold = score_threshold + self.max_detections = max_detections + self.save_path = save_path + self.tensorboard = tensorboard + self.weighted_average = weighted_average + self.verbose = verbose + self.active_model = model + + super(Evaluate, self).__init__() + + def on_epoch_end(self, epoch, logs=None): + logs = logs or {} + + # run evaluation + average_precisions = evaluate( + self.generator, + self.active_model, + iou_threshold=self.iou_threshold, + score_threshold=self.score_threshold, + max_detections=self.max_detections, + visualize=False + ) + + # compute per class average precision + total_instances = [] + precisions = [] + for label, (average_precision, num_annotations) in average_precisions.items(): + if self.verbose == 1: + print('{:.0f} instances of class'.format(num_annotations), + self.generator.label_to_name(label), 'with average precision: {:.4f}'.format(average_precision)) + total_instances.append(num_annotations) + precisions.append(average_precision) + if self.weighted_average: + self.mean_ap = sum([a * b for a, b in zip(total_instances, precisions)]) / sum(total_instances) + else: + self.mean_ap = sum(precisions) / sum(x > 0 for x in total_instances) + + if self.tensorboard is not None: + if tf.version.VERSION < '2.0.0' and self.tensorboard.writer is not None: + summary = tf.Summary() + summary_value = summary.value.add() + summary_value.simple_value = self.mean_ap + summary_value.tag = "mAP" + self.tensorboard.writer.add_summary(summary, epoch) + else: + tf.summary.scalar('mAP', self.mean_ap, epoch) + + logs['mAP'] = self.mean_ap + + if self.verbose == 1: + print('mAP: {:.4f}'.format(self.mean_ap)) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/__init__.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f45a2c79aa9e815ecbb58bc3e5e8cc1ef8f1dc4 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/coco.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..48f4d42b44a05310be292d439e72b80d1ac15c2f --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/coco.py @@ -0,0 +1,175 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +from generators.common import Generator +import os +import numpy as np +from pycocotools.coco import COCO +import cv2 + + +class CocoGenerator(Generator): + """ + Generate data from the COCO dataset. + See https://github.com/cocodataset/cocoapi/tree/master/PythonAPI for more information. + """ + + def __init__(self, data_dir, set_name, **kwargs): + """ + Initialize a COCO data generator. + + Args + data_dir: Path to where the COCO dataset is stored. + set_name: Name of the set to parse. + """ + self.data_dir = data_dir + self.set_name = set_name + if set_name in ['train2017', 'val2017']: + self.coco = COCO(os.path.join(data_dir, 'annotations', 'instances_' + set_name + '.json')) + else: + self.coco = COCO(os.path.join(data_dir, 'annotations', 'image_info_' + set_name + '.json')) + self.image_ids = self.coco.getImgIds() + + self.load_classes() + + super(CocoGenerator, self).__init__(**kwargs) + + def load_classes(self): + """ + Loads the class to label mapping (and inverse) for COCO. + """ + # load class names (name -> label) + categories = self.coco.loadCats(self.coco.getCatIds()) + categories.sort(key=lambda x: x['id']) + + self.classes = {} + self.coco_labels = {} + self.coco_labels_inverse = {} + for c in categories: + self.coco_labels[len(self.classes)] = c['id'] + self.coco_labels_inverse[c['id']] = len(self.classes) + self.classes[c['name']] = len(self.classes) + + # also load the reverse (label -> name) + self.labels = {} + for key, value in self.classes.items(): + self.labels[value] = key + + def size(self): + """ Size of the COCO dataset. + """ + return len(self.image_ids) + + def num_classes(self): + """ Number of classes in the dataset. For COCO this is 80. + """ + return 90 + + def has_label(self, label): + """ Return True if label is a known label. + """ + return label in self.labels + + def has_name(self, name): + """ Returns True if name is a known class. + """ + return name in self.classes + + def name_to_label(self, name): + """ Map name to label. + """ + return self.classes[name] + + def label_to_name(self, label): + """ Map label to name. + """ + return self.labels[label] + + def coco_label_to_label(self, coco_label): + """ Map COCO label to the label as used in the network. + COCO has some gaps in the order of labels. The highest label is 90, but there are 80 classes. + """ + return self.coco_labels_inverse[coco_label] + + def coco_label_to_name(self, coco_label): + """ Map COCO label to name. + """ + return self.label_to_name(self.coco_label_to_label(coco_label)) + + def label_to_coco_label(self, label): + """ Map label as used by the network to labels as used by COCO. + """ + return self.coco_labels[label] + + def image_aspect_ratio(self, image_index): + """ Compute the aspect ratio for an image with image_index. + """ + image = self.coco.loadImgs(self.image_ids[image_index])[0] + return float(image['width']) / float(image['height']) + + def load_image(self, image_index): + """ + Load an image at the image_index. + """ + # {'license': 2, 'file_name': '000000259765.jpg', 'coco_url': 'http://images.cocodataset.org/test2017/000000259765.jpg', 'height': 480, 'width': 640, 'date_captured': '2013-11-21 04:02:31', 'id': 259765} + image_info = self.coco.loadImgs(self.image_ids[image_index])[0] + path = os.path.join(self.data_dir, 'images', self.set_name, image_info['file_name']) + image = cv2.imread(path) + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + return image + + def load_annotations(self, image_index): + """ Load annotations for an image_index. + """ + # get ground truth annotations + annotations_ids = self.coco.getAnnIds(imgIds=self.image_ids[image_index], iscrowd=False) + annotations = {'labels': np.empty((0,), dtype=np.float32), 'bboxes': np.empty((0, 4), dtype=np.float32)} + + # some images appear to miss annotations (like image with id 257034) + if len(annotations_ids) == 0: + return annotations + + # parse annotations + coco_annotations = self.coco.loadAnns(annotations_ids) + for idx, a in enumerate(coco_annotations): + # some annotations have basically no width / height, skip them + if a['bbox'][2] < 1 or a['bbox'][3] < 1: + continue + + annotations['labels'] = np.concatenate( + [annotations['labels'], [a['category_id'] - 1]], axis=0) + annotations['bboxes'] = np.concatenate([annotations['bboxes'], [[ + a['bbox'][0], + a['bbox'][1], + a['bbox'][0] + a['bbox'][2], + a['bbox'][1] + a['bbox'][3], + ]]], axis=0) + + return annotations + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/common.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/common.py new file mode 100644 index 0000000000000000000000000000000000000000..ce23e0591a21f874cc6b7bc27ff5c11b982dbdaf --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/common.py @@ -0,0 +1,514 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import numpy as np +import random +import warnings +import cv2 +from tensorflow import keras + +from utils.anchors import anchors_for_shape, anchor_targets_bbox, AnchorParameters + + +class Generator(keras.utils.Sequence): + """ + Abstract generator class. + """ + + def __init__( + self, + phi=0, + image_sizes=(512, 640, 768, 896, 1024, 1280, 1408), + misc_effect=None, + visual_effect=None, + batch_size=1, + group_method='random', # one of 'none', 'random', 'ratio' + shuffle_groups=True, + detect_text=False, + detect_quadrangle=False, + ): + """ + Initialize Generator object. + + Args: + batch_size: The size of the batches to generate. + group_method: Determines how images are grouped together (defaults to 'ratio', one of ('none', 'random', 'ratio')). + shuffle_groups: If True, shuffles the groups each epoch. + image_sizes: + """ + self.misc_effect = misc_effect + self.visual_effect = visual_effect + self.batch_size = int(batch_size) + self.group_method = group_method + self.shuffle_groups = shuffle_groups + self.detect_text = detect_text + self.detect_quadrangle = detect_quadrangle + self.image_size = image_sizes[phi] + self.groups = None + self.anchor_parameters = AnchorParameters.default if not self.detect_text else AnchorParameters( + ratios=(0.25, 0.5, 1., 2.), + sizes=(16, 32, 64, 128, 256)) + self.anchors = anchors_for_shape((self.image_size, self.image_size), anchor_params=self.anchor_parameters) + self.num_anchors = self.anchor_parameters.num_anchors() + + # Define groups + self.group_images() + + # Shuffle when initializing + if self.shuffle_groups: + random.shuffle(self.groups) + + def on_epoch_end(self): + if self.shuffle_groups: + random.shuffle(self.groups) + + def size(self): + """ + Size of the dataset. + """ + raise NotImplementedError('size method not implemented') + + def get_anchors(self): + """ + loads the anchors from a txt file + """ + with open(self.anchors_path) as f: + anchors = f.readline() + anchors = [float(x) for x in anchors.split(',')] + # (N, 2), wh + return np.array(anchors).reshape(-1, 2) + + def num_classes(self): + """ + Number of classes in the dataset. + """ + raise NotImplementedError('num_classes method not implemented') + + def has_label(self, label): + """ + Returns True if label is a known label. + """ + raise NotImplementedError('has_label method not implemented') + + def has_name(self, name): + """ + Returns True if name is a known class. + """ + raise NotImplementedError('has_name method not implemented') + + def name_to_label(self, name): + """ + Map name to label. + """ + raise NotImplementedError('name_to_label method not implemented') + + def label_to_name(self, label): + """ + Map label to name. + """ + raise NotImplementedError('label_to_name method not implemented') + + def image_aspect_ratio(self, image_index): + """ + Compute the aspect ratio for an image with image_index. + """ + raise NotImplementedError('image_aspect_ratio method not implemented') + + def load_image(self, image_index): + """ + Load an image at the image_index. + """ + raise NotImplementedError('load_image method not implemented') + + def load_annotations(self, image_index): + """ + Load annotations for an image_index. + """ + raise NotImplementedError('load_annotations method not implemented') + + def load_annotations_group(self, group): + """ + Load annotations for all images in group. + """ + annotations_group = [self.load_annotations(image_index) for image_index in group] + for annotations in annotations_group: + assert (isinstance(annotations, + dict)), '\'load_annotations\' should return a list of dictionaries, received: {}'.format( + type(annotations)) + assert ( + 'labels' in annotations), '\'load_annotations\' should return a list of dictionaries that contain \'labels\' and \'bboxes\'.' + assert ( + 'bboxes' in annotations), '\'load_annotations\' should return a list of dictionaries that contain \'labels\' and \'bboxes\'.' + + return annotations_group + + def filter_annotations(self, image_group, annotations_group, group): + """ + Filter annotations by removing those that are outside of the image bounds or whose width/height < 0. + """ + # test all annotations + for index, (image, annotations) in enumerate(zip(image_group, annotations_group)): + # test x2 < x1 | y2 < y1 | x1 < 0 | y1 < 0 | x2 <= 0 | y2 <= 0 | x2 >= image.shape[1] | y2 >= image.shape[0] + invalid_indices = np.where( + (annotations['bboxes'][:, 2] <= annotations['bboxes'][:, 0]) | + (annotations['bboxes'][:, 3] <= annotations['bboxes'][:, 1]) | + (annotations['bboxes'][:, 0] < 0) | + (annotations['bboxes'][:, 1] < 0) | + (annotations['bboxes'][:, 2] <= 0) | + (annotations['bboxes'][:, 3] <= 0) | + (annotations['bboxes'][:, 2] > image.shape[1]) | + (annotations['bboxes'][:, 3] > image.shape[0]) + )[0] + + # delete invalid indices + if len(invalid_indices): + warnings.warn('Image with id {} (shape {}) contains the following invalid boxes: {}.'.format( + group[index], + image.shape, + annotations['bboxes'][invalid_indices, :] + )) + for k in annotations_group[index].keys(): + annotations_group[index][k] = np.delete(annotations[k], invalid_indices, axis=0) + # if annotations['bboxes'].shape[0] == 0: + # warnings.warn('Image with id {} (shape {}) contains no valid boxes before transform'.format( + # group[index], + # image.shape, + # )) + return image_group, annotations_group + + def clip_transformed_annotations(self, image_group, annotations_group, group): + """ + Filter annotations by removing those that are outside of the image bounds or whose width/height < 0. + """ + # test all annotations + filtered_image_group = [] + filtered_annotations_group = [] + for index, (image, annotations) in enumerate(zip(image_group, annotations_group)): + image_height = image.shape[0] + image_width = image.shape[1] + # x1 + annotations['bboxes'][:, 0] = np.clip(annotations['bboxes'][:, 0], 0, image_width - 2) + # y1 + annotations['bboxes'][:, 1] = np.clip(annotations['bboxes'][:, 1], 0, image_height - 2) + # x2 + annotations['bboxes'][:, 2] = np.clip(annotations['bboxes'][:, 2], 1, image_width - 1) + # y2 + annotations['bboxes'][:, 3] = np.clip(annotations['bboxes'][:, 3], 1, image_height - 1) + # test x2 < x1 | y2 < y1 | x1 < 0 | y1 < 0 | x2 <= 0 | y2 <= 0 | x2 >= image.shape[1] | y2 >= image.shape[0] + small_indices = np.where( + (annotations['bboxes'][:, 2] - annotations['bboxes'][:, 0] < 3) | + (annotations['bboxes'][:, 3] - annotations['bboxes'][:, 1] < 3) + )[0] + + # delete invalid indices + if len(small_indices): + for k in annotations_group[index].keys(): + annotations_group[index][k] = np.delete(annotations[k], small_indices, axis=0) + # import cv2 + # for invalid_index in small_indices: + # x1, y1, x2, y2 = annotations['bboxes'][invalid_index] + # label = annotations['labels'][invalid_index] + # class_name = self.labels[label] + # print('width: {}'.format(x2 - x1)) + # print('height: {}'.format(y2 - y1)) + # cv2.rectangle(image, (int(round(x1)), int(round(y1))), (int(round(x2)), int(round(y2))), (0, 255, 0), 2) + # cv2.putText(image, class_name, (int(round(x1)), int(round(y1))), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 255), 1) + # cv2.namedWindow('image', cv2.WINDOW_NORMAL) + # cv2.imshow('image', image) + # cv2.waitKey(0) + filtered_image_group.append(image) + filtered_annotations_group.append(annotations_group[index]) + + return filtered_image_group, filtered_annotations_group + + def load_image_group(self, group): + """ + Load images for all images in a group. + """ + return [self.load_image(image_index) for image_index in group] + + def random_visual_effect_group_entry(self, image, annotations): + """ + Randomly transforms image and annotation. + """ + # apply visual effect + image = self.visual_effect(image) + return image, annotations + + def random_visual_effect_group(self, image_group, annotations_group): + """ + Randomly apply visual effect on each image. + """ + assert (len(image_group) == len(annotations_group)) + + if self.visual_effect is None: + # do nothing + return image_group, annotations_group + + for index in range(len(image_group)): + # apply effect on a single group entry + image_group[index], annotations_group[index] = self.random_visual_effect_group_entry( + image_group[index], annotations_group[index] + ) + + return image_group, annotations_group + + def random_misc_group_entry(self, image, annotations): + """ + Randomly transforms image and annotation. + """ + # randomly transform both image and annotations + image, annotations = self.misc_effect(image, annotations) + return image, annotations + + def random_misc_group(self, image_group, annotations_group): + """ + Randomly transforms each image and its annotations. + """ + + assert (len(image_group) == len(annotations_group)) + + if self.misc_effect is None: + return image_group, annotations_group + + for index in range(len(image_group)): + # transform a single group entry + image_group[index], annotations_group[index] = self.random_misc_group_entry(image_group[index], + annotations_group[index]) + + return image_group, annotations_group + + def preprocess_group_entry(self, image, annotations): + """ + Preprocess image and its annotations. + """ + + # preprocess the image + image, scale = self.preprocess_image(image) + + # apply resizing to annotations too + annotations['bboxes'] *= scale + if self.detect_quadrangle: + annotations['quadrangles'] *= scale + return image, annotations + + def preprocess_group(self, image_group, annotations_group): + """ + Preprocess each image and its annotations in its group. + """ + assert (len(image_group) == len(annotations_group)) + + for index in range(len(image_group)): + # preprocess a single group entry + image_group[index], annotations_group[index] = self.preprocess_group_entry(image_group[index], + annotations_group[index]) + + return image_group, annotations_group + + def group_images(self): + """ + Order the images according to self.order and makes groups of self.batch_size. + """ + # determine the order of the images + + order = list(range(self.size())) + if self.group_method == 'random': + random.shuffle(order) + elif self.group_method == 'ratio': + order.sort(key=lambda x: self.image_aspect_ratio(x)) + + # divide into groups, one group = one batch + self.groups = [[order[x % len(order)] for x in range(i, i + self.batch_size)] for i in + range(0, len(order), self.batch_size)] + + def compute_inputs(self, image_group, annotations_group): + """ + Compute inputs for the network using an image_group. + """ + batch_images = np.array(image_group).astype(np.float32) + return [batch_images] + + def compute_alphas_and_ratios(self, annotations_group): + for i, annotations in enumerate(annotations_group): + quadrangles = annotations['quadrangles'] + alphas = np.zeros((quadrangles.shape[0], 4), dtype=np.float32) + xmin = np.min(quadrangles, axis=1)[:, 0] + ymin = np.min(quadrangles, axis=1)[:, 1] + xmax = np.max(quadrangles, axis=1)[:, 0] + ymax = np.max(quadrangles, axis=1)[:, 1] + # alpha1, alpha2, alpha3, alpha4 + alphas[:, 0] = (quadrangles[:, 0, 0] - xmin) / (xmax - xmin) + alphas[:, 1] = (quadrangles[:, 1, 1] - ymin) / (ymax - ymin) + alphas[:, 2] = (xmax - quadrangles[:, 2, 0]) / (xmax - xmin) + alphas[:, 3] = (ymax - quadrangles[:, 3, 1]) / (ymax - ymin) + annotations['alphas'] = alphas + # ratio + area1 = 0.5 * alphas[:, 0] * (1 - alphas[:, 3]) + area2 = 0.5 * alphas[:, 1] * (1 - alphas[:, 0]) + area3 = 0.5 * alphas[:, 2] * (1 - alphas[:, 1]) + area4 = 0.5 * alphas[:, 3] * (1 - alphas[:, 2]) + annotations['ratios'] = 1 - area1 - area2 - area3 - area4 + + def compute_targets(self, image_group, annotations_group): + """ + Compute target outputs for the network using images and their annotations. + """ + """ + Compute target outputs for the network using images and their annotations. + """ + + batches_targets = anchor_targets_bbox( + self.anchors, + image_group, + annotations_group, + num_classes=self.num_classes(), + detect_quadrangle=self.detect_quadrangle + ) + return list(batches_targets) + + def compute_inputs_targets(self, group, debug=False): + """ + Compute inputs and target outputs for the network. + """ + + # load images and annotations + # list + image_group = self.load_image_group(group) + annotations_group = self.load_annotations_group(group) + + # check validity of annotations + image_group, annotations_group = self.filter_annotations(image_group, annotations_group, group) + + # randomly apply visual effect + image_group, annotations_group = self.random_visual_effect_group(image_group, annotations_group) + + # randomly transform data + # image_group, annotations_group = self.random_transform_group(image_group, annotations_group) + + # randomly apply misc effect + image_group, annotations_group = self.random_misc_group(image_group, annotations_group) + + # perform preprocessing steps + image_group, annotations_group = self.preprocess_group(image_group, annotations_group) + + # check validity of annotations + image_group, annotations_group = self.clip_transformed_annotations(image_group, annotations_group, group) + + assert len(image_group) != 0 + assert len(image_group) == len(annotations_group) + + if self.detect_quadrangle: + # compute alphas and ratio for targets + self.compute_alphas_and_ratios(annotations_group) + + # compute network inputs + inputs = self.compute_inputs(image_group, annotations_group) + + # compute network targets + targets = self.compute_targets(image_group, annotations_group) + + if debug: + return inputs, targets, annotations_group + + return inputs, targets + + def __len__(self): + """ + Number of batches for generator. + """ + + return len(self.groups) + + def __getitem__(self, index): + """ + Keras sequence method for generating batches. + """ + group = self.groups[index] + inputs, targets = self.compute_inputs_targets(group) + return inputs, targets + + def preprocess_image(self, image): + # image, RGB + image_height, image_width = image.shape[:2] + if image_height > image_width: + scale = self.image_size / image_height + resized_height = self.image_size + resized_width = int(image_width * scale) + else: + scale = self.image_size / image_width + resized_height = int(image_height * scale) + resized_width = self.image_size + + image = cv2.resize(image, (resized_width, resized_height)) + image = image.astype(np.float32) + image /= 255. + mean = [0.485, 0.456, 0.406] + std = [0.229, 0.224, 0.225] + image -= mean + image /= std + pad_h = self.image_size - resized_height + pad_w = self.image_size - resized_width + image = np.pad(image, [(0, pad_h), (0, pad_w), (0, 0)], mode='constant') + return image, scale + + def get_augmented_data(self, group): + """ + Compute inputs and target outputs for the network. + """ + + # load images and annotations + # list + image_group = self.load_image_group(group) + annotations_group = self.load_annotations_group(group) + + # check validity of annotations + image_group, annotations_group = self.filter_annotations(image_group, annotations_group, group) + + # randomly apply visual effect + # image_group, annotations_group = self.random_visual_effect_group(image_group, annotations_group) + + # randomly transform data + # image_group, annotations_group = self.random_transform_group(image_group, annotations_group) + + # randomly apply misc effect + # image_group, annotations_group = self.random_misc_group(image_group, annotations_group) + + # perform preprocessing steps + image_group, annotations_group = self.preprocess_group(image_group, annotations_group) + + # check validity of annotations + image_group, annotations_group = self.clip_transformed_annotations(image_group, annotations_group, group) + + assert len(image_group) != 0 + assert len(image_group) == len(annotations_group) + + # compute alphas for targets + self.compute_alphas_and_ratios(annotations_group) + + return image_group, annotations_group + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/csv_.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/csv_.py new file mode 100644 index 0000000000000000000000000000000000000000..65eb2cd67e668a7d43d2e87901c6685607d1bff9 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/csv_.py @@ -0,0 +1,369 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +from generators.common import Generator +import cv2 +import numpy as np +from PIL import Image +from six import raise_from +import csv +import sys +import os.path as osp +from collections import OrderedDict + + +def _parse(value, function, fmt): + """ + Parse a string into a value, and format a nice ValueError if it fails. + + Returns `function(value)`. + Any `ValueError` raised is catched and a new `ValueError` is raised + with message `fmt.format(e)`, where `e` is the caught `ValueError`. + """ + try: + return function(value) + except ValueError as e: + raise_from(ValueError(fmt.format(e)), None) + + +def _read_classes(csv_reader): + """ + Parse the classes file given by csv_reader. + """ + result = OrderedDict() + for line, row in enumerate(csv_reader): + line += 1 + + try: + class_name, class_id = row + except ValueError: + raise_from(ValueError('line {}: format should be \'class_name,class_id\''.format(line)), None) + class_id = _parse(class_id, int, 'line {}: malformed class ID: {{}}'.format(line)) + + if class_name in result: + raise ValueError('line {}: duplicate class name: \'{}\''.format(line, class_name)) + result[class_name] = class_id + return result + + +def _read_quadrangle_annotations(csv_reader, classes, detect_text=False): + """ + Read annotations from the csv_reader. + Args: + csv_reader: csv reader of args.annotations_path + classes: list[str] all the class names read from args.classes_path + + Returns: + result: dict, dict is like {image_path: [{'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, + 'x3': x3, 'y3': y3, 'x4': x4, 'y4': y4, 'class': class_name}]} + + """ + result = OrderedDict() + for line, row in enumerate(csv_reader, 1): + try: + img_file, x1, y1, x2, y2, x3, y3, x4, y4, class_name = row[:10] + if img_file not in result: + result[img_file] = [] + + # If a row contains only an image path, it's an image without annotations. + if (x1, y1, x2, y2, x3, y3, x4, y4, class_name) == ('', '', '', '', '', '', '', '', ''): + continue + + x1 = _parse(x1, int, 'line {}: malformed x1: {{}}'.format(line)) + y1 = _parse(y1, int, 'line {}: malformed y1: {{}}'.format(line)) + x2 = _parse(x2, int, 'line {}: malformed x2: {{}}'.format(line)) + y2 = _parse(y2, int, 'line {}: malformed y2: {{}}'.format(line)) + x3 = _parse(x3, int, 'line {}: malformed x3: {{}}'.format(line)) + y3 = _parse(y3, int, 'line {}: malformed y3: {{}}'.format(line)) + x4 = _parse(x4, int, 'line {}: malformed x4: {{}}'.format(line)) + y4 = _parse(y4, int, 'line {}: malformed y4: {{}}'.format(line)) + + # check if the current class name is correctly present + if detect_text: + if class_name == '###': + continue + else: + class_name = 'text' + + if class_name not in classes: + raise ValueError(f'line {line}: unknown class name: \'{class_name}\' (classes: {classes})') + + result[img_file].append({'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, + 'x3': x3, 'y3': y3, 'x4': x4, 'y4': y4, 'class': class_name}) + except ValueError: + raise_from(ValueError( + f'line {line}: format should be \'img_file,x1,y1,x2,y2,x3,y3,x4,y4,class_name\' or \'img_file,,,,,\''), + None) + + return result + + +def _read_annotations(csv_reader, classes): + """ + Read annotations from the csv_reader. + Args: + csv_reader: csv reader of args.annotations_path + classes: list[str] all the class names read from args.classes_path + + Returns: + result: dict, dict is like {image_path: [{'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'class': class_name}]} + + """ + result = OrderedDict() + for line, row in enumerate(csv_reader, 1): + try: + img_file, x1, y1, x2, y2, class_name = row[:10] + if img_file not in result: + result[img_file] = [] + + # If a row contains only an image path, it's an image without annotations. + if (x1, y1, x2, y2, class_name) == ('', '', '', '', ''): + continue + + x1 = _parse(x1, int, 'line {}: malformed x1: {{}}'.format(line)) + y1 = _parse(y1, int, 'line {}: malformed y1: {{}}'.format(line)) + x2 = _parse(x2, int, 'line {}: malformed x2: {{}}'.format(line)) + y2 = _parse(y2, int, 'line {}: malformed y2: {{}}'.format(line)) + + if class_name not in classes: + raise ValueError(f'line {line}: unknown class name: \'{class_name}\' (classes: {classes})') + + result[img_file].append({'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'class': class_name}) + except ValueError: + raise_from(ValueError( + f'line {line}: format should be \'img_file,x1,y1,x2,y2,class_name\' or \'img_file,,,,,\''), + None) + + return result + + +def _open_for_csv(path): + """ + Open a file with flags suitable for csv.reader. + + This is different for python2 it means with mode 'rb', for python3 this means 'r' with "universal newlines". + """ + if sys.version_info[0] < 3: + return open(path, 'rb') + else: + return open(path, 'r', newline='') + + +class CSVGenerator(Generator): + """ + Generate data for a custom CSV dataset. + + See https://github.com/fizyr/keras-retinanet#csv-datasets for more information. + """ + + def __init__( + self, + csv_data_file, + csv_class_file, + base_dir=None, + detect_quadrangle=False, + detect_text=False, + **kwargs + ): + """ + Initialize a CSV data generator. + + Args + csv_data_file: Path to the CSV annotations file. + csv_class_file: Path to the CSV classes file. + detect_text: if do text detection + base_dir: Directory w.r.t. where the files are to be searched (defaults to the directory containing the csv_data_file). + """ + self.image_names = [] + self.image_data = {} + self.base_dir = base_dir + self.detect_quadrangle = detect_quadrangle + self.detect_text = detect_text + + # Take base_dir from annotations file if not explicitly specified. + if self.base_dir is None: + if osp.exists(csv_data_file): + self.base_dir = '' + else: + self.base_dir = osp.dirname(csv_data_file) + + # parse the provided class file + try: + with _open_for_csv(csv_class_file) as file: + # class_name --> class_id + self.classes = _read_classes(csv.reader(file, delimiter=',')) + except ValueError as e: + raise_from(ValueError('invalid CSV class file: {}: {}'.format(csv_class_file, e)), None) + + self.labels = {} + # class_id --> class_name + for key, value in self.classes.items(): + self.labels[value] = key + + # csv with img_path, x1, y1, x2, y2, x3, y3, x4, y4, class_name + try: + with _open_for_csv(csv_data_file) as file: + # {'img_path1':[{'x1':xx,'y1':xx,'x2':xx,'y2':xx,'x3':xx,'y3':xx,'x4':xx,'y4':xx, 'class':xx}...],...} + if self.detect_quadrangle: + self.image_data = _read_quadrangle_annotations(csv.reader(file, delimiter=','), self.classes, + self.detect_text) + else: + self.image_data = _read_annotations(csv.reader(file, delimiter=','), self.classes) + except ValueError as e: + raise_from(ValueError('invalid CSV annotations file: {}: {}'.format(csv_data_file, e)), None) + self.image_names = list(self.image_data.keys()) + + super(CSVGenerator, self).__init__(detect_text=detect_text, detect_quadrangle=detect_quadrangle, **kwargs) + + def size(self): + """ + Size of the dataset. + """ + return len(self.image_names) + + def num_classes(self): + """ + Number of classes in the dataset. + """ + return max(self.classes.values()) + 1 + + def has_label(self, label): + """ + Return True if label is a known label. + """ + return label in self.labels + + def has_name(self, name): + """ + Returns True if name is a known class. + """ + return name in self.classes + + def name_to_label(self, name): + """ + Map name to label. + """ + return self.classes[name] + + def label_to_name(self, label): + """ + Map label to name. + """ + return self.labels[label] + + def image_path(self, image_index): + """ + Returns the image path for image_index. + """ + return osp.join(self.base_dir, self.image_names[image_index]) + + def image_aspect_ratio(self, image_index): + """ + Compute the aspect ratio for an image with image_index. + """ + # PIL is fast for metadata + image = Image.open(self.image_path(image_index)) + return float(image.width) / float(image.height) + + def load_image(self, image_index): + """ + Load an image at the image_index. + """ + image = cv2.imread(self.image_path(image_index)) + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + return image + + def load_annotations(self, image_index): + """ + Load annotations for an image_index. + """ + path = self.image_names[image_index] + annotations = {'labels': np.empty((0,), dtype=np.int32), + 'bboxes': np.empty((0, 4), dtype=np.float32), + 'quadrangles': np.empty((0, 4, 2), dtype=np.float32), + } + + for idx, annot in enumerate(self.image_data[path]): + annotations['labels'] = np.concatenate((annotations['labels'], [self.name_to_label(annot['class'])])) + if self.detect_quadrangle: + quadrangle = np.array([[float(annot['x1']), float(annot['y1'])], + [float(annot['x2']), float(annot['y2'])], + [float(annot['x3']), float(annot['y3'])], + [float(annot['x4']), float(annot['y4'])]]) + ordered_quadrangle = self.reorder_vertexes(quadrangle) + annotations['quadrangles'] = np.concatenate((annotations['quadrangles'], ordered_quadrangle[None])) + annotations['bboxes'] = np.concatenate((annotations['bboxes'], [[ + float(min(annot['x1'], annot['x2'], annot['x3'], annot['x4'])), + float(min(annot['y1'], annot['y2'], annot['y3'], annot['y4'])), + float(max(annot['x1'], annot['x2'], annot['x3'], annot['x4'])), + float(max(annot['y1'], annot['y2'], annot['y3'], annot['y4'])), + ]])) + else: + annotations['bboxes'] = np.concatenate((annotations['bboxes'], [[ + float(annot['x1']), + float(annot['y1']), + float(annot['x2']), + float(annot['y2']), + ]])) + return annotations + + def reorder_vertexes(self, vertexes): + """ + reorder vertexes as the paper shows, (top, right, bottom, left) + Args: + vertexes: + + Returns: + + """ + assert vertexes.shape == (4, 2) + xmin, ymin = np.min(vertexes, axis=0) + xmax, ymax = np.max(vertexes, axis=0) + + # determine the first point with the smallest y, + # if two vertexes has same y, choose that with smaller x, + ordered_idxes = np.argsort(vertexes, axis=0) + ymin1_idx = ordered_idxes[0, 1] + ymin2_idx = ordered_idxes[1, 1] + if vertexes[ymin1_idx, 1] == vertexes[ymin2_idx, 1]: + if vertexes[ymin1_idx, 0] <= vertexes[ymin2_idx, 0]: + first_vertex_idx = ymin1_idx + else: + first_vertex_idx = ymin2_idx + else: + first_vertex_idx = ymin1_idx + ordered_idxes = [(first_vertex_idx + i) % 4 for i in range(4)] + ordered_vertexes = vertexes[ordered_idxes] + # drag the point to the corresponding edge + ordered_vertexes[0, 1] = ymin + ordered_vertexes[1, 0] = xmax + ordered_vertexes[2, 1] = ymax + ordered_vertexes[3, 0] = xmin + return ordered_vertexes + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/pascal.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/pascal.py new file mode 100644 index 0000000000000000000000000000000000000000..ab94140b9c968ae8c85314b80bea7f194477aba6 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/generators/pascal.py @@ -0,0 +1,297 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +from generators.common import Generator +import os +os.system("pwd") +os.system("ls") +import numpy as np +from six import raise_from +import cv2 +import xml.etree.ElementTree as ET + +voc_classes = { + 'aeroplane': 0, + 'bicycle': 1, + 'bird': 2, + 'boat': 3, + 'bottle': 4, + 'bus': 5, + 'car': 6, + 'cat': 7, + 'chair': 8, + 'cow': 9, + 'diningtable': 10, + 'dog': 11, + 'horse': 12, + 'motorbike': 13, + 'person': 14, + 'pottedplant': 15, + 'sheep': 16, + 'sofa': 17, + 'train': 18, + 'tvmonitor': 19 +} + + +def _findNode(parent, name, debug_name=None, parse=None): + if debug_name is None: + debug_name = name + + result = parent.find(name) + if result is None: + raise ValueError('missing element \'{}\''.format(debug_name)) + if parse is not None: + try: + return parse(result.text) + except ValueError as e: + raise_from(ValueError('illegal value for \'{}\': {}'.format(debug_name, e)), None) + return result + + +class PascalVocGenerator(Generator): + """ + Generate data for a Pascal VOC dataset. + + See http://host.robots.ox.ac.uk/pascal/VOC/ for more information. + """ + + def __init__( + self, + data_dir, + set_name, + classes=voc_classes, + image_extension='.jpg', + skip_truncated=False, + skip_difficult=False, + **kwargs + ): + """ + Initialize a Pascal VOC data generator. + + Args: + data_dir: the path of directory which contains ImageSets directory + set_name: test|trainval|train|val + classes: class names tos id mapping + image_extension: image filename ext + skip_truncated: + skip_difficult: + **kwargs: + """ + self.data_dir = data_dir + self.set_name = set_name + self.classes = classes + + self.image_names = [l.strip().split(None, 1)[0] for l in + open(os.path.join(data_dir, 'ImageSets', 'Main', set_name + '.txt')).readlines()] + self.image_extension = image_extension + self.skip_truncated = skip_truncated + self.skip_difficult = skip_difficult + # class ids to names mapping + self.labels = {} + for key, value in self.classes.items(): + self.labels[value] = key + + super(PascalVocGenerator, self).__init__(**kwargs) + + def size(self): + """ + Size of the dataset. + """ + return len(self.image_names) + + def num_classes(self): + """ + Number of classes in the dataset. + """ + return len(self.classes) + + def has_label(self, label): + """ + Return True if label is a known label. + """ + return label in self.labels + + def has_name(self, name): + """ + Returns True if name is a known class. + """ + return name in self.classes + + def name_to_label(self, name): + """ + Map name to label. + """ + return self.classes[name] + + def label_to_name(self, label): + """ + Map label to name. + """ + return self.labels[label] + + def image_aspect_ratio(self, image_index): + """ + Compute the aspect ratio for an image with image_index. + """ + path = os.path.join(self.data_dir, 'JPEGImages', self.image_names[image_index] + self.image_extension) + image = cv2.imread(path) + h, w = image.shape[:2] + return float(w) / float(h) + + def load_image(self, image_index): + """ + Load an image at the image_index. + """ + path = os.path.join(self.data_dir, 'JPEGImages', self.image_names[image_index] + self.image_extension) + image = cv2.imread(path) + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + return image + + def __parse_annotation(self, element): + """ + Parse an annotation given an XML element. + """ + truncated = _findNode(element, 'truncated', parse=int) + difficult = _findNode(element, 'difficult', parse=int) + + class_name = _findNode(element, 'name').text + if class_name not in self.classes: + raise ValueError('class name \'{}\' not found in classes: {}'.format(class_name, list(self.classes.keys()))) + + box = np.zeros((4,)) + label = self.name_to_label(class_name) + + bndbox = _findNode(element, 'bndbox') + box[0] = _findNode(bndbox, 'xmin', 'bndbox.xmin', parse=float) - 1 + box[1] = _findNode(bndbox, 'ymin', 'bndbox.ymin', parse=float) - 1 + box[2] = _findNode(bndbox, 'xmax', 'bndbox.xmax', parse=float) - 1 + box[3] = _findNode(bndbox, 'ymax', 'bndbox.ymax', parse=float) - 1 + + return truncated, difficult, box, label + + def __parse_annotations(self, xml_root): + """ + Parse all annotations under the xml_root. + """ + annotations = {'labels': np.empty((0,), dtype=np.int32), + 'bboxes': np.empty((0, 4))} + for i, element in enumerate(xml_root.iter('object')): + try: + truncated, difficult, box, label = self.__parse_annotation(element) + except ValueError as e: + raise_from(ValueError('could not parse object #{}: {}'.format(i, e)), None) + + if truncated and self.skip_truncated: + continue + if difficult and self.skip_difficult: + continue + + annotations['bboxes'] = np.concatenate([annotations['bboxes'], [box]]) + annotations['labels'] = np.concatenate([annotations['labels'], [label]]) + + return annotations + + def load_annotations(self, image_index): + """ + Load annotations for an image_index. + """ + filename = self.image_names[image_index] + '.xml' + try: + tree = ET.parse(os.path.join(self.data_dir, 'Annotations', filename)) + return self.__parse_annotations(tree.getroot()) + except ET.ParseError as e: + raise_from(ValueError('invalid annotations file: {}: {}'.format(filename, e)), None) + except ValueError as e: + raise_from(ValueError('invalid annotations file: {}: {}'.format(filename, e)), None) + + +if __name__ == '__main__': + train_generator = PascalVocGenerator( + '/cache/VOCdevkit/VOC2012', + 'train', + phi=0, + skip_difficult=True, + batch_size=1, + misc_effect=None, + visual_effect=None, + ) + mean = [0.485, 0.456, 0.406] + std = [0.229, 0.224, 0.225] + anchors = train_generator.anchors + for batch_inputs, batch_targets in train_generator: + image = batch_inputs[0][0] + image[..., 0] *= std[0] + image[..., 1] *= std[1] + image[..., 2] *= std[2] + image[..., 0] += mean[0] + image[..., 1] += mean[1] + image[..., 2] += mean[2] + image *= 255. + + regression = batch_targets[0][0] + valid_ids = np.where(regression[:, -1] == 1)[0] + boxes = anchors[valid_ids] + deltas = regression[valid_ids] + class_ids = np.argmax(batch_targets[1][0][valid_ids], axis=-1) + mean_ = [0, 0, 0, 0] + std_ = [0.2, 0.2, 0.2, 0.2] + + width = boxes[:, 2] - boxes[:, 0] + height = boxes[:, 3] - boxes[:, 1] + + x1 = boxes[:, 0] + (deltas[:, 0] * std_[0] + mean_[0]) * width + y1 = boxes[:, 1] + (deltas[:, 1] * std_[1] + mean_[1]) * height + x2 = boxes[:, 2] + (deltas[:, 2] * std_[2] + mean_[2]) * width + y2 = boxes[:, 3] + (deltas[:, 3] * std_[3] + mean_[3]) * height + for x1_, y1_, x2_, y2_, class_id in zip(x1, y1, x2, y2, class_ids): + x1_, y1_, x2_, y2_ = int(x1_), int(y1_), int(x2_), int(y2_) + cv2.rectangle(image, (x1_, y1_), (x2_, y2_), (0, 255, 0), 2) + class_name = train_generator.labels[class_id] + label = class_name + ret, baseline = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.3, 1) + cv2.rectangle(image, (x1_, y2_ - ret[1] - baseline), (x1_ + ret[0], y2_), (255, 255, 255), -1) + cv2.putText(image, label, (x1_, y2_ - baseline), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1) + cv2.imshow('image', image.astype(np.uint8)[..., ::-1]) + cv2.waitKey(0) + # 36864, 46080, 48384, 48960, 49104 + # if first_valid_id < 36864: + # stride = 8 + # elif 36864 <= first_valid_id < 46080: + # stride = 16 + # elif 46080 <= first_valid_id < 48384: + # stride = 32 + # elif 48384 <= first_valid_id < 48960: + # stride = 64 + # else: + # stride = 128 + pass + + + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/initializers.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/initializers.py new file mode 100644 index 0000000000000000000000000000000000000000..87b54577a31457c906eea922615f4ff2fa19ef13 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/initializers.py @@ -0,0 +1,54 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +# import keras +from tensorflow import keras + +import numpy as np +import math + + +class PriorProbability(keras.initializers.Initializer): + """ Apply a prior probability to the weights. + """ + + def __init__(self, probability=0.01): + self.probability = probability + + def get_config(self): + return { + 'probability': self.probability + } + + def __call__(self, shape, dtype=None): + # set bias to -log((1 - p)/p) for foreground + result = np.ones(shape, dtype=np.float32) * -math.log((1 - self.probability) / self.probability) + + return result + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/layers.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..02b400db22e451f86a83959a36ba299fe5408110 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/layers.py @@ -0,0 +1,407 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * +from tensorflow import keras +import tensorflow as tf + + +class BatchNormalization(keras.layers.BatchNormalization): + """ + Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. + """ + + def __init__(self, freeze, *args, **kwargs): + self.freeze = freeze + super(BatchNormalization, self).__init__(*args, **kwargs) + + # set to non-trainable if freeze is true + self.trainable = not self.freeze + + def call(self, inputs, training=None, **kwargs): + # return super.call, but set training + if not training: + return super(BatchNormalization, self).call(inputs, training=False) + else: + return super(BatchNormalization, self).call(inputs, training=(not self.freeze)) + + def get_config(self): + config = super(BatchNormalization, self).get_config() + config.update({'freeze': self.freeze}) + return config + + +class wBiFPNAdd(keras.layers.Layer): + def __init__(self, epsilon=1e-4, **kwargs): + super(wBiFPNAdd, self).__init__(**kwargs) + self.epsilon = epsilon + + def build(self, input_shape): + num_in = len(input_shape) + self.w = self.add_weight(name=self.name, + shape=(num_in,), + initializer=keras.initializers.constant(1 / num_in), + trainable=True, + dtype=tf.float32) + + def call(self, inputs, **kwargs): + w = keras.activations.relu(self.w) + x = tf.reduce_sum([w[i] * inputs[i] for i in range(len(inputs))], axis=0) + x = x / (tf.reduce_sum(w) + self.epsilon) + return x + + def compute_output_shape(self, input_shape): + return input_shape[0] + + def get_config(self): + config = super(wBiFPNAdd, self).get_config() + config.update({ + 'epsilon': self.epsilon + }) + return config + + +def bbox_transform_inv(boxes, deltas, scale_factors=None): + cxa = (boxes[..., 0] + boxes[..., 2]) / 2 + cya = (boxes[..., 1] + boxes[..., 3]) / 2 + wa = boxes[..., 2] - boxes[..., 0] + ha = boxes[..., 3] - boxes[..., 1] + ty, tx, th, tw = deltas[..., 0], deltas[..., 1], deltas[..., 2], deltas[..., 3] + if scale_factors: + ty *= scale_factors[0] + tx *= scale_factors[1] + th *= scale_factors[2] + tw *= scale_factors[3] + w = tf.exp(tw) * wa + h = tf.exp(th) * ha + cy = ty * ha + cya + cx = tx * wa + cxa + ymin = cy - h / 2. + xmin = cx - w / 2. + ymax = cy + h / 2. + xmax = cx + w / 2. + return tf.stack([xmin, ymin, xmax, ymax], axis=-1) + + +class ClipBoxes(keras.layers.Layer): + def call(self, inputs, **kwargs): + image, boxes = inputs + shape = keras.backend.cast(keras.backend.shape(image), keras.backend.floatx()) + height = shape[1] + width = shape[2] + x1 = tf.clip_by_value(boxes[:, :, 0], 0, width - 1) + y1 = tf.clip_by_value(boxes[:, :, 1], 0, height - 1) + x2 = tf.clip_by_value(boxes[:, :, 2], 0, width - 1) + y2 = tf.clip_by_value(boxes[:, :, 3], 0, height - 1) + + return keras.backend.stack([x1, y1, x2, y2], axis=2) + + def compute_output_shape(self, input_shape): + return input_shape[1] + + +class RegressBoxes(keras.layers.Layer): + def __init__(self, *args, **kwargs): + super(RegressBoxes, self).__init__(*args, **kwargs) + + def call(self, inputs, **kwargs): + anchors, regression = inputs + return bbox_transform_inv(anchors, regression) + + def compute_output_shape(self, input_shape): + return input_shape[0] + + def get_config(self): + config = super(RegressBoxes, self).get_config() + return config + + +def filter_detections( + boxes, + classification, + alphas=None, + ratios=None, + class_specific_filter=True, + nms=True, + score_threshold=0.01, + max_detections=100, + nms_threshold=0.5, + detect_quadrangle=False, +): + """ + Filter detections using the boxes and classification values. + + Args + boxes: Tensor of shape (num_boxes, 4) containing the boxes in (x1, y1, x2, y2) format. + classification: Tensor of shape (num_boxes, num_classes) containing the classification scores. + other: List of tensors of shape (num_boxes, ...) to filter along with the boxes and classification scores. + class_specific_filter: Whether to perform filtering per class, or take the best scoring class and filter those. + nms: Flag to enable/disable non maximum suppression. + score_threshold: Threshold used to prefilter the boxes with. + max_detections: Maximum number of detections to keep. + nms_threshold: Threshold for the IoU value to determine when a box should be suppressed. + + Returns + A list of [boxes, scores, labels, other[0], other[1], ...]. + boxes is shaped (max_detections, 4) and contains the (x1, y1, x2, y2) of the non-suppressed boxes. + scores is shaped (max_detections,) and contains the scores of the predicted class. + labels is shaped (max_detections,) and contains the predicted label. + other[i] is shaped (max_detections, ...) and contains the filtered other[i] data. + In case there are less than max_detections detections, the tensors are padded with -1's. + """ + + def _filter_detections(scores_, labels_): + # threshold based on score + # (num_score_keeps, 1) + indices_ = tf.where(keras.backend.greater(scores_, score_threshold)) + + if nms: + # (num_score_keeps, 4) + filtered_boxes = tf.gather_nd(boxes, indices_) + # In [4]: scores = np.array([0.1, 0.5, 0.4, 0.2, 0.7, 0.2]) + # In [5]: tf.greater(scores, 0.4) + # Out[5]: + # In [6]: tf.where(tf.greater(scores, 0.4)) + # Out[6]: + # + # + # In [7]: tf.gather(scores, tf.where(tf.greater(scores, 0.4))) + # Out[7]: + # + filtered_scores = keras.backend.gather(scores_, indices_)[:, 0] + + # perform NMS + # filtered_boxes = tf.concat([filtered_boxes[..., 1:2], filtered_boxes[..., 0:1], + # filtered_boxes[..., 3:4], filtered_boxes[..., 2:3]], axis=-1) + nms_indices = tf.image.non_max_suppression(filtered_boxes, filtered_scores, max_output_size=max_detections, + iou_threshold=nms_threshold) + + # filter indices based on NMS + # (num_score_nms_keeps, 1) + indices_ = keras.backend.gather(indices_, nms_indices) + + # add indices to list of all indices + # (num_score_nms_keeps, ) + labels_ = tf.gather_nd(labels_, indices_) + # (num_score_nms_keeps, 2) + indices_ = keras.backend.stack([indices_[:, 0], labels_], axis=1) + + return indices_ + + if class_specific_filter: + all_indices = [] + # perform per class filtering + for c in range(int(classification.shape[1])): + scores = classification[:, c] + labels = c * tf.ones((keras.backend.shape(scores)[0],), dtype='int64') + all_indices.append(_filter_detections(scores, labels)) + + # concatenate indices to single tensor + # (concatenated_num_score_nms_keeps, 2) + indices = keras.backend.concatenate(all_indices, axis=0) + else: + scores = keras.backend.max(classification, axis=1) + labels = keras.backend.argmax(classification, axis=1) + indices = _filter_detections(scores, labels) + + # select top k + scores = tf.gather_nd(classification, indices) + labels = indices[:, 1] + scores, top_indices = tf.nn.top_k(scores, k=keras.backend.minimum(max_detections, keras.backend.shape(scores)[0])) + + # filter input using the final set of indices + indices = keras.backend.gather(indices[:, 0], top_indices) + boxes = keras.backend.gather(boxes, indices) + labels = keras.backend.gather(labels, top_indices) + + # zero pad the outputs + pad_size = keras.backend.maximum(0, max_detections - keras.backend.shape(scores)[0]) + boxes = tf.pad(boxes, [[0, pad_size], [0, 0]], constant_values=-1) + scores = tf.pad(scores, [[0, pad_size]], constant_values=-1) + labels = tf.pad(labels, [[0, pad_size]], constant_values=-1) + labels = keras.backend.cast(labels, 'int32') + + # set shapes, since we know what they are + boxes.set_shape([max_detections, 4]) + scores.set_shape([max_detections]) + labels.set_shape([max_detections]) + + if detect_quadrangle: + alphas = keras.backend.gather(alphas, indices) + ratios = keras.backend.gather(ratios, indices) + alphas = tf.pad(alphas, [[0, pad_size], [0, 0]], constant_values=-1) + ratios = tf.pad(ratios, [[0, pad_size]], constant_values=-1) + alphas.set_shape([max_detections, 4]) + ratios.set_shape([max_detections]) + return [boxes, scores, alphas, ratios, labels] + else: + return [boxes, scores, labels] + + +class FilterDetections(keras.layers.Layer): + """ + Keras layer for filtering detections using score threshold and NMS. + """ + + def __init__( + self, + nms=True, + class_specific_filter=True, + nms_threshold=0.5, + score_threshold=0.01, + max_detections=100, + parallel_iterations=32, + detect_quadrangle=False, + **kwargs + ): + """ + Filters detections using score threshold, NMS and selecting the top-k detections. + + Args + nms: Flag to enable/disable NMS. + class_specific_filter: Whether to perform filtering per class, or take the best scoring class and filter those. + nms_threshold: Threshold for the IoU value to determine when a box should be suppressed. + score_threshold: Threshold used to prefilter the boxes with. + max_detections: Maximum number of detections to keep. + parallel_iterations: Number of batch items to process in parallel. + """ + self.nms = nms + self.class_specific_filter = class_specific_filter + self.nms_threshold = nms_threshold + self.score_threshold = score_threshold + self.max_detections = max_detections + self.parallel_iterations = parallel_iterations + self.detect_quadrangle = detect_quadrangle + super(FilterDetections, self).__init__(**kwargs) + + def call(self, inputs, **kwargs): + """ + Constructs the NMS graph. + + Args + inputs : List of [boxes, classification, other[0], other[1], ...] tensors. + """ + boxes = inputs[0] + classification = inputs[1] + if self.detect_quadrangle: + alphas = inputs[2] + ratios = inputs[3] + + # wrap nms with our parameters + def _filter_detections(args): + boxes_ = args[0] + classification_ = args[1] + alphas_ = args[2] if self.detect_quadrangle else None + ratios_ = args[3] if self.detect_quadrangle else None + + return filter_detections( + boxes_, + classification_, + alphas_, + ratios_, + nms=self.nms, + class_specific_filter=self.class_specific_filter, + score_threshold=self.score_threshold, + max_detections=self.max_detections, + nms_threshold=self.nms_threshold, + detect_quadrangle=self.detect_quadrangle, + ) + + # call filter_detections on each batch item + if self.detect_quadrangle: + outputs = tf.map_fn( + _filter_detections, + elems=[boxes, classification, alphas, ratios], + dtype=['float32', 'float32', 'float32', 'float32', 'int32'], + parallel_iterations=self.parallel_iterations + ) + else: + outputs = tf.map_fn( + _filter_detections, + elems=[boxes, classification], + dtype=['float32', 'float32', 'int32'], + parallel_iterations=self.parallel_iterations + ) + + return outputs + + def compute_output_shape(self, input_shape): + """ + Computes the output shapes given the input shapes. + + Args + input_shape : List of input shapes [boxes, classification]. + + Returns + List of tuples representing the output shapes: + [filtered_boxes.shape, filtered_scores.shape, filtered_labels.shape, filtered_other[0].shape, filtered_other[1].shape, ...] + """ + if self.detect_quadrangle: + return [ + (input_shape[0][0], self.max_detections, 4), + (input_shape[1][0], self.max_detections), + (input_shape[1][0], self.max_detections, 4), + (input_shape[1][0], self.max_detections), + (input_shape[1][0], self.max_detections), + ] + else: + return [ + (input_shape[0][0], self.max_detections, 4), + (input_shape[1][0], self.max_detections), + (input_shape[1][0], self.max_detections), + ] + + def compute_mask(self, inputs, mask=None): + """ + This is required in Keras when there is more than 1 output. + """ + return (len(inputs) + 1) * [None] + + def get_config(self): + """ + Gets the configuration of this layer. + + Returns + Dictionary containing the parameters of this layer. + """ + config = super(FilterDetections, self).get_config() + config.update({ + 'nms': self.nms, + 'class_specific_filter': self.class_specific_filter, + 'nms_threshold': self.nms_threshold, + 'score_threshold': self.score_threshold, + 'max_detections': self.max_detections, + 'parallel_iterations': self.parallel_iterations, + }) + + return config + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/losses.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..70a6a9e34183139ece67157a5793101e8cef8dc3 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/losses.py @@ -0,0 +1,202 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License.""" + +from npu_bridge.npu_init import * + +# import keras +from tensorflow import keras +import tensorflow as tf + + +def focal(alpha=0.25, gamma=1.5): + """ + Create a functor for computing the focal loss. + + Args + alpha: Scale the focal weight with alpha. + gamma: Take the power of the focal weight with gamma. + + Returns + A functor that computes the focal loss using the alpha and gamma. + """ + + def _focal(y_true, y_pred): + """ + Compute the focal loss given the target tensor and the predicted tensor. + + As defined in https://arxiv.org/abs/1708.02002 + + Args + y_true: Tensor of target data from the generator with shape (B, N, num_classes). + y_pred: Tensor of predicted data from the network with shape (B, N, num_classes). + + Returns + The focal loss of y_pred w.r.t. y_true. + """ + labels = y_true[:, :, :-1] + # -1 for ignore, 0 for background, 1 for object + anchor_state = y_true[:, :, -1] + classification = y_pred + + # filter out "ignore" anchors + indices = tf.where(keras.backend.not_equal(anchor_state, -1)) + labels = tf.gather_nd(labels, indices) + classification = tf.gather_nd(classification, indices) + + # compute the focal loss + alpha_factor = keras.backend.ones_like(labels) * alpha + alpha_factor = tf.where(keras.backend.equal(labels, 1), alpha_factor, 1 - alpha_factor) + # (1 - 0.99) ** 2 = 1e-4, (1 - 0.9) ** 2 = 1e-2 + focal_weight = tf.where(keras.backend.equal(labels, 1), 1 - classification, classification) + focal_weight = alpha_factor * focal_weight ** gamma + cls_loss = focal_weight * keras.backend.binary_crossentropy(labels, classification) + + # compute the normalizer: the number of positive anchors + normalizer = tf.where(keras.backend.equal(anchor_state, 1)) + normalizer = keras.backend.cast(keras.backend.shape(normalizer)[0], keras.backend.floatx()) + normalizer = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer) + + return keras.backend.sum(cls_loss) / normalizer + + return _focal + + +def smooth_l1(sigma=3.0): + """ + Create a smooth L1 loss functor. + Args + sigma: This argument defines the point where the loss changes from L2 to L1. + Returns + A functor for computing the smooth L1 loss given target data and predicted data. + """ + sigma_squared = sigma ** 2 + + def _smooth_l1(y_true, y_pred): + """ Compute the smooth L1 loss of y_pred w.r.t. y_true. + Args + y_true: Tensor from the generator of shape (B, N, 5). The last value for each box is the state of the anchor (ignore, negative, positive). + y_pred: Tensor from the network of shape (B, N, 4). + Returns + The smooth L1 loss of y_pred w.r.t. y_true. + """ + # separate target and state + regression = y_pred + regression_target = y_true[:, :, :-1] + anchor_state = y_true[:, :, -1] + + # filter out "ignore" anchors + indices = tf.where(keras.backend.equal(anchor_state, 1)) + regression = tf.gather_nd(regression, indices) + regression_target = tf.gather_nd(regression_target, indices) + + # compute smooth L1 loss + # f(x) = 0.5 * (sigma * x)^2 if |x| < 1 / sigma / sigma + # |x| - 0.5 / sigma / sigma otherwise + regression_diff = regression - regression_target + regression_diff = keras.backend.abs(regression_diff) + regression_loss = tf.where( + keras.backend.less(regression_diff, 1.0 / sigma_squared), + 0.5 * sigma_squared * keras.backend.pow(regression_diff, 2), + regression_diff - 0.5 / sigma_squared + ) + + # compute the normalizer: the number of positive anchors + normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0]) + normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx()) + return keras.backend.sum(regression_loss) / normalizer + + return _smooth_l1 + + +def smooth_l1_quad(sigma=3.0): + """ + Create a smooth L1 loss functor. + + Args + sigma: This argument defines the point where the loss changes from L2 to L1. + + Returns + A functor for computing the smooth L1 loss given target data and predicted data. + """ + sigma_squared = sigma ** 2 + + def _smooth_l1(y_true, y_pred): + """ Compute the smooth L1 loss of y_pred w.r.t. y_true. + + Args + y_true: Tensor from the generator of shape (B, N, 5). The last value for each box is the state of the anchor (ignore, negative, positive). + y_pred: Tensor from the network of shape (B, N, 4). + + Returns + The smooth L1 loss of y_pred w.r.t. y_true. + """ + # separate target and state + regression = y_pred + regression = tf.concat([regression[..., :4], tf.sigmoid(regression[..., 4:9])], axis=-1) + regression_target = y_true[:, :, :-1] + anchor_state = y_true[:, :, -1] + + # filter out "ignore" anchors + indices = tf.where(keras.backend.equal(anchor_state, 1)) + regression = tf.gather_nd(regression, indices) + regression_target = tf.gather_nd(regression_target, indices) + + # compute smooth L1 loss + # f(x) = 0.5 * (sigma * x)^2 if |x| < 1 / sigma / sigma + # |x| - 0.5 / sigma / sigma otherwise + regression_diff = regression - regression_target + regression_diff = keras.backend.abs(regression_diff) + box_regression_loss = tf.where( + keras.backend.less(regression_diff[..., :4], 1.0 / sigma_squared), + 0.5 * sigma_squared * keras.backend.pow(regression_diff[..., :4], 2), + regression_diff[..., :4] - 0.5 / sigma_squared + ) + + alpha_regression_loss = tf.where( + keras.backend.less(regression_diff[..., 4:8], 1.0 / sigma_squared), + 0.5 * sigma_squared * keras.backend.pow(regression_diff[..., 4:8], 2), + regression_diff[..., 4:8] - 0.5 / sigma_squared + ) + + ratio_regression_loss = tf.where( + keras.backend.less(regression_diff[..., 8], 1.0 / sigma_squared), + 0.5 * sigma_squared * keras.backend.pow(regression_diff[..., 8], 2), + regression_diff[..., 8] - 0.5 / sigma_squared + ) + # compute the normalizer: the number of positive anchors + normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0]) + normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx()) + + box_regression_loss = tf.reduce_sum(box_regression_loss) / normalizer + alpha_regression_loss = tf.reduce_sum(alpha_regression_loss) / normalizer + ratio_regression_loss = tf.reduce_sum(ratio_regression_loss) / normalizer + + return box_regression_loss + alpha_regression_loss + 16 * ratio_regression_loss + + return _smooth_l1 + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model.py new file mode 100644 index 0000000000000000000000000000000000000000..a8e69cc84333112337636cbdb1775a8692c1755e --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model.py @@ -0,0 +1,505 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +from functools import reduce + +# from keras import layers +# from keras import initializers +# from keras import models +# from keras_ import EfficientNetB0, EfficientNetB1, EfficientNetB2 +# from keras_ import EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6 + +import tensorflow as tf +from tensorflow.keras import layers +from tensorflow.keras import initializers +from tensorflow.keras import models +from tfkeras import EfficientNetB0, EfficientNetB1, EfficientNetB2 +from tfkeras import EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6 + +from layers import ClipBoxes, RegressBoxes, FilterDetections, wBiFPNAdd, BatchNormalization +from initializers import PriorProbability +from utils.anchors import anchors_for_shape +import numpy as np + +w_bifpns = [64, 88, 112, 160, 224, 288, 384] +d_bifpns = [3, 4, 5, 6, 7, 7, 8] +d_heads = [3, 3, 3, 4, 4, 4, 5] +image_sizes = [512, 640, 768, 896, 1024, 1280, 1408] +backbones = [EfficientNetB0, EfficientNetB1, EfficientNetB2, + EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6] + +MOMENTUM = 0.997 +EPSILON = 1e-4 + + +def SeparableConvBlock(num_channels, kernel_size, strides, name, freeze_bn=False): + f1 = layers.SeparableConv2D(num_channels, kernel_size=kernel_size, strides=strides, padding='same', + use_bias=True, name=f'{name}/conv') + f2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{name}/bn') + # f2 = BatchNormalization(freeze=freeze_bn, name=f'{name}/bn') + return reduce(lambda f, g: lambda *args, **kwargs: g(f(*args, **kwargs)), (f1, f2)) + + +def ConvBlock(num_channels, kernel_size, strides, name, freeze_bn=False): + f1 = layers.Conv2D(num_channels, kernel_size=kernel_size, strides=strides, padding='same', + use_bias=True, name='{}_conv'.format(name)) + f2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='{}_bn'.format(name)) + # f2 = BatchNormalization(freeze=freeze_bn, name='{}_bn'.format(name)) + f3 = layers.ReLU(name='{}_relu'.format(name)) + return reduce(lambda f, g: lambda *args, **kwargs: g(f(*args, **kwargs)), (f1, f2, f3)) + + +def build_wBiFPN(features, num_channels, id, freeze_bn=False): + if id == 0: + _, _, C3, C4, C5 = features + P3_in = C3 + P4_in = C4 + P5_in = C5 + P6_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', name='resample_p6/conv2d')(C5) + P6_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='resample_p6/bn')(P6_in) + # P6_in = BatchNormalization(freeze=freeze_bn, name='resample_p6/bn')(P6_in) + P6_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p6/maxpool')(P6_in) + P7_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p7/maxpool')(P6_in) + P7_U = layers.UpSampling2D()(P7_in) + P6_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U]) + P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td) + P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td) + P5_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/conv2d')(P5_in) + P5_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1) + # P5_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1) + P6_U = layers.UpSampling2D()(P6_td) + P5_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in_1, P6_U]) + P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td) + P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td) + P4_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/conv2d')(P4_in) + P4_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1) + # P4_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1) + P5_U = layers.UpSampling2D()(P5_td) + P4_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in_1, P5_U]) + P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td) + P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td) + P3_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/conv2d')(P3_in) + P3_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in) + # P3_in = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in) + P4_U = layers.UpSampling2D()(P4_td) + P3_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U]) + P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out) + P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out) + P4_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/conv2d')(P4_in) + P4_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2) + # P4_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2) + P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out) + P4_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in_2, P4_td, P3_D]) + P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out) + P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out) + + P5_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/conv2d')(P5_in) + P5_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2) + # P5_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2) + P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out) + P5_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in_2, P5_td, P4_D]) + P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out) + P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out) + + P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out) + P6_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D]) + P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out) + P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out) + + P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out) + P7_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D]) + P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out) + P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out) + + else: + P3_in, P4_in, P5_in, P6_in, P7_in = features + P7_U = layers.UpSampling2D()(P7_in) + P6_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U]) + P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td) + P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td) + P6_U = layers.UpSampling2D()(P6_td) + P5_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in, P6_U]) + P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td) + P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td) + P5_U = layers.UpSampling2D()(P5_td) + P4_td = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in, P5_U]) + P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td) + P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td) + P4_U = layers.UpSampling2D()(P4_td) + P3_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U]) + P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out) + P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out) + P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out) + P4_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in, P4_td, P3_D]) + P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out) + P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out) + + P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out) + P5_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in, P5_td, P4_D]) + P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out) + P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out) + + P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out) + P6_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D]) + P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out) + P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out) + + P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out) + P7_out = wBiFPNAdd(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D]) + P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out) + P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out) + return P3_out, P4_td, P5_td, P6_td, P7_out + + +def build_BiFPN(features, num_channels, id, freeze_bn=False): + if id == 0: + _, _, C3, C4, C5 = features + P3_in = C3 + P4_in = C4 + P5_in = C5 + P6_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', name='resample_p6/conv2d')(C5) + P6_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name='resample_p6/bn')(P6_in) + # P6_in = BatchNormalization(freeze=freeze_bn, name='resample_p6/bn')(P6_in) + P6_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p6/maxpool')(P6_in) + P7_in = layers.MaxPooling2D(pool_size=3, strides=2, padding='same', name='resample_p7/maxpool')(P6_in) + P7_U = layers.UpSampling2D()(P7_in) + P6_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U]) + P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td) + P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td) + P5_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/conv2d')(P5_in) + P5_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1) + # P5_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode1/resample_0_2_6/bn')(P5_in_1) + P6_U = layers.UpSampling2D()(P6_td) + P5_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in_1, P6_U]) + P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td) + P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td) + P4_in_1 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/conv2d')(P4_in) + P4_in_1 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1) + # P4_in_1 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode2/resample_0_1_7/bn')(P4_in_1) + P5_U = layers.UpSampling2D()(P5_td) + P4_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in_1, P5_U]) + P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td) + P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td) + P3_in = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/conv2d')(P3_in) + P3_in = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in) + # P3_in = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode3/resample_0_0_8/bn')(P3_in) + P4_U = layers.UpSampling2D()(P4_td) + P3_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U]) + P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out) + P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out) + P4_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/conv2d')(P4_in) + P4_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2) + # P4_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode4/resample_0_1_9/bn')(P4_in_2) + P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out) + P4_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in_2, P4_td, P3_D]) + P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out) + P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out) + + P5_in_2 = layers.Conv2D(num_channels, kernel_size=1, padding='same', + name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/conv2d')(P5_in) + P5_in_2 = layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, + name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2) + # P5_in_2 = BatchNormalization(freeze=freeze_bn, name=f'fpn_cells/cell_{id}/fnode5/resample_0_2_10/bn')(P5_in_2) + P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out) + P5_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in_2, P5_td, P4_D]) + P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out) + P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out) + + P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out) + P6_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D]) + P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out) + P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out) + + P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out) + P7_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D]) + P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out) + P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out) + + else: + P3_in, P4_in, P5_in, P6_in, P7_in = features + P7_U = layers.UpSampling2D()(P7_in) + P6_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode0/add')([P6_in, P7_U]) + P6_td = layers.Activation(lambda x: tf.nn.swish(x))(P6_td) + P6_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode0/op_after_combine5')(P6_td) + P6_U = layers.UpSampling2D()(P6_td) + P5_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode1/add')([P5_in, P6_U]) + P5_td = layers.Activation(lambda x: tf.nn.swish(x))(P5_td) + P5_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode1/op_after_combine6')(P5_td) + P5_U = layers.UpSampling2D()(P5_td) + P4_td = layers.Add(name=f'fpn_cells/cell_{id}/fnode2/add')([P4_in, P5_U]) + P4_td = layers.Activation(lambda x: tf.nn.swish(x))(P4_td) + P4_td = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode2/op_after_combine7')(P4_td) + P4_U = layers.UpSampling2D()(P4_td) + P3_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode3/add')([P3_in, P4_U]) + P3_out = layers.Activation(lambda x: tf.nn.swish(x))(P3_out) + P3_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode3/op_after_combine8')(P3_out) + P3_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P3_out) + P4_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode4/add')([P4_in, P4_td, P3_D]) + P4_out = layers.Activation(lambda x: tf.nn.swish(x))(P4_out) + P4_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode4/op_after_combine9')(P4_out) + + P4_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P4_out) + P5_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode5/add')([P5_in, P5_td, P4_D]) + P5_out = layers.Activation(lambda x: tf.nn.swish(x))(P5_out) + P5_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode5/op_after_combine10')(P5_out) + + P5_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P5_out) + P6_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode6/add')([P6_in, P6_td, P5_D]) + P6_out = layers.Activation(lambda x: tf.nn.swish(x))(P6_out) + P6_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode6/op_after_combine11')(P6_out) + + P6_D = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(P6_out) + P7_out = layers.Add(name=f'fpn_cells/cell_{id}/fnode7/add')([P7_in, P6_D]) + P7_out = layers.Activation(lambda x: tf.nn.swish(x))(P7_out) + P7_out = SeparableConvBlock(num_channels=num_channels, kernel_size=3, strides=1, + name=f'fpn_cells/cell_{id}/fnode7/op_after_combine12')(P7_out) + return P3_out, P4_td, P5_td, P6_td, P7_out + + +class BoxNet(models.Model): + def __init__(self, width, depth, num_anchors=9, separable_conv=True, freeze_bn=False, detect_quadrangle=False, **kwargs): + super(BoxNet, self).__init__(**kwargs) + self.width = width + self.depth = depth + self.num_anchors = num_anchors + self.separable_conv = separable_conv + self.detect_quadrangle = detect_quadrangle + num_values = 9 if detect_quadrangle else 4 + options = { + 'kernel_size': 3, + 'strides': 1, + 'padding': 'same', + 'bias_initializer': 'zeros', + } + if separable_conv: + kernel_initializer = { + 'depthwise_initializer': initializers.VarianceScaling(), + 'pointwise_initializer': initializers.VarianceScaling(), + } + options.update(kernel_initializer) + self.convs = [layers.SeparableConv2D(filters=width, name=f'{self.name}/box-{i}', **options) for i in + range(depth)] + self.head = layers.SeparableConv2D(filters=num_anchors * num_values, + name=f'{self.name}/box-predict', **options) + else: + kernel_initializer = { + 'kernel_initializer': initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None) + } + options.update(kernel_initializer) + self.convs = [layers.Conv2D(filters=width, name=f'{self.name}/box-{i}', **options) for i in range(depth)] + self.head = layers.Conv2D(filters=num_anchors * num_values, name=f'{self.name}/box-predict', **options) + self.bns = [ + [layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{self.name}/box-{i}-bn-{j}') for j in + range(3, 8)] + for i in range(depth)] + # self.bns = [[BatchNormalization(freeze=freeze_bn, name=f'{self.name}/box-{i}-bn-{j}') for j in range(3, 8)] + # for i in range(depth)] + self.relu = layers.Lambda(lambda x: tf.nn.swish(x)) + self.reshape = layers.Reshape((-1, num_values)) + self.level = 0 + + def call(self, inputs, **kwargs): + feature, level = inputs + for i in range(self.depth): + feature = self.convs[i](feature) + feature = self.bns[i][self.level](feature) + feature = self.relu(feature) + outputs = self.head(feature) + outputs = self.reshape(outputs) + self.level += 1 + return outputs + + +class ClassNet(models.Model): + def __init__(self, width, depth, num_classes=20, num_anchors=9, separable_conv=True, freeze_bn=False, **kwargs): + super(ClassNet, self).__init__(**kwargs) + self.width = width + self.depth = depth + self.num_classes = num_classes + self.num_anchors = num_anchors + self.separable_conv = separable_conv + options = { + 'kernel_size': 3, + 'strides': 1, + 'padding': 'same', + } + if self.separable_conv: + kernel_initializer = { + 'depthwise_initializer': initializers.VarianceScaling(), + 'pointwise_initializer': initializers.VarianceScaling(), + } + options.update(kernel_initializer) + self.convs = [layers.SeparableConv2D(filters=width, bias_initializer='zeros', name=f'{self.name}/class-{i}', + **options) + for i in range(depth)] + self.head = layers.SeparableConv2D(filters=num_classes * num_anchors, + bias_initializer=PriorProbability(probability=0.01), + name=f'{self.name}/class-predict', **options) + else: + kernel_initializer = { + 'kernel_initializer': initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None) + } + options.update(kernel_initializer) + self.convs = [layers.Conv2D(filters=width, bias_initializer='zeros', name=f'{self.name}/class-{i}', + **options) + for i in range(depth)] + self.head = layers.Conv2D(filters=num_classes * num_anchors, + bias_initializer=PriorProbability(probability=0.01), + name='class-predict', **options) + self.bns = [ + [layers.BatchNormalization(momentum=MOMENTUM, epsilon=EPSILON, name=f'{self.name}/class-{i}-bn-{j}') for j + in range(3, 8)] + for i in range(depth)] + # self.bns = [[BatchNormalization(freeze=freeze_bn, name=f'{self.name}/class-{i}-bn-{j}') for j in range(3, 8)] + # for i in range(depth)] + self.relu = layers.Lambda(lambda x: tf.nn.swish(x)) + self.reshape = layers.Reshape((-1, num_classes)) + self.activation = layers.Activation('sigmoid') + self.level = 0 + + def call(self, inputs, **kwargs): + feature, level = inputs + for i in range(self.depth): + feature = self.convs[i](feature) + feature = self.bns[i][self.level](feature) + feature = self.relu(feature) + outputs = self.head(feature) + outputs = self.reshape(outputs) + outputs = self.activation(outputs) + self.level += 1 + return outputs + + +def efficientdet(phi, num_classes=20, num_anchors=9, weighted_bifpn=False, freeze_bn=False, + score_threshold=0.01, detect_quadrangle=False, anchor_parameters=None, separable_conv=True): + assert phi in range(7) + input_size = image_sizes[phi] + input_shape = (input_size, input_size, 3) + image_input = layers.Input(input_shape) + w_bifpn = w_bifpns[phi] + d_bifpn = d_bifpns[phi] + w_head = w_bifpn + d_head = d_heads[phi] + backbone_cls = backbones[phi] + features = backbone_cls(input_tensor=image_input, freeze_bn=freeze_bn) + if weighted_bifpn: + fpn_features = features + for i in range(d_bifpn): + fpn_features = build_wBiFPN(fpn_features, w_bifpn, i, freeze_bn=freeze_bn) + else: + fpn_features = features + for i in range(d_bifpn): + fpn_features = build_BiFPN(fpn_features, w_bifpn, i, freeze_bn=freeze_bn) + box_net = BoxNet(w_head, d_head, num_anchors=num_anchors, separable_conv=separable_conv, freeze_bn=freeze_bn, + detect_quadrangle=detect_quadrangle, name='box_net') + class_net = ClassNet(w_head, d_head, num_classes=num_classes, num_anchors=num_anchors, + separable_conv=separable_conv, freeze_bn=freeze_bn, name='class_net') + classification = [class_net([feature, i]) for i, feature in enumerate(fpn_features)] + classification = layers.Concatenate(axis=1, name='classification')(classification) + regression = [box_net([feature, i]) for i, feature in enumerate(fpn_features)] + regression = layers.Concatenate(axis=1, name='regression')(regression) + + model = models.Model(inputs=[image_input], outputs=[classification, regression], name='efficientdet') + + # apply predicted regression to anchors + anchors = anchors_for_shape((input_size, input_size), anchor_params=anchor_parameters) + anchors_input = np.expand_dims(anchors, axis=0) + boxes = RegressBoxes(name='boxes')([anchors_input, regression[..., :4]]) + boxes = ClipBoxes(name='clipped_boxes')([image_input, boxes]) + + # filter detections (apply NMS / score threshold / select top-k) + if detect_quadrangle: + detections = FilterDetections( + name='filtered_detections', + score_threshold=score_threshold, + detect_quadrangle=True + )([boxes, classification, regression[..., 4:8], regression[..., 8]]) + else: + detections = FilterDetections( + name='filtered_detections', + score_threshold=score_threshold + )([boxes, classification]) + + prediction_model = models.Model(inputs=[image_input], outputs=detections, name='efficientdet_p') + return model, prediction_model + + +if __name__ == '__main__': + npu_keras_sess = set_keras_session_npu_config() + x, y = efficientdet(1) + close_session(npu_keras_sess) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model_level.txt b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..357d87784ead1792c24762291a4801d2b2018b08 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/model_level.txt @@ -0,0 +1,3 @@ +FuncStatus:OK +PerfStatus:NOK +PrecisionStatus:NOK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/requirements.txt b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..05d268dbbb610fe6f529dc0f44a63ed7fdcea363 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/requirements.txt @@ -0,0 +1,130 @@ +absl-py==0.13.0 +addict==2.4.0 +albumentations @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/albumentations-0.4.5-cp37-none-any.whl +asgiref==3.4.1 +astor==0.8.1 +attrs==19.3.0 +auto-tune @ file:///tmp/selfgz1419329419/fwkacllib/lib64/auto_tune-0.1.0-py3-none-any.whl +backcall==0.2.0 +boto3==1.12.22 +botocore==1.15.49 +certifi==2020.6.20 +cffi @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/cffi-1.14.0-cp37-cp37m-linux_aarch64.whl +chardet==3.0.4 +charset-normalizer==2.0.4 +click==8.0.1 +cloudpickle==1.3.0 +cycler==0.10.0 +Cython @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/Cython-0.29.14-cp37-cp37m-linux_aarch64.whl +dask==2.18.1 +decorator==4.4.1 +deep-moxing @ http://100.95.151.167:6868/aarch64/euler/dls-release/ubuntu-16.04/deep-moxing/latest/deep_moxing-1.0.2.e45a4759-py3-none-any.whl +Django==3.2.6 +docutils==0.15.2 +esdk-obs-python==3.20.1 +et-xmlfile==1.1.0 +Flask==1.1.1 +gast==0.2.2 +google-pasta==0.2.0 +grpcio @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/grpcio-1.26.0-cp37-cp37m-linux_aarch64.whl +grpcio-tools @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/grpcio_tools-1.26.0-cp37-cp37m-linux_aarch64.whl +gunicorn==20.0.4 +h5py @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/h5py-2.10.0-cp37-cp37m-linux_aarch64.whl +hccl @ file:///tmp/selfgz1419329419/fwkacllib/lib64/hccl-0.1.0-py3-none-any.whl +huaweicloud-sdk-python-modelarts-dataset @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/huaweicloud_sdk_python_modelarts_dataset-0.1.5-py2.py3-none-any.whl +idna==2.10 +image==1.5.28 +imageio==2.9.0 +imgaug==0.2.6 +importlib-metadata==4.8.1 +ipykernel==5.3.4 +ipython==7.25.0 +ipython-genutils==0.2.0 +itsdangerous==2.0.1 +jdcal==1.4.1 +jedi==0.18.0 +Jinja2==3.0.1 +jmespath==0.10.0 +jupyter-client==6.1.12 +jupyter-core==4.7.1 +Keras==2.3.1 +Keras-Applications==1.0.8 +Keras-Preprocessing==1.1.2 +kfac==0.2.0 +kiwisolver @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/kiwisolver-1.1.0-cp37-cp37m-linux_aarch64.whl +lazy-import==0.2.2 +llvmlite @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/llvmlite-0.31.0-cp37-cp37m-linux_aarch64.whl +lxml @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/lxml-4.4.2-cp37-cp37m-linux_aarch64.whl +Markdown==3.3.4 +MarkupSafe==2.0.1 +matplotlib @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/matplotlib-3.1.2-cp37-cp37m-linux_aarch64.whl +matplotlib-inline==0.1.2 +mmcv @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/mmcv-0.2.14-cp37-cp37m-linux_aarch64.whl +modelarts-service @ http://100.95.151.167:6868/aarch64/euler/dls-release/euler-2.8/modelarts_service/modelarts_service-1.0.2-py3-none-any.whl +moxing-framework @ http://100.95.151.167:6868/aarch64/euler/dls-release/ubuntu-16.04/moxing_framework/moxing_framework-2.0.0.rc2.4b57a67b-py2.py3-none-any.whl +moxing-tensorflow @ http://100.95.151.167:6868/aarch64/euler/dls-release/ubuntu-16.04/moxing_tensorflow/moxing_tensorflow-2.0.0.rc2.65f98f7d-py2.py3-none-any.whl +mpmath==1.2.1 +networkx==2.6.2 +npu-bridge @ file:///tmp/selfgz1881668/tfplugin/bin/npu_bridge-1.15.0-py3-none-any.whl +numba==0.49.1 +numexpr @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/numexpr-2.7.1-cp37-cp37m-linux_aarch64.whl +numpy==1.19.3 +opencv-contrib-python==4.5.4.60 +opencv-python-headless==4.5.4.60 +openpyxl==3.0.3 +opt-einsum==3.3.0 +pandas @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/pandas-0.24.2-cp37-cp37m-linux_aarch64.whl +parso==0.8.2 +pathlib2==2.3.6 +pexpect==4.8.0 +pickleshare==0.7.5 +Pillow @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/Pillow-7.0.0-cp37-cp37m-linux_aarch64.whl +prometheus-client==0.8.0 +prompt-toolkit==3.0.19 +protobuf @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/protobuf-3.11.3-cp37-cp37m-linux_aarch64.whl +psutil @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/psutil-5.7.0-cp37-cp37m-linux_aarch64.whl +ptyprocess==0.7.0 +pycocotools @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/pycocotools-2.0.0-cp37-cp37m-linux_aarch64.whl +pycparser==2.20 +Pygments==2.9.0 +pyparsing==2.4.7 +python-dateutil==2.8.2 +pytz==2021.1 +PyWavelets @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/PyWavelets-1.1.1-cp37-cp37m-linux_aarch64.whl +PyYAML @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/PyYAML-5.3.1-cp37-cp37m-linux_aarch64.whl +pyzmq==22.1.0 +requests==2.26.0 +s3transfer==0.3.7 +schedule-search @ file:///tmp/selfgz1419329419/fwkacllib/lib64/schedule_search-0.1.0-py3-none-any.whl +scikit-image @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/scikit_image-0.17.2-cp37-cp37m-linux_aarch64.whl +scikit-learn @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/scikit_learn-0.20.0-cp37-cp37m-linux_aarch64.whl +scipy @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/scipy-1.3.3-cp37-cp37m-linux_aarch64.whl +Shapely==1.7.1 +six==1.16.0 +sqlparse==0.4.1 +sympy==1.4 +tables @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/tables-3.6.1-cp37-cp37m-linux_aarch64.whl +te @ file:///tmp/selfgz1419329419/fwkacllib/lib64/te-0.4.0-py3-none-any.whl +tensorboard==1.15.0 +tensorflow @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/tensorflow-1.15.0-cp37-cp37m-linux_aarch64.whl +tensorflow-estimator==1.15.1 +tensorflow-probability==0.10.1 +termcolor==1.1.0 +terminaltables==3.1.0 +tf-slim==1.1.0 +tflearn==0.5.0 +tifffile==2021.8.30 +toml==0.10.1 +topi @ file:///tmp/selfgz1419329419/fwkacllib/lib64/topi-0.4.0-py3-none-any.whl +tornado==6.1 +tqdm==4.46.1 +traitlets==5.0.5 +typing-extensions==3.10.0.2 +umap-learn==0.4.6 +umap-learn-modified @ http://100.95.151.167:6868/aarch64/euler/dls-release/euleros-arm/compiled-wheel/umap_learn_modified-0.3.8-py3-none-any.whl +urllib3==1.26.6 +wcwidth==0.2.5 +Werkzeug==2.0.1 +wrapt==1.12.1 +xmltodict==0.12.0 +zipp==3.5.0 diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/test/.keep b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/test/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..716e562111189bd0c5333c769d5de0d14c8af28d --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,214 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path='' +output_path='' +ckpt_path='' + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时:间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=4 + +if [ x"${modelarts_flag}" != x ]; +then + python ./train_sess.py --snapshot imagenet --phi 0 \ + --no-evaluation --random-transform \ + --compute-val-loss --freeze-backbone --step1\ + --batch-size 32 --steps 1000 --epochs=40\ + --pretrained_model=${data_path}/dataset/weights_file/efficientnet-b0_weights_tf_dim_ordering_tf_kernels_autoaugment_notop.h5\ + pascal ${data_path}/dataset/VOCdevkit/VOC2007 > ${print_log} 2>&1 + + python ./train_sess.py --snapshot checkpoints/pascal_10.h5 --phi 0 \ + --no-evaluation --random-transform \ + --compute-val-loss --freeze-bn \ + --batch-size 4 --steps 10000 --epochs=10\ + pascal ${data_path}/dataset/VOCdevkit/VOC2007 >> ${print_log} 2>&1 + + python ./common.py --model_path='checkpoints/pascal_10.h5' \ + --data_path=${data_path}/dataset/VOCdevkit/VOC2007 >> ${print_log} 2>&1 + +else + python3 ./train_sess.py --snapshot imagenet --phi 0 \ + --no-evaluation --random-transform \ + --compute-val-loss --freeze-backbone --step1\ + --batch-size 32 --steps 1000 --epochs=40\ + --pretrained_model=${data_path}/dataset/weights_file/efficientnet-b0_weights_tf_dim_ordering_tf_kernels_autoaugment_notop.h5\ + pascal ${data_path}/dataset/VOCdevkit/VOC2007 > ${print_log} 2>&1 + + python3 ./train_sess.py --snapshot checkpoints/pascal_10.h5 --phi 0 \ + --no-evaluation --random-transform \ + --compute-val-loss --freeze-bn \ + --batch-size 128 --steps 10000 --epochs=10\ + pascal ${data_path}/dataset/VOCdevkit/VOC2007 > ${print_log} 2>&1 + + python3 ./common.py --model_path='checkpoints/pascal_10.h5' \ + --data_path=${data_path}/dataset/VOCdevkit/VOC2007 >> ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "ms/step" ${print_log} | awk '{print $5}' | tail -n 1 | tr -d "ms/step"` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}*1000'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "mAP" ${print_log} | awk '{print $2}'` +# 提取所有loss打印信息 +grep "step - loss:" ${print_log} | awk '{print $8}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance ms/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/tfkeras.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/tfkeras.py new file mode 100644 index 0000000000000000000000000000000000000000..911f2bb1317e90dc2f2acd38aebeb87bf31db38e --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/tfkeras.py @@ -0,0 +1,44 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +from utils import inject_tfkeras_modules, init_tfkeras_custom_objects +import efficientnet as model + +EfficientNetB0 = inject_tfkeras_modules(model.EfficientNetB0) +EfficientNetB1 = inject_tfkeras_modules(model.EfficientNetB1) +EfficientNetB2 = inject_tfkeras_modules(model.EfficientNetB2) +EfficientNetB3 = inject_tfkeras_modules(model.EfficientNetB3) +EfficientNetB4 = inject_tfkeras_modules(model.EfficientNetB4) +EfficientNetB5 = inject_tfkeras_modules(model.EfficientNetB5) +EfficientNetB6 = inject_tfkeras_modules(model.EfficientNetB6) +EfficientNetB7 = inject_tfkeras_modules(model.EfficientNetB7) + +preprocess_input = inject_tfkeras_modules(model.preprocess_input) + +init_tfkeras_custom_objects() + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/train_sess.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/train_sess.py new file mode 100644 index 0000000000000000000000000000000000000000..3d4275266b576c511ad5298f629b281f6cf9c653 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/train_sess.py @@ -0,0 +1,356 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import argparse +from datetime import date +import sys +import tensorflow as tf +from npu_bridge.npu_init import * + + +# import keras +# import keras.preprocessing.image +# import keras.backend as K +# from keras.optimizers import Adam, SGD + +import tensorflow.python.keras as keras +from tensorflow.python.keras import backend as K +from tensorflow.keras.optimizers import Adam, SGD +from augmentor.color import VisualEffect +from augmentor.misc import MiscEffect +from model import efficientdet +from losses import smooth_l1, focal, smooth_l1_quad +from efficientnet import BASE_WEIGHTS_PATH, WEIGHTS_HASHES +from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig + +def makedirs(path): + # Intended behavior: try to create the directory, + # pass if the directory exists already, fails otherwise. + # Meant for Python 2.7/3.n compatibility. + try: + os.makedirs(path) + except OSError: + if not os.path.isdir(path): + raise + + +def create_callbacks(training_model, prediction_model, validation_generator, args): + """ + Creates the callbacks to use during training. + + Args + training_model: The model that is used for training. + prediction_model: The model that should be used for validation. + validation_generator: The generator for creating validation data. + args: parseargs args object. + + Returns: + A list of callbacks used for training. + """ + callbacks = [] + + tensorboard_callback = None + + if args.tensorboard_dir: + if tf.version.VERSION > '2.0.0': + file_writer = tf.summary.create_file_writer(args.tensorboard_dir) + file_writer.set_as_default() + tensorboard_callback = keras.callbacks.TensorBoard( + log_dir=args.tensorboard_dir, + histogram_freq=0, + batch_size=args.batch_size, + write_graph=True, + write_grads=False, + write_images=False, + embeddings_freq=0, + embeddings_layer_names=None, + embeddings_metadata=None + ) + callbacks.append(tensorboard_callback) + + if args.evaluation and validation_generator: + if args.dataset_type == 'coco': + from eval.coco import Evaluate + # use prediction model for evaluation + evaluation = Evaluate(validation_generator, prediction_model, tensorboard=tensorboard_callback) + else: + from eval.pascal import Evaluate + evaluation = Evaluate(validation_generator, prediction_model, tensorboard=tensorboard_callback) + callbacks.append(evaluation) + + # save the model + if args.snapshots: + # ensure directory created first; otherwise h5py will error after epoch. + makedirs(args.snapshot_path) + checkpoint = keras.callbacks.ModelCheckpoint( + os.path.join( + args.snapshot_path, + f'{args.dataset_type}_{{epoch:02d}}.h5' if args.step1 + else f'{args.dataset_type}_ft_{{epoch:02d}}.h5' + ), + verbose=1, + save_weights_only=True, + # save_best_only=True, + # monitor="mAP", + # mode='max' + ) + callbacks.append(checkpoint) + return callbacks + + +def create_generators(args): + """ + Create generators for training and validation. + + Args + args: parseargs object containing configuration for generators. + preprocess_image: Function that preprocesses an image for the network. + """ + common_args = { + 'batch_size': args.batch_size, + 'phi': args.phi, + 'detect_text': args.detect_text, + 'detect_quadrangle': args.detect_quadrangle + } + + # create random transform generator for augmenting training data + if args.random_transform: + misc_effect = MiscEffect() + visual_effect = VisualEffect() + else: + misc_effect = None + visual_effect = None + + if args.dataset_type == 'pascal': + from generators.pascal import PascalVocGenerator + train_generator = PascalVocGenerator( + args.pascal_path, + 'trainval', + skip_difficult=True, + misc_effect=misc_effect, + visual_effect=visual_effect, + **common_args + ) + + validation_generator = PascalVocGenerator( + args.pascal_path, + 'val', + skip_difficult=True, + shuffle_groups=False, + **common_args + ) + elif args.dataset_type == 'csv': + from generators.csv_ import CSVGenerator + train_generator = CSVGenerator( + args.annotations_path, + args.classes_path, + misc_effect=misc_effect, + visual_effect=visual_effect, + **common_args + ) + + if args.val_annotations_path: + validation_generator = CSVGenerator( + args.val_annotations_path, + args.classes_path, + shuffle_groups=False, + **common_args + ) + else: + validation_generator = None + + elif args.dataset_type == 'coco': + # import here to prevent unnecessary dependency on cocoapi + from generators.coco import CocoGenerator + train_generator = CocoGenerator( + args.coco_path, + 'train2017', + misc_effect=misc_effect, + visual_effect=visual_effect, + group_method='random', + **common_args + ) + + validation_generator = CocoGenerator( + args.coco_path, + 'val2017', + shuffle_groups=False, + **common_args + ) + else: + raise ValueError('Invalid data type received: {}'.format(args.dataset_type)) + + return train_generator, validation_generator + + +def parse_args(args): + """ + Parse the arguments. + """ + today = str(date.today()) + parser = argparse.ArgumentParser(description='Simple training script for training a RetinaNet network.') + subparsers = parser.add_subparsers(help='Arguments for specific dataset types.', dest='dataset_type') + subparsers.required = True + + coco_parser = subparsers.add_parser('coco') + coco_parser.add_argument('coco_path', help='Path to dataset directory (ie. /tmp/COCO).') + + pascal_parser = subparsers.add_parser('pascal') + pascal_parser.add_argument('pascal_path', help='Path to dataset directory (ie. /tmp/VOCdevkit).') + + csv_parser = subparsers.add_parser('csv') + csv_parser.add_argument('annotations_path', help='Path to CSV file containing annotations for training.') + csv_parser.add_argument('classes_path', help='Path to a CSV file containing class label mapping.') + csv_parser.add_argument('--val-annotations-path', + help='Path to CSV file containing annotations for validation (optional).') + parser.add_argument('--detect-quadrangle', help='If to detect quadrangle.', action='store_true', default=False) + parser.add_argument('--detect-text', help='If is text detection task.', action='store_true', default=False) + + parser.add_argument('--snapshot', help='Resume training from a snapshot.') + parser.add_argument('--freeze-backbone', help='Freeze training of backbone layers.', action='store_true') + parser.add_argument('--freeze-bn', help='Freeze training of BatchNormalization layers.', action='store_true') + parser.add_argument('--weighted-bifpn', help='Use weighted BiFPN', action='store_true') + + parser.add_argument('--batch-size', help='Size of the batches.', default=1, type=int) + parser.add_argument('--phi', help='Hyper parameter phi', default=0, type=int, choices=(0, 1, 2, 3, 4, 5, 6)) + # parser.add_argument('--gpu', help='Id of the GPU to use (as reported by nvidia-smi).') + parser.add_argument('--epochs', help='Number of epochs to train.', type=int, default=50) + parser.add_argument('--steps', help='Number of steps per epoch.', type=int, default=10000) + parser.add_argument('--snapshot_path', + help='Path to store snapshots of models during training', + default='checkpoints/') + parser.add_argument('--tensorboard-dir', help='Log directory for Tensorboard output', + default='logs/{}'.format(today)) + parser.add_argument('--no-snapshots', help='Disable saving snapshots.', dest='snapshots', action='store_false') + parser.add_argument('--no-evaluation', help='Disable per epoch evaluation.', dest='evaluation', + action='store_false') + parser.add_argument('--random-transform', help='Randomly transform image and annotations.', action='store_true') + parser.add_argument('--compute-val-loss', help='Compute validation loss during training', dest='compute_val_loss', + action='store_true') + parser.add_argument('--step1', help='train step1', dest='step1', + action='store_true') + # Fit generator arguments + parser.add_argument('--multiprocessing', help='Use multiprocessing in fit_generator.', action='store_true') + parser.add_argument('--workers', help='Number of generator workers.', type=int, default=1) + parser.add_argument('--max-queue-size', help='Queue length for multiprocessing workers in fit_generator.', type=int, + default=10) + parser.add_argument('--train_file_pattern', help='path to tfrecord', default='') + parser.add_argument('--pretrained_model', help='path to tfrecord', default='/home/dingwei/efficientdet/efficientnet-b0_weights_tf_dim_ordering_tf_kernels_autoaugment_notop.h5') + print(vars(parser.parse_args(args))) + return parser.parse_args(args) + # return check_args(parser.parse_args(args)) + + +def main(args=None): + print(1) + # parse arguments + if args is None: + args = sys.argv[1:] + args = parse_args(args) + + # create the generators + train_generator, validation_generator = create_generators(args) + num_classes = train_generator.num_classes() + num_anchors = train_generator.num_anchors + model, prediction_model = efficientdet(args.phi, + num_classes=num_classes, + num_anchors=num_anchors, + weighted_bifpn=args.weighted_bifpn, + freeze_bn=args.freeze_bn, + detect_quadrangle=args.detect_quadrangle + ) + + # load pretrained weights + if args.snapshot: + if args.snapshot == 'imagenet': + model.load_weights(args.pretrained_model, by_name=True) + else: + print('Loading model, this may take a second...') + model.load_weights(args.snapshot, by_name=True) + + # freeze backbone layers + if args.freeze_backbone: + # 227, 329, 329, 374, 464, 566, 656 + for i in range(1, [227, 329, 329, 374, 464, 566, 656][args.phi]): + model.layers[i].trainable = False + + loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, decr_every_n_nan_or_inf=2, decr_ratio=0.5) + opt_tmp = npu_tf_optimizer(tf.train.AdamOptimizer(learning_rate=1e-3)) + optimizer = NPULossScaleOptimizer(opt_tmp, loss_scale_manager) + + # compile model + model.compile(optimizer=optimizer, loss={ + 'regression': smooth_l1_quad() if args.detect_quadrangle else smooth_l1(), + 'classification': focal() + }, ) + # create the callbacks + callbacks = create_callbacks( + model, + prediction_model, + validation_generator, + args, + ) + if not args.compute_val_loss: + validation_generator = None + elif args.compute_val_loss and validation_generator is None: + raise ValueError('When you have no validation data, you should not specify --compute-val-loss.') + + # start training + return model.fit_generator( + generator=train_generator, + steps_per_epoch=args.steps, + initial_epoch=0, + epochs=args.epochs, + verbose=1, + callbacks=callbacks, + workers=args.workers, + use_multiprocessing=args.multiprocessing, + max_queue_size=args.max_queue_size, + validation_data=validation_generator + ) + + + + +if __name__ == '__main__': + # NPU setting + K.clear_session() + sess_config = tf.ConfigProto() + custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add() + #custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("force_fp32") + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["use_off_line"].b = True + sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + sess_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + sess = tf.Session(config=sess_config) + K.set_session(sess) + + main() + + sess.close() diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/__init__.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..739f04a2067ea4018fda16f1a78344ba7f8dc742 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/__init__.py @@ -0,0 +1,197 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * +import functools +import cv2 +import numpy as np + +_KERAS_BACKEND = None +_KERAS_LAYERS = None +_KERAS_MODELS = None +_KERAS_UTILS = None + + +def get_submodules_from_kwargs(kwargs): + backend = kwargs.get('backend', _KERAS_BACKEND) + layers = kwargs.get('layers', _KERAS_LAYERS) + models = kwargs.get('models', _KERAS_MODELS) + utils = kwargs.get('utils', _KERAS_UTILS) + for key in kwargs.keys(): + if key not in ['backend', 'layers', 'models', 'utils']: + raise TypeError('Invalid keyword argument: %s', key) + return backend, layers, models, utils + + +def inject_keras_modules(func): + import keras + @functools.wraps(func) + def wrapper(*args, **kwargs): + kwargs['backend'] = keras.backend + kwargs['layers'] = keras.layers + kwargs['models'] = keras.models + kwargs['utils'] = keras.utils + return func(*args, **kwargs) + + return wrapper + + +def inject_tfkeras_modules(func): + import tensorflow.keras as tfkeras + @functools.wraps(func) + def wrapper(*args, **kwargs): + kwargs['backend'] = tfkeras.backend + kwargs['layers'] = tfkeras.layers + kwargs['models'] = tfkeras.models + kwargs['utils'] = tfkeras.utils + return func(*args, **kwargs) + + return wrapper + + +def init_keras_custom_objects(): + import keras + import efficientnet as model + + custom_objects = { + 'swish': inject_keras_modules(model.get_swish)(), + 'FixedDropout': inject_keras_modules(model.get_dropout)() + } + + keras.utils.generic_utils.get_custom_objects().update(custom_objects) + + +def init_tfkeras_custom_objects(): + import tensorflow.keras as tfkeras + import efficientnet as model + + custom_objects = { + 'swish': inject_tfkeras_modules(model.get_swish)(), + 'FixedDropout': inject_tfkeras_modules(model.get_dropout)() + } + + tfkeras.utils.get_custom_objects().update(custom_objects) + + +def preprocess_image(image, image_size): + # image, RGB + image_height, image_width = image.shape[:2] + if image_height > image_width: + scale = image_size / image_height + resized_height = image_size + resized_width = int(image_width * scale) + else: + scale = image_size / image_width + resized_height = int(image_height * scale) + resized_width = image_size + + image = cv2.resize(image, (resized_width, resized_height)) + image = image.astype(np.float32) + image /= 255. + mean = [0.485, 0.456, 0.406] + std = [0.229, 0.224, 0.225] + image -= mean + image /= std + pad_h = image_size - resized_height + pad_w = image_size - resized_width + image = np.pad(image, [(0, pad_h), (0, pad_w), (0, 0)], mode='constant') + + return image, scale + + +def rotate_image(image): + rotate_degree = np.random.uniform(low=-45, high=45) + h, w = image.shape[:2] + # Compute the rotation matrix. + M = cv2.getRotationMatrix2D(center=(w / 2, h / 2), + angle=rotate_degree, + scale=1) + + # Get the sine and cosine from the rotation matrix. + abs_cos_angle = np.abs(M[0, 0]) + abs_sin_angle = np.abs(M[0, 1]) + + # Compute the new bounding dimensions of the image. + new_w = int(h * abs_sin_angle + w * abs_cos_angle) + new_h = int(h * abs_cos_angle + w * abs_sin_angle) + + # Adjust the rotation matrix to take into account the translation. + M[0, 2] += new_w // 2 - w // 2 + M[1, 2] += new_h // 2 - h // 2 + + # Rotate the image. + image = cv2.warpAffine(image, M=M, dsize=(new_w, new_h), flags=cv2.INTER_CUBIC, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(128, 128, 128)) + + return image + + +def reorder_vertexes(vertexes): + """ + reorder vertexes as the paper shows, (top, right, bottom, left) + Args: + vertexes: np.array (4, 2), should be in clockwise + + Returns: + + """ + assert vertexes.shape == (4, 2) + xmin, ymin = np.min(vertexes, axis=0) + xmax, ymax = np.max(vertexes, axis=0) + + # determine the first point with the smallest y, + # if two vertexes has same y, choose that with smaller x, + ordered_idxes = np.argsort(vertexes, axis=0) + ymin1_idx = ordered_idxes[0, 1] + ymin2_idx = ordered_idxes[1, 1] + if vertexes[ymin1_idx, 1] == vertexes[ymin2_idx, 1]: + if vertexes[ymin1_idx, 0] <= vertexes[ymin2_idx, 0]: + first_vertex_idx = ymin1_idx + else: + first_vertex_idx = ymin2_idx + else: + first_vertex_idx = ymin1_idx + ordered_idxes = [(first_vertex_idx + i) % 4 for i in range(4)] + ordered_vertexes = vertexes[ordered_idxes] + # drag the point to the corresponding edge + ordered_vertexes[0, 1] = ymin + ordered_vertexes[1, 0] = xmax + ordered_vertexes[2, 1] = ymax + ordered_vertexes[3, 0] = xmin + return ordered_vertexes + + +def postprocess_boxes(boxes, scale, height, width): + boxes /= scale + boxes[:, 0] = np.clip(boxes[:, 0], 0, width - 1) + boxes[:, 1] = np.clip(boxes[:, 1], 0, height - 1) + boxes[:, 2] = np.clip(boxes[:, 2], 0, width - 1) + boxes[:, 3] = np.clip(boxes[:, 3], 0, height - 1) + return boxes + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/anchors.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/anchors.py new file mode 100644 index 0000000000000000000000000000000000000000..8b3ef982a9e734a811639751ea0e0338d4e7ed37 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/anchors.py @@ -0,0 +1,404 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import os +import numpy as np +from tensorflow import keras +# import compute_overlap as compute_overlap + + +def compute_overlap(a, b): + #a [N,4] + #b [M,4] + area = (b[:, 2] - b[:, 0] + 1) * (b[:, 3] - b[:, 1] + 1) + iw = np.minimum(np.expand_dims(a[:, 2], axis=1), b[:, 2]) - np.maximum(np.expand_dims(a[:, 0], axis=1), b[:, 0]) + 1 + ih = np.minimum(np.expand_dims(a[:, 3], axis=1), b[:, 3]) - np.maximum(np.expand_dims(a[:, 1], axis=1), b[:, 1]) + 1 + # 假设a的数目是N,b的数目是M + # np.expand_dims((N,),axis=1)将(N,)变成(N,1) + # np.minimum((N,1),(M,)) 得到 (N M) 的矩阵 代表a和b逐一比较的结果 + # 取x和y中较小的值 来计算intersection + # iw和ih分别是intersection的宽和高 iw和ih的shape都是(N,M), 代表每个anchor和groundTruth之间的intersection + iw = np.maximum(iw, 0) + ih = np.maximum(ih, 0) #不允许iw或者ih小于0 + + ua = np.expand_dims((a[:, 2] - a[:, 0] + 1) *(a[:, 3] - a[:, 1] + 1), axis=1) + area - iw * ih + # 并集的计算 S_a+S_b-interection_ab + ua = np.maximum(ua, np.finfo(float).eps) + + intersection = iw * ih + return intersection / ua # (N,M) + +class AnchorParameters: + """ + The parameters that define how anchors are generated. + + Args + sizes : List of sizes to use. Each size corresponds to one feature level. + strides : List of strides to use. Each stride correspond to one feature level. + ratios : List of ratios to use per location in a feature map. + scales : List of scales to use per location in a feature map. + """ + + def __init__(self, sizes=(32, 64, 128, 256, 512), + strides=(8, 16, 32, 64, 128), + ratios=(1, 0.5, 2), + scales=(2 ** 0, 2 ** (1. / 3.), 2 ** (2. / 3.))): + self.sizes = sizes + self.strides = strides + self.ratios = np.array(ratios, dtype=keras.backend.floatx()) + self.scales = np.array(scales, dtype=keras.backend.floatx()) + + def num_anchors(self): + return len(self.ratios) * len(self.scales) + + +""" +The default anchor parameters. +""" +AnchorParameters.default = AnchorParameters( + sizes=[32, 64, 128, 256, 512], + strides=[8, 16, 32, 64, 128], + # ratio=h/w + ratios=np.array([1, 0.5, 2], keras.backend.floatx()), + scales=np.array([2 ** 0, 2 ** (1.0 / 3.0), 2 ** (2.0 / 3.0)], keras.backend.floatx()), +) + + +def anchor_targets_bbox( + anchors, + image_group, + annotations_group, + num_classes, + negative_overlap=0.4, + positive_overlap=0.5, + detect_quadrangle=False +): + """ + Generate anchor targets for bbox detection. + + Args + anchors: np.array of annotations of shape (N, 4) for (x1, y1, x2, y2). + image_group: List of BGR images. + annotations_group: List of annotations (np.array of shape (N, 5) for (x1, y1, x2, y2, label)). + num_classes: Number of classes to predict. + mask_shape: If the image is padded with zeros, mask_shape can be used to mark the relevant part of the image. + negative_overlap: IoU overlap for negative anchors (all anchors with overlap < negative_overlap are negative). + positive_overlap: IoU overlap or positive anchors (all anchors with overlap > positive_overlap are positive). + + Returns + labels_batch: batch that contains labels & anchor states (np.array of shape (batch_size, N, num_classes + 1), + where N is the number of anchors for an image and the last column defines the anchor state + (-1 for ignore, 0 for bg, 1 for fg). + regression_batch: batch that contains bounding-box regression targets for an image & anchor states + (np.array of shape (batch_size, N, 4 + 1), where N is the number of anchors for an image, + the first 4 columns define regression targets for (x1, y1, x2, y2) and the last column defines + anchor states (-1 for ignore, 0 for bg, 1 for fg). + """ + + assert (len(image_group) == len(annotations_group)), "The length of the images and annotations need to be equal." + assert (len(annotations_group) > 0), "No data received to compute anchor targets for." + for annotations in annotations_group: + assert ('bboxes' in annotations), "Annotations should contain bboxes." + assert ('labels' in annotations), "Annotations should contain labels." + + batch_size = len(image_group) + + if detect_quadrangle: + regression_batch = np.zeros((batch_size, anchors.shape[0], 9 + 1), dtype=np.float32) + else: + regression_batch = np.zeros((batch_size, anchors.shape[0], 4 + 1), dtype=np.float32) + labels_batch = np.zeros((batch_size, anchors.shape[0], num_classes + 1), dtype=np.float32) + + # compute labels and regression targets + for index, (image, annotations) in enumerate(zip(image_group, annotations_group)): + if annotations['bboxes'].shape[0]: + # obtain indices of gt annotations with the greatest overlap + # argmax_overlaps_inds: id of ground truth box has greatest overlap with anchor + # (N, ), (N, ), (N, ) N is num_anchors + positive_indices, ignore_indices, argmax_overlaps_inds = compute_gt_annotations(anchors, + annotations['bboxes'], + negative_overlap, + positive_overlap) + labels_batch[index, ignore_indices, -1] = -1 + labels_batch[index, positive_indices, -1] = 1 + + regression_batch[index, ignore_indices, -1] = -1 + regression_batch[index, positive_indices, -1] = 1 + + # compute target class labels + labels_batch[ + index, positive_indices, annotations['labels'][argmax_overlaps_inds[positive_indices]].astype(int)] = 1 + + regression_batch[index, :, :4] = bbox_transform(anchors, annotations['bboxes'][argmax_overlaps_inds, :]) + if detect_quadrangle: + regression_batch[index, :, 4:8] = annotations['alphas'][argmax_overlaps_inds, :] + regression_batch[index, :, 8] = annotations['ratios'][argmax_overlaps_inds] + + # ignore anchors outside of image + if image.shape: + anchors_centers = np.vstack([(anchors[:, 0] + anchors[:, 2]) / 2, (anchors[:, 1] + anchors[:, 3]) / 2]).T + indices = np.logical_or(anchors_centers[:, 0] >= image.shape[1], anchors_centers[:, 1] >= image.shape[0]) + + labels_batch[index, indices, -1] = -1 + regression_batch[index, indices, -1] = -1 + + return labels_batch, regression_batch + + +def compute_gt_annotations( + anchors, + annotations, + negative_overlap=0.4, + positive_overlap=0.5 +): + """ + Obtain indices of gt annotations with the greatest overlap. + + Args + anchors: np.array of annotations of shape (N, 4) for (x1, y1, x2, y2). + annotations: np.array of shape (K, 5) for (x1, y1, x2, y2, label). + negative_overlap: IoU overlap for negative anchors (all anchors with overlap < negative_overlap are negative). + positive_overlap: IoU overlap or positive anchors (all anchors with overlap > positive_overlap are positive). + + Returns + positive_indices: indices of positive anchors, (N, ) + ignore_indices: indices of ignored anchors, (N, ) + argmax_overlaps_inds: ordered overlaps indices, (N, ) + """ + # (N, K) + overlaps = compute_overlap(anchors.astype(np.float64), annotations.astype(np.float64)) + # (N, ) + argmax_overlaps_inds = np.argmax(overlaps, axis=1) + # (N, ) + max_overlaps = overlaps[np.arange(overlaps.shape[0]), argmax_overlaps_inds] + + # assign "dont care" labels + # (N, ) + positive_indices = max_overlaps >= positive_overlap + + # adam: in case of there are gt boxes has no matched positive anchors + # nonzero_inds = np.nonzero(overlaps == np.max(overlaps, axis=0)) + # positive_indices[nonzero_inds[0]] = 1 + + # (N, ) + ignore_indices = (max_overlaps > negative_overlap) & ~positive_indices + + return positive_indices, ignore_indices, argmax_overlaps_inds + + +def layer_shapes(image_shape, model): + """ + Compute layer shapes given input image shape and the model. + + Args + image_shape: The shape of the image. + model: The model to use for computing how the image shape is transformed in the pyramid. + + Returns + A dictionary mapping layer names to image shapes. + """ + shape = { + model.layers[0].name: (None,) + image_shape, + } + + for layer in model.layers[1:]: + nodes = layer._inbound_nodes + for node in nodes: + input_shapes = [shape[inbound_layer.name] for inbound_layer in node.inbound_layers] + if not input_shapes: + continue + shape[layer.name] = layer.compute_output_shape(input_shapes[0] if len(input_shapes) == 1 else input_shapes) + + return shape + + +def make_shapes_callback(model): + """ + Make a function for getting the shape of the pyramid levels. + """ + + def get_shapes(image_shape, pyramid_levels): + shape = layer_shapes(image_shape, model) + image_shapes = [shape["P{}".format(level)][1:3] for level in pyramid_levels] + return image_shapes + + return get_shapes + + +def guess_shapes(image_shape, pyramid_levels): + """ + Guess shapes based on pyramid levels. + + Args + image_shape: The shape of the image. + pyramid_levels: A list of what pyramid levels are used. + + Returns + A list of image shapes at each pyramid level. + """ + image_shape = np.array(image_shape[:2]) + image_shapes = [(image_shape + 2 ** x - 1) // (2 ** x) for x in pyramid_levels] + return image_shapes + + +def anchors_for_shape( + image_shape, + pyramid_levels=None, + anchor_params=None, + shapes_callback=None, +): + """ + Generators anchors for a given shape. + + Args + image_shape: The shape of the image. + pyramid_levels: List of ints representing which pyramids to use (defaults to [3, 4, 5, 6, 7]). + anchor_params: Struct containing anchor parameters. If None, default values are used. + shapes_callback: Function to call for getting the shape of the image at different pyramid levels. + + Returns + np.array of shape (N, 4) containing the (x1, y1, x2, y2) coordinates for the anchors. + """ + + if pyramid_levels is None: + pyramid_levels = [3, 4, 5, 6, 7] + + if anchor_params is None: + anchor_params = AnchorParameters.default + + if shapes_callback is None: + shapes_callback = guess_shapes + feature_map_shapes = shapes_callback(image_shape, pyramid_levels) + + # compute anchors over all pyramid levels + all_anchors = np.zeros((0, 4), dtype=np.float32) + for idx, p in enumerate(pyramid_levels): + anchors = generate_anchors( + base_size=anchor_params.sizes[idx], + ratios=anchor_params.ratios, + scales=anchor_params.scales + ) + shifted_anchors = shift(feature_map_shapes[idx], anchor_params.strides[idx], anchors) + all_anchors = np.append(all_anchors, shifted_anchors, axis=0) + + return all_anchors.astype(np.float32) + + +def shift(feature_map_shape, stride, anchors): + """ + Produce shifted anchors based on shape of the map and stride size. + + Args + feature_map_shape : Shape to shift the anchors over. + stride : Stride to shift the anchors with over the shape. + anchors: The anchors to apply at each location. + """ + + # create a grid starting from half stride from the top left corner + shift_x = (np.arange(0, feature_map_shape[1]) + 0.5) * stride + shift_y = (np.arange(0, feature_map_shape[0]) + 0.5) * stride + + shift_x, shift_y = np.meshgrid(shift_x, shift_y) + + shifts = np.vstack(( + shift_x.ravel(), shift_y.ravel(), + shift_x.ravel(), shift_y.ravel() + )).transpose() + + A = anchors.shape[0] + K = shifts.shape[0] + all_anchors = (anchors.reshape((1, A, 4)) + shifts.reshape((1, K, 4)).transpose((1, 0, 2))) + all_anchors = all_anchors.reshape((K * A, 4)) + + return all_anchors + + +def generate_anchors(base_size=16, ratios=None, scales=None): + """ + Generate anchor (reference) windows by enumerating aspect ratios X scales w.r.t. a reference window. + + Args: + base_size: + ratios: + scales: + + Returns: + + """ + if ratios is None: + ratios = AnchorParameters.default.ratios + + if scales is None: + scales = AnchorParameters.default.scales + + num_anchors = len(ratios) * len(scales) + + # initialize output anchors + anchors = np.zeros((num_anchors, 4)) + + anchors[:, 2:] = base_size * np.tile(np.repeat(scales, len(ratios))[None], (2, 1)).T + + areas = anchors[:, 2] * anchors[:, 3] + + # correct for ratios + anchors[:, 2] = np.sqrt(areas / np.tile(ratios, len(scales))) + anchors[:, 3] = anchors[:, 2] * np.tile(ratios, len(scales)) + + anchors[:, 0::2] -= np.tile(anchors[:, 2] * 0.5, (2, 1)).T + anchors[:, 1::2] -= np.tile(anchors[:, 3] * 0.5, (2, 1)).T + + return anchors + + +def bbox_transform(anchors, gt_boxes, scale_factors=None): + wa = anchors[:, 2] - anchors[:, 0] + ha = anchors[:, 3] - anchors[:, 1] + cxa = anchors[:, 0] + wa / 2. + cya = anchors[:, 1] + ha / 2. + + w = gt_boxes[:, 2] - gt_boxes[:, 0] + h = gt_boxes[:, 3] - gt_boxes[:, 1] + cx = gt_boxes[:, 0] + w / 2. + cy = gt_boxes[:, 1] + h / 2. + # Avoid NaN in division and log below. + ha += 1e-7 + wa += 1e-7 + h += 1e-7 + w += 1e-7 + tx = (cx - cxa) / wa + ty = (cy - cya) / ha + tw = np.log(w / wa) + th = np.log(h / ha) + if scale_factors: + ty /= scale_factors[0] + tx /= scale_factors[1] + th /= scale_factors[2] + tw /= scale_factors[3] + targets = np.stack([ty, tx, th, tw], axis=1) + return targets + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/colors.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/colors.py new file mode 100644 index 0000000000000000000000000000000000000000..8a92e2df01e9a4d9ee5f46c2d4196615081dd031 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/colors.py @@ -0,0 +1,141 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import warnings + + +def label_color(label): + """ Return a color from a set of predefined colors. Contains 80 colors in total. + + Args + label: The label to get the color for. + + Returns + A list of three values representing a RGB color. + + If no color is defined for a certain label, the color green is returned and a warning is printed. + """ + if label < len(colors): + return colors[label] + else: + warnings.warn('Label {} has no color, returning default.'.format(label)) + return (0, 255, 0) + + +""" +Generated using: + +``` +colors = [list((matplotlib.colors.hsv_to_rgb([x, 1.0, 1.0]) * 255).astype(int)) for x in np.arange(0, 1, 1.0 / 80)] +shuffle(colors) +pprint(colors) +``` +""" +colors = [ + [31 , 0 , 255] , + [0 , 159 , 255] , + [255 , 95 , 0] , + [255 , 19 , 0] , + [255 , 0 , 0] , + [255 , 38 , 0] , + [0 , 255 , 25] , + [255 , 0 , 133] , + [255 , 172 , 0] , + [108 , 0 , 255] , + [0 , 82 , 255] , + [0 , 255 , 6] , + [255 , 0 , 152] , + [223 , 0 , 255] , + [12 , 0 , 255] , + [0 , 255 , 178] , + [108 , 255 , 0] , + [184 , 0 , 255] , + [255 , 0 , 76] , + [146 , 255 , 0] , + [51 , 0 , 255] , + [0 , 197 , 255] , + [255 , 248 , 0] , + [255 , 0 , 19] , + [255 , 0 , 38] , + [89 , 255 , 0] , + [127 , 255 , 0] , + [255 , 153 , 0] , + [0 , 255 , 255] , + [0 , 255 , 216] , + [0 , 255 , 121] , + [255 , 0 , 248] , + [70 , 0 , 255] , + [0 , 255 , 159] , + [0 , 216 , 255] , + [0 , 6 , 255] , + [0 , 63 , 255] , + [31 , 255 , 0] , + [255 , 57 , 0] , + [255 , 0 , 210] , + [0 , 255 , 102] , + [242 , 255 , 0] , + [255 , 191 , 0] , + [0 , 255 , 63] , + [255 , 0 , 95] , + [146 , 0 , 255] , + [184 , 255 , 0] , + [255 , 114 , 0] , + [0 , 255 , 235] , + [255 , 229 , 0] , + [0 , 178 , 255] , + [255 , 0 , 114] , + [255 , 0 , 57] , + [0 , 140 , 255] , + [0 , 121 , 255] , + [12 , 255 , 0] , + [255 , 210 , 0] , + [0 , 255 , 44] , + [165 , 255 , 0] , + [0 , 25 , 255] , + [0 , 255 , 140] , + [0 , 101 , 255] , + [0 , 255 , 82] , + [223 , 255 , 0] , + [242 , 0 , 255] , + [89 , 0 , 255] , + [165 , 0 , 255] , + [70 , 255 , 0] , + [255 , 0 , 172] , + [255 , 76 , 0] , + [203 , 255 , 0] , + [204 , 0 , 255] , + [255 , 0 , 229] , + [255 , 133 , 0] , + [127 , 0 , 255] , + [0 , 235 , 255] , + [0 , 255 , 197] , + [255 , 0 , 191] , + [0 , 44 , 255] , + [50 , 255 , 0] +] + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/compute_overlap.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/compute_overlap.py new file mode 100644 index 0000000000000000000000000000000000000000..d5193be1c0961644997fda8713b9d62d5fcf2b00 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/compute_overlap.py @@ -0,0 +1,49 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np + +def compute_overlap(a, b): + #a [N,4] + #b [M,4] + area = (b[:, 2] - b[:, 0] + 1) * (b[:, 3] - b[:, 1] + 1) + iw = np.minimum(np.expand_dims(a[:, 2], axis=1), b[:, 2]) - np.maximum(np.expand_dims(a[:, 0], axis=1), b[:, 0]) + 1 + ih = np.minimum(np.expand_dims(a[:, 3], axis=1), b[:, 3]) - np.maximum(np.expand_dims(a[:, 1], axis=1), b[:, 1]) + 1 + # 假设a的数目是N,b的数目是M + # np.expand_dims((N,),axis=1)将(N,)变成(N,1) + # np.minimum((N,1),(M,)) 得到 (N M) 的矩阵 代表a和b逐一比较的结果 + # 取x和y中较小的值 来计算intersection + # iw和ih分别是intersection的宽和高 iw和ih的shape都是(N,M), 代表每个anchor和groundTruth之间的intersection + iw = np.maximum(iw, 0) + ih = np.maximum(ih, 0) #不允许iw或者ih小于0 + + ua = np.expand_dims((a[:, 2] - a[:, 0] + 1) *(a[:, 3] - a[:, 1] + 1), axis=1) + area - iw * ih + # 并集的计算 S_a+S_b-interection_ab + ua = np.maximum(ua, np.finfo(float).eps) + + intersection = iw * ih + return intersection / ua # (N,M) \ No newline at end of file diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/draw_boxes.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/draw_boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..d1fe66a7234c8b09e17a7d1c12f908ae5dc805ed --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/draw_boxes.py @@ -0,0 +1,46 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * +import cv2 + + +def draw_boxes(image, boxes, scores, labels, colors, classes): + for b, l, s in zip(boxes, labels, scores): + class_id = int(l) + class_name = classes[class_id] + + xmin, ymin, xmax, ymax = list(map(int, b)) + score = '{:.4f}'.format(s) + color = colors[class_id] + label = '-'.join([class_name, score]) + + ret, baseline = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 1) + cv2.rectangle(image, (xmin, ymax - ret[1] - baseline), (xmin + ret[0], ymax), color, -1) + cv2.putText(image, label, (xmin, ymax - baseline), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/image.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/image.py new file mode 100644 index 0000000000000000000000000000000000000000..0b92454ecff0f111a05a634756b3d5dea29a48c5 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/image.py @@ -0,0 +1,386 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import division +from npu_bridge.npu_init import * +import numpy as np +import cv2 +from PIL import Image + +from .transform import change_transform_origin + + +def read_image_bgr(path): + """ + Read an image in BGR format. + + Args + path: Path to the image. + """ + # We deliberately don't use cv2.imread here, since it gives no feedback on errors while reading the image. + image = np.asarray(Image.open(path).convert('RGB')) + return image[:, :, ::-1].copy() + + +def preprocess_image(x, mode='caffe'): + """ + Preprocess an image by subtracting the ImageNet mean. + + Args + x: np.array of shape (None, None, 3) or (3, None, None). + mode: One of "caffe" or "tf". + - caffe: will zero-center each color channel with + respect to the ImageNet dataset, without scaling. + - tf: will scale pixels between -1 and 1, sample-wise. + + Returns + The input with the ImageNet mean subtracted. + """ + # mostly identical to "https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py" + # except for converting RGB -> BGR since we assume BGR already + + # covert always to float32 to keep compatibility with opencv + x = x.astype(np.float32) + + if mode == 'tf': + x /= 127.5 + x -= 1. + elif mode == 'caffe': + x[..., 0] -= 103.939 + x[..., 1] -= 116.779 + x[..., 2] -= 123.68 + + return x + + +def adjust_transform_for_image(transform, image, relative_translation): + """ + Adjust a transformation for a specific image. + + The translation of the matrix will be scaled with the size of the image. + The linear part of the transformation will adjusted so that the origin of the transformation will be at the center of the image. + """ + height, width, channels = image.shape + + result = transform + + # Scale the translation with the image size if specified. + if relative_translation: + result[0:2, 2] *= [width, height] + + # Move the origin of transformation. + result = change_transform_origin(transform, (0.5 * width, 0.5 * height)) + + return result + + +class TransformParameters: + """ + Struct holding parameters determining how to apply a transformation to an image. + + Args + fill_mode: One of: 'constant', 'nearest', 'reflect', 'wrap' + interpolation: One of: 'nearest', 'linear', 'cubic', 'area', 'lanczos4' + cval: Fill value to use with fill_mode='constant' + relative_translation: If true (the default), interpret translation as a factor of the image size. + If false, interpret it as absolute pixels. + """ + + def __init__( + self, + fill_mode='nearest', + interpolation='linear', + cval=0, + relative_translation=True, + ): + self.fill_mode = fill_mode + self.cval = cval + self.interpolation = interpolation + self.relative_translation = relative_translation + + def cvBorderMode(self): + if self.fill_mode == 'constant': + return cv2.BORDER_CONSTANT + if self.fill_mode == 'nearest': + return cv2.BORDER_REPLICATE + if self.fill_mode == 'reflect': + return cv2.BORDER_REFLECT_101 + if self.fill_mode == 'wrap': + return cv2.BORDER_WRAP + + def cvInterpolation(self): + if self.interpolation == 'nearest': + return cv2.INTER_NEAREST + if self.interpolation == 'linear': + return cv2.INTER_LINEAR + if self.interpolation == 'cubic': + return cv2.INTER_CUBIC + if self.interpolation == 'area': + return cv2.INTER_AREA + if self.interpolation == 'lanczos4': + return cv2.INTER_LANCZOS4 + + +def apply_transform(matrix, image, params): + """ + Apply a transformation to an image. + + The origin of transformation is at the top left corner of the image. + + The matrix is interpreted such that a point (x, y) on the original image is moved to transform * (x, y) in the generated image. + Mathematically speaking, that means that the matrix is a transformation from the transformed image space to the original image space. + + Args + matrix: A homogeneous 3 by 3 matrix holding representing the transformation to apply. + image: The image to transform. + params: The transform parameters (see TransformParameters) + """ + output = cv2.warpAffine( + image, + matrix[:2, :], + dsize=(image.shape[1], image.shape[0]), + flags=params.cvInterpolation(), + borderMode=params.cvBorderMode(), + borderValue=params.cval, + ) + return output + + +def compute_resize_scale(image_shape, min_side=800, max_side=1333): + """ + Compute an image scale such that the image size is constrained to min_side and max_side. + + Args + min_side: The image's min side will be equal to min_side after resizing. + max_side: If after resizing the image's max side is above max_side, resize until the max side is equal to max_side. + + Returns + A resizing scale. + """ + (rows, cols, _) = image_shape + + smallest_side = min(rows, cols) + + # rescale the image so the smallest side is min_side + scale = min_side / smallest_side + + # check if the largest side is now greater than max_side, which can happen + # when images have a large aspect ratio + largest_side = max(rows, cols) + if largest_side * scale > max_side: + scale = max_side / largest_side + + return scale + + +def resize_image(img, min_side=800, max_side=1333): + """ + Resize an image such that the size is constrained to min_side and max_side. + + Args + min_side: The image's min side will be equal to min_side after resizing. + max_side: If after resizing the image's max side is above max_side, resize until the max side is equal to max_side. + + Returns + A resized image. + """ + # compute scale to resize the image + scale = compute_resize_scale(img.shape, min_side=min_side, max_side=max_side) + + # resize the image with the computed scale + img = cv2.resize(img, None, fx=scale, fy=scale) + + return img, scale + + +def _uniform(val_range): + """ + Uniformly sample from the given range. + + Args + val_range: A pair of lower and upper bound. + """ + return np.random.uniform(val_range[0], val_range[1]) + + +def _check_range(val_range, min_val=None, max_val=None): + """ + Check whether the range is a valid range. + + Args + val_range: A pair of lower and upper bound. + min_val: Minimal value for the lower bound. + max_val: Maximal value for the upper bound. + """ + if val_range[0] > val_range[1]: + raise ValueError('interval lower bound > upper bound') + if min_val is not None and val_range[0] < min_val: + raise ValueError('invalid interval lower bound') + if max_val is not None and val_range[1] > max_val: + raise ValueError('invalid interval upper bound') + + +def _clip(image): + """ + Clip and convert an image to np.uint8. + + Args + image: Image to clip. + """ + return np.clip(image, 0, 255).astype(np.uint8) + + +class VisualEffect: + """ + Struct holding parameters and applying image color transformation. + + Args + contrast_factor: A factor for adjusting contrast. Should be between 0 and 3. + brightness_delta: Brightness offset between -1 and 1 added to the pixel values. + hue_delta: Hue offset between -1 and 1 added to the hue channel. + saturation_factor: A factor multiplying the saturation values of each pixel. + """ + + def __init__( + self, + contrast_factor, + brightness_delta, + hue_delta, + saturation_factor, + ): + self.contrast_factor = contrast_factor + self.brightness_delta = brightness_delta + self.hue_delta = hue_delta + self.saturation_factor = saturation_factor + + def __call__(self, image): + """ + Apply a visual effect on the image. + + Args + image: Image to adjust + """ + + if self.contrast_factor: + image = adjust_contrast(image, self.contrast_factor) + if self.brightness_delta: + image = adjust_brightness(image, self.brightness_delta) + + if self.hue_delta or self.saturation_factor: + image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + if self.hue_delta: + image = adjust_hue(image, self.hue_delta) + if self.saturation_factor: + image = adjust_saturation(image, self.saturation_factor) + + image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR) + + return image + + +def random_visual_effect_generator( + contrast_range=(0.9, 1.1), + brightness_range=(-.1, .1), + hue_range=(-0.05, 0.05), + saturation_range=(0.95, 1.05) +): + """ + Generate visual effect parameters uniformly sampled from the given intervals. + + Args + contrast_factor: A factor interval for adjusting contrast. Should be between 0 and 3. + brightness_delta: An interval between -1 and 1 for the amount added to the pixels. + hue_delta: An interval between -1 and 1 for the amount added to the hue channel. + The values are rotated if they exceed 180. + saturation_factor: An interval for the factor multiplying the saturation values of each + pixel. + """ + _check_range(contrast_range, 0) + _check_range(brightness_range, -1, 1) + _check_range(hue_range, -1, 1) + _check_range(saturation_range, 0) + + def _generate(): + while True: + yield VisualEffect( + contrast_factor=_uniform(contrast_range), + brightness_delta=_uniform(brightness_range), + hue_delta=_uniform(hue_range), + saturation_factor=_uniform(saturation_range), + ) + + return _generate() + + +def adjust_contrast(image, factor): + """ + Adjust contrast of an image. + + Args + image: Image to adjust. + factor: A factor for adjusting contrast. + """ + mean = image.mean(axis=0).mean(axis=0) + return _clip((image - mean) * factor + mean) + + +def adjust_brightness(image, delta): + """ + Adjust brightness of an image + + Args + image: Image to adjust. + delta: Brightness offset between -1 and 1 added to the pixel values. + """ + return _clip(image + delta * 255) + + +def adjust_hue(image, delta): + """ + Adjust hue of an image. + + Args + image: Image to adjust. + delta: An interval between -1 and 1 for the amount added to the hue channel. + The values are rotated if they exceed 180. + """ + image[..., 0] = np.mod(image[..., 0] + delta * 180, 180) + return image + + +def adjust_saturation(image, factor): + """ + Adjust saturation of an image. + + Args + image: Image to adjust. + factor: An interval for the factor multiplying the saturation values of each pixel. + """ + image[..., 1] = np.clip(image[..., 1] * factor, 0, 255) + return image + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/transform.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..e34b3c1babe68794432fdca422a9c0c857f1db12 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/transform.py @@ -0,0 +1,333 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import numpy as np + +DEFAULT_PRNG = np.random + + +def colvec(*args): + """ + Create a numpy array representing a column vector. + """ + return np.array([args]).T + + +def transform_aabb(transform, aabb): + """ + Apply a transformation to an axis aligned bounding box. + + The result is a new AABB in the same coordinate system as the original AABB. + The new AABB contains all corner points of the original AABB after applying the given transformation. + + Args + transform: The transformation to apply. + x1: The minimum x value of the AABB. + y1: The minimum y value of the AABB. + x2: The maximum x value of the AABB. + y2: The maximum y value of the AABB. + Returns + The new AABB as tuple (x1, y1, x2, y2) + """ + x1, y1, x2, y2 = aabb + # Transform all 4 corners of the AABB. + points = transform.dot([ + [x1, x2, x1, x2], + [y1, y2, y2, y1], + [1, 1, 1, 1], + ]) + + # Extract the min and max corners again. + # (3, ) (min_x, min_y, 1) + min_corner = points.min(axis=1) + # (3, ) (max_x, max_y, 1) + max_corner = points.max(axis=1) + + return [min_corner[0], min_corner[1], max_corner[0], max_corner[1]] + + +def _random_vector(min, max, prng=DEFAULT_PRNG): + """ + Construct a random vector between min and max. + + Args + min: the minimum value for each component, (n, ) + max: the maximum value for each component, (n, ) + """ + min = np.array(min) + max = np.array(max) + assert min.shape == max.shape + assert len(min.shape) == 1 + return prng.uniform(min, max) + + +def rotation(angle): + """ + Construct a homogeneous 2D rotation matrix. + + Args + angle: the angle in radians + Returns + the rotation matrix as 3 by 3 numpy array + """ + return np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] + ]) + + +def random_rotation(min, max, prng=DEFAULT_PRNG): + """ + Construct a random rotation between -max and max. + + Args + min: a scalar for the minimum absolute angle in radians + max: a scalar for the maximum absolute angle in radians + prng: the pseudo-random number generator to use. + Returns + a homogeneous 3 by 3 rotation matrix + """ + return rotation(prng.uniform(min, max)) + + +def translation(translation): + """ + Construct a homogeneous 2D translation matrix. + + Args: + translation: the translation 2D vector + + Returns: + the translation matrix as 3 by 3 numpy array + + """ + return np.array([ + [1, 0, translation[0]], + [0, 1, translation[1]], + [0, 0, 1] + ]) + + +def random_translation(min, max, prng=DEFAULT_PRNG): + """ + Construct a random 2D translation between min and max. + + Args + min: a 2D vector with the minimum translation for each dimension + max: a 2D vector with the maximum translation for each dimension + prng: the pseudo-random number generator to use. + Returns + a homogeneous 3 by 3 translation matrix + """ + return translation(_random_vector(min, max, prng)) + + +def shear(angle): + """ + Construct a homogeneous 2D shear matrix. + + Args + angle: the shear angle in radians + Returns + the shear matrix as 3 by 3 numpy array + """ + return np.array([ + [1, -np.sin(angle), 0], + [0, np.cos(angle), 0], + [0, 0, 1] + ]) + + +def random_shear(min, max, prng=DEFAULT_PRNG): + """ + Construct a random 2D shear matrix with shear angle between -max and max. + + Args + min: the minimum shear angle in radians. + max: the maximum shear angle in radians. + prng: the pseudo-random number generator to use. + Returns + a homogeneous 3 by 3 shear matrix + """ + return shear(prng.uniform(min, max)) + + +def scaling(factor): + """ + Construct a homogeneous 2D scaling matrix. + + Args + factor: a 2D vector for X and Y scaling + Returns + the zoom matrix as 3 by 3 numpy array + """ + + return np.array([ + [factor[0], 0, 0], + [0, factor[1], 0], + [0, 0, 1] + ]) + + +def random_scaling(min, max, prng=DEFAULT_PRNG): + """ + Construct a random 2D scale matrix between -max and max. + + Args + min: a 2D vector containing the minimum scaling factor for X and Y. + min: a 2D vector containing The maximum scaling factor for X and Y. + prng: the pseudo-random number generator to use. + Returns + a homogeneous 3 by 3 scaling matrix + """ + return scaling(_random_vector(min, max, prng)) + + +def random_flip(flip_x_chance, flip_y_chance, prng=DEFAULT_PRNG): + """ + Construct a transformation randomly containing X/Y flips (or not). + + Args + flip_x_chance: The chance that the result will contain a flip along the X axis. + flip_y_chance: The chance that the result will contain a flip along the Y axis. + prng: The pseudo-random number generator to use. + Returns + a homogeneous 3 by 3 transformation matrix + """ + flip_x = prng.uniform(0, 1) < flip_x_chance + flip_y = prng.uniform(0, 1) < flip_y_chance + # 1 - 2 * bool gives 1 for False and -1 for True. + return scaling((1 - 2 * flip_x, 1 - 2 * flip_y)) + + +def change_transform_origin(transform, center): + """ + Create a new transform representing the same transformation, only with the origin of the linear part changed. + + Args + transform: the transformation matrix + center: the new origin of the transformation + Returns + translate(center) * transform * translate(-center) + """ + center = np.array(center) + return np.linalg.multi_dot([translation(center), transform, translation(-center)]) + + +def random_transform( + min_rotation=0, + max_rotation=0, + min_translation=(0, 0), + max_translation=(0, 0), + min_shear=0, + max_shear=0, + min_scaling=(1, 1), + max_scaling=(1, 1), + flip_x_chance=0, + flip_y_chance=0, + prng=DEFAULT_PRNG +): + """ + Create a random transformation. + + The transformation consists of the following operations in this order (from left to right): + * rotation + * translation + * shear + * scaling + * flip x (if applied) + * flip y (if applied) + + Note that by default, the data generators in `keras_retinanet.preprocessing.generators` interpret the translation + as factor of the image size. So an X translation of 0.1 would translate the image by 10% of it's width. + Set `relative_translation` to `False` in the `TransformParameters` of a data generator to have it interpret + the translation directly as pixel distances instead. + + Args + min_rotation: The minimum rotation in radians for the transform as scalar. + max_rotation: The maximum rotation in radians for the transform as scalar. + min_translation: The minimum translation for the transform as 2D column vector. + max_translation: The maximum translation for the transform as 2D column vector. + min_shear: The minimum shear angle for the transform in radians. + max_shear: The maximum shear angle for the transform in radians. + min_scaling: The minimum scaling for the transform as 2D column vector. + max_scaling: The maximum scaling for the transform as 2D column vector. + flip_x_chance: The chance (0 to 1) that a transform will contain a flip along X direction. + flip_y_chance: The chance (0 to 1) that a transform will contain a flip along Y direction. + prng: The pseudo-random number generator to use. + """ + return np.linalg.multi_dot([ + random_rotation(min_rotation, max_rotation, prng), + random_translation(min_translation, max_translation, prng), + random_shear(min_shear, max_shear, prng), + random_scaling(min_scaling, max_scaling, prng), + random_flip(flip_x_chance, flip_y_chance, prng) + ]) + + +def random_transform_generator(prng=None, **kwargs): + """ + Create a random transform generator. + Uses a dedicated, newly created, properly seeded PRNG by default instead of the global DEFAULT_PRNG. + + The transformation consists of the following operations in this order (from left to right): + * rotation + * translation + * shear + * scaling + * flip x (if applied) + * flip y (if applied) + + Note that by default, the data generators in `keras_retinanet.preprocessing.generators` interpret the translation + as factor of the image size. So an X translation of 0.1 would translate the image by 10% of it's width. + Set `relative_translation` to `False` in the `TransformParameters` of a data generator to have it interpret + the translation directly as pixel distances instead. + + Args + min_rotation: The minimum rotation in radians for the transform as scalar. + max_rotation: The maximum rotation in radians for the transform as scalar. + min_translation: The minimum translation for the transform as 2D column vector. + max_translation: The maximum translation for the transform as 2D column vector. + min_shear: The minimum shear angle for the transform in radians. + max_shear: The maximum shear angle for the transform in radians. + min_scaling: The minimum scaling for the transform as 2D column vector. + max_scaling: The maximum scaling for the transform as 2D column vector. + flip_x_chance: The chance (0 to 1) that a transform will contain a flip along X direction. + flip_y_chance: The chance (0 to 1) that a transform will contain a flip along Y direction. + prng: The pseudo-random number generator to use. + """ + + if prng is None: + # RandomState automatically seeds using the best available method. + prng = np.random.RandomState() + + while True: + yield random_transform(prng=prng, **kwargs) + diff --git a/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/visualization.py b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/visualization.py new file mode 100644 index 0000000000000000000000000000000000000000..4743641e822579b32af7488452b5f757c4bda215 --- /dev/null +++ b/TensorFlow/contrib/cv/EfficientDet_ID0693_for_TensorFlow/utils/visualization.py @@ -0,0 +1,120 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import cv2 +import numpy as np + +from .colors import label_color + + +def draw_box(image, box, color, thickness=2): + """ Draws a box on an image with a given color. + + # Arguments + image : The image to draw on. + box : A list of 4 elements (x1, y1, x2, y2). + color : The color of the box. + thickness : The thickness of the lines to draw a box with. + """ + b = np.array(box).astype(np.int32) + cv2.rectangle(image, (b[0], b[1]), (b[2], b[3]), color, thickness, cv2.LINE_AA) + + +def draw_caption(image, box, caption): + """ Draws a caption above the box in an image. + + # Arguments + image : The image to draw on. + box : A list of 4 elements (x1, y1, x2, y2). + caption : String containing the text to draw. + """ + b = np.array(box).astype(int) + cv2.putText(image, caption, (b[0], b[1] - 10), cv2.FONT_HERSHEY_PLAIN, 1, (0, 0, 0), 2) + cv2.putText(image, caption, (b[0], b[1] - 10), cv2.FONT_HERSHEY_PLAIN, 1, (255, 255, 255), 1) + + +def draw_boxes(image, boxes, color, thickness=2): + """ Draws boxes on an image with a given color. + + # Arguments + image : The image to draw on. + boxes : A [N, 4] matrix (x1, y1, x2, y2). + color : The color of the boxes. + thickness : The thickness of the lines to draw boxes with. + """ + for b in boxes: + draw_box(image, b, color, thickness=thickness) + + +def draw_detections(image, boxes, scores, labels, colors, label_to_name=None, score_threshold=0.5): + """ Draws detections in an image. + + # Arguments + image : The image to draw on. + boxes : A [N, 4] matrix (x1, y1, x2, y2). + scores : A list of N classification scores. + labels : A list of N labels. + colors : The colors of the boxes. + label_to_name : (optional) Functor for mapping a label to a name. + score_threshold : Threshold used for determining what detections to draw. + """ + selection = np.where(scores > score_threshold)[0] + + for i in selection: + c = colors[int(labels[i])] + draw_box(image, boxes[i, :], color=c) + + # draw labels + caption = (label_to_name(labels[i]) if label_to_name else labels[i]) + ': {0:.2f}'.format(scores[i]) + draw_caption(image, boxes[i, :], caption) + + +def draw_annotations(image, annotations, color=(0, 255, 0), label_to_name=None): + """ Draws annotations in an image. + + # Arguments + image : The image to draw on. + annotations : A [N, 5] matrix (x1, y1, x2, y2, label) or dictionary containing bboxes (shaped [N, 4]) and labels (shaped [N]). + color : The color of the boxes. By default the color from keras_retinanet.utils.colors.label_color will be used. + label_to_name : (optional) Functor for mapping a label to a name. + """ + if isinstance(annotations, np.ndarray): + annotations = {'bboxes': annotations[:, :4], 'labels': annotations[:, 4]} + + assert('bboxes' in annotations) + assert('labels' in annotations) + assert(annotations['bboxes'].shape[0] == annotations['labels'].shape[0]) + + for i in range(annotations['bboxes'].shape[0]): + label = annotations['labels'][i] + c = color if color is not None else label_color(label) + caption = '{}'.format(label_to_name(label) if label_to_name else label) + draw_caption(image, annotations['bboxes'][i], caption) + draw_box(image, annotations['bboxes'][i], color=c) + diff --git a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/efficientnet_model.py b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/efficientnet_model.py index 569e02c0190f07f2bd76e0b68a315104ac49bbd2..c653e87884793fb54f716fdda3abb5665333f2a0 100644 --- a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/efficientnet_model.py +++ b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/efficientnet_model.py @@ -367,6 +367,7 @@ class Model(tf.keras.Model): kernel_initializer=dense_kernel_initializer) if self._global_params.dropout_rate > 0: + from npu_bridge.estimator.npu import npu_convert_dropout self._dropout = tf.keras.layers.Dropout(self._global_params.dropout_rate) else: self._dropout = None diff --git a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_full_1p.sh index a354a192bd576fecad509930fe3b4bee2e6cc50e..b83abad12f69b8f344bf8ee605cb0668354fed9a 100644 --- a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_full_1p.sh @@ -120,7 +120,7 @@ e2e_time=$(( $end_time - $start_time )) echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 TrainingTime=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $14}'` -FPS=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $16}'|awk '{sum+=$1} END {print sum/NR}'` +FPS=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk -F "fps:" '{print $2}'| awk '{sum+=$1} END {print "", sum/NR}' | sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Performance TrainingTime : $TrainingTime" echo "Final Performance images/sec : $FPS" diff --git a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_performance_1p.sh index 5cabaa5e6f1b172b5d8a6345fc10d5660eb0567d..75af2ddb23ba7e584876eab38b21ea62d8bcb40b 100644 --- a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/test/train_performance_1p.sh @@ -104,7 +104,7 @@ fi #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 python3 train.py \ --TMP_DATA_PATH=${data_path}/data_quan \ - --epochs=1 \ + --epochs=10 \ --TMP_WEIGHTS_PATH=${data_path}/weights \ --image_num=900 > ${cur_path}/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 wait @@ -117,7 +117,7 @@ e2e_time=$(( $end_time - $start_time )) echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 TrainingTime=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $14}'` -FPS=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk '{print $16}'|awk '{sum+=$1} END {print sum/NR}'` +FPS=`grep 'fps:' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk -F "fps:" '{print $2}'| awk '{sum+=$1} END {print "", sum/NR}' | sed s/[[:space:]]//g` #打印,不需要修改 echo "Final Performance TrainingTime : $TrainingTime" echo "Final Performance images/sec : $FPS" diff --git a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/train.py b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/train.py index 39dcda7b6a91e6db592308f717e2903f1e97a2a6..37a7e394d1a87f5d683747d19f678e84cc2be831 100644 --- a/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/train.py +++ b/TensorFlow/contrib/cv/EfficientNet/EfficientNet_V2_ID1220_for_TensorFlow/train.py @@ -156,39 +156,46 @@ def tf_data_list(tf_data_path): dataset = tf.data.TFRecordDataset(tf_data_list(FLAGS.TMP_DATA_PATH + "/" +"train_tf")) -dataset = dataset.map(_parse_read, num_parallel_calls=1) +dataset = dataset.map(_parse_read, num_parallel_calls=192) if FLAGS.is_training: dataset = dataset.shuffle(FLAGS.batch_size * 6) - dataset = dataset.repeat(FLAGS.epochs) + dataset = dataset.repeat() else: dataset = dataset.repeat(1) dataset = dataset.batch(FLAGS.batch_size, drop_remainder=True) -iterator = dataset.make_one_shot_iterator() +# iterator = dataset.make_one_shot_iterator() +iterator = dataset.make_initializable_iterator() images_batch, labels_batch = iterator.get_next() print(images_batch, labels_batch) -inputx = tf.placeholder(tf.float32, shape=[FLAGS.batch_size, 224, 224, 3], name="inputx") -inputy = tf.placeholder(tf.int64, name="inputy") +# inputx = tf.placeholder(tf.float32, shape=[FLAGS.batch_size, 224, 224, 3], name="inputx") +# inputy = tf.placeholder(tf.int64, name="inputy") out, model_endpoint = efficientnet_builder.build_model( - inputx, + images_batch, model_name="efficientnet-b0", training=FLAGS.is_training, override_params=None) -train_op, train_loss, train_val = training_op(out, inputy) -test_acc = evaluation(out, inputy) +labels_batch=tf.squeeze(labels_batch) + +train_op, train_loss, train_val = training_op(out, labels_batch) +test_acc = evaluation(out, labels_batch) + config = tf.ConfigProto(allow_soft_placement=True) custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["use_off_line"].b = True # 在昇腾AI处理器执行训练 custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") +custom_op.parameter_map["enable_data_pre_proc"].b = True # getnext算子下沉是迭代循环下沉的必要条件 +custom_op.parameter_map["iterations_per_loop"].i = 10 config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 关闭remap开关 sess = tf.Session(config=config) +train_op = util.set_iteration_per_loop(sess, train_op, 10) sess.run(tf.global_variables_initializer()) - +sess.run(iterator.initializer) saver = tf.train.Saver() saver.restore(sess, WEIGHTS_MODEL) @@ -201,22 +208,21 @@ try: perf_lsit=[] fps_list=[] for epoch in range(FLAGS.epochs): - for step in range(int(FLAGS.image_num / FLAGS.batch_size)): #900/90=10 + for step in range(0,int(FLAGS.image_num / FLAGS.batch_size),10): #900/90=10 star_time = time.time() - x_in, y_in = sess.run([images_batch, labels_batch]) - y_in = np.squeeze(y_in, 1) - _, tra_loss, tra_acc = sess.run([train_op, train_loss, train_val], - feed_dict={inputx: x_in, inputy: y_in}) - if (step + 1) % 1 == 0: - if step > 0: #去掉第一次不稳定数据 - perf = time.time() - star_time - perf_lsit.append(perf) - perf_ = np.mean(perf_lsit) - fps = FLAGS.batch_size / perf - fps_list.append(fps) - fps_ = np.mean(fps_list) - print('Epoch %d step %d train loss = %.4f train accuracy = %.2f%% time: %.4f fps: %.4f' % ( - epoch + 1, step + 1, tra_loss, tra_acc * 100.0, perf_, fps_)) + # x_in, y_in = sess.run([images_batch, labels_batch]) + # y_in = np.squeeze(y_in, 1) + _, tra_loss, tra_acc = sess.run([train_op, train_loss, train_val]) + # if (step + 1) % 1 == 0: + # if step > 0: #去掉第一次不稳定数据 + perf = (time.time() - star_time) / 10 + perf_lsit.append(perf) + perf_ = np.mean(perf_lsit) + fps = FLAGS.batch_size / perf + fps_list.append(fps) + fps_ = np.mean(fps_list) + print('Epoch %d step %d train loss = %.4f train accuracy = %.2f%% time: %.4f fps: %.4f' % ( + epoch + 1, step + 1, tra_loss, tra_acc * 100.0, perf_, fps_)) checkpoint_path = os.path.join(FLAGS.TMP_MODEL_PATH, "m.ckpt") saver_train.save(sess, checkpoint_path, global_step=epoch) except tf.errors.OutOfRangeError: diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/.keep b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..11dc1bec0fecd6ecac9a30450f38d22b5bcd7224 --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Ali Dabouei + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/README.md b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d69d66bbfed2ee0cf0f9a7b447fa111a608eeb1f --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/README.md @@ -0,0 +1,72 @@ +# 基本信息: +发布者(Publisher):Huawei +应用领域(Application Domain): Signature-detection +版本(Version):1.0 +框架(Framework):TensorFlow 1.15.0 +模型格式(Model Format):ckpt +处理器(Processor):昇腾910 +应用级别(Categories):Research +描述(Description):基于TensorFlow框架的特征点检测网络训练代码 + +# 概述: +Factorized是一个通过分解空间嵌入对对象地标进行无监督学习 +参考论文及源代码地址: +Unsupervised learning of object landmarks by factorized spatial embeddings +https://github.com/alldbi/Factorized-Spatial-Embeddings + +# 默认配置: +1.训练数据集预处理 +celebA +2.测试数据集预处理 +celebA +3.训练超参 +LANDMARK_N = 8 +SAVE_FREQ = 500 +SUMMARY_FREQ = 20 +BATCH_SIZE = 32 +DOWNSAMPLE_M = 4 +DIVERSITY = 500. +ALIGN = 1. +LEARNING_RATE = 1.e-4 +MOMENTUM = 0.5 +RANDOM_SEED = 1234 +WEIGHT_DECAY = 0.0005 +SCALE_SIZE = 146 +CROP_SIZE = 146 +MAX_EPOCH = 200 +# 训练环境准备: +ascend-share/5.1.rc1.alpha003_tensorflow-ascend910-cp37-euleros2.8-aarch64-training:1.15.0-21.0.2_0317 + +# 快速上手: +数据集的准备: +Training dataset:celebA。 + +# 模型训练: +启动训练文件:train.py + + +# 模型测试: +启动训练文件:test.py + + + +# 文件说明 + +├── README.md //说明文档 +├── requirements.txt //依赖 +├── modelzoo_level.txt //进度说明文档 +├── LICENSE //license + ├── VDSR.py //训练启动文件 +├── train.py //调用模块1 +├── test.py //调用模块2 +├── utils/warp.py //调用模块3 +├── utils/ThinPlateSplineB.py //调用模块4 + + +精度性能信息对比: +gpu: +精度loss_align 4-8 +性能image/sec 196.5 +npu +精度loss_align 4-8 +性能image/sec 233.1 diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d0ce358c96ac36fc58fd0a15e3da7820321fe44 --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,5 @@ +GPUStatus:OK +NPUMigrationStatus:OK +FuncStatus:OK +PrecisionStatus:OK +PerfStatus:OK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/requirement.txt b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/requirement.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test.py b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test.py new file mode 100644 index 0000000000000000000000000000000000000000..719e31e741565bf640e220ecd85293db99980275 --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test.py @@ -0,0 +1,783 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +from npu_bridge.npu_init import * + +import tensorflow as tf +__all__ = [tf] +import numpy as np +import glob, os, random, math, collections, time, argparse, shutil +from utils.warp import feature_warping2 +from utils.warp import image_warping2 + + +from matplotlib import cm + +# ***************************************Train mode********************************************************* + +# Parameter setting **************************************************************************************************** + + +MODE = "test" + + +INPUT_DIR = 'Factorized-Spatial-Embeddings-master/datasets/celebA-test' #读取数据集路径 +OUTPUT_DIR = 'Factorized-Spatial-Embeddings-master/output' #输出图像路径 +# OUTPUT_DIR = '/fse-test/output/test1/output/V0098/' #输出路径 + +# IMAGE_DIR='' +# checkpoint_dir='' + +image_dir ='Factorized-Spatial-Embeddings-master/output' +#OUTPUT_DIR = '/fse-test/output/test1/output/V0085/' #输出路径 存储模型 以及输出图像 +LANDMARK_N = 8 + + + +#INPUT_DIR = './dataset/celebA-test/' #数据集路径 +#INPUT_DIR = 's3://fse-test/dataset/celebA-test/' #obs的数据集路径 + +#DATA_DIRECTORY = "/celebatest/celebAtest/" #数据集路径(obs桶中) +#OUTPUT_DIR = './output' + +# CHECKPOINT = './backup/model/' +CHECKPOINT = 'Factorized-Spatial-Embeddings-master/backup/model/V0101' + + + +SAVE_FREQ = 500 +SUMMARY_FREQ = 20 +BATCH_SIZE = 32 +DOWNSAMPLE_M = 4 +DIVERSITY = 500. +ALIGN = 1. +LEARNING_RATE = 1.e-4 +MOMENTUM = 0.5 +RANDOM_SEED = 1234 +WEIGHT_DECAY = 0.0005 +SCALE_SIZE = 146 +CROP_SIZE = 146 +MAX_EPOCH = 1000 + + + + +#OUTPUT_DIR = './output' +# OUTPUT_DIR = r'C:\Users\User\PycharmProjects\pythonProject2\Factorized-Spatial-Embeddings-master\output' + +# CHECKPOINT = r'C:\Users\User\PycharmProjects\pythonProject2\Factorized-Spatial-Embeddings-master\backup\model' +#CHECKPOINT = 'backup.model' + + + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Factorized Spatial Embeddings", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + #parser.add_argument("--mode", default=MODE, choices=["train", "test"]) + parser.add_argument("--mode", default=MODE) + parser.add_argument("--input_dir", default=INPUT_DIR, + help="Path to the directory containing the training or testing images.") + parser.add_argument("--K", type=int, default=LANDMARK_N, + help="Number of landmarks.") + + parser.add_argument("--output_dir", default=OUTPUT_DIR, + help="Where to put output files") + + + parser.add_argument("--batch_size", type=int, default=BATCH_SIZE, + help="Number of images sent to the network in one step.") + parser.add_argument("--learning_rate", type=float, default=LEARNING_RATE, + help="Learning rate for adam.") + parser.add_argument("--beta1", type=float, default=MOMENTUM, + help="Momentum component of the optimiser.") + parser.add_argument("--M", type=int, default=DOWNSAMPLE_M, + help="Downsampling value of the diversity loss.") + parser.add_argument("--weight_decay", type=float, default=WEIGHT_DECAY, + help="Regularisation parameter for L2-loss.") + parser.add_argument("--random_seed", type=int, default=RANDOM_SEED, + help="Random seed to have reproducible results.") + parser.add_argument("--diversity_weight", type=float, default=DIVERSITY, + help="Weight on diversity loss.") + parser.add_argument("--align_weight", type=float, default=ALIGN, + help="Weight on align loss.") + parser.add_argument("--scale_size", type=int, default=SCALE_SIZE, + help="Scale images to this size before cropping to CROP_SIZE") + parser.add_argument("--crop_size", type=int, default=CROP_SIZE, + help="CROP images to this size") + parser.add_argument("--max_epochs", type=int, default=MAX_EPOCH, + help="Number of training epochs") + parser.add_argument("--checkpoint", default=CHECKPOINT, + help="Directory with checkpoint to resume training from or use for testing") + + parser.add_argument("--summary_freq", type=int, default=SUMMARY_FREQ, + help="Update summaries every summary_freq steps") + parser.add_argument("--save_freq", type=int, default=SAVE_FREQ, help="Save model every save_freq steps") + + + + #其他参数 + parser.add_argument("--data_url", type=str, ) + parser.add_argument("--train_url", type=str, ) + parser.add_argument("--num_gpus", default=1) + + + return parser.parse_args() + + + + + + + +# def get_arguments(): +# """Parse all the arguments provided from the CLI. +# +# Returns: +# A list of parsed arguments. +# """ +# parser = argparse.ArgumentParser(description="Factorized Spatial Embeddings") +# parser.add_argument("--mode", default=MODE, choices=["train", "test"]) +# parser.add_argument("--batch_size", type=int, default=BATCH_SIZE, +# help="Number of images sent to the network in one step.") +# parser.add_argument("--input_dir", type=str, default=DATA_DIRECTORY, +# help="Path to the directory containing the training or testing images.") +# parser.add_argument("--learning_rate", type=float, default=LEARNING_RATE, +# help="Learning rate for adam.") +# parser.add_argument("--beta1", type=float, default=MOMENTUM, +# help="Momentum component of the optimiser.") +# parser.add_argument("--K", type=int, default=LANDMARK_N, +# help="Number of landmarks.") +# parser.add_argument("--M", type=int, default=DOWNSAMPLE_M, +# help="Downsampling value of the diversity loss.") +# parser.add_argument("--weight_decay", type=float, default=WEIGHT_DECAY, +# help="Regularisation parameter for L2-loss.") +# parser.add_argument("--random-seed", type=int, default=RANDOM_SEED, +# help="Random seed to have reproducible results.") +# parser.add_argument("--diversity_weight", type=float, default=DIVERSITY, +# help="Weight on diversity loss.") +# parser.add_argument("--align_weight", type=float, default=ALIGN, +# help="Weight on align loss.") +# parser.add_argument("--scale_size", type=int, default=SCALE_SIZE, +# help="Scale images to this size before cropping to CROP_SIZE") +# parser.add_argument("--crop_size", type=int, default=CROP_SIZE, +# help="CROP images to this size") +# parser.add_argument("--max_epochs", type=int, default=MAX_EPOCH, +# help="Number of training epochs") +# parser.add_argument("--checkpoint", default=CHECKPOINT, +# help="Directory with checkpoint to resume training from or use for testing") +# parser.add_argument("--output_dir", default=OUTPUT_DIR, +# help="Where to put output files") +# parser.add_argument("--summary_freq", type=int, default=SUMMARY_FREQ, +# help="Update summaries every summary_freq steps") +# parser.add_argument("--save_freq", type=int, default=SAVE_FREQ, help="Save model every save_freq steps") +# return parser.parse_args() + +def landmark_colors(n_landmarks): + """Compute landmark colors. + + Returns: + An array of RGB values. + """ + cmap = cm.get_cmap('hsv') + landmark_color = [] + landmark_color.append((0., 0., 0.)) + for i in range(n_landmarks): + landmark_color.append(cmap(i/float(n_landmarks))[0:3]) + landmark_color = np.array(landmark_color) + return landmark_color + + +# Collections definition +Examples = collections.namedtuple("Examples", + "paths, images, images_deformed, deformation, count, steps_per_epoch, shape") +Model = collections.namedtuple("Model", "pos_loss, neg_loss, distance") + +def weight_decay(): + """Compute weight decay loss. + + Returns: + Weight decay loss. + """ + costs = [] + for var in tf.trainable_variables(): + if var.op.name.find('filter')>0: + costs.append(tf.nn.l2_loss(var)) + return tf.add_n(costs) + +def conv(batch_input, out_channels, stride=1): + with tf.variable_scope("conv"): + in_channels = batch_input.get_shape()[3] + filter = tf.get_variable("filter", [5, 5, in_channels, out_channels], dtype=tf.float32, + initializer=tf.random_normal_initializer(0, 0.02)) + conv = tf.nn.conv2d(batch_input, filter, [1, stride, stride, 1], padding="VALID") + return conv + +def save_images(fetches, args, step=None): #存储输出的图像 + #image_dir = os.path.join(args.output_dir, "images") + # image_dir = args.train_url + print("---------输出图像位置", image_dir) + + filesets = [] + for i, in_path in enumerate(fetches["paths"]): + name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8"))) + fileset = {"name": name, "step": step} + filename = name + "-" + "outputs" + ".png" + if step is not None: + filename = "%08d-%s" % (step, filename) + fileset["outputs"] = filename + out_path = os.path.join(image_dir, filename) + contents = fetches["outputs"][i] + with open(out_path, "wb") as f: + f.write(contents) + filesets.append(fileset) + return filesets + + + + + + +# def save_images(fetches, args, step=None): #存储输出的图像 +# #image_dir = os.path.join(args.output_dir, "images") +# image_dir = args.train_url +# print("---------输出图像位置", image_dir) +# +# if not os.path.exists(image_dir): +# print("---------未找到图像位置") +# os.makedirs(image_dir) +# +# filesets = [] +# for i, in_path in enumerate(fetches["paths"]): +# name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8"))) +# fileset = {"name": name, "step": step} +# filename = name + "-" + "outputs" + ".png" +# if step is not None: +# filename = "%08d-%s" % (step, filename) +# fileset["outputs"] = filename +# out_path = os.path.join(image_dir, filename) +# contents = fetches["outputs"][i] +# with open(out_path, "wb") as f: +# f.write(contents) +# filesets.append(fileset) +# return filesets + + +def preprocess(image): + with tf.name_scope("preprocess"): + # [0, 1] => [-1, 1] + return image * 2 - 1 + +def deprocess(image): + with tf.name_scope("deprocess"): + # [-1, 1] => [0, 1] + return (image + 1) / 2 + +def load_examples(args): + """Load all images in the input_dir. + + Returns: + Examples.paths : batch of path of images, + Examples.images : batch of images, + Examples.images_deformed : batch of deformed images, + Examples.deformation : batch of deformation parameters, + """ + + + #if args.input_dir is None or not os.path.exists(args.input_dir): + if args.input_dir is None : + raise Exception("input_dir does not exist") + + decode = tf.image.decode_jpeg + # load distorted pairs address + #input_paths = glob.glob(os.path.join(args.input_dir, "*.png")) + input_paths = glob.glob(os.path.join(args.input_dir, "*.png")) + + + if len(input_paths) == 0: + raise Exception("input_dir contains no image files") + + def get_name(path): + name, _ = os.path.splitext(os.path.basename(path)) + return name + + # if the image names are numbers, sort by the value rather than asciibetically + # having sorted inputs means that the outputs are sorted in test mode + if all(get_name(path).isdigit() for path in input_paths): + input_paths = sorted(input_paths, key=lambda path: int(get_name(path))) + else: + input_paths = sorted(input_paths) + + with tf.name_scope("load_images"): + path_queue = tf.train.string_input_producer(input_paths, shuffle= args.mode == "train") + reader = tf.WholeFileReader() + paths, contents = reader.read(path_queue) + input = decode(contents) + input = tf.image.convert_image_dtype(input, dtype=tf.float32) + assertion = tf.assert_equal(tf.shape(input)[2], 3, message="image does not have required channels") + with tf.control_dependencies([assertion]): + input = tf.identity(input) + + input.set_shape([None, None, 3]) + + images = preprocess(input) + + seed = random.randint(0, 2 ** 31 - 1) + + # scale and crop input image to match 256x256 size + def transform(image): + r = image + r = tf.image.resize_images(r, [args.scale_size, args.scale_size], method=tf.image.ResizeMethod.AREA) + + offset = tf.cast(tf.floor(tf.random_uniform([2], 0, args.scale_size - args.crop_size + 1, seed=seed)), dtype=tf.int32) + if args.scale_size > args.crop_size: + r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], args.crop_size, args.crop_size) + + elif args.scale_size < args.crop_size: + raise Exception("scale size cannot be less than crop size") + return r + + with tf.name_scope("images"): + input_images = transform(images) + + if args.mode=="train": + input_images, _ = image_warping2(input_images, w=0.0) + deformed_images, deformation = image_warping2(input_images, w=0.1) + deformation = tf.squeeze(deformation) + + # crop after warping + input_images = tf.image.crop_to_bounding_box(input_images, 5, 5, 128, 128) + deformed_images = tf.image.crop_to_bounding_box(deformed_images, 5, 5, 128, 128) + + # clip image values + input_images = tf.clip_by_value(input_images, clip_value_min=-1., clip_value_max=1.) + deformed_images = tf.clip_by_value(deformed_images, clip_value_min=-1., clip_value_max=1.) + + paths_batch, images_batch, images_deformed_batch, deformation_batch = tf.train.batch( + [paths, input_images, deformed_images, deformation], batch_size=args.batch_size) + steps_per_epoch = int(math.ceil(len(input_paths) / args.batch_size)) + + return Examples( + paths=paths_batch, + images=images_batch, + images_deformed=images_deformed_batch, + deformation=deformation_batch, + count=len(input_paths), + steps_per_epoch=steps_per_epoch, + shape=input.get_shape() + ) + +def CNN_tower(inputs, n_landmarks, isTrain): + + n_filters = [20, 48, 64, 80, 256, n_landmarks] + with tf.variable_scope("layer_1"): + x = conv(inputs, n_filters[0]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + # only the first layer has a 2x2 maxpooling + x = tf.layers.max_pooling2d(inputs=x, pool_size=[2, 2], strides=2) + with tf.variable_scope("layer_2"): + x = conv(x, n_filters[1]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_3"): + x = conv(x, n_filters[2]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_4"): + x = conv(x, n_filters[3]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_5"): + x = conv(x, n_filters[4]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_6"): + x = conv(x, n_filters[5]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + + return x + + +def align_loss(predA_deformed, predB, n_landmarks): + + + # compute the mean of landmark locations + + + batch_size = predB.get_shape()[0] + pred_size = predB.get_shape()[1] + index = tf.range(0, tf.cast(pred_size, tf.float32), delta=1, dtype=tf.float32) + index = tf.reshape(index, [pred_size, 1]) + + x_index = tf.tile(index, [1, pred_size]) + + index = tf.transpose(index) + + y_index = tf.tile(index, [pred_size, 1]) + + x_index = tf.expand_dims(x_index, 2) + x_index = tf.expand_dims(x_index, 0) + + y_index = tf.expand_dims(y_index, 2) + y_index = tf.expand_dims(y_index, 0) + + x_index = tf.tile(x_index, [batch_size, 1, 1, n_landmarks]) + y_index = tf.tile(y_index, [batch_size, 1, 1, n_landmarks]) + + + x_index_avg_A = x_index * predA_deformed + y_index_avg_A = y_index * predA_deformed + + x_index_avg_B = x_index * predB + y_index_avg_B = y_index * predB + + + pA_sum = tf.reduce_sum(predA_deformed, axis=[1, 2]) + pB_sum = tf.reduce_sum(predB, axis=[1, 2]) + + + x_index_avg_A = tf.reduce_mean(x_index_avg_A, axis=[1, 2]) + y_index_avg_A = tf.reduce_mean(y_index_avg_A, axis=[1, 2]) + x_index_avg_B = tf.reduce_mean(x_index_avg_B, axis=[1, 2]) + y_index_avg_B = tf.reduce_mean(y_index_avg_B, axis=[1, 2]) + + x_index_avg_A = x_index_avg_A / pA_sum + y_index_avg_A = y_index_avg_A / pA_sum + x_index_avg_B = x_index_avg_B / pB_sum + y_index_avg_B = y_index_avg_B / pB_sum + + # compute align loss + loss = tf.pow(x_index_avg_A-x_index_avg_B, 2.) + tf.pow(y_index_avg_A - y_index_avg_B, 2.) + loss = tf.reduce_mean(loss) + return loss, x_index, y_index + + +def align_loss2(predA, predB, deformation, n_landmarks): + + + # compute the mean of landmark locations + + batch_size = predA.get_shape()[0] + pred_size = predA.get_shape()[1] + index = tf.range(0, tf.cast(pred_size, tf.float32), delta=1, dtype=tf.float32) + index = tf.reshape(index, [pred_size, 1]) + + x_index = tf.tile(index, [1, pred_size]) + + index = tf.transpose(index) + + y_index = tf.tile(index, [pred_size, 1]) + + x_index = tf.expand_dims(x_index, 2) + x_index = tf.expand_dims(x_index, 0) + + y_index = tf.expand_dims(y_index, 2) + y_index = tf.expand_dims(y_index, 0) + + x_index = tf.tile(x_index, [batch_size, 1, 1, n_landmarks]) + y_index = tf.tile(y_index, [batch_size, 1, 1, n_landmarks]) + + + u_norm2 = tf.pow(x_index, 2.) + tf.pow(y_index, 2.) + u_norm2 = u_norm2 * predA + loss_part1 = tf.reduce_sum(u_norm2, axis=[1, 2]) + + x_index_deformed = feature_warping2(x_index, deformation, padding=3) + y_index_defomred = feature_warping2(y_index, deformation, padding=3) + v_norm2 = tf.pow(x_index_deformed, 2.) + tf.pow(y_index_defomred, 2.) + v_norm2 = v_norm2 * predB + loss_part2 = tf.reduce_sum(v_norm2, axis=[1, 2]) + + + loss_part3x = tf.reduce_sum(x_index * predA, axis=[1, 2]) + loss_part3y = tf.reduce_sum(y_index * predA, axis=[1, 2]) + loss_part4x = tf.reduce_sum(x_index_deformed * predB, axis=[1, 2]) + loss_part4y = tf.reduce_sum(y_index_defomred * predB, axis=[1, 2]) + + loss_part3 = loss_part3x * loss_part4x + loss_part3y * loss_part4y + loss = loss_part1 + loss_part2 - 2. * loss_part3 + loss = tf.reduce_mean(loss) + + return loss + + + + +def main(): + + """Create the model and start the training.""" + args = get_arguments() + + tf.set_random_seed(args.random_seed) + examples = load_examples(args) + + print("examples count = %d" % examples.count) + + + + with tf.variable_scope("cnn_tower"): + predA = CNN_tower(examples.images, n_landmarks=args.K, isTrain=args.mode == "train") + + with tf.variable_scope("cnn_tower", reuse=True): + predB = CNN_tower(examples.images_deformed, n_landmarks=args.K, isTrain=args.mode == "train") + + + # apply a spatial softmax to obtain K probability maps + + pred_size = predA.get_shape()[1] + + predA = tf.reshape(predA, [-1, pred_size*pred_size, args.K]) + predB = tf.reshape(predB, [-1, pred_size*pred_size, args.K]) + + predA = tf.nn.softmax(predA, dim=1) #predA = tf.nn.softmax(predA, axis=1) + predB = tf.nn.softmax(predB, dim=1) #predB = tf.nn.softmax(predB, axis=1) + + predA = tf.reshape(predA, [-1, pred_size, pred_size, args.K]) + predB = tf.reshape(predB, [-1, pred_size, pred_size, args.K]) + + + # visualizing landmarks + predA_vis = tf.reduce_mean(predA, axis=3) + predA_vis = tf.expand_dims(predA_vis, axis=3) + + # another visualization + pred_max = tf.reduce_max(predA, axis=[1, 2]) + pred_max = tf.expand_dims(pred_max, axis=1) + pred_max = tf.expand_dims(pred_max, axis=1) + pred_max = tf.equal(predA, pred_max) + pred_max = tf.cast(pred_max, tf.float32) + + mask = tf.range(start=1, limit=args.K+1, delta=1, dtype=tf.float32) + mask = tf.reshape(mask, [1, 1, 1, args.K]) + mask = tf.tile(mask, [args.batch_size, pred_size, pred_size, 1]) + mask = mask * pred_max + mask = tf.reduce_max(mask, axis=3, keep_dims=True) + + landmarks = tf.convert_to_tensor(landmark_colors(args.K), tf.float32) + + mask = tf.reshape(mask, [args.batch_size, pred_size*pred_size]) + mask = tf.cast(mask, tf.int32) + mask = tf.gather(landmarks, mask, axis=0) + mask = tf.reshape(mask, [args.batch_size, pred_size, pred_size, 3]) + + pred_max = tf.reduce_max(pred_max, axis=3) + pred_max = tf.expand_dims(pred_max, axis=3) + + # compute the diversity loss + + + def diversity_loss(pred, n_landmark, pool_size): + pred_pool = tf.nn.pool(pred, window_shape=[pool_size, pool_size], strides=[1, 1], pooling_type="AVG", padding="VALID") + # convert avg pool to sum pool + # pred_pool = pred_pool * float(pool_size) * float(pool_size) + pred_max = tf.reduce_max(pred_pool, axis=3) + pred_max_sum = tf.reduce_sum(pred_max, axis=[1, 2]) + pred_max_sum = float(n_landmark) - pred_max_sum + pred_max_sum = tf.reduce_mean(pred_max_sum) + return pred_max_sum + + diversityLoss_predA = diversity_loss(predA, n_landmark=args.K, pool_size=args.M) + diversityLoss_predB = diversity_loss(predB, n_landmark=args.K, pool_size=args.M) + div_loss = diversityLoss_predA + diversityLoss_predB + + # compute the align loss + algn_loss = align_loss2(predA, predB, examples.deformation, n_landmarks= args.K) + + # compute the weight decay loss + decay_loss = weight_decay() * args.weight_decay + + + with tf.name_scope("train"): + optim = tf.train.AdamOptimizer(args.learning_rate, args.beta1) + # grads_and_vars = optim.compute_gradients(loss) + # train = optim.apply_gradients(grads_and_vars) + train_op = optim.minimize(algn_loss*args.align_weight + div_loss*args.diversity_weight + decay_loss ) + # global_step = tf.contrib.framework.get_or_create_global_step() + global_step = tf.train.get_or_create_global_step() + incr_global_step = tf.assign(global_step, global_step + 1) + train = tf.group(train_op, incr_global_step) + + input_images = deprocess(examples.images) + input_deformed = deprocess(examples.images_deformed) + + + # overlay landmarks on the input image + + landmarks_image = pred_max * mask + + pred_max_resized = tf.image.resize_images(pred_max, [128, 128], tf.image.ResizeMethod.AREA) + pred_max_resized = tf.greater(pred_max_resized, 0.) + pred_max_resized = tf.cast(pred_max_resized, tf.float32) + + mask_resized = tf.image.resize_images(mask, [128, 128]) + + + input_images_landmark = input_images * (1.-pred_max_resized) + pred_max_resized * mask_resized + + + with tf.name_scope("parameter_count"): + parameter_count = tf.reduce_sum([tf.reduce_prod(tf.shape(v)) for v in tf.trainable_variables()]) + + tf.summary.image("Input", input_images) + tf.summary.image("Deformed", input_deformed) + tf.summary.image("PredA", predA_vis) + # tf.summary.image("AApredAmax", mask) + # tf.summary.image("PredB", predB_vis) + tf.summary.image("Landmark", input_images_landmark) + # tf.summary.image("AApredAmax", landmarks_image) + + tf.summary.scalar("loss_align", algn_loss) + tf.summary.scalar("loss_diversity", div_loss) + tf.summary.scalar("loss_decay", decay_loss) + + output_images = tf.image.convert_image_dtype(input_images_landmark, dtype=tf.uint8, saturate=True) + with tf.name_scope("encode_images"): + display_fetches = { + "paths": examples.paths, + "outputs": tf.map_fn(tf.image.encode_png, output_images, dtype=tf.string, name="input_pngs"), + } + + + saver = tf.train.Saver(max_to_keep=1) + + + #print('----------------------args.output_dir:', args.output_dir) + sv = tf.train.Supervisor(logdir=os.path.join(args.output_dir, 'logs'), save_summaries_secs=0, saver=None) + #sv = tf.train.Supervisor(logdir=os.path.join(os.path.join(args.train_url, 'logs')), save_summaries_secs=0,saver=None) + #print('--------------模型log位置 ', args.output_dir) + + + with sv.managed_session() as sess: #logdir中去找checkpoint,如果没有的话,自动执行初始化 + + max_steps = 2 ** 32 + if args.max_epochs is not None: + max_steps = examples.steps_per_epoch * args.max_epochs + print ("max epochs: ", args.max_epochs) + print ("max steps : ", max_steps) + start = time.time() + + print("parameter_count =", sess.run(parameter_count)) + + # print(args.checkpoint) + # if args.checkpoint is not None: + # # print(args.checkpoint) + # print ("loading from checkpoint...") + # checkpoint = tf.train.latest_checkpoint(args.checkpoint) + # print (checkpoint) + # saver.restore(sess, checkpoint) + + if args.checkpoint is not None: + checkpoint = tf.train.latest_checkpoint(args.checkpoint) + + + if checkpoint is not None: + print("------------读取checkpoint...") + saver.restore(sess, checkpoint) + print("------------已恢复checkpoint") + + + if args.mode == "train": + # training + for step in range(max_steps): + def should(freq): + return freq > 0 and ((step + 1) % freq == 0 or step == max_steps - 1) + + fetches = { + "train": train, + "global_step": sv.global_step, + "loss": algn_loss, + "labels": examples.images, + "offset": examples.deformation, + "predA" : predA, + "decay_loss":decay_loss, + "div_loss":div_loss, + + + } + + if should(freq=args.summary_freq): + fetches["summary"] = sv.summary_op + + results = sess.run(fetches) + + if should(freq=args.summary_freq): + sv.summary_writer.add_summary(results["summary"], results["global_step"]) + # global_step will have the correct step count if we resume from a checkpoint + train_epoch = math.ceil(results["global_step"] / examples.steps_per_epoch) + train_step = (results["global_step"] - 1) % examples.steps_per_epoch + 1 + rate = (step + 1) * args.batch_size / (time.time() - start) + remaining = (max_steps - step) * args.batch_size / rate + print("progress epoch %d step %d image/sec %0.1f remaining %dm" % ( + train_epoch, train_step, rate, remaining / 60)) + print ("loss_align", results["loss"]) + print ("loss_diversity", results["div_loss"]) + print ("loss_decay", results["decay_loss"]) + print ("------------------------------") + + + + if should(freq=args.save_freq): + print("saving model...") + # saver.save(sess, os.path.join(args.output_dir, "model"), global_step=sv.global_step) + + + saver.save(sess, os.path.join(args.train_url), global_step=sv.global_step) + print("---------模型存储位置 args.train_url : ", args.train_url) + + elif args.mode=="test": + # testing + start = time.time() + max_steps = min(examples.steps_per_epoch, max_steps) + for step in range(max_steps): + results = sess.run(display_fetches) + filesets = save_images(results, args) + for i, f in enumerate(filesets): + print("evaluated image", f["name"]) + print("rate", (time.time() - start) / max_steps) + #print("----------图像存储位置 : ", ) + + +if __name__ == '__main__': + main() diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/.keep b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..eb953c2d2581ad457f2d8ab6be04f19c0e543543 --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,173 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +cp -r ${data_path}/MA-new-code-04-22-09-36/code/utils ./ +mkdir ./data +mkdir ./workplace + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=32 + +python3.7 train.py --input_dir=${data_path}/data10w --output_dir=${output_path} --data_url=./data --train_url=./workplace > ${print_log} 2>&1 + +step=`grep "epoch 200" ${print_log} | awk '{print $5}' | tail -n 1` +# 性能相关数据计算 +FPS=`grep "image/sec" ${print_log} | awk '{print $7}' | tail -n 1` + +# 精度相关数据计算 +loss_diversity=`grep "loss_diversity" ${print_log} | awk '{print $2}' | tail -n 1` +loss_decay=`grep "loss_decay" ${print_log} | awk '{print $2}' | tail -n 1` +# 提取所有loss打印信息 +grep "loss_align" ${print_log} | awk '{print $2}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +echo "------------------ Final result ------------------" + +# 输出性能FPS/单step耗时/端到端耗时 +echo "progress epoch 200 step $step image/sec $FPS remaining 0m" +echo "loss_align $ActualLoss" +echo "loss_diversity $loss_diversity" +echo "loss_decay $loss_decay" + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..a8c4854cf3e564dd5b03ae07c109a13a9de33afd --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,166 @@ +hell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --obs_url # output path in OBS + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --obs_url* ]];then + obs_url=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +cp -r ${data_path}/MA-new-code-04-22-09-36/code/utils ./ +mkdir ./data +mkdir ./workplace + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=32 +epochs=1 +python3.7 train.py --input_dir=${data_path}/data10w --output_dir=${output_path} --data_url=./data --train_url=./workplace --max_epochs=${epochs} > ${print_log} 2>&1 + +step=`grep "epoch 200" ${print_log} | awk '{print $5}' | tail -n 1` +# 性能相关数据计算 +FPS=`grep "image/sec" ${print_log} | awk '{print $7}' | tail -n 1` + +# 精度相关数据计算 +loss_diversity=`grep "loss_diversity" ${print_log} | awk '{print $2}' | tail -n 1` +loss_decay=`grep "loss_decay" ${print_log} | awk '{print $2}' | tail -n 1` +# 提取所有loss打印信息 +grep "loss_align" ${print_log} | awk '{print $2}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +echo "------------------ Final result ------------------" + +# 输出性能FPS/单step耗时/端到端耗时 +echo "progress epoch 200 step $step image/sec $FPS remaining 0m" +echo "loss_align $ActualLoss" +echo "loss_diversity $loss_diversity" +echo "loss_decay $loss_decay" + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/train.py b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/train.py new file mode 100644 index 0000000000000000000000000000000000000000..083929587425d6871b8e9d48cece29639991589a --- /dev/null +++ b/TensorFlow/contrib/cv/Factorized_ID1301_for_TensorFlow/train.py @@ -0,0 +1,791 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +from npu_bridge.npu_init import * +import cv2 +import tensorflow as tf +__all__ = [tf] +import numpy as np +import glob, os, random, math, collections, time, argparse, shutil +from utils.warp import feature_warping2 +from utils.warp import image_warping2 +#import moxing as mox + +from matplotlib import cm + +# ***************************************Train mode********************************************************* + +# Parameter setting **************************************************************************************************** + + +MODE = "train" + + +#INPUT_DIR = 'Factorized-Spatial-Embeddings-master/datasets/img_align_celeba_png_10w' #读取数据集路径 + + +#OUTPUT_DIR = 'Factorized-Spatial-Embeddings-master/output' #输出路径 +#OUTPUT_DIR = '/fse-test/output/test1/output/V0085/' #输出路径 存储模型 以及输出图像 +#OUTPUT_DIR = 'Factorized-Spatial-Embeddings-master/output' #输出路径 +LANDMARK_N = 8 + + +#data_dir = "/cache/dataset" +#os.makedirs(data_dir) + +#model_dir = "/cache/result" +#os.makedirs(model_dir) + +#INPUT_DIR = './dataset/celebA-test/' #数据集路径 +#INPUT_DIR = 's3://fse-test/dataset/celebA-test/' #obs的数据集路径 + +#DATA_DIRECTORY = "/celebatest/celebAtest/" #数据集路径(obs桶中) +#OUTPUT_DIR = './output' +#INPUT_DIR = '/cache/dataset' +#OUTPUT_DIR = '/cache/result' + +#CHECKPOINT = './backup/model/' +CHECKPOINT = None + + +SAVE_FREQ = 500 +SUMMARY_FREQ = 20 +BATCH_SIZE = 32 +DOWNSAMPLE_M = 4 +DIVERSITY = 500. +ALIGN = 1. +LEARNING_RATE = 1.e-4 +MOMENTUM = 0.5 +RANDOM_SEED = 1234 +WEIGHT_DECAY = 0.0005 +SCALE_SIZE = 146 +CROP_SIZE = 146 +MAX_EPOCH = 200 + + + + + +#OUTPUT_DIR = './output' +#OUTPUT_DIR = r'C:\Users\User\PycharmProjects\pythonProject2\Factorized-Spatial-Embeddings-master\output' + +#CHECKPOINT = r'C:\Users\User\PycharmProjects\pythonProject2\Factorized-Spatial-Embeddings-master\backup\model' +#CHECKPOINT = 'backup.model' + + + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Factorized Spatial Embeddings") + parser.add_argument("--data_url", type=str, default="/fse-1/data/") + parser.add_argument("--train_url", type=str, default="/fse-1/workplace/") + #parser.add_argument("--mode", default=MODE, choices=["train", "test"]) + parser.add_argument("--mode", default=MODE) + parser.add_argument("--input_dir", + help="Path to the directory containing the training or testing images.") + parser.add_argument("--K", type=int, default=LANDMARK_N, + help="Number of landmarks.") + + parser.add_argument("--output_dir", + help="Where to put output files") + + + parser.add_argument("--batch_size", type=int, default=BATCH_SIZE, + help="Number of images sent to the network in one step.") + parser.add_argument("--learning_rate", type=float, default=LEARNING_RATE, + help="Learning rate for adam.") + parser.add_argument("--beta1", type=float, default=MOMENTUM, + help="Momentum component of the optimiser.") + parser.add_argument("--M", type=int, default=DOWNSAMPLE_M, + help="Downsampling value of the diversity loss.") + parser.add_argument("--weight_decay", type=float, default=WEIGHT_DECAY, + help="Regularisation parameter for L2-loss.") + parser.add_argument("--random_seed", type=int, default=RANDOM_SEED, + help="Random seed to have reproducible results.") + parser.add_argument("--diversity_weight", type=float, default=DIVERSITY, + help="Weight on diversity loss.") + parser.add_argument("--align_weight", type=float, default=ALIGN, + help="Weight on align loss.") + parser.add_argument("--scale_size", type=int, default=SCALE_SIZE, + help="Scale images to this size before cropping to CROP_SIZE") + parser.add_argument("--crop_size", type=int, default=CROP_SIZE, + help="CROP images to this size") + parser.add_argument("--max_epochs", type=int, default=MAX_EPOCH, + help="Number of training epochs") + parser.add_argument("--checkpoint", default=CHECKPOINT, + help="Directory with checkpoint to resume training from or use for testing") + + parser.add_argument("--summary_freq", type=int, default=SUMMARY_FREQ, + help="Update summaries every summary_freq steps") + parser.add_argument("--save_freq", type=int, default=SAVE_FREQ, help="Save model every save_freq steps") + + + + #其他参数 + #parser.add_argument("--data_url", default=0) + #parser.add_argument("--train_url", default=0) + parser.add_argument("--num_gpus", default=1) + + return parser.parse_args() + + + + + + + +# def get_arguments(): +# """Parse all the arguments provided from the CLI. +# +# Returns: +# A list of parsed arguments. +# """ +# parser = argparse.ArgumentParser(description="Factorized Spatial Embeddings") +# parser.add_argument("--mode", default=MODE, choices=["train", "test"]) +# parser.add_argument("--batch_size", type=int, default=BATCH_SIZE, +# help="Number of images sent to the network in one step.") +# parser.add_argument("--input_dir", type=str, default=DATA_DIRECTORY, +# help="Path to the directory containing the training or testing images.") +# parser.add_argument("--learning_rate", type=float, default=LEARNING_RATE, +# help="Learning rate for adam.") +# parser.add_argument("--beta1", type=float, default=MOMENTUM, +# help="Momentum component of the optimiser.") +# parser.add_argument("--K", type=int, default=LANDMARK_N, +# help="Number of landmarks.") +# parser.add_argument("--M", type=int, default=DOWNSAMPLE_M, +# help="Downsampling value of the diversity loss.") +# parser.add_argument("--weight_decay", type=float, default=WEIGHT_DECAY, +# help="Regularisation parameter for L2-loss.") +# parser.add_argument("--random-seed", type=int, default=RANDOM_SEED, +# help="Random seed to have reproducible results.") +# parser.add_argument("--diversity_weight", type=float, default=DIVERSITY, +# help="Weight on diversity loss.") +# parser.add_argument("--align_weight", type=float, default=ALIGN, +# help="Weight on align loss.") +# parser.add_argument("--scale_size", type=int, default=SCALE_SIZE, +# help="Scale images to this size before cropping to CROP_SIZE") +# parser.add_argument("--crop_size", type=int, default=CROP_SIZE, +# help="CROP images to this size") +# parser.add_argument("--max_epochs", type=int, default=MAX_EPOCH, +# help="Number of training epochs") +# parser.add_argument("--checkpoint", default=CHECKPOINT, +# help="Directory with checkpoint to resume training from or use for testing") +# parser.add_argument("--output_dir", default=OUTPUT_DIR, +# help="Where to put output files") +# parser.add_argument("--summary_freq", type=int, default=SUMMARY_FREQ, +# help="Update summaries every summary_freq steps") +# parser.add_argument("--save_freq", type=int, default=SAVE_FREQ, help="Save model every save_freq steps") +# return parser.parse_args() + +def landmark_colors(n_landmarks): + """Compute landmark colors. + + Returns: + An array of RGB values. + """ + cmap = cm.get_cmap('hsv') + landmark_color = [] + landmark_color.append((0., 0., 0.)) + for i in range(n_landmarks): + landmark_color.append(cmap(i/float(n_landmarks))[0:3]) + landmark_color = np.array(landmark_color) + return landmark_color + + +# Collections definition +Examples = collections.namedtuple("Examples", + "paths, images, images_deformed, deformation, count, steps_per_epoch, shape") +Model = collections.namedtuple("Model", "pos_loss, neg_loss, distance") + +def weight_decay(): + """Compute weight decay loss. + + Returns: + Weight decay loss. + """ + costs = [] + for var in tf.trainable_variables(): + if var.op.name.find('filter')>0: + costs.append(tf.nn.l2_loss(var)) + return tf.add_n(costs) + +def conv(batch_input, out_channels, stride=1): + with tf.variable_scope("conv"): + in_channels = batch_input.get_shape()[3] + filter = tf.get_variable("filter", [5, 5, in_channels, out_channels], dtype=tf.float32, + initializer=tf.random_normal_initializer(0, 0.02)) + conv = tf.nn.conv2d(batch_input, filter, [1, stride, stride, 1], padding="VALID") + return conv + + +def save_images(fetches, args, step=None): + image_dir = os.path.join(args.output_dir, "images") + if not os.path.exists(image_dir): + os.makedirs(image_dir) + + filesets = [] + for i, in_path in enumerate(fetches["paths"]): + name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8"))) + fileset = {"name": name, "step": step} + filename = name + "-" + "outputs" + ".jpg" + if step is not None: + filename = "%08d-%s" % (step, filename) + fileset["outputs"] = filename + out_path = os.path.join(image_dir, filename) + contents = fetches["outputs"][i] + with open(out_path, "wb") as f: + f.write(contents) + filesets.append(fileset) + return filesets + + +def preprocess(image): + with tf.name_scope("preprocess"): + # [0, 1] => [-1, 1] + return image * 2 - 1 + +def deprocess(image): + with tf.name_scope("deprocess"): + # [-1, 1] => [0, 1] + return (image + 1) / 2 + +def load_examples(args): + """Load all images in the input_dir. + + Returns: + Examples.paths : batch of path of images, + Examples.images : batch of images, + Examples.images_deformed : batch of deformed images, + Examples.deformation : batch of deformation parameters, + """ + + + #if args.input_dir is None or not os.path.exists(args.input_dir): + if args.input_dir is None : + raise Exception("input_dir does not exist") + + decode = tf.image.decode_jpeg + # load distorted pairs address + #input_paths = glob.glob(os.path.join(args.input_dir, "*.png")) + input_paths = glob.glob(os.path.join(args.input_dir, "*.jpg")) + + + if len(input_paths) == 0: + raise Exception("input_dir contains no image files") + + def get_name(path): + name, _ = os.path.splitext(os.path.basename(path)) + return name + + # if the image names are numbers, sort by the value rather than asciibetically + # having sorted inputs means that the outputs are sorted in test mode + if all(get_name(path).isdigit() for path in input_paths): + input_paths = sorted(input_paths, key=lambda path: int(get_name(path))) + else: + input_paths = sorted(input_paths) + + def parse(paths): + with tf.name_scope("load_images"): + # path_queue = tf.train.string_input_producer(input_paths, shuffle= args.mode == "train") + # reader = tf.WholeFileReader() + # paths, contents = reader.read(path_queue) + # input = decode(contents) + + contents = tf.read_file(paths) + input = tf.image.decode_jpeg(contents) + + input = tf.image.convert_image_dtype(input, dtype=tf.float32) + assertion = tf.assert_equal(tf.shape(input)[2], 3, message="image does not have required channels") + with tf.control_dependencies([assertion]): + input = tf.identity(input) + + input.set_shape([None, None, 3]) + + images = preprocess(input) + + seed = random.randint(0, 2 ** 31 - 1) + + # scale and crop input image to match 256x256 size + def transform(image): + r = image + r = tf.image.resize_images(r, [args.scale_size, args.scale_size], method=tf.image.ResizeMethod.AREA) + + offset = tf.cast(tf.floor(tf.random_uniform([2], 0, args.scale_size - args.crop_size + 1, seed=seed)), dtype=tf.int32) + if args.scale_size > args.crop_size: + r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], args.crop_size, args.crop_size) + + elif args.scale_size < args.crop_size: + raise Exception("scale size cannot be less than crop size") + return r + + with tf.name_scope("images"): + input_images = transform(images) + if args.mode=="train": + input_images, _ = image_warping2(input_images, w=0.0) + deformed_images, deformation = image_warping2(input_images, w=0.1) + deformation = tf.squeeze(deformation) + + # crop after warping + input_images = tf.image.crop_to_bounding_box(input_images, 5, 5, 128, 128) + deformed_images = tf.image.crop_to_bounding_box(deformed_images, 5, 5, 128, 128) + + # clip image values + input_images = tf.clip_by_value(input_images, clip_value_min=-1., clip_value_max=1.) + deformed_images = tf.clip_by_value(deformed_images, clip_value_min=-1., clip_value_max=1.) + + return input, paths, input_images, deformed_images, deformation + + # paths_batch, images_batch, images_deformed_batch, deformation_batch = tf.train.batch( + # [paths, input_images, deformed_images, deformation], batch_size=args.batch_size) + + dataset = tf.data.Dataset.from_tensor_slices(input_paths) + if args.mode == "train": + dataset = dataset.shuffle(buffer_size=len(input_paths)) + dataset = dataset.map(parse).batch(args.batch_size, drop_remainder=True).repeat(args.max_epochs) + iterator = dataset.make_one_shot_iterator() + input, paths_batch, images_batch, images_deformed_batch, deformation_batch = iterator.get_next() + + paths_batch.set_shape(args.batch_size + paths_batch.shape[1:]) + images_batch.set_shape(args.batch_size + images_batch.shape[1:]) + images_deformed_batch.set_shape(args.batch_size + images_deformed_batch.shape[1:]) + deformation_batch.set_shape(args.batch_size + deformation_batch.shape[1:]) + + steps_per_epoch = int(math.ceil(len(input_paths) / args.batch_size)) + + return Examples( + paths=paths_batch, + images=images_batch, + images_deformed=images_deformed_batch, + deformation=deformation_batch, + count=len(input_paths), + steps_per_epoch=steps_per_epoch, + shape=input.get_shape()[1:] + ) + +def CNN_tower(inputs, n_landmarks, isTrain): + + n_filters = [20, 48, 64, 80, 256, n_landmarks] + with tf.variable_scope("layer_1"): + x = conv(inputs, n_filters[0]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + # only the first layer has a 2x2 maxpooling + x = tf.layers.max_pooling2d(inputs=x, pool_size=[2, 2], strides=2) + with tf.variable_scope("layer_2"): + x = conv(x, n_filters[1]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_3"): + x = conv(x, n_filters[2]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_4"): + x = conv(x, n_filters[3]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_5"): + x = conv(x, n_filters[4]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + with tf.variable_scope("layer_6"): + x = conv(x, n_filters[5]) + x = tf.contrib.layers.batch_norm(x, updates_collections=None, decay=0.9, center=True, + scale=True, + activation_fn=tf.nn.relu, is_training=isTrain) + + return x + + +def align_loss(predA_deformed, predB, n_landmarks): + + + # compute the mean of landmark locations + + + batch_size = predB.get_shape()[0] + pred_size = predB.get_shape()[1] + index = tf.range(0, tf.cast(pred_size, tf.float32), delta=1, dtype=tf.float32) + index = tf.reshape(index, [pred_size, 1]) + + x_index = tf.tile(index, [1, pred_size]) + + index = tf.transpose(index) + + y_index = tf.tile(index, [pred_size, 1]) + + x_index = tf.expand_dims(x_index, 2) + x_index = tf.expand_dims(x_index, 0) + + y_index = tf.expand_dims(y_index, 2) + y_index = tf.expand_dims(y_index, 0) + + x_index = tf.tile(x_index, [batch_size, 1, 1, n_landmarks]) + y_index = tf.tile(y_index, [batch_size, 1, 1, n_landmarks]) + + + x_index_avg_A = x_index * predA_deformed + y_index_avg_A = y_index * predA_deformed + + x_index_avg_B = x_index * predB + y_index_avg_B = y_index * predB + + + pA_sum = tf.reduce_sum(predA_deformed, axis=[1, 2]) + pB_sum = tf.reduce_sum(predB, axis=[1, 2]) + + + x_index_avg_A = tf.reduce_mean(x_index_avg_A, axis=[1, 2]) + y_index_avg_A = tf.reduce_mean(y_index_avg_A, axis=[1, 2]) + x_index_avg_B = tf.reduce_mean(x_index_avg_B, axis=[1, 2]) + y_index_avg_B = tf.reduce_mean(y_index_avg_B, axis=[1, 2]) + + x_index_avg_A = x_index_avg_A / pA_sum + y_index_avg_A = y_index_avg_A / pA_sum + x_index_avg_B = x_index_avg_B / pB_sum + y_index_avg_B = y_index_avg_B / pB_sum + + # compute align loss + loss = tf.pow(x_index_avg_A-x_index_avg_B, 2.) + tf.pow(y_index_avg_A - y_index_avg_B, 2.) + loss = tf.reduce_mean(loss) + return loss, x_index, y_index + + +def align_loss2(predA, predB, deformation, n_landmarks): + + + # compute the mean of landmark locations + + batch_size = predA.get_shape()[0] + pred_size = predA.get_shape()[1] + index = tf.range(0, tf.cast(pred_size, tf.float32), delta=1, dtype=tf.float32) + index = tf.reshape(index, [pred_size, 1]) + + x_index = tf.tile(index, [1, pred_size]) + + index = tf.transpose(index) + + y_index = tf.tile(index, [pred_size, 1]) + + x_index = tf.expand_dims(x_index, 2) + x_index = tf.expand_dims(x_index, 0) + + y_index = tf.expand_dims(y_index, 2) + y_index = tf.expand_dims(y_index, 0) + + x_index = tf.tile(x_index, [batch_size, 1, 1, n_landmarks]) + y_index = tf.tile(y_index, [batch_size, 1, 1, n_landmarks]) + + + u_norm2 = tf.pow(x_index, 2.) + tf.pow(y_index, 2.) + u_norm2 = u_norm2 * predA + loss_part1 = tf.reduce_sum(u_norm2, axis=[1, 2]) + + x_index_deformed = feature_warping2(x_index, deformation, padding=3) + y_index_defomred = feature_warping2(y_index, deformation, padding=3) + v_norm2 = tf.pow(x_index_deformed, 2.) + tf.pow(y_index_defomred, 2.) + v_norm2 = v_norm2 * predB + loss_part2 = tf.reduce_sum(v_norm2, axis=[1, 2]) + + + loss_part3x = tf.reduce_sum(x_index * predA, axis=[1, 2]) + loss_part3y = tf.reduce_sum(y_index * predA, axis=[1, 2]) + loss_part4x = tf.reduce_sum(x_index_deformed * predB, axis=[1, 2]) + loss_part4y = tf.reduce_sum(y_index_defomred * predB, axis=[1, 2]) + + loss_part3 = loss_part3x * loss_part4x + loss_part3y * loss_part4y + loss = loss_part1 + loss_part2 - 2. * loss_part3 + loss = tf.reduce_mean(loss) + + return loss + + + + +def main(): + + """Create the model and start the training.""" + args = get_arguments() + + #mox.file.copy_parallel(args.data_url, data_dir) + + tf.set_random_seed(args.random_seed) + examples = load_examples(args) + + print("----------------------examples count = %d" % examples.count) + + + + with tf.variable_scope("cnn_tower"): + predA = CNN_tower(examples.images, n_landmarks=args.K, isTrain=args.mode == "train") + + with tf.variable_scope("cnn_tower", reuse=True): + predB = CNN_tower(examples.images_deformed, n_landmarks=args.K, isTrain=args.mode == "train") + + + # apply a spatial softmax to obtain K probability maps + + pred_size = predA.get_shape()[1] + + predA = tf.reshape(predA, [-1, pred_size*pred_size, args.K]) + predB = tf.reshape(predB, [-1, pred_size*pred_size, args.K]) + + predA = tf.nn.softmax(predA, dim=1) #predA = tf.nn.softmax(predA, axis=1) + predB = tf.nn.softmax(predB, dim=1) #predB = tf.nn.softmax(predB, axis=1) + + predA = tf.reshape(predA, [-1, pred_size, pred_size, args.K]) + predB = tf.reshape(predB, [-1, pred_size, pred_size, args.K]) + + + # visualizing landmarks + predA_vis = tf.reduce_mean(predA, axis=3) + predA_vis = tf.expand_dims(predA_vis, axis=3) + + # another visualization + pred_max = tf.reduce_max(predA, axis=[1, 2]) + pred_max = tf.expand_dims(pred_max, axis=1) + pred_max = tf.expand_dims(pred_max, axis=1) + pred_max = tf.equal(predA, pred_max) + pred_max = tf.cast(pred_max, tf.float32) + + mask = tf.range(start=1, limit=args.K+1, delta=1, dtype=tf.float32) + mask = tf.reshape(mask, [1, 1, 1, args.K]) + mask = tf.tile(mask, [args.batch_size, pred_size, pred_size, 1]) + mask = mask * pred_max + mask = tf.reduce_max(mask, axis=3, keep_dims=True) + + landmarks = tf.convert_to_tensor(landmark_colors(args.K), tf.float32) + + mask = tf.reshape(mask, [args.batch_size, pred_size*pred_size]) + mask = tf.cast(mask, tf.int32) + mask = tf.gather(landmarks, mask, axis=0) + mask = tf.reshape(mask, [args.batch_size, pred_size, pred_size, 3]) + + pred_max = tf.reduce_max(pred_max, axis=3) + pred_max = tf.expand_dims(pred_max, axis=3) + + # compute the diversity loss + + + def diversity_loss(pred, n_landmark, pool_size): + pred_pool = tf.nn.pool(pred, window_shape=[pool_size, pool_size], strides=[1, 1], pooling_type="AVG", padding="VALID") + # convert avg pool to sum pool + # pred_pool = pred_pool * float(pool_size) * float(pool_size) + pred_max = tf.reduce_max(pred_pool, axis=3) + pred_max_sum = tf.reduce_sum(pred_max, axis=[1, 2]) + pred_max_sum = float(n_landmark) - pred_max_sum + pred_max_sum = tf.reduce_mean(pred_max_sum) + return pred_max_sum + + diversityLoss_predA = diversity_loss(predA, n_landmark=args.K, pool_size=args.M) + diversityLoss_predB = diversity_loss(predB, n_landmark=args.K, pool_size=args.M) + div_loss = diversityLoss_predA + diversityLoss_predB + + # compute the align loss + algn_loss = align_loss2(predA, predB, examples.deformation, n_landmarks= args.K) + + # compute the weight decay loss + decay_loss = weight_decay() * args.weight_decay + + + with tf.name_scope("train"): + optim = tf.train.AdamOptimizer(args.learning_rate, args.beta1) + # grads_and_vars = optim.compute_gradients(loss) + # train = optim.apply_gradients(grads_and_vars) + train_op = optim.minimize(algn_loss*args.align_weight + div_loss*args.diversity_weight + decay_loss ) + # global_step = tf.contrib.framework.get_or_create_global_step() + global_step = tf.train.get_or_create_global_step() + incr_global_step = tf.assign(global_step, global_step + 1) + train = tf.group(train_op, incr_global_step) + + input_images = deprocess(examples.images) + input_deformed = deprocess(examples.images_deformed) + + + # overlay landmarks on the input image + + landmarks_image = pred_max * mask + + pred_max_resized = tf.image.resize_images(pred_max, [128, 128], tf.image.ResizeMethod.AREA) + pred_max_resized = tf.greater(pred_max_resized, 0.) + pred_max_resized = tf.cast(pred_max_resized, tf.float32) + + mask_resized = tf.image.resize_images(mask, [128, 128]) + + + input_images_landmark = input_images * (1.-pred_max_resized) + pred_max_resized * mask_resized + + + with tf.name_scope("parameter_count"): + parameter_count = tf.reduce_sum([tf.reduce_prod(tf.shape(v)) for v in tf.trainable_variables()]) + + tf.summary.image("Input", input_images) + tf.summary.image("Deformed", input_deformed) + tf.summary.image("PredA", predA_vis) + # tf.summary.image("AApredAmax", mask) + # tf.summary.image("PredB", predB_vis) + tf.summary.image("Landmark", input_images_landmark) + # tf.summary.image("AApredAmax", landmarks_image) + + tf.summary.scalar("loss_align", algn_loss) + tf.summary.scalar("loss_diversity", div_loss) + tf.summary.scalar("loss_decay", decay_loss) + + output_images = tf.image.convert_image_dtype(input_images_landmark, dtype=tf.uint8, saturate=True) + with tf.name_scope("encode_images"): + display_fetches = { + "paths": examples.paths, + "outputs": tf.map_fn(tf.image.encode_jpeg, output_images, dtype=tf.string, name="input_pngs"), + } + + + saver = tf.train.Saver(max_to_keep=1) + + + + config = tf.ConfigProto() + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + + custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭remap + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + + + sv = tf.train.Supervisor(logdir=os.path.join(args.output_dir, 'logs'), save_summaries_secs=0, saver=None) + #sv = tf.train.Supervisor(logdir=os.path.join(os.path.join(args.train_url, 'logs')), save_summaries_secs=0,saver=None) + print('--------------模型log位置 ', args.output_dir) + + + with sv.managed_session(config=config) as sess: #logdir中去找checkpoint,如果没有的话,自动执行初始化 + + max_steps = 2 ** 32 + if args.max_epochs is not None: + + max_steps = examples.steps_per_epoch * args.max_epochs + print ("max epochs: ", args.max_epochs) + print ("max steps : ", max_steps) + start = time.time() + + print("parameter_count =", sess.run(parameter_count)) + + # print(args.checkpoint) + # if args.checkpoint is not None: + # # print(args.checkpoint) + # print ("loading from checkpoint...") + # checkpoint = tf.train.latest_checkpoint(args.checkpoint) + # print (checkpoint) + # saver.restore(sess, checkpoint) + + if args.checkpoint is not None: + checkpoint = tf.train.latest_checkpoint(args.checkpoint) + if checkpoint is not None: + print("loading from checkpoint...") + + saver.restore(sess, checkpoint) + print("restore checkpoint --completed") + + + if args.mode == "train": + # training + for step in range(max_steps): + def should(freq): + return freq > 0 and ((step + 1) % freq == 0 or step == max_steps - 1) + + fetches = { + "train": train, + "global_step": sv.global_step, + "loss": algn_loss, + "labels": examples.images, + "offset": examples.deformation, + "predA" : predA, + "decay_loss":decay_loss, + "div_loss":div_loss, + + + } + + if should(freq=args.summary_freq): + fetches["summary"] = sv.summary_op + + results = sess.run(fetches) + + if should(freq=args.summary_freq): + sv.summary_writer.add_summary(results["summary"], results["global_step"]) + # global_step will have the correct step count if we resume from a checkpoint + train_epoch = math.ceil(results["global_step"] / examples.steps_per_epoch) + + train_step = (results["global_step"] - 1) % examples.steps_per_epoch + 1 + rate = (step + 1) * args.batch_size / (time.time() - start) + remaining = (max_steps - step) * args.batch_size / rate + print("progress epoch %d step %d image/sec %0.1f remaining %dm" % ( + train_epoch, train_step, rate, remaining / 60)) + print ("loss_align", results["loss"]) + print ("loss_diversity", results["div_loss"]) + print ("loss_decay", results["decay_loss"]) + print ("------------------------------") + + + + if should(freq=args.save_freq): + print("saving model...") + # saver.save(sess, os.path.join(args.output_dir, "model"), global_step=sv.global_step) + + saver.save(sess, os.path.join(args.train_url), global_step=sv.global_step) + print("---------模型存储位置 args.train_url : ", args.train_url) + + elif args.mode=="test": + # testing + start = time.time() + max_steps = min(examples.steps_per_epoch, max_steps) + for step in range(max_steps): + results = sess.run(display_fetches) + filesets = save_images(results, args) + for i, f in enumerate(filesets): + print("evaluated image", f["name"]) + print("rate", (time.time() - start) / max_steps) + print("----------图像存储位置 : ", ) + #mox.file.copy_parallel(model_dir, args.train_url) + +if __name__ == '__main__': + main() diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..ea754b00e424d7d35f371c971001a3b865de0535 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Li Chen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/Network.png b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/Network.png new file mode 100644 index 0000000000000000000000000000000000000000..91666b9e308b4ba03f57d0777d508f462d57133f Binary files /dev/null and b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/Network.png differ diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README.md b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9bcdabab731e8f3dcb686b3afeb8ee5a87b9f694 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README.md @@ -0,0 +1,345 @@ +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [迁移学习指导](#迁移学习指导.md) +- [高级参考](#高级参考.md) +

基本信息

+ +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):cv** + +**版本(Version):1.1** + +**修改时间(Modified) :2021.12.14** + +**大小(Size):249M** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt** + +**精度(Precision):Mixed** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Benchmark** + +**描述(Description):基于TensorFlow框架的基于多峰混合密度网络生成多个可行的 3D 姿态假设的网络** + +

概述

+ +- GMH-MDN:一种基于多峰混合密度网络生成多个可行的 3D 姿态假设的网络 + +- 参考论文: + + ``` + https://arxiv.org/pdf/1904.05547.pdf + ``` + +- 参考实现: + + ``` + https://github.com/chaneyddtt/Generating-Multiple-Hypotheses-for-3D-Human-Pose-Estimation-with-Mixture-Density-Network + ``` + +- 适配昇腾 AI 处理器的实现: + ``` + https://gitee.com/ascend/modelzoo/tree/master/built-in/TensorFlow/Benchmark/cv/image_classification/Shufflenet_ID0645_for_TensorFlow + branch=master + commit_id= 477b07a1e95a35885b3a9a569b1c8ccb9ad5d7af + ``` + + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +## 默认配置 +- 网络结构 + - 初始学习率为0.001,对学习率learning_rate应用指数衰减。 + - 优化器:ADAM + - 学习率衰减速度 decay_steps:100000 + - 学习率衰减系数 decay_rate:0.96 + - 单卡batchsize:64 + - 总Epoch数:200 + - dropout:0.5 + +- 训练超参(单卡): + - Batch size: 64 + - LR scheduler: exponential decay + - Learning rate\(LR\): 0.001 + - Train epoch: 200 + - dropout:0.5 + - linear_size:1024 \#ps: size of each layer(每一层神经元的个数) + + +## 支持特性 + +| 特性列表 | 是否支持 | +| ---------- | -------- | +| 分布式训练 | 否 | +| 混合精度 | 是 | +| 数据并行 | 否 | + + +## 混合精度训练 + +昇腾910 AI处理器提供自动混合精度功能,可以针对全网中float32数据类型的算子,按照内置的优化策略,自动将部分float32的算子降低精度到float16,从而在精度损失很小的情况下提升系统性能并减少内存使用。 + +## 开启混合精度 +相关代码示例。 + +``` +config = tf.ConfigProto() +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +custom_op.parameter_map["use_off_line"].b = True +custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") +custom_op.parameter_map["modify_mixlist"].s = tf.compat.as_bytes("/home/test/ops_info.json") +config.graph_options.rewrite_options.remapping = RewriterConfig.OFF +config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF +with tf.Session(config=config) as sess: + print(sess.run(cost)) +``` + +

训练环境准备

+ +1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 +2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 + + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + + **表 1** 镜像列表 + + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
+ + +

快速上手

+ +### 数据集准备 + +1. 模型预训练使用 [Human3.6M]数据集 ,需用户自行申请。因申请较慢,故可在[此处](https://github.com/MendyD/human36m) 下载 + +2. 数据集下载后,放入模型目录下,在训练脚本中指定数据集路径,可正常使用。 + +### 模型训练 +- 下载训练脚本。 + +- 开始训练。 + + 1.启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + + 2.单卡训练 + + 2.1设置单卡训练参数(脚本位于./GMH—MDN_ID1225_for_TensorFlow/test/train_full_1p.sh),示例如下。请确保下面例子中的“data_dir,batch_size,epochs”修改为用户数据集的路径。 + + data_dir="../data/h36m/" + batch_size=64 + epochs=200 + 2.2 单卡训练指令(脚本位于./GMH—MDN_ID1225_for_TensorFlow/test/train_performance_1p.sh) + + bash train_performance_1p.sh --train_dir + + +

开始测试

+ +- 预训练模型下载 + + [地址](https://drive.google.com/open?id=1ndJyuVL-7fbhw-G654m5U8tHogcQIftT) +- 参数配置 + + 1.修改脚本启动参数(脚本位于test/train_full_1p.sh),将test设置为True,如下所示: + + data_dir="../data/h36m/" + batch_size=64 + epochs=200 + + 2.增加checkpoints的路径,请用户根据checkpoints实际路径进行配置。使用预训练模型或者自己训练的模型 + + checkpoint_dir=../Models/mdm_5_prior/ + +- 执行测试指令 + + 1.上述文件修改完成之后,执行测试指令 + + bash test/train_performance.sh + +

迁移学习指导

+ +- 数据集准备。 + + 1.获取数据。 + 请参见“快速上手”中的数据集准备。 + + 2.数据目录结构如下: + + human36m/ + ├── h36m/ + ├── cameras.h5 + ├── S1/ + ├── S11/ + ├── S5/ + ├── S6/ + ├── S7/ + ├── S8/ + ├── S9/ + └── logging.conf/ +- 修改训练脚本。 + + 1.加载预训练模型。 + 修改**load_dir**的路径以及load参数,其中**load**为checkpoint-4874200.index 中的数字部分 4874200 + +- 模型训练。 + + 请参考“快速上手”章节。 + +- 模型评估。 + + 可以参考“模型训练”中训练步骤。 + +

高级参考【深加工】

+ +### 脚本和示例代码 + + ├── README.md //说明文档 + ├── requirements.txt //依赖 + ├── LICENSE + ├── Models + ├── experiments + ├── src_npu_20211208155957 + │ ├── cameras.py + │ ├── data_utils.py + │ ├── logging.conf + │ ├── mix_den_model.py + │ ├── predict_3dpose_mdm.py + │ ├── procrustes.py + │ └── viz.py + + +### 脚本参数 + +``` +--learning_rate Learning rate default:0.001 +--dropout Dropout keep probability 1 means no dropout default:0.5 +--batch_size batch size to use during training default:64 +--epochs How many epochs we should train for default:200 +--camera_frame Convert 3d poses to camera coordinates default:TRUE +--max_norm Apply maxnorm constraint to the weights default:TRUE +--batch_norm Use batch_normalization default:TRUE +# Data loading +--predict_14 predict 14 joints default:FALSE +--use_sh Use 2d pose predictions from StackedHourglass default:TRUE +--action The action to train on 'All' means all the actions default:All +# Architecture +--linear_size Size of each model layer default:1024 +--num_layers Number of layers in the model default:2 +--residual Whether to add a residual connection every 2 layers default:TRUE +# Evaluation +--procrustes Apply procrustes analysis at test time default:FALSE +--evaluateActionWise The dataset to use either h36m or heva default:TRUE +# Directories +--cameras_path Directory to load camera parameters default:/data/h36m/cameras.h5 +--data_dir Data directory default: /data/h36m/ +--train_dir Training directory default:/experiments/test_git/ +--load_dir Specify the directory to load trained model default:/Models/mdm_5_prior/ +# Train or load +--sample Set to True for sampling default:FALSE +--test Set to True for sampling default:FALSE +--use_cpu Whether to use the CPU default:FALSE +--load Try to load a previous checkpoint default:0 +--miss_num Specify how many missing joints default:1 +``` + +## 训练过程 + +通过“模型训练”中的训练指令启动单卡训练,通过运行脚本训练。 +将训练脚本(test/train_full_1p.sh)中的data_dir设置为训练数据集的路径。具体的流程参见“模型训练”的示例。 +模型存储路径为{train_dir},包括训练的log以及checkpoints文件。以单卡训练为例,loss信息在文件.{train_dir}/log/log.txt中,示例如下。 + +``` +Epoch: 1 +Global step: 48742 +Learning rate: 9.80e-04 +Train loss avg: 10.5440 +============================= +2021-12-10 08:44:00,731 [INFO] root - ===Action=== ==mm== +2021-12-10 08:44:14,404 [INFO] root - Directions 67.16 +2021-12-10 08:44:41,598 [INFO] root - Discussion 69.08 +2021-12-10 08:44:58,180 [INFO] root - Eating 64.17 +2021-12-10 08:45:11,033 [INFO] root - Greeting 70.90 +2021-12-10 08:45:34,378 [INFO] root - Phoning 84.17 +2021-12-10 08:45:46,680 [INFO] root - Photo 86.36 +2021-12-10 08:45:57,517 [INFO] root - Posing 63.92 +2021-12-10 08:46:05,577 [INFO] root - Purchases 68.64 +2021-12-10 08:46:22,047 [INFO] root - Sitting 82.77 +2021-12-10 08:46:35,970 [INFO] root - SittingDown 107.23 +2021-12-10 08:46:59,066 [INFO] root - Smoking 75.12 +2021-12-10 08:47:14,754 [INFO] root - Waiting 71.51 +2021-12-10 08:47:26,528 [INFO] root - WalkDog 78.11 +2021-12-10 08:47:38,442 [INFO] root - Walking 59.05 +2021-12-10 08:47:49,315 [INFO] root - WalkTogether 63.24 +2021-12-10 08:47:49,323 [INFO] root - Average 74.09 +2021-12-10 08:47:49,325 [INFO] root - =================== +``` + + +### 推理/验证过程 + +#### 推理验证 + +在200 epoch训练执行完成后,请参见“模型训练”中的测试流程,需要修改脚本启动参数(脚本位于test/train_performance.sh)将test设置为True,修改load_dir的路径以及load参数,其中load_dir 为模型ckpt目录,load为ckpt 文件 checkpoint-4874200.index 中的数字部分 4874200,然后执行脚本。 + +`bash train_full_1p.sh --test=True` + +该脚本会自动执行验证流程,验证结果若想输出至文档描述文件,则需修改启动脚本参数,否则输出至默认log文件(./experiments/test_git/log/log.txt)中。 + +``` +2021-12-10 07:29:31,061 [INFO] root - Logs will be written to ../experiments/test_git/log +2021-12-10 07:32:14,597 [INFO] root - ===Action=== ==mm== +2021-12-10 07:32:33,258 [INFO] root - Directions 50.76 +2021-12-10 07:32:59,096 [INFO] root - Discussion 61.78 +2021-12-10 07:33:14,707 [INFO] root - Eating 56.20 +2021-12-10 07:33:26,797 [INFO] root - Greeting 60.24 +2021-12-10 07:33:49,975 [INFO] root - Phoning 78.02 +2021-12-10 07:34:02,201 [INFO] root - Photo 74.15 +2021-12-10 07:34:13,259 [INFO] root - Posing 52.02 +2021-12-10 07:34:21,237 [INFO] root - Purchases 67.17 +2021-12-10 07:34:37,670 [INFO] root - Sitting 78.90 +2021-12-10 07:34:50,829 [INFO] root - SittingDown 101.50 +2021-12-10 07:35:13,391 [INFO] root - Smoking 66.54 +2021-12-10 07:35:28,320 [INFO] root - Waiting 60.78 +2021-12-10 07:35:39,677 [INFO] root - WalkDog 68.80 +2021-12-10 07:35:51,568 [INFO] root - Walking 52.74 +2021-12-10 07:36:02,067 [INFO] root - WalkTogether 57.69 +2021-12-10 07:36:02,660 [INFO] root - Average 65.82 +2021-12-10 07:36:02,671 [INFO] root - =================== +``` + \ No newline at end of file diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README_ori.md b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README_ori.md new file mode 100644 index 0000000000000000000000000000000000000000..6f0ca3c6033eca7263c895feb5ac26de744f4b42 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/README_ori.md @@ -0,0 +1,73 @@ +# Generating-Multiple-Hypotheses-for-3D-Human-Pose-Estimation-with-Mixture-Density-Network + +**About** + +This is the source code for the paper + +Chen Li, Gim Hee Lee. Generating Multiple Hypotheses for 3D Human Pose Estimation with Mixture Density Network. In CVPR2019. + +We argue that 3D human pose estimation from a monocular/2D-joint input is an iverse problem where multiple solutions can exist. +![Problem illustration](problem_illustration.png) + +We use a two-stage approach to generate multiple 3D pose hypotheses. The 2D joints are firstly detected from the input images in the first stage, followed by a feature extractor and hypotheses generator to generate 3D pose hypotheses. + +![Network architecture](Network.png) + +For more details, please refer to our paper on [arXiv](https://arxiv.org/pdf/1904.05547.pdf). + +**Bibtex:** +``` +@InProceedings{Li_2019_CVPR, +author = {Li, Chen and Lee, Gim Hee}, +title = {Generating Multiple Hypotheses for 3D Human Pose Estimation With Mixture Density Network}, +booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, +month = {June}, +year = {2019} +} +``` + +**Dependencies** +1. h5py--to read data +2. Tensorflow 1.8 + +**Train** + +Get this code: +``` +git clone https://github.com/chaneyddtt/Generating-Multiple-Hypotheses-for-3D-Human-Pose-Estimation-with-Mixture-Density-Network.git +``` +Download the 2D detections of [Human3.6 dataset](https://github.com/una-dinosauria/3d-pose-baseline). + +Run: +``` +python predict_3dpose_mdm.py --train_dir +``` +You can also change the arguments during training. For example,you can train with one or two missing joint(s) randomly selected from the limbs by run: +``` +python predict_3dpose_mdm.py --miss_num +``` +You can also change other arguments in the predict_3dpose_mdm.py in a similar way. + + **Test** + +Down our [pretrained model](https://drive.google.com/open?id=1ndJyuVL-7fbhw-G654m5U8tHogcQIftT) + +To test our pretrained model, run: +``` +python predict_3dpose_mdm.py --test True --load 4338038 --load_dir ../Models/mdm_5_prior/ (model with the dirichlet conjucate prior) +``` +or run: +``` +python predict_3dpose_mdm.py --test True --load 4679232 --load_dir ../Models/mdm_5/ (model without the dirichlet conjucate prior) +``` +**Visualize** + +To visualze all the five 3D pose hypotheses generated by our model, run: +``` +python predict_3dpose_mdm.py --sample True --load 4338038 --load_dir ../Models/mdm_5_prior/ +``` + + +**Acknowledgments** + +The pre-processed human3.6 dataset and the feature extractor of our model was ported or adapted from the code by [@una-dinosauria](https://github.com/una-dinosauria/3d-pose-baseline). diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_acc.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..ee3060aab202f6ef6c354b4bfd7be193b7ae82f9 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_acc.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_full_1p.sh --data_dir=%s --train_dir=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_perf.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..f64068bb904b18b3ed68d458e5cda3b926b15f51 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelarts_entry_perf.py @@ -0,0 +1,64 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +print("-----",config.data_url,"----",config.train_url) +os.system("bash ./test/train_performance_1p.sh --data_dir=%s --train_dir=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7eeb8d729d7fb2dd94b91dcf79f8eabd5cfc5b77 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,3 @@ +FuncStatus:OK +PerfStatus:OK +PrecisionStatus:OK diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/problem_illustration.png b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/problem_illustration.png new file mode 100644 index 0000000000000000000000000000000000000000..62b35ce90da94d1135516e9a86a92932e223255f Binary files /dev/null and b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/problem_illustration.png differ diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/requirements.txt b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..91f9d6fb793e30b725e8c7cc40a49d08e9ff5eba --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/requirements.txt @@ -0,0 +1,2 @@ +h5py +tensorflow==1.15.0 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/cameras.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/cameras.py new file mode 100644 index 0000000000000000000000000000000000000000..60a2608ca0e4641457c9f28534027cea4219a7a8 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/cameras.py @@ -0,0 +1,166 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities to deal with the cameras of human3.6m""" + +from __future__ import division + +import h5py +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.image as mpimg +import data_utils +import viz + +def project_point_radial( P, R, T, f, c, k, p ): + """ + Project points from 3d to 2d using camera parameters + including radial and tangential distortion + + Args + P: Nx3 points in world coordinates + R: 3x3 Camera rotation matrix + T: 3x1 Camera translation parameters + f: (scalar) Camera focal length + c: 2x1 Camera center + k: 3x1 Camera radial distortion coefficients + p: 2x1 Camera tangential distortion coefficients + Returns + Proj: Nx2 points in pixel space + D: 1xN depth of each point in camera space + radial: 1xN radial distortion per point + tan: 1xN tangential distortion per point + r2: 1xN squared radius of the projected points before distortion + """ + + # P is a matrix of 3-dimensional points + assert len(P.shape) == 2 + assert P.shape[1] == 3 + + N = P.shape[0] + X = R.dot( P.T - T ) # rotate and translate + XX = X[:2,:] / X[2,:] + r2 = XX[0,:]**2 + XX[1,:]**2 + + radial = 1 + np.einsum( 'ij,ij->j', np.tile(k,(1, N)), np.array([r2, r2**2, r2**3]) ); + tan = p[0]*XX[1,:] + p[1]*XX[0,:] + + XXX = XX * np.tile(radial+tan,(2,1)) + np.outer(np.array([p[1], p[0]]).reshape(-1), r2 ) + + Proj = (f * XXX) + c + Proj = Proj.T + + D = X[2,] + + return Proj, D, radial, tan, r2 + +def world_to_camera_frame(P, R, T): + """ + Convert points from world to camera coordinates + + Args + P: Nx3 3d points in world coordinates + R: 3x3 Camera rotation matrix + T: 3x1 Camera translation parameters + Returns + X_cam: Nx3 3d points in camera coordinates + """ + + assert len(P.shape) == 2 + assert P.shape[1] == 3 + + X_cam = R.dot( P.T - T ) # rotate and translate + + return X_cam.T + +def camera_to_world_frame(P, R, T): + """Inverse of world_to_camera_frame + + Args + P: Nx3 points in camera coordinates + R: 3x3 Camera rotation matrix + T: 3x1 Camera translation parameters + Returns + X_cam: Nx3 points in world coordinates + """ + + assert len(P.shape) == 2 + assert P.shape[1] == 3 + + X_cam = R.T.dot( P.T ) + T # rotate and translate + + return X_cam.T + +def load_camera_params( hf, path ): + """Load h36m camera parameters + + Args + hf: hdf5 open file with h36m cameras data + path: path or key inside hf to the camera we are interested in + Returns + R: 3x3 Camera rotation matrix + T: 3x1 Camera translation parameters + f: (scalar) Camera focal length + c: 2x1 Camera center + k: 3x1 Camera radial distortion coefficients + p: 2x1 Camera tangential distortion coefficients + name: String with camera id + """ + + R = hf[ path.format('R') ][:] + R = R.T + + T = hf[ path.format('T') ][:] + f = hf[ path.format('f') ][:] + c = hf[ path.format('c') ][:] + k = hf[ path.format('k') ][:] + p = hf[ path.format('p') ][:] + + name = hf[ path.format('Name') ][:] + name = "".join( [chr(item) for item in name] ) + + return R, T, f, c, k, p, name + +def load_cameras( bpath='cameras.h5', subjects=[1,5,6,7,8,9,11] ): + """Loads the cameras of h36m + + Args + bpath: path to hdf5 file with h36m camera data + subjects: List of ints representing the subject IDs for which cameras are requested + Returns + rcams: dictionary of 4 tuples per subject ID containing its camera parameters for the 4 h36m cams + """ + rcams = {} + + with h5py.File(bpath,'r') as hf: + for s in subjects: + for c in range(4): # There are 4 cameras in human3.6m + rcams[(s, c+1)] = load_camera_params(hf, 'subject%d/camera%d/{0}' % (s,c+1) ) + + return rcams + diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/data_utils.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/data_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..76c7f39f43259e81ef9d79ffdf356c82577b542e --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/data_utils.py @@ -0,0 +1,757 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utility functions for dealing with human3.6m data.""" + +from __future__ import division + +import os +import numpy as np +import cameras +import h5py +import glob +import copy +import random + + +# Human3.6m IDs for training and testing +TRAIN_SUBJECTS = [1,5,6,7,8] +TEST_SUBJECTS = [9,11] + +# Joints in H3.6M -- data has 32 joints, but only 17 that move; these are the indices. +H36M_NAMES = ['']*32 +H36M_NAMES[0] = 'Hip' +H36M_NAMES[1] = 'RHip' +H36M_NAMES[2] = 'RKnee' +H36M_NAMES[3] = 'RFoot' +H36M_NAMES[6] = 'LHip' +H36M_NAMES[7] = 'LKnee' +H36M_NAMES[8] = 'LFoot' +H36M_NAMES[12] = 'Spine' +H36M_NAMES[13] = 'Thorax' +H36M_NAMES[14] = 'Neck/Nose' +H36M_NAMES[15] = 'Head' +H36M_NAMES[17] = 'LShoulder' +H36M_NAMES[18] = 'LElbow' +H36M_NAMES[19] = 'LWrist' +H36M_NAMES[25] = 'RShoulder' +H36M_NAMES[26] = 'RElbow' +H36M_NAMES[27] = 'RWrist' + +# Stacked Hourglass produces 16 joints. These are the names. +SH_NAMES = ['']*16 +SH_NAMES[0] = 'RFoot' +SH_NAMES[1] = 'RKnee' +SH_NAMES[2] = 'RHip' +SH_NAMES[3] = 'LHip' +SH_NAMES[4] = 'LKnee' +SH_NAMES[5] = 'LFoot' +SH_NAMES[6] = 'Hip' +SH_NAMES[7] = 'Spine' +SH_NAMES[8] = 'Thorax' +SH_NAMES[9] = 'Head' +SH_NAMES[10] = 'RWrist' +SH_NAMES[11] = 'RElbow' +SH_NAMES[12] = 'RShoulder' +SH_NAMES[13] = 'LShoulder' +SH_NAMES[14] = 'LElbow' +SH_NAMES[15] = 'LWrist' + +def load_data( bpath, subjects, actions, dim=3 ): + """ + Loads 2d ground truth from disk, and puts it in an easy-to-acess dictionary + + Args + bpath: String. Path where to load the data from + subjects: List of integers. Subjects whose data will be loaded + actions: List of strings. The actions to load + dim: Integer={2,3}. Load 2 or 3-dimensional data + Returns: + data: Dictionary with keys k=(subject, action, seqname) + values v=(nx(32*2) matrix of 2d ground truth) + There will be 2 entries per subject/action if loading 3d data + There will be 8 entries per subject/action if loading 2d data + """ + + if not dim in [2,3]: + raise(ValueError, 'dim must be 2 or 3') + + data = {} + + for subj in subjects: + for action in actions: + + print('Reading subject {0}, action {1}'.format(subj, action)) + + dpath = os.path.join( bpath, 'S{0}'.format(subj), 'MyPoses/{0}D_positions'.format(dim), '{0}*.h5'.format(action) ) + print( dpath ) + + fnames = glob.glob( dpath ) + + loaded_seqs = 0 + for fname in fnames: + seqname = os.path.basename( fname ) + + # This rule makes sure SittingDown is not loaded when Sitting is requested + if action == "Sitting" and seqname.startswith( "SittingDown" ): + continue + + # This rule makes sure that WalkDog and WalkTogeter are not loaded when + # Walking is requested. + if seqname.startswith( action ): + print( fname ) + loaded_seqs = loaded_seqs + 1 + + with h5py.File( fname, 'r' ) as h5f: + poses = h5f['{0}D_positions'.format(dim)][:] + + poses = poses.T + data[ (subj, action, seqname) ] = poses + + if dim == 2: + assert loaded_seqs == 8, "Expecting 8 sequences, found {0} instead".format( loaded_seqs ) + else: + assert loaded_seqs == 2, "Expecting 2 sequences, found {0} instead".format( loaded_seqs ) + + return data + + +def load_stacked_hourglass(data_dir, subjects, actions): + """ + Load 2d detections from disk, and put it in an easy-to-acess dictionary. + + Args + data_dir: string. Directory where to load the data from, + subjects: list of integers. Subjects whose data will be loaded. + actions: list of strings. The actions to load. + Returns + data: dictionary with keys k=(subject, action, seqname) + values v=(nx(32*2) matrix of 2d stacked hourglass detections) + There will be 2 entries per subject/action if loading 3d data + There will be 8 entries per subject/action if loading 2d data + """ + # Permutation that goes from SH detections to H36M ordering. + SH_TO_GT_PERM = np.array([SH_NAMES.index( h ) for h in H36M_NAMES if h != '' and h in SH_NAMES]) + assert np.all( SH_TO_GT_PERM == np.array([6,2,1,0,3,4,5,7,8,9,13,14,15,12,11,10]) ) + + data = {} + + for subj in subjects: + for action in actions: + + print('Reading subject {0}, action {1}'.format(subj, action)) + + dpath = os.path.join( data_dir, 'S{0}'.format(subj), 'StackedHourglass/{0}*.h5'.format(action) ) + print( dpath ) + + fnames = glob.glob( dpath ) + + loaded_seqs = 0 + for fname in fnames: + seqname = os.path.basename( fname ) + seqname = seqname.replace('_',' ') + + # This rule makes sure SittingDown is not loaded when Sitting is requested + if action == "Sitting" and seqname.startswith( "SittingDown" ): + continue + + # This rule makes sure that WalkDog and WalkTogeter are not loaded when + # Walking is requested. + if seqname.startswith( action ): + print( fname ) + loaded_seqs = loaded_seqs + 1 + + # Load the poses from the .h5 file + with h5py.File( fname, 'r' ) as h5f: + poses = h5f['poses'][:] + + # Permute the loaded data to make it compatible with H36M + poses = poses[:,SH_TO_GT_PERM,:] + + # Reshape into n x (32*2) matrix + poses = np.reshape(poses,[poses.shape[0], -1]) + poses_final = np.zeros([poses.shape[0], len(H36M_NAMES)*2]) + + dim_to_use_x = np.where(np.array([x != '' and x != 'Neck/Nose' for x in H36M_NAMES]))[0] * 2 + dim_to_use_y = dim_to_use_x+1 + + dim_to_use = np.zeros(len(SH_NAMES)*2,dtype=np.int32) + dim_to_use[0::2] = dim_to_use_x + dim_to_use[1::2] = dim_to_use_y + poses_final[:,dim_to_use] = poses + seqname = seqname+'-sh' + data[ (subj, action, seqname) ] = poses_final + + # Make sure we loaded 8 sequences + if (subj == 11 and action == 'Directions'): # <-- this video is damaged + assert loaded_seqs == 7, "Expecting 7 sequences, found {0} instead. S:{1} {2}".format(loaded_seqs, subj, action ) + else: + assert loaded_seqs == 8, "Expecting 8 sequences, found {0} instead. S:{1} {2}".format(loaded_seqs, subj, action ) + + return data + + +def normalization_stats(complete_data, dim, predict_14=False ): + """ + Computes normalization statistics: mean and stdev, dimensions used and ignored + + Args + complete_data: nxd np array with poses + dim. integer={2,3} dimensionality of the data + predict_14. boolean. Whether to use only 14 joints + Returns + data_mean: np vector with the mean of the data + data_std: np vector with the standard deviation of the data + dimensions_to_ignore: list of dimensions not used in the model + dimensions_to_use: list of dimensions used in the model + """ + if not dim in [2,3]: + raise(ValueError, 'dim must be 2 or 3') + + data_mean = np.mean(complete_data, axis=0) + data_std = np.std(complete_data, axis=0) + + # Encodes which 17 (or 14) 2d-3d pairs we are predicting + dimensions_to_ignore = [] + if dim == 2: + dimensions_to_use = np.where(np.array([x != '' and x != 'Neck/Nose' for x in H36M_NAMES]))[0] + dimensions_to_use = np.sort( np.hstack( (dimensions_to_use*2, dimensions_to_use*2+1))) + dimensions_to_ignore = np.delete( np.arange(len(H36M_NAMES)*2), dimensions_to_use ) + else: # dim == 3 + dimensions_to_use = np.where(np.array([x != '' for x in H36M_NAMES]))[0] + dimensions_to_use = np.delete( dimensions_to_use, [0,7,9] if predict_14 else 0 ) + + dimensions_to_use = np.sort( np.hstack( (dimensions_to_use*3, + dimensions_to_use*3+1, + dimensions_to_use*3+2))) + dimensions_to_ignore = np.delete( np.arange(len(H36M_NAMES)*3), dimensions_to_use ) + + return data_mean, data_std, dimensions_to_ignore, dimensions_to_use + + +def transform_world_to_camera(poses_set, cams, ncams=4 ): + """ + Project 3d poses from world coordinate to camera coordinate system + Args + poses_set: dictionary with 3d poses + cams: dictionary with cameras + ncams: number of cameras per subject + Return: + t3d_camera: dictionary with 3d poses in camera coordinate + """ + t3d_camera = {} + for t3dk in sorted( poses_set.keys() ): + + subj, action, seqname = t3dk + t3d_world = poses_set[ t3dk ] + + for c in range( ncams ): + R, T, f, c, k, p, name = cams[ (subj, c+1) ] + camera_coord = cameras.world_to_camera_frame( np.reshape(t3d_world, [-1, 3]), R, T) + camera_coord = np.reshape( camera_coord, [-1, len(H36M_NAMES)*3] ) + + sname = seqname[:-3]+"."+name+".h5" # e.g.: Waiting 1.58860488.h5 + t3d_camera[ (subj, action, sname) ] = camera_coord + + return t3d_camera + + +def normalize_data(data, data_mean, data_std, dim_to_use ): + """ + Normalizes a dictionary of poses + + Args + data: dictionary where values are + data_mean: np vector with the mean of the data + data_std: np vector with the standard deviation of the data + dim_to_use: list of dimensions to keep in the data + Returns + data_out: dictionary with same keys as data, but values have been normalized + """ + data_out = {} + + for key in data.keys(): + data[ key ] = data[ key ][ :, dim_to_use ] + mu = data_mean[dim_to_use] + stddev = data_std[dim_to_use] + data_out[ key ] = np.divide( (data[key] - mu), stddev ) + + return data_out + + +def unNormalizeData(normalized_data, data_mean, data_std, dimensions_to_ignore): + """ + Un-normalizes a matrix whose mean has been substracted and that has been divided by + standard deviation. Some dimensions might also be missing + + Args + normalized_data: nxd matrix to unnormalize + data_mean: np vector with the mean of the data + data_std: np vector with the standard deviation of the data + dimensions_to_ignore: list of dimensions that were removed from the original data + Returns + orig_data: the input normalized_data, but unnormalized + """ + T = normalized_data.shape[0] # Batch size + D = data_mean.shape[0] # Dimensionality + + orig_data = np.zeros((T, D), dtype=np.float32) + dimensions_to_use = np.array([dim for dim in range(D) + if dim not in dimensions_to_ignore]) + + orig_data[:, dimensions_to_use] = normalized_data + + # Multiply times stdev and add the mean + stdMat = data_std.reshape((1, D)) + stdMat = np.repeat(stdMat, T, axis=0) + meanMat = data_mean.reshape((1, D)) + meanMat = np.repeat(meanMat, T, axis=0) + orig_data = np.multiply(orig_data, stdMat) + meanMat + return orig_data + + +def define_actions( action ): + """ + Given an action string, returns a list of corresponding actions. + + Args + action: String. either "all" or one of the h36m actions + Returns + actions: List of strings. Actions to use. + Raises + ValueError: if the action is not a valid action in Human 3.6M + """ + actions = ["Directions","Discussion","Eating","Greeting", + "Phoning","Photo","Posing","Purchases", + "Sitting","SittingDown","Smoking","Waiting", + "WalkDog","Walking","WalkTogether"] + + if action == "All" or action == "all": + return actions + + if not action in actions: + raise( ValueError, "Unrecognized action: %s" % action ) + + return [action] + + +def project_to_cameras( poses_set, cams, ncams=4 ): + """ + Project 3d poses using camera parameters + + Args + poses_set: dictionary with 3d poses + cams: dictionary with camera parameters + ncams: number of cameras per subject + Returns + t2d: dictionary with 2d poses + """ + t2d = {} + + for t3dk in sorted( poses_set.keys() ): + subj, a, seqname = t3dk + t3d = poses_set[ t3dk ] + + for cam in range( ncams ): + R, T, f, c, k, p, name = cams[ (subj, cam+1) ] + pts2d, _, _, _, _ = cameras.project_point_radial( np.reshape(t3d, [-1, 3]), R, T, f, c, k, p ) + + pts2d = np.reshape( pts2d, [-1, len(H36M_NAMES)*2] ) + sname = seqname[:-3]+"."+name+".h5" # e.g.: Waiting 1.58860488.h5 + t2d[ (subj, a, sname) ] = pts2d + + return t2d + + +def read_2d_predictions( actions, data_dir ): + """ + Loads 2d data from precomputed Stacked Hourglass detections + + Args + actions: list of strings. Actions to load + data_dir: string. Directory where the data can be loaded from + Returns + train_set: dictionary with loaded 2d stacked hourglass detections for training + test_set: dictionary with loaded 2d stacked hourglass detections for testing + data_mean: vector with the mean of the 2d training data + data_std: vector with the standard deviation of the 2d training data + dim_to_ignore: list with the dimensions to not predict + dim_to_use: list with the dimensions to predict + """ + + train_set = load_stacked_hourglass( data_dir, TRAIN_SUBJECTS, actions) + test_set = load_stacked_hourglass( data_dir, TEST_SUBJECTS, actions) + + complete_train = copy.deepcopy( np.vstack( train_set.values() )) + data_mean, data_std, dim_to_ignore, dim_to_use = normalization_stats( complete_train, dim=2 ) + + train_set = normalize_data( train_set, data_mean, data_std, dim_to_use ) + test_set = normalize_data( test_set, data_mean, data_std, dim_to_use ) + + return train_set, test_set, data_mean, data_std, dim_to_ignore, dim_to_use + + +def create_2d_data( actions, data_dir, rcams ): + """ + Creates 2d poses by projecting 3d poses with the corresponding camera + parameters. Also normalizes the 2d poses + + Args + actions: list of strings. Actions to load + data_dir: string. Directory where the data can be loaded from + rcams: dictionary with camera parameters + Returns + train_set: dictionary with projected 2d poses for training + test_set: dictionary with projected 2d poses for testing + data_mean: vector with the mean of the 2d training data + data_std: vector with the standard deviation of the 2d training data + dim_to_ignore: list with the dimensions to not predict + dim_to_use: list with the dimensions to predict + """ + + # Load 3d data + train_set = load_data( data_dir, TRAIN_SUBJECTS, actions, dim=3 ) + test_set = load_data( data_dir, TEST_SUBJECTS, actions, dim=3 ) + + train_set = project_to_cameras( train_set, rcams ) + test_set = project_to_cameras( test_set, rcams ) + + + # Compute normalization statistics. + complete_train = copy.deepcopy( np.vstack( train_set.values() )) + data_mean, data_std, dim_to_ignore, dim_to_use = normalization_stats( complete_train, dim=2 ) + + # Divide every dimension independently + train_set = normalize_data( train_set, data_mean, data_std, dim_to_use ) + test_set = normalize_data( test_set, data_mean, data_std, dim_to_use ) + + return train_set, test_set, data_mean, data_std, dim_to_ignore, dim_to_use + + +def read_3d_data( actions, data_dir, camera_frame, rcams, predict_14=False ): + """ + Loads 3d poses, zero-centres and normalizes them + + Args + actions: list of strings. Actions to load + data_dir: string. Directory where the data can be loaded from + camera_frame: boolean. Whether to convert the data to camera coordinates + rcams: dictionary with camera parameters + predict_14: boolean. Whether to predict only 14 joints + Returns + train_set: dictionary with loaded 3d poses for training + test_set: dictionary with loaded 3d poses for testing + data_mean: vector with the mean of the 3d training data + data_std: vector with the standard deviation of the 3d training data + dim_to_ignore: list with the dimensions to not predict + dim_to_use: list with the dimensions to predict + train_root_positions: dictionary with the 3d positions of the root in train + test_root_positions: dictionary with the 3d positions of the root in test + """ + # Load 3d data + train_set = load_data( data_dir, TRAIN_SUBJECTS, actions, dim=3 ) + test_set = load_data( data_dir, TEST_SUBJECTS, actions, dim=3 ) + + + if camera_frame: + train_set = transform_world_to_camera( train_set, rcams ) + test_set = transform_world_to_camera( test_set, rcams ) + + # Apply 3d post-processing (centering around root) + train_set, train_root_positions = postprocess_3d( train_set ) + test_set, test_root_positions = postprocess_3d( test_set ) + + # Compute normalization statistics + complete_train = copy.deepcopy( np.vstack( train_set.values() )) + data_mean, data_std, dim_to_ignore, dim_to_use = normalization_stats( complete_train, dim=3, predict_14=predict_14 ) + + # Divide every dimension independently + train_set = normalize_data( train_set, data_mean, data_std, dim_to_use ) + test_set = normalize_data( test_set, data_mean, data_std, dim_to_use ) + + return train_set, test_set, data_mean, data_std, dim_to_ignore, dim_to_use, train_root_positions, test_root_positions + + +def postprocess_3d( poses_set ): + """ + Center 3d points around root + + Args + poses_set: dictionary with 3d data + Returns + poses_set: dictionary with 3d data centred around root (center hip) joint + root_positions: dictionary with the original 3d position of each pose + """ + root_positions = {} + for k in poses_set.keys(): + # Keep track of the global position + root_positions[k] = copy.deepcopy(poses_set[k][:,:3]) + + # Remove the root from the 3d position + poses = poses_set[k] + poses = poses - np.tile( poses[:,:3], [1, len(H36M_NAMES)] ) + poses_set[k] = poses + + return poses_set, root_positions + + + +def create_2d_mpii_test(dataset, Debug=False): + + ''' + Create 2d pose data as the input of the stage two. + For mpii dataset, we use the output of the hourglass network + For mpi dataset, we use the 2d joints provided by the dataset + Args: + dataset: spicify which dataset to use, either 'mpi' or 'mpii' + + ''' + + mpii_to_human36 = np.array([6, 2, 1, 0, 3, 4, 5, 7, 8, 9, 13, 14, 15, 12, 11, 10]) + + if dataset == 'mpi': + input_file = '../data/mpi/annotVal_outdoor.h5' # mpi dataset has three scenario, green background, normal indoor and outdoor + annot_train = getData(input_file) + joints = annot_train['annot_2d'] + + else: + input_file = '../data/mpii/mpii_preds.h5' + annot_train = getData(input_file) + joints = annot_train['part'] + + joints = joints[:, mpii_to_human36, :] # only use the correspoinding joints + + dim_to_use_x = np.where(np.array([x != '' and x != 'Neck/Nose' for x in H36M_NAMES]))[0] * 2 + dim_to_use_y = dim_to_use_x + 1 + + dim_to_use = np.zeros(len(SH_NAMES) * 2, dtype=np.int32) + dim_to_use[0::2] = dim_to_use_x + dim_to_use[1::2] = dim_to_use_y + + dimensions_to_ignore = np.delete(np.arange(len(H36M_NAMES) * 2), dim_to_use) + poses = np.reshape(joints, [joints.shape[0], -1]) + + poses_final = np.zeros([poses.shape[0], len(H36M_NAMES) * 2]) + poses_final[:, dim_to_use] = poses + + print('{} left from {} after filter'.format(poses_final.shape[0], poses.shape[0])) + complete_train = copy.deepcopy(poses_final) + test_set_2d, data_mean, data_std = normalize_data_mpii(complete_train, dim_to_use) + + # if Debug: + # + # data = unNormalizeData(train_set, data_mean, data_std, dimensions_to_ignore) + # for i in range(data.shape[0]): + # pose = data[i, dim_to_use].reshape(16,2) + # human36_to_mpii = np.argsort(mpii_to_human36) + # pose_mpii = pose[human36_to_mpii] + # name = names[i][:13] + # imgpath = '/home/lichen/pose_estimation/images_2d/' + # img = cv2.imread(os.path.join(imgpath, name)) + # c = (255, 0, 0) + # + # for j in range(pose_mpii.shape[0]): + # cv2.circle(img, (int(pose_mpii[j, 0]), int(pose_mpii[j, 1])), 3, c, -1) + # cv2.imshow('img', img) + # cv2.waitKey() + + return test_set_2d, data_mean, data_std, dim_to_use + +def get_all_batches_mpii(data, batch_size): + ''' + + data: all data to use + batch_size: batch size for model + return: data in batch + ''' + + data = np.array(data) + n = data.shape[0] + n_extra = np.int32(n % batch_size) + n_batches = np.int32(n // batch_size ) + if n_extra>0: + encoder_inputs = np.split(data[:-n_extra, :],n_batches ) + else: + encoder_inputs = np.split(data, n_batches) + return encoder_inputs + + +def normalize_data_mpii(data, dim_to_use): + """ + Normalize the 2d pose data + Args + data: + dim_to_use: list of dimensions to keep in the 2d pose data + Returns + data_out: normalized data, mean and standard deviation + """ + + + data_mean = np.mean(data, axis=0) + data_std = np.std(data,axis=0) + data_out = np.divide( (data[:, dim_to_use] - data_mean[dim_to_use]), data_std[dim_to_use] ) + + return data_out, data_mean, data_std + +def unnormalize_data_mpii(normalized_data, data_mean, data_std, dimensions_to_use): + + ''' + Unnormalize the 2d pose data + ''' + + T = normalized_data.shape[0] # Batch size + D = data_mean.shape[0] # Dimensionality + + orig_data = np.zeros((T, D), dtype=np.float32) + orig_data[:, dimensions_to_use] = normalized_data + + # Multiply times stdev and add the mean + stdMat = data_std.reshape((1, D)) + stdMat = np.repeat(stdMat, T, axis=0) + meanMat = data_mean.reshape((1, D)) + meanMat = np.repeat(meanMat, T, axis=0) + orig_data = np.multiply(orig_data, stdMat) + meanMat + + return orig_data + +def h36_to_mpii(pose): + h36_to_mpii_permu = np.array([3, 2, 1, 4, 5, 6, 0, 7, 8, 9, 15, 14, 13, 10, 11, 12]) # joint indexes for mpii dataset and h36 are different + pose = np.reshape(pose, [pose.shape[0], len(SH_NAMES), -1]) + pose = pose[:, h36_to_mpii_permu] + + return pose + +def create_3d_mpi_test(): + + ''' + Create 3d pose data for mpi data set + ''' + + mpii_to_human36 = np.array([6, 2, 1, 0, 3, 4, 5, 7, 8, 9, 13, 14, 15, 12, 11, 10]) # to make the joint index in consistence + input_file = '../data/mpi/annotVal_outdoor.h5' + data = h5py.File(input_file, 'r') + joints_3d = data['annot_3d'].value + img_name = data['annot_image'].value + + poses = joints_3d[:, mpii_to_human36, :] # mpi does not have the annotation for joint 'neck', approximate by the average value of throat and head + pose_neck = (poses[:, 8, :] + poses[:, 10, :])/2 + poses_17 = np.insert(poses, 9 , pose_neck, axis= 1) + poses_mpi_to_h36 = copy.deepcopy( poses_17) + + poses_17 = np.reshape(poses_17, [poses_17.shape[0], -1]) + poses_final = np.zeros([poses.shape[0], len(H36M_NAMES) * 3]) + + + dim_to_use_x = np.where(np.array([x != '' for x in H36M_NAMES]))[0] * 3 + dim_to_use_y = dim_to_use_x + 1 + dim_to_use_z = dim_to_use_x + 2 + + dim_to_use = np.zeros(17 * 3, dtype=np.int32) + dim_to_use[0::3] = dim_to_use_x + dim_to_use[1::3] = dim_to_use_y + dim_to_use[2::3] = dim_to_use_z + poses_final[:, dim_to_use] = poses_17 + + test_set, test_root_positions = postprocess_3d_mpi(poses_final) + complete_test = copy.deepcopy(np.vstack(test_set)) + data_mean, data_std, dim_to_ignore, dim_to_use_ = normalization_stats(complete_test, dim=3) + + # Divide every dimension independently + test_set = normalize_data_mpi(test_set, data_mean, data_std, dim_to_use_) + return test_set, data_mean, data_std, dim_to_ignore, dim_to_use_, test_root_positions, poses_mpi_to_h36, img_name + +def postprocess_3d_mpi(pose_3d): + ''' + + process the 3d pose data with respect to the root joint + We regress the relative rather than the absolute coordinates, + ''' + + root_position = copy.deepcopy(pose_3d[:, :3]) + + pose_3d_root = pose_3d - np.tile(root_position,[1, len(H36M_NAMES)]) + + return pose_3d_root, root_position + +def normalize_data_mpi(data, mean, std, dim_to_use): + + ''' + Normalize the 3d pose data in mpi dataset + ''' + + data = data[:, dim_to_use] + mean = mean[dim_to_use] + std = std[dim_to_use] + data_out = np.divide((data-mean), std+0.0000001) + + return data_out + +def getData(tmpFile): + ''' + Read data from .h5 file + ''' + data = h5py.File(tmpFile, 'r') + d = {} + for k, v in data.items(): + d[k] = np.asarray(data[k]) + data.close() + return d + + +def generage_missing_data(enc_in,mis_number): + ''' + + enc_in: input 2d pose data + mis_number: the number of missing joints + return: 2d pose with missing joints randomly selected from the limbs + ''' + + joints_missing = [2, 3, 5, 6, 11, 12, 14, 15] # only delete joints from limbs + for i in range(enc_in.shape[0]): + if mis_number == 1: + missing_index = random.randint(0, 7) + missing_dim = np.array([joints_missing[missing_index]*2, joints_missing[missing_index]*2+1]) + else: + missing_index = random.sample(range(8), 2) + missing_dim = np.array([joints_missing[missing_index[0]] * 2, joints_missing[missing_index[0]] * 2 + 1, + joints_missing[missing_index[1]] * 2, joints_missing[missing_index[1]] * 2 + 1]) + + enc_in[i, missing_dim] = 0.0 # get missing joints by setting the corresponding value to 0 + + return enc_in + + + + + + + + + + + diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/logging.conf b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/logging.conf new file mode 100644 index 0000000000000000000000000000000000000000..837f760c0fbd1cafc7e51635b276016cf1ef426e --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys=root,simpleExample + +[handlers] +keys=consoleHandler + +[formatters] +keys=simpleFormatter + +[logger_root] +level=DEBUG +handlers=consoleHandler + +[logger_simpleExample] +level=DEBUG +handlers=consoleHandler +qualname=simpleExample +propagate=0 + +[handler_consoleHandler] +class=StreamHandler +level=DEBUG +formatter=simpleFormatter +args=(sys.stdout,) + +[formatter_simpleFormatter] +format=%(asctime)s [%(levelname)s] %(name)s - %(message)s +datefmt= \ No newline at end of file diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/mix_den_model.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/mix_den_model.py new file mode 100644 index 0000000000000000000000000000000000000000..863214c79db197fced782994cdbc7d78059ff2d2 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/mix_den_model.py @@ -0,0 +1,432 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Simple model to regress 3d human poses from 2d joint locations""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from tensorflow.python.ops import variable_scope as vs + +import os +import numpy as np +from six.moves import xrange # pylint: disable=redefined-builtin +import tensorflow as tf +import data_utils +import cameras as cam +from npu_bridge.npu_init import * + +def kaiming(shape, dtype, partition_info=None): + """Kaiming initialization as described in https://arxiv.org/pdf/1502.01852.pdf + + Args + shape: dimensions of the tf array to initialize + dtype: data type of the array + partition_info: (Optional) info about how the variable is partitioned. + See https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/ops/init_ops.py#L26 + Needed to be used as an initializer. + Returns + Tensorflow array with initial weights + """ + return(tf.truncated_normal(shape, dtype=dtype)*tf.sqrt(2/float(shape[0]))) + +class LinearModel(object): + """ A simple Linear+RELU model """ + + def __init__(self, + linear_size, + num_layers, + residual, + batch_norm, + max_norm, + batch_size, + learning_rate, + summaries_dir, + predict_14=False, + dtype=tf.float32): + """Creates the linear + relu model + + Args + linear_size: integer. number of units in each layer of the model + num_layers: integer. number of bilinear blocks in the model + residual: boolean. Whether to add residual connections + batch_norm: boolean. Whether to use batch normalization + max_norm: boolean. Whether to clip weights to a norm of 1 + batch_size: integer. The size of the batches used during training + learning_rate: float. Learning rate to start with + summaries_dir: String. Directory where to log progress + predict_14: boolean. Whether to predict 14 instead of 17 joints + dtype: the data type to use to store internal variables + """ + + # There are in total 17 joints in H3.6M and 16 in MPII (and therefore in stacked + # hourglass detections). We settled with 16 joints in 2d just to make models + # compatible (e.g. you can train on ground truth 2d and test on SH detections). + # This does not seem to have an effect on prediction performance. + self.HUMAN_2D_SIZE = 16 * 2 + + # In 3d all the predictions are zero-centered around the root (hip) joint, so + # we actually predict only 16 joints. The error is still computed over 17 joints, + # because if one uses, e.g. Procrustes alignment, there is still error in the + # hip to account for! + # There is also an option to predict only 14 joints, which makes our results + # directly comparable to those in https://arxiv.org/pdf/1611.09010.pdf + self.HUMAN_3D_SIZE = 14 * 3 if predict_14 else 16 * 3 + + self.input_size = self.HUMAN_2D_SIZE + self.output_size = self.HUMAN_3D_SIZE + + self.isTraining = tf.placeholder(tf.bool,name="isTrainingflag") + self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob") + + # Summary writers for train and test runs + self.train_writer = tf.summary.FileWriter( os.path.join(summaries_dir, 'train' )) + self.test_writer = tf.summary.FileWriter( os.path.join(summaries_dir, 'test' )) + + self.linear_size = linear_size + self.batch_size = batch_size + self.learning_rate = tf.Variable( float(learning_rate), trainable=False, dtype=dtype, name="learning_rate") + self.global_step = tf.Variable(0, trainable=False, name="global_step") + decay_steps = 100000 # empirical + decay_rate = 0.96 # empirical + self.learning_rate = tf.train.exponential_decay(self.learning_rate, self.global_step, decay_steps, decay_rate) + self.num_models = 5 # specify the number of gaussian kernels in the mixture model + + + # === Transform the inputs === + with vs.variable_scope("inputs"): + + # === fix the batch size in order to introdoce uncertainty into loss ===# + + enc_in = tf.placeholder(dtype, shape=[None, self.input_size], name="enc_in") + dec_out = tf.placeholder(dtype, shape=[None, self.output_size], name="dec_out") + + + self.encoder_inputs = enc_in + self.decoder_outputs = dec_out + + # === Create the linear + relu combos === + with vs.variable_scope( "linear_model" ): + + # === First layer, brings dimensionality up to linear_size === + w1 = tf.get_variable( name="w1", initializer=kaiming, shape=[self.HUMAN_2D_SIZE, linear_size], dtype=dtype ) + b1 = tf.get_variable( name="b1", initializer=kaiming, shape=[linear_size], dtype=dtype ) + w1 = tf.clip_by_norm(w1,1) if max_norm else w1 + y3 = tf.matmul( enc_in, w1 ) + b1 + + if batch_norm: + y3 = tf.layers.batch_normalization(y3,training=self.isTraining, name="batch_normalization") + y3 = tf.nn.relu( y3 ) + y3 = tf.nn.dropout( y3, self.dropout_keep_prob ) + + # === Create multiple bi-linear layers === + for idx in range( num_layers ): + y3 = self.two_linear( y3, linear_size, residual, self.dropout_keep_prob, max_norm, batch_norm, dtype, idx ) + + + + # === Last linear layer has HUMAN_3D_SIZE in output === + w4 = tf.get_variable( name="w4", initializer=kaiming, shape=[linear_size, self.HUMAN_3D_SIZE*self.num_models], dtype=dtype ) + b4 = tf.get_variable( name="b4", initializer=kaiming, shape=[self.HUMAN_3D_SIZE*self.num_models], dtype=dtype ) + w4 = tf.clip_by_norm(w4,1) if max_norm else w4 + y_mu = tf.matmul(y3, w4) + b4 + + + w5 = tf.get_variable( name="w5", initializer=kaiming, shape=[linear_size, self.num_models], dtype=dtype ) + b5 = tf.get_variable( name="b5", initializer=kaiming, shape=[self.num_models], dtype=dtype ) + w5 = tf.clip_by_norm(w5,1) if max_norm else w5 + y_sigma = tf.matmul(y3, w5) + b5 + y_sigma = tf.nn.elu(y_sigma)+1 + + w6 = tf.get_variable( name="w6", initializer=kaiming, shape=[linear_size, self.num_models], dtype=dtype ) + b6 = tf.get_variable( name="b6", initializer=kaiming, shape=[self.num_models], dtype=dtype ) + y_alpha = tf.matmul(y3, w6) + b6 + y_alpha = tf.nn.softmax(y_alpha, dim=1) + + # === End linear model === + + components = tf.concat([y_mu, y_sigma, y_alpha], axis=1) + self.outputs = components + + # add dirichlet conjucate prior to the mixing coefficents + prior = tf.constant([2.0, 2.0, 2.0, 2.0, 2.0], dtype=tf.float32) + loss_prior = Dirichlet_loss(components, self.HUMAN_3D_SIZE, self.num_models, prior) + + with vs.variable_scope('loss'): + + loss_gaussion = mean_log_Gaussian_like(dec_out, components, self.HUMAN_3D_SIZE, self.num_models) # Mixture density network based on gaussian kernel + self.loss = loss_gaussion + loss_prior + + tf.summary.scalar('loss', self.loss, collections=['train', 'test']) + self.loss_summary = tf.summary.merge_all('train') + + + + + # To keep track of the loss in mm + self.err_mm = tf.placeholder( tf.float32, name="error_mm" ) + self.err_mm_summary = tf.summary.scalar( "loss/error_mm", self.err_mm ) + + # Gradients and update operation for training the model. + opt = tf.train.AdamOptimizer( self.learning_rate ) + loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, decr_every_n_nan_or_inf=2, decr_ratio=0.5) + opt = NPULossScaleOptimizer(opt, loss_scale_manager) + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) + + with tf.control_dependencies(update_ops): + + # Update all the trainable parameters + gradients = opt.compute_gradients(self.loss) + self.gradients = [[] if i==None else i for i in gradients] + self.updates = opt.apply_gradients(gradients, global_step=self.global_step) + + # Keep track of the learning rate + self.learning_rate_summary = tf.summary.scalar('learning_rate/learning_rate', self.learning_rate) + + # To save the model + self.saver = tf.train.Saver( tf.global_variables(), max_to_keep=None ) + + + def two_linear( self, xin, linear_size, residual, dropout_keep_prob, max_norm, batch_norm, dtype, idx ): + """ + Make a bi-linear block with optional residual connection + + Args + xin: the batch that enters the block + linear_size: integer. The size of the linear units + residual: boolean. Whether to add a residual connection + dropout_keep_prob: float [0,1]. Probability of dropping something out + max_norm: boolean. Whether to clip weights to 1-norm + batch_norm: boolean. Whether to do batch normalization + dtype: type of the weigths. Usually tf.float32 + idx: integer. Number of layer (for naming/scoping) + Returns + y: the batch after it leaves the block + """ + + with vs.variable_scope( "two_linear_"+str(idx) ) as scope: + + input_size = int(xin.get_shape()[1]) + + # Linear 1 + w2 = tf.get_variable( name="w2_"+str(idx), initializer=kaiming, shape=[input_size, linear_size], dtype=dtype) + b2 = tf.get_variable( name="b2_"+str(idx), initializer=kaiming, shape=[linear_size], dtype=dtype) + w2 = tf.clip_by_norm(w2,1) if max_norm else w2 + y = tf.matmul(xin, w2) + b2 + if batch_norm: + y = tf.layers.batch_normalization(y,training=self.isTraining,name="batch_normalization1"+str(idx)) + + y = tf.nn.relu( y ) + y = tf.nn.dropout( y, dropout_keep_prob ) + + # Linear 2 + w3 = tf.get_variable( name="w3_"+str(idx), initializer=kaiming, shape=[linear_size, linear_size], dtype=dtype) + b3 = tf.get_variable( name="b3_"+str(idx), initializer=kaiming, shape=[linear_size], dtype=dtype) + w3 = tf.clip_by_norm(w3,1) if max_norm else w3 + y = tf.matmul(y, w3) + b3 + + if batch_norm: + y = tf.layers.batch_normalization(y,training=self.isTraining,name="batch_normalization2"+str(idx)) + + y = tf.nn.relu( y ) + y = tf.nn.dropout( y, dropout_keep_prob ) + + # Residual every 2 blocks + y = (xin + y) if residual else y + + return y + + def step(self, session, encoder_inputs, decoder_outputs, dropout_keep_prob, isTraining=True): + """Run a step of the model feeding the given inputs. + + Args + session: tensorflow session to use + encoder_inputs: list of numpy vectors to feed as encoder inputs + decoder_outputs: list of numpy vectors that are the expected decoder outputs + dropout_keep_prob: (0,1] dropout keep probability + isTraining: whether to do the backward step or only forward + + Returns + if isTraining is True, a 4-tuple + loss: the computed loss of this batch + loss_summary: tf summary of this batch loss, to log on tensorboard + learning_rate_summary: tf summary of learnign rate to log on tensorboard + outputs: predicted 3d poses + if isTraining is False, a 3-tuple + (loss, loss_summary, outputs) same as above + """ + + input_feed = {self.encoder_inputs: encoder_inputs, + self.decoder_outputs: decoder_outputs, + self.isTraining: isTraining, + self.dropout_keep_prob: dropout_keep_prob} + + # Output feed: depends on whether we do a backward step or not. + if isTraining: + output_feed = [self.updates, # Update Op that does SGD + self.loss, + self.loss_summary, + self.learning_rate_summary, + self.outputs] + + outputs = session.run( output_feed, input_feed ) + return outputs[1], outputs[2], outputs[3], outputs[4] + + else: + output_feed = [self.loss, # Loss for this batch. + self.loss_summary, + self.outputs] + + outputs = session.run(output_feed, input_feed) + return outputs[0], outputs[1], outputs[2] # No gradient norm + + def get_all_batches(self, data_x, data_y, camera_frame, training=True): + """ + Obtain a list of all the batches, randomly permutted + Args + data_x: dictionary with 2d inputs + data_y: dictionary with 3d expected outputs + camera_frame: whether the 3d data is in camera coordinates + training: True if this is a training batch. False otherwise. + + Returns + encoder_inputs: list of 2d batches + decoder_outputs: list of 3d batches + """ + + # Figure out how many frames we have + n = 0 + repre = {} + + for key2d in sorted(data_x.keys()): + n2d, _ = data_x[key2d].shape + n = n + n2d + repre[key2d] = n2d + + encoder_inputs = np.zeros((n, self.HUMAN_2D_SIZE), dtype=float) + decoder_outputs = np.zeros((n, self.HUMAN_3D_SIZE), dtype=float) + + # Put all the data into big arrays + idx = 0 + for key2d in sorted(data_x.keys()): + (subj, b, fname) = key2d + # keys should be the same if 3d is in camera coordinates + key3d = key2d if (camera_frame) else (subj, b, '{0}.h5'.format(fname.split('.')[0])) + key3d = (subj, b, fname[:-3]) if fname.endswith('-sh') and camera_frame else key3d + + n2d, _ = data_x[key2d].shape + encoder_inputs[idx:idx + n2d, :] = data_x[key2d] + decoder_outputs[idx:idx + n2d, :] = data_y[key3d] + idx = idx + n2d + + if training: + # Randomly permute everything + idx = np.random.permutation(n) + encoder_inputs = encoder_inputs[idx, :] + decoder_outputs = decoder_outputs[idx, :] + + # Make the number of examples a multiple of the batch size + n_extra = n % self.batch_size + if n_extra > 0: # Otherwise examples are already a multiple of batch size + encoder_inputs = encoder_inputs[:-n_extra, :] + decoder_outputs = decoder_outputs[:-n_extra, :] + + n_batches = n // self.batch_size + encoder_inputs = np.split(encoder_inputs, n_batches) + decoder_outputs = np.split(decoder_outputs, n_batches) + repre[sorted(data_x.keys())[-1]] = repre[sorted(data_x.keys())[-1]] - n_extra ## track how many frames are used in each video, + + return encoder_inputs, decoder_outputs, repre + + +def mean_log_Gaussian_like(y_true, parameters,c,m ): + """Mean Log Gaussian Likelihood distribution + y_truth: ground truth 3d pose + parameters: output of hypotheses generator, which conclude the mean, variance and mixture coeffcient of the mixture model + c: dimension of 3d pose + m: number of kernels + """ + components = tf.reshape(parameters, [-1, c + 2, m]) + mu = components[:, :c, :] + sigma = components[:, c, :] + sigma = tf.clip_by_value(sigma, 1e-15,1e15) + alpha = components[:, c + 1, :] + alpha = tf.clip_by_value(alpha, 1e-8, 1.) + + exponent = tf.log(alpha) - 0.5 * c * tf.log(2 * np.pi) \ + - c * tf.log(sigma) \ + - tf.reduce_sum((tf.expand_dims(y_true, 2) - mu) ** 2, axis=1) / (2.0 * (sigma) ** 2.0) + + log_gauss = log_sum_exp(exponent, axis=1) + res = - tf.reduce_mean(log_gauss) + return res + + +def Dirichlet_loss(parameters, c, m, prior): + ''' + add dirichlet conjucate prior to the loss function to prevent all data fitting into single kernel + ''' + + components = tf.reshape(parameters, [-1, c + 2, m]) + alpha = components[:, c + 1, :] + alpha = tf.clip_by_value(alpha, 1e-8, 1.) + + loss = tf.reduce_sum((prior-1.0) * tf.log(alpha), axis=1) + res = -tf.reduce_mean(loss) + return res + + + + +def log_sum_exp(x, axis=None): + """Log-sum-exp trick implementation""" + x_max = tf.reduce_max(x, axis=axis, keep_dims=True) + return tf.log(tf.reduce_sum(tf.exp(x - x_max), + axis=axis, keep_dims=True))+x_max + + + + +def mean_log_LaPlace_like(y_true, parameters, c, m): + """Mean Log Laplace Likelihood distribution + parameters refer to mean_log_Gaussian_like + """ + components = tf.reshape(parameters, [-1, c + 2, m]) + mu = components[:, :c, :] + sigma = components[:, c, :] + sigma = tf.clip_by_value(sigma, 1e-15, 1e15) + alpha = components[:, c + 1, :] + alpha = tf.clip_by_value(alpha, 1e-8, 1.) + + exponent = tf.log(alpha) - c * tf.log(2.0 * sigma) \ + - tf.reduce_sum(tf.abs(tf.expand_dims(y_true, 2) - mu), axis=1) / (sigma) + + log_gauss, _ = log_sum_exp(exponent, axis=1) + res = - tf.reduce_mean(log_gauss) + return res diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/predict_3dpose_mdm.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/predict_3dpose_mdm.py new file mode 100644 index 0000000000000000000000000000000000000000..d56b9086a0a6a3e427650658c0fcbbd9bdb4deea --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/predict_3dpose_mdm.py @@ -0,0 +1,690 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +"""Predicting 3d poses from 2d joints""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from npu_bridge.npu_init import * + +import math +import os +import random +import sys +import time +import h5py +import copy + +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec +import numpy as np +from six.moves import xrange # pylint: disable=redefined-builtin +import tensorflow as tf +import procrustes + +import viz +import cameras +import data_utils +import mix_den_model +import logging, logging.config + + +tf.app.flags.DEFINE_float("learning_rate", 1e-3, "Learning rate") +tf.app.flags.DEFINE_float("dropout", 0.5, "Dropout keep probability. 1 means no dropout") +tf.app.flags.DEFINE_integer("batch_size", 64,"batch size to use during training") +tf.app.flags.DEFINE_integer("epochs", 200, "How many epochs we should train for") +tf.app.flags.DEFINE_boolean("camera_frame", True, "Convert 3d poses to camera coordinates") +tf.app.flags.DEFINE_boolean("max_norm", True, "Apply maxnorm constraint to the weights") +tf.app.flags.DEFINE_boolean("batch_norm", True, "Use batch_normalization") + +# Data loading +tf.app.flags.DEFINE_boolean("predict_14", False, "predict 14 joints") +tf.app.flags.DEFINE_boolean("use_sh", True, "Use 2d pose predictions from StackedHourglass") +tf.app.flags.DEFINE_string("action","All", "The action to train on. 'All' means all the actions") + +# Architecture +tf.app.flags.DEFINE_integer("linear_size", 1024, "Size of each model layer.") +tf.app.flags.DEFINE_integer("num_layers", 2, "Number of layers in the model.") +tf.app.flags.DEFINE_boolean("residual", True, "Whether to add a residual connection every 2 layers") + +# Evaluation +tf.app.flags.DEFINE_boolean("procrustes", False, "Apply procrustes analysis at test time") +tf.app.flags.DEFINE_boolean("evaluateActionWise",True, "The dataset to use either h36m or heva") + +# Directories +tf.app.flags.DEFINE_string("cameras_path","../data/h36m/cameras.h5","Directory to load camera parameters") +tf.app.flags.DEFINE_string("data_dir", "../data/h36m/", "Data directory") +tf.app.flags.DEFINE_string("train_dir", "../experiments/test_git/", "Training directory.") +tf.app.flags.DEFINE_string("load_dir", "../Models/mdm_5_prior/", "Specify the directory to load trained model") + +# Train or load +tf.app.flags.DEFINE_boolean("sample", False, "Set to True for sampling.") +tf.app.flags.DEFINE_boolean("test", False, "Set to True for sampling.") +tf.app.flags.DEFINE_boolean("use_cpu", False, "Whether to use the CPU") +tf.app.flags.DEFINE_integer("load", 0, "Try to load a previous checkpoint.") +tf.app.flags.DEFINE_integer("miss_num", 1, "Specify how many missing joints.") + +### 4679232 for mdm_5 +### 4338038 for mdm prior + +# Misc +tf.app.flags.DEFINE_boolean("use_fp16", False, "Train using fp16 instead of fp32.") + +FLAGS = tf.app.flags.FLAGS + +def make_dir_if_not_exist(path): + try: + os.makedirs(path) + except OSError: + if not os.path.isdir(path): + raise + +train_dir = FLAGS.train_dir +load_dir = FLAGS.load_dir +summaries_dir = os.path.join( train_dir, "summary" ) +logdir = os.path.join(train_dir,"log") +os.system('mkdir -p {}'.format(summaries_dir)) +make_dir_if_not_exist(logdir) + +logging.config.fileConfig('./logging.conf') +logger = logging.getLogger() +fileHandler = logging.FileHandler("{0}/log.txt".format(logdir)) +logFormatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s - %(message)s") +fileHandler.setFormatter(logFormatter) +logger.addHandler(fileHandler) +logger.info("Logs will be written to %s" % logdir) + + + + + + + +def create_model( session, actions, batch_size ): + """ + Create model and initialize it or load its parameters in a session + + Args + session: tensorflow session + actions: list of string. Actions to train/test on + batch_size: integer. Number of examples in each batch + Returns + model: The created (or loaded) model + Raises + ValueError if asked to load a model, but the checkpoint specified by + FLAGS.load cannot be found. + """ + + model = mix_den_model.LinearModel( + FLAGS.linear_size, + FLAGS.num_layers, + FLAGS.residual, + FLAGS.batch_norm, + FLAGS.max_norm, + batch_size, + FLAGS.learning_rate, + summaries_dir, + FLAGS.predict_14, + dtype=tf.float16 if FLAGS.use_fp16 else tf.float32) + + if FLAGS.load <= 0: + # Create a new model from scratch + print("Creating model with fresh parameters.") + session.run( tf.global_variables_initializer() ) + return model + + # Load a previously saved model + ckpt = tf.train.get_checkpoint_state( load_dir, latest_filename="checkpoint") + print( "train_dir", load_dir ) + + if ckpt and ckpt.model_checkpoint_path: + # Check if the specific checkpoint exists + if FLAGS.load > 0: + if os.path.isfile(os.path.join(load_dir,"checkpoint-{0}.index".format(FLAGS.load))): + ckpt_name = os.path.join( os.path.join(load_dir,"checkpoint-{0}".format(FLAGS.load)) ) + else: + raise ValueError("Asked to load checkpoint {0}, but it does not seem to exist".format(FLAGS.load)) + else: + ckpt_name = os.path.basename( ckpt.model_checkpoint_path ) + + print("Loading model {0}".format( ckpt_name )) + model.saver.restore( session, ckpt_name ) + return model + else: + print("Could not find checkpoint. Aborting.") + raise( ValueError, "Checkpoint {0} does not seem to exist".format( ckpt.model_checkpoint_path ) ) + + +def train(): + """Train a linear model for 3d pose estimation""" + + actions = data_utils.define_actions( FLAGS.action ) + + # Load camera parameters + SUBJECT_IDS = [1,5,6,7,8,9,11] + rcams = cameras.load_cameras(FLAGS.cameras_path, SUBJECT_IDS) + + # Load 3d data and load (or create) 2d projections + train_set_3d, test_set_3d, data_mean_3d, data_std_3d, dim_to_ignore_3d, dim_to_use_3d, train_root_positions, test_root_positions = data_utils.read_3d_data( + actions, FLAGS.data_dir, FLAGS.camera_frame, rcams, FLAGS.predict_14 ) + + # Read stacked hourglass 2D predictions if use_sh, otherwise use groundtruth 2D projections + if FLAGS.use_sh: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d = data_utils.read_2d_predictions(actions, FLAGS.data_dir) + else: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d = data_utils.create_2d_data( actions, FLAGS.data_dir, rcams ) + + + # Avoid using the GPU if requested + #device_count = {"GPU": 0} if FLAGS.use_cpu else {"GPU": 1} + config = tf.ConfigProto(allow_soft_placement=True) + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + with tf.Session(config=config) as sess: + + # === Create the model === + print("Creating %d bi-layers of %d units." % (FLAGS.num_layers, FLAGS.linear_size)) + model = create_model( sess, actions, FLAGS.batch_size ) + model.train_writer.add_graph( sess.graph ) + + + #=== This is the training loop === + step_time, loss, val_loss = 0.0, 0.0, 0.0 + current_step = 0 if FLAGS.load <= 0 else FLAGS.load + 1 + + + current_epoch = 0 + log_every_n_batches = 100 + + + for epoch in xrange( FLAGS.epochs ): + current_epoch = current_epoch + 1 + + # === Load training batches for one epoch === + encoder_inputs, decoder_outputs, _ = model.get_all_batches( train_set_2d, train_set_3d, FLAGS.camera_frame, training=True ) + nbatches = len( encoder_inputs ) + start_time, loss = time.time(), 0. + + # === Loop through all the training batches === + for i in range( nbatches): + + if (i+1) % log_every_n_batches == 0: + # Print progress every log_every_n_batches batches + print("Working on epoch {0}, batch {1} / {2}... ".format( current_epoch, i+1, nbatches), end="" ) + + enc_in, dec_out = encoder_inputs[i], decoder_outputs[i] + # enc_in = data_utils.generage_missing_data(enc_in, FLAGS.miss_num) + step_loss, loss_summary, lr_summary, comp = model.step( sess, enc_in, dec_out, FLAGS.dropout, isTraining=True ) + + + if (i+1) % log_every_n_batches == 0: + + # Log and print progress every log_every_n_batches batches + + model.train_writer.add_summary( loss_summary, current_step ) + model.train_writer.add_summary( lr_summary, current_step ) + step_time = (time.time() - start_time) + start_time = time.time() + print("done in {0:.2f} ms".format( 1000*step_time / log_every_n_batches ) ) + + loss += step_loss + current_step += 1 + # === end looping through training batches === + + loss = loss / nbatches + + logger.info("=============================\n" + "Epoch: %d\n" + "Global step: %d\n" + "Learning rate: %.2e\n" + "Train loss avg: %.4f\n" + "=============================" % (epoch, model.global_step.eval(), + model.learning_rate.eval(), loss) ) + # === End training for an epoch === + + # === Testing after this epoch === + + if FLAGS.evaluateActionWise: + + logger.info("{0:=^12} {1:=^6}".format("Action", "mm")) # line of 30 equal signs + + cum_err = 0 # select the mixture model which has mininum error + for action in actions: + + + # Get 2d and 3d testing data for this action + action_test_set_2d = get_action_subset( test_set_2d, action ) + action_test_set_3d = get_action_subset( test_set_3d, action ) + encoder_inputs, decoder_outputs, repro_info = model.get_all_batches( action_test_set_2d, action_test_set_3d, FLAGS.camera_frame, training=False) + + act_err, step_time, loss = evaluate_batches( sess, model, + data_mean_3d, data_std_3d, dim_to_use_3d, dim_to_ignore_3d, + data_mean_2d, data_std_2d, dim_to_use_2d, dim_to_ignore_2d, + current_step, encoder_inputs, decoder_outputs) + + cum_err = cum_err + act_err + logger.info('{0:<12} {1:>6.2f}'.format(action, act_err)) + + summaries = sess.run( model.err_mm_summary, {model.err_mm: float(cum_err/float(len(actions)))} ) + model.test_writer.add_summary( summaries, current_step ) + + logger.info('{0:<12} {1:>6.2f}'.format("Average", cum_err/float(len(actions)))) + + logger.info('{0:=^19}'.format('')) + + # Save the model + print( "Saving the model... ", end="" ) + start_time = time.time() + if cum_err/float(len(actions))<60.66: + model.saver.save(sess, os.path.join(train_dir, 'checkpoint'), global_step=current_step) + print( "done in {0:.2f} ms".format(1000*(time.time() - start_time)) ) + + # Reset global time and loss + step_time, loss = 0, 0 + + sys.stdout.flush() + + +def get_action_subset( poses_set, action ): + """ + Given a preloaded dictionary of poses, load the subset of a particular action + + Args + poses_set: dictionary with keys k=(subject, action, seqname), + values v=(nxd matrix of poses) + action: string. The action that we want to filter out + Returns + poses_subset: dictionary with same structure as poses_set, but only with the + specified action. + """ + return {k:v for k, v in poses_set.items() if k[1] == action} + + +def evaluate_batches( sess, model, + data_mean_3d, data_std_3d, dim_to_use_3d, dim_to_ignore_3d, + data_mean_2d, data_std_2d, dim_to_use_2d, dim_to_ignore_2d, + current_step, encoder_inputs, decoder_outputs, current_epoch=0 ): + """ + Generic method that evaluates performance of a list of batches. + May be used to evaluate all actions or a single action. + + Args + sess + model + data_mean_3d + data_std_3d + dim_to_use_3d + dim_to_ignore_3d + data_mean_2d + data_std_2d + dim_to_use_2d + dim_to_ignore_2d + current_step + encoder_inputs + decoder_outputs + current_epoch + Returns + + total_err + joint_err + step_time + loss + """ + + n_joints = 17 if not(FLAGS.predict_14) else 14 + nbatches = len( encoder_inputs ) + + + # Loop through test examples + all_dists, start_time, loss = [], time.time(), 0. + log_every_n_batches = 100 + all_poses_3d = [] + all_enc_in =[] + + for i in range(nbatches): + + if current_epoch > 0 and (i+1) % log_every_n_batches == 0: + print("Working on test epoch {0}, batch {1} / {2}".format( current_epoch, i+1, nbatches) ) + + enc_in, dec_out = encoder_inputs[i], decoder_outputs[i] + # enc_in = data_utils.generage_missing_data(enc_in, FLAGS.miss_num) + dp = 1.0 # dropout keep probability is always 1 at test time + step_loss, loss_summary, out_all_components_ori = model.step( sess, enc_in, dec_out, dp, isTraining=False ) + loss += step_loss + + out_all_components = np.reshape(out_all_components_ori,[-1, model.HUMAN_3D_SIZE+2, model.num_models]) + out_mean = out_all_components[:, : model.HUMAN_3D_SIZE, :] + + + # denormalize + enc_in = data_utils.unNormalizeData( enc_in, data_mean_2d, data_std_2d, dim_to_ignore_2d ) + enc_in_ = copy.deepcopy(enc_in) + all_enc_in.append(enc_in_) + dec_out = data_utils.unNormalizeData( dec_out, data_mean_3d, data_std_3d, dim_to_ignore_3d ) + pose_3d = np.zeros((enc_in.shape[0],96, out_mean.shape[-1])) + + for j in range(out_mean.shape[-1]): + pose_3d[:, :, j] = data_utils.unNormalizeData( out_mean[:, :, j], data_mean_3d, data_std_3d, dim_to_ignore_3d ) + + pose_3d_ = copy.deepcopy(pose_3d) + all_poses_3d.append(pose_3d_) + + # Keep only the relevant dimensions + dtu3d = np.hstack( (np.arange(3), dim_to_use_3d) ) if not(FLAGS.predict_14) else dim_to_use_3d + + dec_out = dec_out[:, dtu3d] + pose_3d = pose_3d[:, dtu3d,:] + + assert dec_out.shape[0] == FLAGS.batch_size + assert pose_3d.shape[0] == FLAGS.batch_size + + if FLAGS.procrustes: + # Apply per-frame procrustes alignment if asked to do so + for j in range(FLAGS.batch_size): + for k in range(model.num_models): + gt = np.reshape(dec_out[j,:],[-1,3]) + out = np.reshape(pose_3d[j,:, k],[-1,3]) + _, Z, T, b, c = procrustes.compute_similarity_transform(gt,out,compute_optimal_scale=True) + out = (b*out.dot(T))+c + + pose_3d[j, :, k] = np.reshape(out,[-1,17*3] ) if not(FLAGS.predict_14) else np.reshape(pose_3d[j,:, k],[-1,14*3] ) + + # Compute Euclidean distance error per joint + sqerr = (pose_3d - np.expand_dims(dec_out,axis=2))**2 # Squared error between prediction and expected output + dists = np.zeros((sqerr.shape[0], n_joints, sqerr.shape[2])) # Array with L2 error per joint in mm + + for m in range(dists.shape[-1]): + dist_idx = 0 + for k in np.arange(0, n_joints*3, 3): + # Sum across X,Y, and Z dimenstions to obtain L2 distance + dists[:,dist_idx, m] = np.sqrt( np.sum( sqerr[:, k:k+3,m], axis=1 )) + + dist_idx = dist_idx + 1 + + all_dists.append(dists) + assert sqerr.shape[0] == FLAGS.batch_size + + step_time = (time.time() - start_time) / nbatches + loss = loss / nbatches + + all_dists = np.vstack( all_dists ) + aver_minerr = np.mean(np.min(np.sum( all_dists, axis=1),axis=1))/n_joints + + return aver_minerr, step_time, loss + + +def test(): + + actions = data_utils.define_actions( FLAGS.action ) + + # Load camera parameters + SUBJECT_IDS = [1,5,6,7,8,9,11] + rcams = cameras.load_cameras(FLAGS.cameras_path, SUBJECT_IDS) + + # Load 3d data and load (or create) 2d projections + train_set_3d, test_set_3d, data_mean_3d, data_std_3d, dim_to_ignore_3d, dim_to_use_3d, train_root_positions, test_root_positions = data_utils.read_3d_data( + actions, FLAGS.data_dir, FLAGS.camera_frame, rcams, FLAGS.predict_14 ) + + # Read stacked hourglass 2D predictions if use_sh, otherwise use groundtruth 2D projections + if FLAGS.use_sh: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d = data_utils.read_2d_predictions(actions, FLAGS.data_dir) + else: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d = data_utils.create_2d_data( actions, FLAGS.data_dir, rcams ) + + + # Avoid using the GPU if requested + config = tf.ConfigProto(allow_soft_placement=True) + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + with tf.Session(config=config) as sess: + + # === Create the model === + print("Creating %d bi-layers of %d units." % (FLAGS.num_layers, FLAGS.linear_size)) + model = create_model( sess, actions, FLAGS.batch_size ) + model.train_writer.add_graph( sess.graph ) + + current_step = 0 if FLAGS.load <= 0 else FLAGS.load + 1 + + if FLAGS.evaluateActionWise: + + logger.info("{0:=^12} {1:=^6}".format("Action", "mm")) # line of 30 equal signs + + cum_err = 0 # select the mixture model which has mininum error + for action in actions: + + + # Get 2d and 3d testing data for this action + action_test_set_2d = get_action_subset( test_set_2d, action ) + action_test_set_3d = get_action_subset( test_set_3d, action ) + encoder_inputs, decoder_outputs, repro_info = model.get_all_batches( action_test_set_2d, action_test_set_3d, FLAGS.camera_frame, training=False) + + act_err, step_time, loss = evaluate_batches( sess, model, + data_mean_3d, data_std_3d, dim_to_use_3d, dim_to_ignore_3d, + data_mean_2d, data_std_2d, dim_to_use_2d, dim_to_ignore_2d, + current_step, encoder_inputs, decoder_outputs) + + cum_err = cum_err + act_err + logger.info('{0:<12} {1:>6.2f}'.format(action, act_err)) + + summaries = sess.run( model.err_mm_summary, {model.err_mm: float(cum_err/float(len(actions)))} ) + model.test_writer.add_summary( summaries, current_step ) + + logger.info('{0:<12} {1:>6.2f}'.format("Average", cum_err/float(len(actions)))) + + logger.info('{0:=^19}'.format('')) + + +def sample(): + + """Get samples from a model and visualize them""" + path = '{}/samples_sh'.format(FLAGS.train_dir) + if not os.path.exists(path): + os.makedirs(path) + actions = data_utils.define_actions( FLAGS.action ) + + # Load camera parameters + SUBJECT_IDS = [1,5,6,7,8,9,11] + rcams = cameras.load_cameras(FLAGS.cameras_path, SUBJECT_IDS) + n_joints = 17 if not (FLAGS.predict_14) else 14 + + # Load 3d data and load (or create) 2d projections + train_set_3d, test_set_3d, data_mean_3d, data_std_3d, dim_to_ignore_3d, dim_to_use_3d, train_root_positions, test_root_positions = data_utils.read_3d_data( + actions, FLAGS.data_dir, FLAGS.camera_frame, rcams, FLAGS.predict_14 ) + + if FLAGS.use_sh: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d = data_utils.read_2d_predictions(actions, FLAGS.data_dir) + else: + train_set_2d, test_set_2d, data_mean_2d, data_std_2d, dim_to_ignore_2d, dim_to_use_2d, _ = data_utils.create_2d_data( actions, FLAGS.data_dir, rcams ) + + config = tf.ConfigProto(allow_soft_placement=True) + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + with tf.Session(config=config) as sess: + # === Create the model === + + batch_size = 128 + model = create_model(sess, actions, batch_size) + print("Model loaded") + + + for key2d in test_set_2d.keys(): + + (subj, b, fname) = key2d + + # choose SittingDown action to visualize + if b == 'SittingDown': + print( "Subject: {}, action: {}, fname: {}".format(subj, b, fname) ) + + # keys should be the same if 3d is in camera coordinates + key3d = key2d if FLAGS.camera_frame else (subj, b, '{0}.h5'.format(fname.split('.')[0])) + key3d = (subj, b, fname[:-3]) if (fname.endswith('-sh')) and FLAGS.camera_frame else key3d + + enc_in = test_set_2d[ key2d ] + n2d, _ = enc_in.shape + dec_out = test_set_3d[ key3d ] + n3d, _ = dec_out.shape + assert n2d == n3d + + # Split into about-same-size batches + + enc_in = np.array_split( enc_in, n2d // batch_size ) + dec_out = np.array_split( dec_out, n3d // batch_size ) + + # store all pose hypotheses in a list + pose_3d_mdm = [[], [], [], [], []] + + for bidx in range( len(enc_in) ): + + # Dropout probability 0 (keep probability 1) for sampling + dp = 1.0 + loss, _, out_all_components = model.step(sess, enc_in[bidx], dec_out[bidx], dp, isTraining=False) + + # denormalize the input 2d pose, ground truth 3d pose as well as 3d pose hypotheses from mdm + out_all_components = np.reshape(out_all_components, [-1, model.HUMAN_3D_SIZE + 2, model.num_models]) + out_mean = out_all_components[:, : model.HUMAN_3D_SIZE, :] + + enc_in[bidx] = data_utils.unNormalizeData( enc_in[bidx], data_mean_2d, data_std_2d, dim_to_ignore_2d ) + dec_out[bidx] = data_utils.unNormalizeData( dec_out[bidx], data_mean_3d, data_std_3d, dim_to_ignore_3d ) + poses3d = np.zeros((out_mean.shape[0], 96, out_mean.shape[-1])) + for j in range(out_mean.shape[-1]): + poses3d[:, :, j] = data_utils.unNormalizeData( out_mean[:, :, j], data_mean_3d, data_std_3d, dim_to_ignore_3d ) + + # extract the 17 joints + dtu3d = np.hstack((np.arange(3), dim_to_use_3d)) if not (FLAGS.predict_14) else dim_to_use_3d + dec_out_17 = dec_out[bidx][: , dtu3d] + pose_3d_17 = poses3d[:, dtu3d, :] + sqerr = (pose_3d_17 - np.expand_dims(dec_out_17, axis=2)) ** 2 + dists = np.zeros((sqerr.shape[0], n_joints, sqerr.shape[2])) + for m in range(dists.shape[-1]): + dist_idx = 0 + for k in np.arange(0, n_joints * 3, 3): + dists[:, dist_idx, m] = np.sqrt(np.sum(sqerr[:, k:k + 3, m], axis=1)) + dist_idx = dist_idx + 1 + + [pose_3d_mdm[i].append(poses3d[:, :, i]) for i in range(poses3d.shape[-1])] + + # Put all the poses together + enc_in, dec_out= map(np.vstack,[enc_in, dec_out]) + for i in range(poses3d.shape[-1]): + pose_3d_mdm[i] = np.vstack(pose_3d_mdm[i]) + + # Convert back to world coordinates + if FLAGS.camera_frame: + N_CAMERAS = 4 + N_JOINTS_H36M = 32 + + # Add global position back + dec_out = dec_out + np.tile( test_root_positions[ key3d ], [1,N_JOINTS_H36M] ) + for i in range(poses3d.shape[-1]): + pose_3d_mdm[i] = pose_3d_mdm[i] + np.tile(test_root_positions[key3d], [1, N_JOINTS_H36M]) + + + # Load the appropriate camera + subj, action, sname = key3d + + cname = sname.split('.')[1] # <-- camera name + scams = {(subj,c+1): rcams[(subj,c+1)] for c in range(N_CAMERAS)} # cams of this subject + scam_idx = [scams[(subj,c+1)][-1] for c in range(N_CAMERAS)].index( cname ) # index of camera used + the_cam = scams[(subj, scam_idx+1)] # <-- the camera used + R, T, f, c, k, p, name = the_cam + assert name == cname + + def cam2world_centered(data_3d_camframe): + data_3d_worldframe = cameras.camera_to_world_frame(data_3d_camframe.reshape((-1, 3)), R, T) + data_3d_worldframe = data_3d_worldframe.reshape((-1, N_JOINTS_H36M*3)) + # subtract root translation + return data_3d_worldframe - np.tile( data_3d_worldframe[:,:3], (1,N_JOINTS_H36M) ) + + # Apply inverse rotation and translation + dec_out = cam2world_centered(dec_out) + for i in range(poses3d.shape[-1]): + pose_3d_mdm[i] = cam2world_centered(pose_3d_mdm[i]) + + # sample some results to visualize + np.random.seed(42) + idx = np.random.permutation(enc_in.shape[0]) + enc_in, dec_out = enc_in[idx, :], dec_out[idx,:] + for i in range(poses3d.shape[-1]): + pose_3d_mdm[i] = pose_3d_mdm[i][idx, :] + + exidx = 1 + nsamples = 20 + + for i in np.arange(nsamples): + fig = plt.figure(figsize=(20, 5)) + + subplot_idx = 1 + gs1 = gridspec.GridSpec(1, 7) # 5 rows, 9 columns + gs1.update(wspace=-0.00, hspace=0.05) # set the spacing between axes. + plt.axis('off') + + # Plot 2d pose + ax1 = plt.subplot(gs1[subplot_idx - 1]) + p2d = enc_in[exidx, :] + viz.show2Dpose(p2d, ax1) + ax1.invert_yaxis() + + # Plot 3d gt + ax2 = plt.subplot(gs1[subplot_idx], projection='3d') + p3d = dec_out[exidx, :] + viz.show3Dpose(p3d, ax2) + + # Plot 3d pose hypotheses + + for i in range(poses3d.shape[-1]): + ax3 = plt.subplot(gs1[subplot_idx + i + 1], projection='3d') + p3d = pose_3d_mdm[i][exidx] + viz.show3Dpose(p3d, ax3, lcolor="#9b59b6", rcolor="#2ecc71") + # plt.show() + plt.savefig('{}/sample_{}_{}_{}_{}.png'.format(path, subj, action, scam_idx, exidx)) + plt.close(fig) + exidx = exidx + 1 + + +def main(_): + if FLAGS.sample: + sample() + elif FLAGS.test: + test() + else: + train() + +if __name__ == "__main__": + + tf.app.run() diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/procrustes.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/procrustes.py new file mode 100644 index 0000000000000000000000000000000000000000..31eb2ef7b2d725b52b572462121fd1188c093f69 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/procrustes.py @@ -0,0 +1,91 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def compute_similarity_transform(X, Y, compute_optimal_scale=False): + """ + A port of MATLAB's `procrustes` function to Numpy. + Adapted from http://stackoverflow.com/a/18927641/1884420 + + Args + X: array NxM of targets, with N number of points and M point dimensionality + Y: array NxM of inputs + compute_optimal_scale: whether we compute optimal scale or force it to be 1 + + Returns: + d: squared error after transformation + Z: transformed Y + T: computed rotation + b: scaling + c: translation + """ + import numpy as np + + muX = X.mean(0) + muY = Y.mean(0) + + X0 = X - muX + Y0 = Y - muY + + ssX = (X0**2.).sum() + ssY = (Y0**2.).sum() + + # centred Frobenius norm + normX = np.sqrt(ssX) + normY = np.sqrt(ssY) + + # scale to equal (unit) norm + X0 = X0 / normX + Y0 = Y0 / normY + + # optimum rotation matrix of Y + A = np.dot(X0.T, Y0) + U,s,Vt = np.linalg.svd(A,full_matrices=False) + V = Vt.T + T = np.dot(V, U.T) + + # Make sure we have a rotation + detT = np.linalg.det(T) + V[:,-1] *= np.sign( detT ) + s[-1] *= np.sign( detT ) + T = np.dot(V, U.T) + + traceTA = s.sum() + + if compute_optimal_scale: # Compute optimum scaling of Y. + b = traceTA * normX / normY + d = 1 - traceTA**2 + Z = normX*traceTA*np.dot(Y0, T) + muX + else: # If no scaling allowed + b = 1 + d = 1 + ssY/ssX - 2 * traceTA * normY / normX + Z = normY*np.dot(Y0, T) + muX + + c = muX - b*np.dot(muY, T) + + return d, Z, T, b, c + diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/viz.py b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/viz.py new file mode 100644 index 0000000000000000000000000000000000000000..a383d07fc26a80b973cbbdca0a012917fb8c37ff --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/src/viz.py @@ -0,0 +1,200 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions to visualize human poses""" + +import matplotlib.pyplot as plt +import data_utils +import numpy as np +import h5py +import os +from mpl_toolkits.mplot3d import Axes3D + +def show3Dpose(channels, ax, lcolor="#3498db", rcolor="#e74c3c", add_labels=False): # blue, orange + """ + Visualize the ground truth 3d skeleton + + Args + channels: 96x1 vector. The pose to plot. + ax: matplotlib 3d axis to draw on + lcolor: color for left part of the body + rcolor: color for right part of the body + add_labels: whether to add coordinate labels + Returns + Nothing. Draws on ax. + """ + + assert channels.size == len(data_utils.H36M_NAMES)*3, "channels should have 96 entries, it has %d instead" % channels.size + vals = np.reshape( channels, (len(data_utils.H36M_NAMES), -1) ) + + I = np.array([1,2,3,1,7,8,1, 13,14,15,14,18,19,14,26,27])-1 # start points + J = np.array([2,3,4,7,8,9,13,14,15,16,18,19,20,26,27,28])-1 # end points + LR = np.array([1,1,1,0,0,0,0, 0, 0, 0, 0, 0, 0, 1, 1, 1], dtype=bool) + + # Make connection matrix + for i in np.arange( len(I) ): + x, y, z = [np.array( [vals[I[i], j], vals[J[i], j]] ) for j in range(3)] + # ax.plot(x, y, z, lw=3, marker = 'o', markersize = 5, c=lcolor if LR[i] else rcolor, markeredgecolor = lcolor) + ax.plot(x, y, z, lw=2, c=lcolor if LR[i] else rcolor) + + RADIUS = 750 # space around the subject + xroot, yroot, zroot = vals[0,0], vals[0,1], vals[0,2] + ax.set_xlim3d([-RADIUS+xroot, RADIUS+xroot]) + ax.set_zlim3d([-RADIUS+zroot, RADIUS+zroot]) + ax.set_ylim3d([-RADIUS+yroot, RADIUS+yroot]) + + # ax.set_xlim3d([np.min(vals[:, 0]), np.max(vals[:, 0])]) + # ax.set_zlim3d([np.min(vals[:, 2]), np.max(vals[:, 2])]) + # ax.set_ylim3d([np.min(vals[:, 1]), np.max(vals[:, 1])]) + + if add_labels: + ax.set_xlabel("x") + ax.set_ylabel("y") + ax.set_zlabel("z") + + # Get rid of the ticks and tick labels + + ax.set_xticks([]) + ax.set_yticks([]) + ax.set_zticks([]) + + ax.get_xaxis().set_ticklabels([]) + ax.get_yaxis().set_ticklabels([]) + ax.set_zticklabels([]) + ax.set_aspect('equal') + + # Get rid of the panes (actually, make them white) + white = (1.0, 1.0, 1.0, 0.0) + ax.w_xaxis.set_pane_color(white) + # ax.w_zaxis.set_pane_color(white) + ax.w_yaxis.set_pane_color(white) + # # Keep z pane + # + # # Get rid of the lines in 3d + ax.w_xaxis.line.set_color(white) + ax.w_yaxis.line.set_color(white) + ax.w_zaxis.line.set_color(white) + + + ax.view_init(azim=129, elev=10) + + + +def show2Dpose(channels, ax, lcolor="#3498db", rcolor="#e74c3c", add_labels=False): + """ + Visualize a 2d skeleton with 32 joints + + Args + channels: 64x1 vector. The pose to plot. + ax: matplotlib axis to draw on + lcolor: color for left part of the body + rcolor: color for right part of the body + add_labels: whether to add coordinate labels + Returns + Nothing. Draws on ax. + """ + + assert channels.size == len(data_utils.H36M_NAMES)*2, "channels should have 64 entries, it has %d instead" % channels.size + vals = np.reshape( channels, (len(data_utils.H36M_NAMES), -1) ) + + I = np.array([1,2,3,1,7,8,1, 13,14,14,18,19,14,26,27])-1 # start points + J = np.array([2,3,4,7,8,9,13,14,16,18,19,20,26,27,28])-1 # end points + LR = np.array([1,1,1,0,0,0,0, 0, 0, 0, 0, 0, 1, 1, 1], dtype=bool) + + # Make connection matrix + for i in np.arange( len(I) ): + x, y = [np.array( [vals[I[i], j], vals[J[i], j]] ) for j in range(2)] + ax.plot(x, y, lw=2, c=lcolor if LR[i] else rcolor) + + # Get rid of the ticks + ax.set_xticks([]) + ax.set_yticks([]) + + # Get rid of tick labels + ax.get_xaxis().set_ticklabels([]) + ax.get_yaxis().set_ticklabels([]) + + RADIUS = 300 # space around the subject + xroot, yroot = vals[0,0], vals[0,1] + ax.set_xlim([-RADIUS+xroot, RADIUS+xroot]) + ax.set_ylim([-RADIUS+yroot, RADIUS+yroot]) + if add_labels: + ax.set_xlabel("x") + ax.set_ylabel("z") + + ax.set_aspect('equal') + + + + +def show2Dpose_mdm(channels, ax, lcolor="#3498db", rcolor="#e74c3c", add_labels=False): + """ + Visualize 2d reprojections of all 3d pose hypotheses in one fig in order to show the similarity between them + + Args + channels: 64 * 5, 2d reprojections of all 3d pose hypotheses + ax: matplotlib axis to draw on + lcolor: color for left part of the body + rcolor: color for right part of the body. Note that we do not really use lcolor and rcolor in this function. + In stead, we define a color for each hypotheses to show the overlap between them. + add_labels: whether to add coordinate labels + Returns + Nothing. Draws on ax. + """ + + + + + I = np.array([1,2,3,1,7,8,1, 13,14,14,18,19,14,26,27])-1 # start points + J = np.array([2,3,4,7,8,9,13,14,16,18,19,20,26,27,28])-1 # end points + LR = np.array([1,1,1,0,0,0,0, 0, 0, 0, 0, 0, 1, 1, 1], dtype=bool) + colors = ['#FF8000', '#4169E1', '#308014', '#000080', '#FF83FA'] # color used for 2d reprejection from each 3d pose hypotheses + for m in range(channels.shape[-1]): + vals = np.reshape(channels[:,m], [len(data_utils.H36M_NAMES), -1]) + for i in np.arange( len(I) ): + x, y = [np.array( [vals[I[i], j], vals[J[i], j]] ) for j in range(2)] + ax.plot(x, y, lw=2, c=colors[m]) + + # Get rid of the ticks + ax.set_xticks([]) + ax.set_yticks([]) + + # Get rid of tick labels + ax.get_xaxis().set_ticklabels([]) + ax.get_yaxis().set_ticklabels([]) + + RADIUS = 300 # space around the subject + xroot, yroot = vals[0,0], vals[0,1] + ax.set_xlim([-RADIUS+xroot, RADIUS+xroot]) + ax.set_ylim([-RADIUS+yroot, RADIUS+yroot]) + if add_labels: + ax.set_xlabel("x") + ax.set_ylabel("z") + + ax.set_aspect('equal') + diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/env.sh b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/env.sh new file mode 100644 index 0000000000000000000000000000000000000000..1193ce4826e1553b109047f49d13032ec89220c7 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/env.sh @@ -0,0 +1,14 @@ +#!/bin/bash +cur_path=`pwd`/../ +export install_path=/usr/local/Ascend +export LD_LIBRARY_PATH=/usr/local/Ascend/driver/lib64/common/:/usr/local/Ascend/driver/lib64/driver:$LD_LIBRARY_PATH # 仅容器训练场景配置 +export PATH=${install_path}/fwkacllib/ccec_compiler/bin:${install_path}/fwkacllib/bin:$PATH +export LD_LIBRARY_PATH=${install_path}/fwkacllib/lib64:$LD_LIBRARY_PATH +export PYTHONPATH=${install_path}/fwkacllib/python/site-packages:$PYTHONPATH +export PYTHONPATH=/usr/local/python3.7.5/lib/python3.7/site-packages:${install_path}/tfplugin/python/site-packages:$PYTHONPATH +export ASCEND_OPP_PATH=${install_path}/opp +export ASCEND_AICPU_PATH=${install_path} +export PYTHONPATH=$cur_path/models/research:$cur_path/models/research/slim:$PYTHONPATH +export JOB_ID=10087 +export ASCEND_GLOBAL_LOG_LEVEL=3 +export ASCEND_DEVICE_ID=0 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..c9e39294107a014e648794945df2b302c4e659f5 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,180 @@ +#!/bin/bash + +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +mkdir -p ../experiments/test_git + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` +# 当前执行网络的名称 +Network="GMH-MDN_ID1225_for_TensorFlow" +#失败用例打屏 +export ASCEND_SLOG_PRINT_TO_STDOUT=0 +#基础参数,需要模型审视修改 +#batch Size +batch_size=64 +#当前是否为测试,默认为False,即训练模式 +test="False" +#网络名称,同目录名称 +#Device数量,单卡默认为1 +RankSize=1 +#训练epoch,可选 +epochs=200 +#学习率 +learning_rate='1e-3' +#参数配置 +data_path="" +output_path="" +cameras_path=${data_path}/human36m-master/h36m/cameras.h5 +data_dir=${data_path}/human36m-master/h36m +train_dir=$cur_path/../experiments/test_git +load_dir="" +load=0 + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == --h ]];then + echo "usage:./train_performance_1p.sh " + + echo "" + echo "parameter explain: + --test #Set to True for sampling + --learning_rate #Learning rate + --batch_size #batch size to use during training + --epochs #How many epochs we should train for + --cameras_path #Directory to load camera parameters + --data_dir #Data directory + --train_dir #Training directory + --load_dir #Specify the directory to load trained model + --load #Try to load a previous checkpoint + -h/--help #Show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +echo "### get your log here : ${print_log}" + +cameras_path=${data_path}/human36m-master/h36m/cameras.h5 +data_dir=${data_path}/human36m-master/h36m +train_dir=$cur_path/../experiments/test_git + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +touch ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt +cd ${cur_path}/../src + +echo ${cameras_path} +start=$(date +%s) +python3 ./predict_3dpose_mdm.py \ + --cameras_path ${cameras_path} \ + --data_dir ${data_dir} \ + --train_dir ${train_dir} \ + --load_dir ${load_dir} \ + --test ${test} \ + --load ${load} \ + --batch_size ${batch_size} \ + --epochs ${epochs} \ + --learning_rate ${learning_rate} >${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 +wait +end=$(date +%s) +e2e_time=$(( $end - $start )) + +#输出性能FPS,需要模型审视修改 +StepTime=`grep "done in" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | grep -v 'Saving the model' | awk '{print $11}' | tail -n 10 | awk '{sum+=$1} END {print sum/NR/1000}'` +#打印,不需要修改 +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' /'${StepTime}'}'` + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "root - Average" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $7}'` + +# 提取所有loss打印信息 +grep "Train loss avg:" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk '{print $4}' > $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "tf_adapter" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt $cur_path/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..833fba4bc0933b0cce4900aa6509d1aa103b3b64 --- /dev/null +++ b/TensorFlow/contrib/cv/GMH-MDN_ID1225_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,180 @@ +#!/bin/bash + +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +mkdir -p ../experiments/test_git + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` +# 当前执行网络的名称 +Network="GMH-MDN_ID1225_for_TensorFlow" +#失败用例打屏 +export ASCEND_SLOG_PRINT_TO_STDOUT=0 +#基础参数,需要模型审视修改 +#batch Size +batch_size=64 +#当前是否为测试,默认为False,即训练模式 +test="False" +#网络名称,同目录名称 +#Device数量,单卡默认为1 +RankSize=1 +#训练epoch,可选 +epochs=1 +#学习率 +learning_rate='1e-3' +#参数配置 +data_path="" +output_path="" +cameras_path=${data_path}/human36m-master/h36m/cameras.h5 +data_dir=${data_path}/human36m-master/h36m +train_dir=$cur_path/../experiments/test_git +load_dir="" +load=0 + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == --h ]];then + echo "usage:./train_performance_1p.sh " + + echo "" + echo "parameter explain: + --test #Set to True for sampling + --learning_rate #Learning rate + --batch_size #batch size to use during training + --epochs #How many epochs we should train for + --cameras_path #Directory to load camera parameters + --data_dir #Data directory + --train_dir #Training directory + --load_dir #Specify the directory to load trained model + --load #Try to load a previous checkpoint + -h/--help #Show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +echo "### get your log here : ${print_log}" + +cameras_path=${data_path}/human36m-master/h36m/cameras.h5 +data_dir=${data_path}/human36m-master/h36m +train_dir=$cur_path/../experiments/test_git + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} +touch ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt +cd ${cur_path}/../src + +echo ${cameras_path} +start=$(date +%s) +python3 ./predict_3dpose_mdm.py \ + --cameras_path ${cameras_path} \ + --data_dir ${data_dir} \ + --train_dir ${train_dir} \ + --load_dir ${load_dir} \ + --test ${test} \ + --load ${load} \ + --batch_size ${batch_size} \ + --epochs ${epochs} \ + --learning_rate ${learning_rate} >${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 +wait +end=$(date +%s) +e2e_time=$(( $end - $start )) + +#输出性能FPS,需要模型审视修改 +StepTime=`grep "done in" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | grep -v 'Saving the model' | awk '{print $11}' | tail -n 10 | awk '{sum+=$1} END {print sum/NR/1000}'` +#打印,不需要修改 +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' /'${StepTime}'}'` + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep "root - Average" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk 'END {print $7}'` + +# 提取所有loss打印信息 +grep "Train loss avg:" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk '{print $4}' > $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "tf_adapter" ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv $cur_path/output/${ASCEND_DEVICE_ID}/my_output_loss.txt $cur_path/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/README.md b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/README.md index 92e845849b9780eba95842718ad18ea018f3607e..112da71761560f0416ab7ba82c9dc929f7f46f89 100644 --- a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/README.md @@ -68,21 +68,23 @@ GitLoss ├─data 存放数据集文件夹 ├─test ├─output 存放模型运行日志文件夹 - ├─run_1p.sh 代码运行脚本 - ├─gitloss.py 模型定义及主函数 + ├─train_full_1p.sh 训练及验证,验证精度 + ├─train_performance_1p.sh 仅训练,验证性能 + ├─gitloss.py 模型定义及主函数(训练及验证) + ├─gitloss_perf.py 模型定义及主函数(仅训练) ``` ## Running the code ### Run command #### Use bash ``` -bash ./test/run_1p.sh -``` -#### Run directly +1. train_full_1p +bash ./test/train_full_1p.sh +2. train_performance_1p +bash ./test/train_performance_1p.sh ``` -python gitloss.py -``` + 参数注释: ``` update_centers: numbers of steps after which update the centers, default is 1000 @@ -98,8 +100,22 @@ steps: The train steps, default is 8000 #### 训练性能分析 | 平台| 性能 | |--|--| -| GPU(V100)| 10ms/step | -| NPU(Ascend910)| 25.5ms/step | +| GPU(V100)| 1.7013s/epoch | +| NPU(Ascend910)| 1.71s/epoch | + +#### 打屏信息 +``` +Device ID: +------------------ INFO NOTICE START------------------ +INFO, your task have used Ascend NPU, please check your result. +------------------ INFO NOTICE END------------------ +------------------ Final result ------------------ +Final Performance sec/epoch : 1.71 +E2E Training Duration sec : 154 +Final Train Accuracy : 1.0000 +ActualLoss : 0.5325 +``` + #### 精度结果 ##### GPU结果 ``` diff --git a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/gitloss_perf.py b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/gitloss_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..ffceb51cce797ba59a4fe24fbe5d3a4db61201bc --- /dev/null +++ b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/gitloss_perf.py @@ -0,0 +1,315 @@ +# MIT License +# +# Copyright (c) 2018 Kamran Janjua + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * + +import os +import numpy as np +import tensorflow as tf +import tflearn +from tensorflow.examples.tutorials.mnist import input_data +import matplotlib.pyplot as plt +import itertools, math +import pathlib +import tensorflow.contrib.layers as initializers +from scipy.spatial import distance +import time + +CENTER_LOSS_ALPHA = 0.5 +NUM_CLASSES = 10 +plt_range = 5 + +distArr = [] +avgArr = [] + +threshold = 0.4 +range_val = 2 +slim = tf.contrib.slim + +tf.app.flags.DEFINE_integer('update_centers', 1000, 'numbers of steps after which update the centers.') +tf.app.flags.DEFINE_float('lambda_c', 1.0, 'The weight of the center loss') +tf.app.flags.DEFINE_float('lambda_g', 1.0, 'The weight of the git loss') +tf.app.flags.DEFINE_integer('steps', 8000, 'The train steps') +tf.app.flags.DEFINE_string('exp_save_dir', "./test/output", 'The train save') +FLAGS = tf.app.flags.FLAGS + + +epoch = 0 +counter = 0 + + +def get_centers(feat_list, label_list): + centers_list = [] + for idx in range(10): + list_of_indices = [n for n, x in enumerate(label_list) if x == idx] + + items_of_class = [] + for item in list_of_indices: + got_feat = [float(i) for i in feat_list[item]] + items_of_class.append(got_feat) + + mean = np.mean(items_of_class, axis=0) + centers_list.append(mean) + return np.asarray(centers_list) + + +def get_intra_class_distance(feat_lst, label_lst, centers): + distances_list = [] + for idx in range(10): + list_of_indices = [n for n, x in enumerate(label_lst) if x == idx] + + list_for_class = [] + for item in list_of_indices: + got_feat = [float(i) for i in feat_lst[item]] + list_for_class.append(got_feat) + + distance_feat_from_center = [] + for item in list_for_class: + distance_feat_from_center.append(distance.euclidean(item, centers[idx])) + intraclass_distance = np.mean(distance_feat_from_center, axis=0) + distances_list.append(intraclass_distance) + return distances_list + + +with tf.name_scope('input'): + input_images = tf.placeholder(tf.float32, shape=(None, 28, 28, 1), name='input_images') + labels = tf.placeholder(tf.int64, shape=(None), name='labels') + +global_step = tf.Variable(0, trainable=False, name='global_step') + +def get_distances(features, labels, num_classes): + len_features = features.get_shape()[1] + centers = tf.get_variable('centers', [num_classes, len_features], dtype=tf.float32, + initializer=tf.constant_initializer(0), trainable=False) + labels = tf.reshape(labels, [-1]) + centers_batch = tf.gather(centers, labels) + + diff = centers_batch - features + unique_label, unique_idx, unique_count = tf.unique_with_counts(labels) + appear_times = tf.gather(unique_count, unique_idx) + appear_times = tf.reshape(appear_times, [-1, 1]) + + diff = tf.divide(diff, tf.cast((1 + appear_times), tf.float32)) + + return diff + + +def get_git_loss(features, labels, num_classes): + len_features = features.get_shape()[1] + centers = tf.get_variable('centers', [num_classes, len_features], dtype=tf.float32, + initializer=tf.constant_initializer(0), trainable=False) + labels = tf.reshape(labels, [-1]) + centers_batch = tf.gather(centers, labels) + + loss = tf.reduce_mean(tf.square(features - centers_batch)) + + # Pairwise differences + diffs = (features[:, tf.newaxis] - centers_batch[tf.newaxis, :]) + diffs_shape = tf.shape(diffs) + + # Mask diagonal (where i == j) + mask = 1 - tf.eye(diffs_shape[0], diffs_shape[1], dtype=diffs.dtype) + diffs = diffs * mask[:, :, tf.newaxis] + + # combinaton of two losses + loss2 = tf.reduce_mean(tf.divide(1, 1 + tf.square(diffs))) + + diff = centers_batch - features + unique_label, unique_idx, unique_count = tf.unique_with_counts(labels) + appear_times = tf.gather(unique_count, unique_idx) + appear_times = tf.reshape(appear_times, [-1, 1]) + + diff = tf.divide(diff, tf.cast((1 + appear_times), tf.float32)) + diff = CENTER_LOSS_ALPHA * diff + + centers_update_op = tf.scatter_sub(centers, labels, diff) # diff is used to get updated centers. + + # combo_loss = value_factor * loss + new_factor * loss2 + combo_loss = FLAGS.lambda_c * loss + FLAGS.lambda_g * loss2 + + return combo_loss, centers_update_op + + +def inference(input_images): + with slim.arg_scope([slim.conv2d], kernel_size=3, padding='SAME'): + with slim.arg_scope([slim.max_pool2d], kernel_size=2): + x = slim.conv2d(input_images, num_outputs=32, weights_initializer=initializers.xavier_initializer(), + scope='conv1_1') + x = slim.conv2d(x, num_outputs=32, weights_initializer=initializers.xavier_initializer(), scope='conv1_2') + x = slim.max_pool2d(x, scope='pool1') + x = slim.conv2d(x, num_outputs=64, weights_initializer=initializers.xavier_initializer(), scope='conv2_1') + x = slim.conv2d(x, num_outputs=64, weights_initializer=initializers.xavier_initializer(), scope='conv2_2') + x = slim.max_pool2d(x, scope='pool2') + x = slim.conv2d(x, num_outputs=128, weights_initializer=initializers.xavier_initializer(), scope='conv3_1') + x = slim.conv2d(x, num_outputs=128, weights_initializer=initializers.xavier_initializer(), scope='conv3_2') + x = slim.max_pool2d(x, scope='pool3') + x = slim.flatten(x, scope='flatten') + feature = slim.fully_connected(x, num_outputs=2, activation_fn=None, scope='fc1') + x = tflearn.prelu(feature) + x = slim.fully_connected(x, num_outputs=10, activation_fn=None, scope='fc2') + return x, feature + + +def build_network(input_images, labels): + logits, features = inference(input_images) + + with tf.variable_scope('loss') as scope: + + with tf.name_scope('git_loss'): + git_loss, centers_update_op_int = get_git_loss(features, labels, NUM_CLASSES) + scope.reuse_variables() + with tf.name_scope('softmax_loss'): + softmax_loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits)) + with tf.name_scope('total_loss'): + total_loss = softmax_loss + git_loss + + with tf.name_scope('acc'): + accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(logits, 1), labels), tf.float32)) + + with tf.name_scope('loss/'): + + tf.summary.scalar('SoftmaxLoss', softmax_loss) + tf.summary.scalar('TotalLoss', total_loss) + + with tf.name_scope('dist'): + distances_op = get_distances(features, labels, NUM_CLASSES) + + return logits, features, total_loss, accuracy, centers_update_op_int, distances_op # returns total loss + + + +logits, features, total_loss, accuracy, centers_update_op, distances_op = build_network(input_images, labels) +mnist = input_data.read_data_sets('./data/mnist', reshape=False) +optimizer = tf.train.AdamOptimizer(0.001) # learning rate. +train_op = optimizer.minimize(total_loss, global_step=global_step) + +summary_op = tf.summary.merge_all() +# sess = tf.Session(config=npu_config_proto()) +config = tf.ConfigProto() + +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +custom_op.parameter_map["use_off_line"].b = True +custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") +config.graph_options.rewrite_options.remapping = RewriterConfig.OFF +config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + +sess = tf.Session(config=config) +sess.run(tf.global_variables_initializer()) + +mean_data = np.mean(mnist.train.images, axis=0) +step = sess.run(global_step) + 1 + + +exp_save_dir = FLAGS.exp_save_dir +pathlib.Path(exp_save_dir).mkdir(parents=True, exist_ok=True) +batch_size = 128 +intra_cls_dist = 0 +vali_acc = 0 +inter_cls_dist = 0 +with open(exp_save_dir + "/loss+perf_gpu.txt", "w") as text_file: + while step < FLAGS.steps: + + batch_images, batch_labels = mnist.train.next_batch(batch_size) +# print(batch_images.shape) + _, summary_str, train_acc, train_loss, updated_centers = sess.run( + [train_op, summary_op, accuracy, total_loss, centers_update_op], + feed_dict={ + input_images: batch_images - mean_data, + labels: batch_labels, + }) + + step += 1 + + if step % FLAGS.update_centers == 0: + + num_train_samples = mnist.train.num_examples + print('========num_train_samples=======',num_train_samples) + num_of_batches = num_train_samples // batch_size + print('========num_of_batches=======',num_of_batches) + centers = np.zeros([NUM_CLASSES, 2]) + all_features = [] + all_labels = [] + start_time = time.time() + for b in range(num_of_batches): + batch_images, batch_labels = mnist.train.next_batch(batch_size, shuffle=False) + feat2 = sess.run(features, feed_dict={input_images: batch_images - mean_data}) + all_features.extend(feat2) + all_labels.extend(batch_labels) + c = get_centers(feat2, batch_labels) + centers = np.sum(np.array([centers, c]), axis=0) + end_time = time.time() - start_time + + centers = centers / num_of_batches + + d = get_intra_class_distance(all_features, all_labels, centers) + # print(d) + intra_cls_dist = np.mean(np.asarray(d)) + # print("intra class distance %f" % intra_cls_dist) + + for i, j in itertools.combinations(centers, 2): + distance1 = math.sqrt(((i[0] - j[0]) ** 2) + ((i[1] - j[1]) ** 2)) + distArr.append(distance1) + inter_cls_dist = float(sum(distArr)) / len(distArr) + avgArr.append(inter_cls_dist) + # print("The average distance between two centers is: ", inter_cls_dist) + + # print(("Step: {}, Loss: {:.4f}".format(step, train_loss))) # prints training loss and steps. + epoch += 1 + # vali_image = mnist.validation.images - mean_data + + # vali_acc, vali_loss = sess.run( + # [accuracy, total_loss], + # feed_dict={ + # input_images: vali_image, + # labels: mnist.validation.labels + # }) + + + print(("Step: {}, Epoch: {}, Train_Loss: {:.4f} , Train_Acc: {:.4f} , inter_cls_dist: {:.4f} , intra_cls_dist: {:.4f} , train_time: {:.4f}". + format(step, epoch, train_loss, train_acc, inter_cls_dist, intra_cls_dist, end_time))) + + text_file.write( + ( + "Step:\t{}, Epoch: {}, Train_Loss:\t{:.4f}, Train_Acc:\t{:.4f}, inter_cls_dist:\t{:.4f}, intra_cls_dist:\t{:.4f}\n , train_time:\t{:.4f}\n". + format(step, epoch, train_loss, train_acc, inter_cls_dist, intra_cls_dist, end_time))) + + + if step == FLAGS.steps - 1: + tf.train.Saver().save(sess, "ckpt_npu/model.ckpt") + tf.io.write_graph(sess.graph, './ckpt_npu', 'graph.pbtxt', as_text=True) + + diff --git a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/run_1p.sh b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/run_1p.sh deleted file mode 100644 index 1aa948d349c1a853da701294535ea36d28b1487a..0000000000000000000000000000000000000000 --- a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/run_1p.sh +++ /dev/null @@ -1,8 +0,0 @@ -#/bin/bash - -# source activate /home/ma-user/miniconda3/envs/TensorFlow-1.15.0 -# pip install tflearn - -cd ../ - -python3 gitloss.py --update_centers=1000 --lambda_c=1.0 --lambda_g=1.0 --steps=8000 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/train_performance_1p.sh index 3732084c692e950b512acfe342a2ad1958f0fe64..0c36a548acf2421f471d7c378950b53db0857610 100644 --- a/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/Gitloss_ID1277_for_TensorFlow/test/train_performance_1p.sh @@ -14,8 +14,12 @@ RANK_ID_START=0 data_path='' ckpt_path='' +# 设置环境 +# source ~/env.sh + #设置默认日志级别,不需要修改 export ASCEND_GLOBAL_LOG_LEVEL=3 +export ASCEND_GLOBAL_EVENT_ENABLE=0 #export ASCEND_DEVICE_ID=3 #基础参数,需要模型审视修改 @@ -123,16 +127,16 @@ do #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path - sed -i "s|"./data/mnist"|"${data_path}"|g" gitloss.py + sed -i "s|"./data/mnist"|"${data_path}"|g" gitloss_perf.py - python3 gitloss.py \ - --update_centers=10 \ + python3 gitloss_perf.py \ + --update_centers=1000 \ --lambda_c=1.0 \ --lambda_g=1.0 \ - --steps=100 \ + --steps=8000 \ --exp_save_dir $cur_path/test/output/${ASCEND_DEVICE_ID} > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 - sed -i "s|"${data_path}"|"./data/mnist"|g" gitloss.py + sed -i "s|"${data_path}"|"./data/mnist"|g" gitloss_perf.py @@ -143,13 +147,18 @@ wait end_time=$(date +%s) e2e_time=$(( $end_time - $start_time )) +echo "------------------ INFO NOTICE START------------------" +echo "INFO, your task have used Ascend NPU, please check your result." +echo "------------------ INFO NOTICE END------------------" + #结果打印,不需要修改 echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 -grep "train_time" $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk '{print $22}'|tail -n +2 > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime.txt +grep "train_time" $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log |awk '{print $18}'|tail -n +2 > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime.txt cat $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime.txt |awk '{sum+=$1} END {print "Avg = ",sum/NR}' > $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime_avg.txt TrainingTime=`grep 'Avg' $cur_path/test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}_traintime_avg.txt |awk '{print $3}'` +echo "Final Performance sec/epoch : $TrainingTime" #输出训练精度,需要模型审视修改 echo "E2E Training Duration sec : $e2e_time" @@ -163,9 +172,16 @@ CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' #吞吐量 ActualFPS=`awk 'BEGIN{printf "%.3f\n", 128/'${TrainingTime}'}'` +#最后一个迭代acc值,不需要修改 +grep 'Train_Acc:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $9}' |tail -n +2 > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt + +#最后一个迭代acc值,不需要修改 +ActualAcc=`awk 'END {print}' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt` + +echo "Final Train Accuracy : $ActualAcc" #最后一个迭代loss值,不需要修改 -grep 'Train_Loss:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $4}' |tail -n +2 > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +grep 'Train_Loss:' $cur_path/test/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print $6}' |tail -n +2 > $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt #最后一个迭代loss值,不需要修改 ActualLoss=`awk 'END {print}' $cur_path/test/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` @@ -181,4 +197,5 @@ echo "CaseName = ${CaseName}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseN echo "ActualFPS = ${ActualFPS}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${TrainingTime}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualAcc = ${ActualAcc}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/contrib/cv/LEARNING-TO-SEE-IN-THE-DARK_ID2069_for_TensorFlow/README.md b/TensorFlow/contrib/cv/LEARNING-TO-SEE-IN-THE-DARK_ID2069_for_TensorFlow/README.md index 92942d21b95e7728d998f373a7d4e643fc4332ab..ac10e8f35ebe454d318bfa7ea2f67d2ef31385de 100644 --- a/TensorFlow/contrib/cv/LEARNING-TO-SEE-IN-THE-DARK_ID2069_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/LEARNING-TO-SEE-IN-THE-DARK_ID2069_for_TensorFlow/README.md @@ -36,7 +36,6 @@ https://github.com/cchen156/Learning-to-See-in-the-Dark * depth: 512 * width: 512 * epochs: 4000 - * batch-size: 2 * lr: 前2000epoch: 0.0001 后2000epoch: 0.00001 # 支持特性 @@ -73,7 +72,12 @@ scipy==1.2.1 ``` # 快速上手 ## 数据集准备 -1. 用户需自行下载SID数据集,已上传至obs中,obs路径如下:obs://sid-obs/ModelArts_SID/dataset。 +用户需自行下载SID数据集,已上传至obs中,obs路径如下:obs://sid-obs/ModelArts_SID/dataset。 +训练集的训练目录Sony_train_list.txt和测试集的测试目录Sony_test_list.txt已经给出,文件中每行写出了一张图片短曝光图片的路径以及其对应的长曝光图片的路径。 +数据集中每张图片的命名包含信息: +第一个数字表示对应的数据集("0"属于训练集,"1"属于测试集),第2到第5个数字表示图片ID。 + + ## 模型训练 * 单击“立即下载”,并选择合适的下载方式下载源码包。 * 开始训练 @@ -89,13 +93,24 @@ scipy==1.2.1 * 模型评估。 参考“模型训练”中验证步骤。 # 训练过程及结果 -1. 执行train_Sony_mix.py文件。 +1. 执行train_Sony.py文件,开始训练所有图片名第一个数字为"0"的短曝光图片。训练过程中在屏幕中打印每个epoch的loss和训练时间,通过观察发现在1000 epoch时,loss收敛,停止训练。 +``` +python3.7 train_Sony.py --epochs=1001 +``` 2. 将训练得到的result_Sony中的checkpoint文件放入checkpoint文件夹。 -3. 在GPU复现中,由于自行编写的脚本与原论文中不同,评估结果也有一定差异。 +3. 执行test_Sony.py文件,用训练得到的checkpoint得到测试集的输出结果,测试所有图片名第一个数字为"1"的短曝光图片。测试结果为短曝光图片网络训练后的png格式图片,以及其对应的真值长曝光png格式图片。 +``` +python3.7 test_Sony.py +``` +4. 执行eval.py文件,对测试结果进行精度计算。计算训练结果图片与其真值图片的PSNR以及SSIM。 +``` +python3.7 eval.py +``` +在GPU复现中,由于自行编写的脚本与原论文中不同,评估结果也有一定差异。 以下为复现者自行编写后的评估结果: - | | PSNR | SSIM | - | -------- | -----: | :----: | - | 原论文 | 28.88 | 0.78| - | GPU复现 | 27.63 | 0.719| - | NPU复现 | 28.26 | 0.715| + | | PSNR | SSIM | 性能 | + | -------- | -----: | -----: | :----: | + | 原论文 | 28.88 | 0.78| | + | GPU复现 | 27.63 | 0.719| 0.055 s/step | + | NPU复现 | 28.26 | 0.715| 0.052 s/step | diff --git a/TensorFlow/contrib/cv/META-SGD_ID1236_for_TensorFlow/main_npu.py b/TensorFlow/contrib/cv/META-SGD_ID1236_for_TensorFlow/main_npu.py index 8633e2ab35bf7866c0e80c2409a95cb1be85844c..3b450c626aa2c7e21bcd8b720f50df39a21b1890 100644 --- a/TensorFlow/contrib/cv/META-SGD_ID1236_for_TensorFlow/main_npu.py +++ b/TensorFlow/contrib/cv/META-SGD_ID1236_for_TensorFlow/main_npu.py @@ -269,7 +269,11 @@ def main(): config = tf.ConfigProto(allow_soft_placement=True) custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" - sess = tf.InteractiveSession(config=npu_config_proto(config_proto=config)) + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF + + sess = tf.InteractiveSession(config=config) if FLAGS.datasource == 'sinusoid': if FLAGS.train: diff --git a/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/freeze_graph.py b/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/freeze_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..6ff8ed5112d25410cd6d3612d6bd9af22b2e8acb --- /dev/null +++ b/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/freeze_graph.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from tensorflow.python.tools import freeze_graph +import argparse +import logging +import tensorflow as tf + +from maml_freeze import MAML + +logging.basicConfig(level=logging.INFO) +LOG = logging.getLogger('main') +parser = argparse.ArgumentParser() +parser.add_argument('--ckpt_path',type=str, default='/npu/ID1283/task00613907/MT-NET_ID1283_for_TensorFlow/mt-net-0419/ckpt/model59999',help='The path of checkpoint') + +#running function +def run(args): + ckpt_path = args.ckpt_path + + tf.reset_default_graph() + + inputa = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputa") + inputb = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputb") + labela = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputc") + labelb = tf.placeholder(tf.float32, shape=(4, 5, 1), name="inputd") + metaval_input_tensors = {'inputa': inputa, 'inputb': inputb, 'labela': labela, 'labelb': labelb} + + model = MAML(dim_input=1, dim_output=1, test_num_updates=1) + model.construct_model(input_tensors=metaval_input_tensors, prefix='metaval_') + + logits = model.metaval_total_loss1 + tf.identity(logits, name="output") + + with tf.Session() as sess: + tf.train.write_graph(sess.graph_def, './pb_model', 'output.pb') # save pb file with output node + freeze_graph.freeze_graph( + input_graph='./pb_model/output.pb', # the pb file with output node + input_saver='', + input_binary=False, + input_checkpoint=ckpt_path, # input checkpoint file path + output_node_names='output', # the name of output node in pb file + restore_op_name='save/restore_all', + filename_tensor_name='save/Const:0', + output_graph='./pb_model/mt-net.pb', # path of output graph + clear_devices=False, + initializer_nodes='') + logging.info('done') + + +if __name__ == "__main__": + args = parser.parse_args() + run(args) + + + diff --git a/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/maml_freeze.py b/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/maml_freeze.py new file mode 100644 index 0000000000000000000000000000000000000000..62a245d094f8c9e002c03ed52654c99bd5940e7d --- /dev/null +++ b/TensorFlow/contrib/cv/MT-NET_ID1283_for_TensorFlow/maml_freeze.py @@ -0,0 +1,531 @@ +# +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" Code for the MAML algorithm and network definitions. """ +from npu_bridge.npu_init import * +import numpy as np + +try: + import special_grads +except KeyError as e: + print ('WARNING: Cannot define MaxPoolGrad, likely already defined for this version of TensorFlow:', e) +import tensorflow as tf + +from tensorflow.python.platform import flags +from utils import mse, xent, conv_block, normalize + +FLAGS = flags.FLAGS + +## chip options +flags.DEFINE_string('chip', 'npu', "run on which chip, (npu or gpu or cpu)") +flags.DEFINE_string('platform', 'linux', 'runtime platform, linux or modelarts') +flags.DEFINE_string("obs_dir", '', 'obs result path, not need on gpu and apulis platform') +flags.DEFINE_boolean("profiling", False, "profiling for performance or not") + +## Dataset/method options + +flags.DEFINE_string('datasource', 'sinusoid', 'sinusoid or omniglot or miniimagenet') +flags.DEFINE_integer('num_classes', 5, 'number of classes used in classification (e.g. 5-way classification).') +flags.DEFINE_integer('num_train_classes', -1, 'number of classes to train on (-1 for all).') +# oracle means task id is input (only suitable for sinusoid) +flags.DEFINE_string('baseline', None, 'oracle, or None') + +## Training options +flags.DEFINE_integer('pretrain_iterations', 0, 'number of pre-training iterations.') +flags.DEFINE_integer('metatrain_iterations', 40000, 'number of metatraining iterations.') # 15k for omniglot, 50k for sinusoid +flags.DEFINE_integer('meta_batch_size', 1, 'number of tasks sampled per meta-update') +flags.DEFINE_float('meta_lr', 0.001, 'the base learning rate of the generator') +flags.DEFINE_integer('update_batch_size', 1, 'number of examples used for inner gradient update (K for K-shot learning).') +flags.DEFINE_float('update_lr', .01, 'step size alpha for inner gradient update.') # 0.1 for omniglot +flags.DEFINE_integer('num_updates', 1, 'number of inner gradient updates during training.') +flags.DEFINE_integer('poly_order', 1, 'order of polynomial to generate') + +## Model options +#flags.DEFINE_string('mod', '', 'modifications to original paper. None, split, both') +flags.DEFINE_bool('use_T', True, 'whether or not to use transformation matrix T') +flags.DEFINE_bool('use_M', True, 'whether or not to use mask M') +flags.DEFINE_bool('share_M', True, 'only effective if use_M is true, whether or not to ' + 'share masks between weights' + 'that contribute to the same activation') +flags.DEFINE_float('temp', 1, 'temperature for gumbel-softmax') +flags.DEFINE_float('logit_init', 0, 'initial logit') +flags.DEFINE_string('norm', 'None', 'batch_norm, layer_norm, or None') +flags.DEFINE_integer('dim_hidden', 40, 'dimension of fc layer') +flags.DEFINE_integer('num_filters', 64, 'number of filters for conv nets -- use 32 for ' + 'miniimagenet, 64 for omiglot.') +flags.DEFINE_bool('conv', True, 'whether or not to use a convolutional network, only applicable in some cases') +flags.DEFINE_bool('max_pool', True, 'Whether or not to use max pooling rather than strided convolutions') +flags.DEFINE_bool('stop_grad', False, 'if True, do not use second derivatives in meta-optimization (for speed)') + +## Logging, saving, and testing options +flags.DEFINE_bool('log', True, 'if false, do not log summaries, for debugging code.') +flags.DEFINE_string('logdir', 'logs/omniglot20way', 'directory for summaries and checkpoints.') +flags.DEFINE_bool('debug', False, 'debug mode. uses less data for fast evaluation.') +flags.DEFINE_bool('resume', True, 'resume training if there is a model available') +flags.DEFINE_bool('train', False, 'True to train, False to test.') +flags.DEFINE_integer('test_iter', -1, 'iteration to load model (-1 for latest model)') +flags.DEFINE_bool('test_set', False, 'Set to true to test on the the test set, False for the validation set.') +flags.DEFINE_integer('train_update_batch_size', -1, 'number of examples used for gradient update during training (use if you want to test with a different number).') +flags.DEFINE_float('train_update_lr', -1, 'value of inner gradient step step during training. (use if you want to test with a different value)') # 0.1 for omniglot + + +class MAML: + def __init__(self, dim_input=1, dim_output=1, test_num_updates=5): + """ must call construct_model() after initializing MAML! """ + self.dim_input = dim_input + self.dim_output = dim_output + self.update_lr = FLAGS.update_lr + self.meta_lr = tf.placeholder_with_default(FLAGS.meta_lr, ()) + self.classification = False + self.test_num_updates = test_num_updates + if FLAGS.datasource in ['sinusoid', 'polynomial']: + self.dim_hidden = [FLAGS.dim_hidden, FLAGS.dim_hidden] + if FLAGS.use_T: + self.forward = self.forward_fc_withT + else: + self.forward = self.forward_fc + self.construct_weights = self.construct_fc_weights + self.loss_func = mse + elif FLAGS.datasource == 'omniglot' or FLAGS.datasource == 'miniimagenet': + self.loss_func = xent + self.classification = True + if FLAGS.conv: + self.dim_hidden = FLAGS.num_filters + if FLAGS.use_T: + self.forward = self.forward_conv_withT + else: + self.forward = self.forward_conv + self.construct_weights = self.construct_conv_weights + else: + self.dim_hidden = [256, 128, 64, 64] + self.forward = self.forward_fc + self.construct_weights = self.construct_fc_weights + if FLAGS.datasource == 'miniimagenet': + self.channels = 3 + else: + self.channels = 1 + self.img_size = int(np.sqrt(self.dim_input / self.channels)) + else: + raise ValueError('Unrecognized data source.') + + def construct_model(self, input_tensors=None, prefix='metatrain_'): + # a: training data for inner gradient, b: test data for meta gradient + self.inputa = input_tensors['inputa'] + self.inputb = input_tensors['inputb'] + self.labela = input_tensors['labela'] + self.labelb = input_tensors['labelb'] + + with tf.variable_scope('model', reuse=None) as training_scope: + self.dropout_probs = {} + if 'weights' in dir(self): + training_scope.reuse_variables() + weights = self.weights + else: + # Define the weights + self.weights = weights = self.construct_weights() + + # outputbs[i] and lossesb[i] is the output and loss after i+1 gradient updates + lossesa, outputas, lossesb, outputbs = [], [], [], [] + accuraciesa, accuraciesb = [], [] + num_updates = max(self.test_num_updates, FLAGS.num_updates) + outputbs = [[]] * num_updates + lossesb = [[]] * num_updates + accuraciesb = [[]] * num_updates + + def task_metalearn(inp, reuse=True): + """ Perform gradient descent for one task in the meta-batch. """ + inputa, inputb, labela, labelb = inp + task_outputbs, task_lossesb = [], [] + mse_lossesb = [] + + if self.classification: + task_accuraciesb = [] + + train_keys = list(weights.keys()) + if FLAGS.use_M and FLAGS.share_M: + def make_shared_mask(key): + temperature = FLAGS.temp + logits = weights[key+'_prob'] + logits = tf.stack([logits, tf.zeros(logits.shape)], 1) + U = tf.random_uniform(logits.shape, minval=0, maxval=1) + gumbel = -tf.log(-tf.log(U + 1e-20) + 1e-20) + y = logits + gumbel + gumbel_softmax = tf.nn.softmax(y / temperature) + gumbel_hard = tf.cast(tf.equal(gumbel_softmax, tf.reduce_max(gumbel_softmax, 1, keep_dims=True)), tf.float32) + mask = tf.stop_gradient(gumbel_hard - gumbel_softmax) + gumbel_softmax + return mask[:, 0] + + def get_mask(masks, name): + mask = masks[[k for k in masks.keys() if name[-1] in k][0]] + if 'conv' in name: # Conv + mask = tf.reshape(mask, [1, 1, 1, -1]) + tile_size = weights[name].shape.as_list()[:3] + [1] + mask = tf.tile(mask, tile_size) + elif 'w' in name: # FC + mask = tf.reshape(mask, [1, -1]) + tile_size = weights[name].shape.as_list()[:1] + [1] + mask = tf.tile(mask, tile_size) + elif 'b' in name: # Bias + mask = tf.reshape(mask, [-1]) + return mask + if self.classification: + masks = {k: make_shared_mask(k) for k in ['conv1', 'conv2', 'conv3', 'conv4', 'w5']} + else: + masks = {k: make_shared_mask(k) for k in ['w1', 'w2', 'w3']} + + if FLAGS.use_M and not FLAGS.share_M: + def get_mask_noshare(key): + temperature = FLAGS.temp + logits = weights[key + '_prob'] + logits = tf.stack([logits, tf.zeros(logits.shape)], 1) + U = tf.random_uniform(logits.shape, minval=0, maxval=1) + gumbel = -tf.log(-tf.log(U + 1e-20) + 1e-20) + y = logits + gumbel + gumbel_softmax = tf.nn.softmax(y / temperature) + gumbel_hard = tf.cast(tf.equal(gumbel_softmax, tf.reduce_max(gumbel_softmax, 1, keep_dims=True)), tf.float32) + out = tf.stop_gradient(gumbel_hard - gumbel_softmax) + gumbel_softmax + return tf.reshape(out[:, 0], weights[key].shape) + + train_keys = [k for k in weights.keys() if 'prob' not in k and 'f' not in k] + train_weights = [weights[k] for k in train_keys] + task_outputa = self.forward(inputa, weights, reuse=reuse) # only reuse on the first iter + self.task_outputa = task_outputa + task_lossa = self.loss_func(task_outputa, labela) + grads = tf.gradients(task_lossa, train_weights) + if FLAGS.stop_grad: + grads = [tf.stop_gradient(grad) for grad in grads] + gradients = dict(zip(train_keys, grads)) + + fast_weights = dict(zip(weights.keys(), [weights[key] for key in weights.keys()])) + + def compute_weights(key): + prev_weights = fast_weights[key] + if key not in train_keys: + return prev_weights + if FLAGS.use_M and FLAGS.share_M: + mask = get_mask(masks, key) + new_weights = prev_weights - self.update_lr * mask * gradients[key] + elif FLAGS.use_M and not FLAGS.share_M: + mask = get_mask_noshare(key) + new_weights = prev_weights - self.update_lr * mask * gradients[key] + else: + new_weights = prev_weights - self.update_lr * gradients[key] + return new_weights + + fast_weights = dict(zip( + weights.keys(), [compute_weights(key) for key in weights.keys()])) + + output = self.forward(inputb, fast_weights, reuse=True) + task_outputbs.append(output) + loss = self.loss_func(output, labelb) + task_lossesb.append(loss) + + for j in range(num_updates - 1): + output = self.forward(inputa, fast_weights, reuse=True) + loss = self.loss_func(output, labela) + train_weights = [fast_weights[k] for k in train_keys] + grads = tf.gradients(loss, train_weights) + if FLAGS.stop_grad: + grads = [tf.stop_gradient(grad) for grad in grads] + gradients = dict(zip(train_keys, grads)) + + fast_weights = dict(zip( + weights.keys(), [compute_weights(key) for key in weights.keys()])) + + output = self.forward(inputb, fast_weights, reuse=True) + task_outputbs.append(output) + loss = self.loss_func(output, labelb) + task_lossesb.append(loss) + + task_output = [task_outputa, task_outputbs, task_lossa, task_lossesb] + + if self.classification: + task_accuracya = tf.contrib.metrics.accuracy(tf.argmax(tf.nn.softmax(task_outputa), 1), + tf.argmax(labela, 1)) + for j in range(num_updates): + task_accuraciesb.append( + tf.contrib.metrics.accuracy(tf.argmax(tf.nn.softmax(task_outputbs[j]), 1), + tf.argmax(labelb, 1))) + task_output.extend([task_accuracya, task_accuraciesb]) + + return task_output + + out_dtype = [tf.float32, [tf.float32] * num_updates, tf.float32, [tf.float32] * num_updates] + if self.classification: + out_dtype.extend([tf.float32, [tf.float32] * num_updates]) + + if FLAGS.chip == 'npu': + if self.classification: + outputas, outputbs, lossesa, lossesb, accuraciesa, accuraciesb = [], [], [], [], [], [] + for i in range(FLAGS.meta_batch_size): + each_input = self.inputa[i], self.inputb[i], self.labela[i], self.labelb[i] + each_outputas, each_outputbs, each_lossesa, each_lossesb, each_accuraciesa, each_accuraciesb = task_metalearn( + each_input) + outputas.append(each_outputas) + outputbs.append(each_outputbs) + lossesa.append(each_lossesa) + lossesb.append(each_lossesb) + accuraciesa.append(each_accuraciesa) + accuraciesb.append(each_accuraciesb) + outputas = tf.stack(outputas) + outputbs = tf.unstack(tf.stack(outputbs), axis=1) + lossesa = tf.stack(lossesa) + lossesb = tf.unstack(tf.stack(lossesb), axis=1) + accuraciesa = tf.stack(accuraciesa) + accuraciesb = tf.unstack(tf.stack(accuraciesb), axis=1) + else: + outputas, outputbs, lossesa, lossesb = [], [], [], [] + for i in range(FLAGS.meta_batch_size): + each_input = self.inputa[i], self.inputb[i], self.labela[i], self.labelb[i] + each_outputas, each_outputbs, each_lossesa, each_lossesb = task_metalearn( + each_input) + outputas.append(each_outputas) + outputbs.append(each_outputbs) + lossesa.append(each_lossesa) + lossesb.append(each_lossesb) + outputas = tf.stack(outputas) + tmp = [] + for i in outputbs: + for j in i: + outputbs = tmp.append(i) + lossesa = tf.stack(lossesa) + lossesb = tf.unstack(tf.stack(lossesb), axis=1) + + logit_keys = sorted([k for k in weights.keys() if 'prob' in k]) + logit_weights = [-weights[k] for k in logit_keys] + probs = [tf.exp(w) / (1 + tf.exp(w)) for w in logit_weights] + self.total_probs = [tf.reduce_mean(p) for p in probs] + + ## Performance & Optimization + self.metaval_total_loss1 = total_loss1 = tf.reduce_sum(lossesa) / tf.to_float(FLAGS.meta_batch_size) + self.metaval_total_losses2 = total_losses2 = [tf.reduce_sum(lossesb[j]) / tf.to_float(FLAGS.meta_batch_size) + for j in range(num_updates)] + if self.classification: + self.metaval_total_accuracy1 = total_accuracy1 = tf.reduce_sum(accuraciesa) / tf.to_float( + FLAGS.meta_batch_size) + self.metaval_total_accuracies2 = total_accuracies2 = [ + tf.reduce_sum(accuraciesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(num_updates)] + + ## Summaries + tf.summary.scalar(prefix + 'change probs', tf.reduce_mean(self.total_probs)) + tf.summary.scalar(prefix + 'Pre-update loss', total_loss1) + if self.classification: + tf.summary.scalar(prefix + 'Pre-update accuracy', total_accuracy1) + + for j in range(num_updates): + tf.summary.scalar(prefix + 'Post-update loss, step ' + str(j + 1), total_losses2[j]) + if self.classification: + tf.summary.scalar(prefix + 'Post-update accuracy, step ' + str(j + 1), total_accuracies2[j]) + + for k, v in weights.items(): + tf.summary.histogram(k, v) + if 'prob' in k: + tf.summary.histogram('prob_'+k, tf.nn.softmax(tf.stack([v, tf.zeros(v.shape)], 1))[:, 0]) + + ### Network construction functions (fc networks and conv networks) + def construct_fc_weights(self): + weights = {} + weights['w1'] = tf.Variable(tf.truncated_normal([self.dim_input, self.dim_hidden[0]], stddev=0.01)) + weights['b1'] = tf.Variable(tf.zeros([self.dim_hidden[0]])) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1)] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i - 1], self.dim_hidden[i]], stddev=0.01)) + weights['b' + str(i + 1)] = tf.Variable(tf.zeros([self.dim_hidden[i]])) + weights['w' + str(len(self.dim_hidden) + 1)] = tf.Variable( + tf.truncated_normal([self.dim_hidden[-1], self.dim_output], stddev=0.01)) + weights['b' + str(len(self.dim_hidden) + 1)] = tf.Variable(tf.zeros([self.dim_output])) + + if FLAGS.use_M and not FLAGS.share_M: + weights['w1_prob'] = tf.Variable(tf.truncated_normal([self.dim_input * self.dim_hidden[0]], stddev=.1)) + weights['b1_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden[0]], stddev=.1)) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i - 1] * self.dim_hidden[i]], stddev=.1)) + weights['b' + str(i + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[i]], stddev=.1)) + weights['w' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_hidden[-1] * self.dim_output], stddev=0.1)) + weights['b' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + tf.truncated_normal([self.dim_output], stddev=.1)) + elif FLAGS.use_M and FLAGS.share_M: + weights['w1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden[0]])) + for i in range(1, len(self.dim_hidden)): + weights['w' + str(i + 1) + '_prob'] = tf.Variable( + FLAGS.logit_init * tf.ones([self.dim_hidden[i]])) + weights['w' + str(len(self.dim_hidden) + 1) + '_prob'] = tf.Variable( + FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['w1_f'] = tf.Variable(tf.eye(self.dim_hidden[0])) + weights['w2_f'] = tf.Variable(tf.eye(self.dim_hidden[1])) + weights['w3_f'] = tf.Variable(tf.eye(self.dim_output)) + return weights + + def forward_fc(self, inp, weights, reuse=False): + hidden = normalize(tf.matmul(inp, weights['w1']) + weights['b1'], + activation=tf.nn.relu, reuse=reuse, scope='0') + for i in range(1, len(self.dim_hidden)): + hidden = normalize(tf.matmul(hidden, weights['w' + str(i + 1)]) + weights['b' + str(i + 1)], + activation=tf.nn.relu, reuse=reuse, scope=str(i + 1)) + return tf.matmul(hidden, weights['w' + str(len(self.dim_hidden) + 1)]) + \ + weights['b' + str(len(self.dim_hidden) + 1)] + + def forward_fc_withT(self, inp, weights, reuse=False): + hidden = tf.matmul(tf.matmul(inp, weights['w1']) + weights['b1'], weights['w1_f']) + hidden = normalize(hidden, activation=tf.nn.relu, reuse=reuse, scope='1') + hidden = tf.matmul(tf.matmul(hidden, weights['w2']) + weights['b2'], weights['w2_f']) + hidden = normalize(hidden, activation=tf.nn.relu, reuse=reuse, scope='2') + hidden = tf.matmul(tf.matmul(hidden, weights['w3']) + weights['b3'], weights['w3_f']) + return hidden + + def construct_conv_weights(self): + weights = {} + dtype = tf.float32 + conv_initializer = tf.contrib.layers.xavier_initializer_conv2d(dtype=dtype) + fc_initializer = tf.contrib.layers.xavier_initializer(dtype=dtype) + k = 3 + channels = self.channels + dim_hidden = self.dim_hidden + + def get_conv(name, shape): + return tf.get_variable(name, shape, initializer=conv_initializer, dtype=dtype) + + def get_identity(dim, conv=True): + return tf.Variable(tf.eye(dim, batch_shape=[1,1])) if conv \ + else tf.Variable(tf.eye(dim)) + + weights['conv1'] = get_conv('conv1', [k, k, channels, self.dim_hidden]) + weights['b1'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv2'] = get_conv('conv2', [k, k, dim_hidden, self.dim_hidden]) + weights['b2'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv3'] = get_conv('conv3', [k, k, dim_hidden, self.dim_hidden]) + weights['b3'] = tf.Variable(tf.zeros([self.dim_hidden])) + weights['conv4'] = get_conv('conv4', [k, k, dim_hidden, self.dim_hidden]) + weights['b4'] = tf.Variable(tf.zeros([self.dim_hidden])) + if FLAGS.datasource == 'miniimagenet': + # assumes max pooling + assert FLAGS.max_pool + weights['w5'] = tf.get_variable('w5', [self.dim_hidden * 5 * 5, self.dim_output], + initializer=fc_initializer) + weights['b5'] = tf.Variable(tf.zeros([self.dim_output]), name='b5') + + if FLAGS.use_M and not FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(tf.truncated_normal([k * k * channels * self.dim_hidden], stddev=.01)) + weights['b1_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv2_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b2_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv3_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b3_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['conv4_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['b4_prob'] = tf.Variable(tf.truncated_normal([self.dim_hidden], stddev=.01)) + weights['w5_prob'] = tf.Variable(tf.truncated_normal([dim_hidden *5*5* self.dim_output], stddev=.01)) + weights['b5_prob'] = tf.Variable(tf.truncated_normal([self.dim_output], stddev=.01)) + if FLAGS.use_M and FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv2_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv3_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv4_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['w5_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['conv1_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv2_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv3_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv4_f'] = get_identity(self.dim_hidden, conv=True) + weights['w5_f'] = get_identity(self.dim_output, conv=False) + else: + weights['w5'] = tf.Variable(tf.random_normal([dim_hidden, self.dim_output]), name='w5') + weights['b5'] = tf.Variable(tf.zeros([self.dim_output]), name='b5') + if FLAGS.use_M and not FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(tf.truncated_normal([k * k * channels * self.dim_hidden], stddev=.01)) + weights['conv2_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['conv3_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['conv4_prob'] = tf.Variable(tf.truncated_normal([k * k * dim_hidden * self.dim_hidden], stddev=.01)) + weights['w5_prob'] = tf.Variable(tf.truncated_normal([dim_hidden * self.dim_output], stddev=.01)) + if FLAGS.use_M and FLAGS.share_M: + weights['conv1_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv2_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv3_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['conv4_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_hidden])) + weights['w5_prob'] = tf.Variable(FLAGS.logit_init * tf.ones([self.dim_output])) + + if FLAGS.use_T: + weights['conv1_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv2_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv3_f'] = get_identity(self.dim_hidden, conv=True) + weights['conv4_f'] = get_identity(self.dim_hidden, conv=True) + weights['w5_f'] = get_identity(self.dim_output, conv=False) + return weights + + def forward_conv(self, inp, weights, reuse=False, scope=''): + # reuse is for the normalization parameters. + channels = self.channels + inp = tf.reshape(inp, [-1, self.img_size, self.img_size, channels]) + hidden1 = conv_block(inp, weights['conv1'], weights['b1'], reuse, scope + '0') + hidden2 = conv_block(hidden1, weights['conv2'], weights['b2'], reuse, scope + '1') + hidden3 = conv_block(hidden2, weights['conv3'], weights['b3'], reuse, scope + '2') + hidden4 = conv_block(hidden3, weights['conv4'], weights['b4'], reuse, scope + '3') + + if FLAGS.datasource == 'miniimagenet': + # last hidden layer is 6x6x64-ish, reshape to a vector + hidden4 = tf.reshape(hidden4, [-1, np.prod([int(dim) for dim in hidden4.get_shape()[1:]])]) + else: + hidden4 = tf.reduce_mean(hidden4, [1, 2]) + return tf.matmul(hidden4, weights['w5']) + weights['b5'] + + def forward_conv_withT(self, inp, weights, reuse=False, scope=''): + # reuse is for the normalization parameters. + def conv_tout(inp, cweight, bweight, rweight, reuse, scope, activation=tf.nn.relu, max_pool_pad='VALID', + residual=False): + stride, no_stride = [1, 2, 2, 1], [1, 1, 1, 1] + if FLAGS.max_pool: + conv_output = tf.nn.conv2d(inp, cweight, no_stride, 'SAME') + bweight + else: + conv_output = tf.nn.conv2d(inp, cweight, stride, 'SAME') + bweight + conv_output = tf.nn.conv2d(conv_output, rweight, no_stride, 'SAME') + normed = normalize(conv_output, activation, reuse, scope) + if FLAGS.max_pool: + normed = tf.nn.max_pool(normed, stride, stride, max_pool_pad) + return normed + + channels = self.channels + inp = tf.reshape(inp, [-1, self.img_size, self.img_size, channels]) + hidden1 = conv_tout(inp, weights['conv1'], weights['b1'], weights['conv1_f'], reuse, scope + '0') + hidden2 = conv_tout(hidden1, weights['conv2'], weights['b2'], weights['conv2_f'], reuse, scope + '1') + hidden3 = conv_tout(hidden2, weights['conv3'], weights['b3'], weights['conv3_f'], reuse, scope + '2') + hidden4 = conv_tout(hidden3, weights['conv4'], weights['b4'], weights['conv4_f'], reuse, scope + '3') + + if FLAGS.datasource == 'miniimagenet': + # last hidden layer is 6x6x64-ish, reshape to a vector + hidden4 = tf.reshape(hidden4, [-1, np.prod([int(dim) for dim in hidden4.get_shape()[1:]])]) + else: + hidden4 = tf.reduce_mean(hidden4, [1, 2]) + hidden5 = tf.matmul(hidden4, weights['w5']) + weights['b5'] + return tf.matmul(hidden5, weights['w5_f']) + diff --git a/TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow/author.txt b/TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow/author.txt new file mode 100644 index 0000000000000000000000000000000000000000..833dc4c8131d782d9c3767d8ee46b75b54896cd8 --- /dev/null +++ b/TensorFlow/contrib/cv/Pix2pose_ID1164_for_TensorFlow/author.txt @@ -0,0 +1,4 @@ +Shiyuan Ma, Lei Xie +Nanjing University +Nanjing, Jiangsu, China +mashiyuan@smail.nju.edu.cn, lxie@nju.edu.cn \ No newline at end of file diff --git a/TensorFlow/contrib/cv/RANDLA-NET_ID0850_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/RANDLA-NET_ID0850_for_TensorFlow/test/train_full_1p.sh index d89e020bbdebe9be2fbc66566b953c5344e85637..31c24b677841c564ecdcffa533984c0b9f112277 100644 --- a/TensorFlow/contrib/cv/RANDLA-NET_ID0850_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/RANDLA-NET_ID0850_for_TensorFlow/test/train_full_1p.sh @@ -160,7 +160,7 @@ echo "E2E Training Duration sec : $e2e_time" #训练用例信息,不需要修改 BatchSize=${batch_size} DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' ##获取性能数据,不需要修改 #吞吐量 diff --git a/TensorFlow/contrib/cv/STNet_ID2360_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/STNet_ID2360_for_TensorFlow/test/train_performance_1p.sh index 408baf968c3a49ab59f2759daf07c25905428bae..68b7a0d5da9d16e59d3232ca82350cbe825dd9a7 100644 --- a/TensorFlow/contrib/cv/STNet_ID2360_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/STNet_ID2360_for_TensorFlow/test/train_performance_1p.sh @@ -120,7 +120,7 @@ else fi # 性能相关数据计算 -StepTime=`grep "sec/step :" ${print_log} | tail -n 10 | awk '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` +StepTime=`grep "sec/step :" ${print_log} | tail -n 10 | awk -F':' '{print $5}' |cut -b -15 | awk '{sum+=$1} END {print sum/NR}'` FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` # 精度相关数据计算 diff --git a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_full_1p.sh index 8887efad62217575bbb0007888e3615390ba7e63..0369ce5ba7edab69bf692c1a254790b46fe27729 100644 --- a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_full_1p.sh @@ -118,6 +118,7 @@ then --triplet_model ${data_path}/dataset/model_data/20211209-124102/ \ --save_dir ${output_path}model_data/traj/ \ --max_step 15 \ + --train_epoch=2000000 \ --output_path ${output_path} >${print_log} 2>&1 @@ -129,6 +130,7 @@ else --triplet_model ${data_path}/dataset/model_data/20211209-124102/ \ --save_dir ${output_path}model_data/traj/ \ --max_step 15 \ + --train_epoch=2000000 \ --output_path ${output_path} >${print_log} 2>&1 fi diff --git a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_performance_1p.sh index 73af1fb0a74148599814f570cec836c3b103438c..5a8eb551faa62c3e6e58ac9a7933ae30459c4730 100644 --- a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/test/train_performance_1p.sh @@ -30,7 +30,7 @@ if [[ $1 == --help || $1 == -h ]];then --data_path # dataset of training --output_path # output of training --train_steps # max_step for training - --train_epochs # max_epoch for training + --train_epochs # max_epoch for training --batch_size # batch size -h/--help show help message " @@ -46,7 +46,7 @@ do output_path=`echo ${para#*=}` elif [[ $para == --train_steps* ]];then train_steps=`echo ${para#*=}` - elif [[ $para == --train_epochs* ]];then + elif [[ $para == --train_epochs* ]];then train_epochs=`echo ${para#*=}` elif [[ $para == --batch_size* ]];then batch_size=`echo ${para#*=}` @@ -58,7 +58,6 @@ if [[ $data_path == "" ]];then echo "[Error] para \"data_path\" must be config" exit 1 fi - # 校验是否传入output_path,不需要修改 if [[ $output_path == "" ]];then output_path="./test/output/${ASCEND_DEVICE_ID}" @@ -108,25 +107,30 @@ start_time=$(date +%s) # 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 # 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 # 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 - +batch_size=32 if [ x"${modelarts_flag}" != x ]; then python3.7 ./train_cnn_trajectory_2d.py \ - --MAT_folder ${data_path}original_data/MOT17Det/mat/ \ - --temp_folder ${output_path}temp/ \ - --triplet_model ${data_path}model_data/20211209-124102/ \ - --save_dir ${output_path}model_data/traj/model.ckpt \ - --max_step 15 \ - --output_path ${output_path} + --MAT_folder ${data_path}/dataset/original_data/MOT17Det/mat/ \ + --img_folder ${data_path}/dataset/original_data/MOT17Det/train \ + --temp_folder ${output_path}/temp/ \ + --triplet_model ${data_path}/dataset/model_data/20211209-124102/ \ + --save_dir ${output_path}/model_data/traj/model.ckpt \ + --max_step 3 \ + --train_epoch=1 \ + --output_path ${output_path} >${print_log} 2>&1 else python3.7 ./train_cnn_trajectory_2d.py \ - --MAT_folder ${data_path}original_data/MOT17Det/mat/ \ + --MAT_folder ${data_path}/dataset/original_data/MOT17Det/mat \ + --img_folder ${data_path}/dataset/original_data/MOT17Det/train \ --temp_folder ${output_path}temp/ \ - --triplet_model ${data_path}model_data/20211209-124102/ \ - --save_dir ${output_path}model_data/traj/ \ - --max_step 15 \ - --output_path ${output_path} + --triplet_model ${data_path}/dataset/model_data/20211209-124102/ \ + --save_dir ${output_path}model_data/traj/model.ckpt \ + --max_step 3 \ + --train_epoch=1 \ + --output_path ${output_path} >${print_log} 2>&1 + fi # 性能相关数据计算 @@ -134,7 +138,7 @@ StepTime=`grep "sec/step :" ${print_log} | tail -n 10 | awk '{print $NF}' | awk FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` # 精度相关数据计算 -train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}'` +#train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}'` # 提取所有loss打印信息 grep "loss :" ${print_log} | awk -F ":" '{print $4}' | awk -F "-" '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt @@ -160,7 +164,6 @@ fi # 获取最终的casename,请保留,case文件名为${CaseName} get_casename - # 重命名loss文件 if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; then @@ -178,7 +181,7 @@ echo "Final Performance sec/step : $StepTime" echo "E2E Training Duration sec : $e2e_time" # 输出训练精度 -echo "Final Train Accuracy : ${train_accuracy}" +#echo "Final Train Accuracy : ${train_accuracy}" # 最后一个迭代loss值,不需要修改 ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) @@ -192,4 +195,4 @@ echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}. echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/train_cnn_trajectory_2d.py b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/train_cnn_trajectory_2d.py index efbeef22adef8ee97bcf722cc74ddd995aabdcc0..49c94d30bb330e4a5da6167a9d9d4599467f0b5d 100644 --- a/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/train_cnn_trajectory_2d.py +++ b/TensorFlow/contrib/cv/TNT_ID1233_for_TensorFlow/train_cnn_trajectory_2d.py @@ -105,6 +105,7 @@ def main(args): triplet_model = args.triplet_model save_dir = args.save_dir max_step = args.max_step + train_epoch = args.train_epoch # In[4]: batch_X_x = tf.placeholder(tf.float32, [None, 1, max_length, 1]) @@ -144,7 +145,7 @@ def main(args): cnt = 0 # - for i in range(2000000): + for i in range(train_epoch): start_time = time.time() total_batch_x, total_batch_y = generate_data(feature_size, max_length, batch_size * 10, MAT_folder, img_folder,triplet_model, temp_folder) total_batch_x = interp_batch(total_batch_x) @@ -174,6 +175,9 @@ def main(args): # shuffle 4 times acc = [] step_time = 0 + + iter_start = time.time() + for kk in range(num_batch): temp_batch_size = batch_size if kk == num_batch - 1: @@ -239,8 +243,12 @@ def main(args): batch_Y: batch_y, keep_prob: 0.75}) #print('step %d, training accuracy %g' % (cnt, train_accuracy)) - step_time = time.time() - start_time + step_time = time.time() - iter_start print("epoch : {}----step : {}----loss : {}----sec/step : {:.3f}".format(i, cnt, 1-train_accuracy,step_time)) + + iter_end = time.time() + print("\n ----> iter duration = {} \n".format(iter_end - iter_start), flush=True) + acc = np.array(acc) print('accuracy : {}'.format(np.mean(acc))) @@ -917,6 +925,7 @@ def parse_arguments(argv): parser.add_argument('--triplet_model', type=str, default='/home/ma-user/modelarts/outputs/train_url_0/model_data/20211209-124102/ ') parser.add_argument('--max_step', type=int,default='2000000') + parser.add_argument('--train_epoch', type=int,default='2000000') parser.add_argument('--save_dir', type=str, default='/home/ma-user/modelarts/outputs/train_url_0/models/result/model.ckpt') parser.add_argument('--output_path', type=str, diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/.keep b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5ea8a5f7b6ae91ebb12b7f2fa71a5432bb89de63 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/LICENSE @@ -0,0 +1,284 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------ +Files: third_party/compute_library/... + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------ +Files: ACKNOWLEDGEMENTS +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------ +Files: third_party/hexagon + +Copyright (c) 2016-2019, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/README.md b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5051ed3d10229537ccb0cd75abd206bb211ffabe --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/README.md @@ -0,0 +1,165 @@ +- [基本信息](#基本信息.md) +- [概述](#概述.md) +- [训练环境准备](#训练环境准备.md) +- [快速上手](#快速上手.md) +- [迁移学习指导](#迁移学习指导.md) +- [高级参考](#高级参考.md) +

基本信息

+ +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):Image Processing** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):ckpt** + +**精度(Precision):Mixed** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description): The paper propose +TVNet, a novel end-to-end trainable neural network, to learn +optical-flow-like features from data.** +

概述

+ + Despite the recent success of end-to-end learned representations, hand-crafted optical flow features are still widelyused in video analysis tasks. To fill this gap, we proposeTVNet, a novel end-to-end trainable neural network, to learnoptical-flow-like features from data + +- 参考论文: + + https://openaccess.thecvf.com/content_cvpr_2018/papers/Fan_End-to-End_Learning_of_CVPR_2018_paper.pdf + + +- 适配昇腾 AI 处理器的实现: + + https://gitee.com/myd-git/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +## 默认配置 + +- 训练数据集预处理: + + - 图像的输入尺寸为1024*436 + - 图像输入格式:png + +- 测试数据集预处理 + + - 图像的输入尺寸为1024*436 + - 图像输入格式:png + +- 训练超参 + + - scale: Number of scales in TVNet (default: 1) + - warp: Number of warppings in TVNet (default: 1) + - iteration: Number of iterations in TVNet(default: 50) + - Train step: 10000 + - gpu: the gpu to run on (0-indexed, -1 for CPU) + +## 支持特性 + +| 特性列表 | 是否支持 | +|-------|------| +| 分布式训练 | 否 | +| 混合精度 | 是 | +| 并行数据 | 否 | + + +## 混合精度训练 + +昇腾910 AI处理器提供自动混合精度功能,可以针对全网中float32数据类型的算子,按照内置的优化策略,自动将部分float32的算子降低精度到float16,从而在精度损失很小的情况下提升系统性能并减少内存使用。 + +## 开启混合精度 + +脚本已默认开启混合精度,设置precision_mode参数的脚本参考如下。 + + ``` + custom_op = session_config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes(str(args.precision_mode)) + ``` + + +

快速上手

+ +- 数据集准备 +1. 数据集下载链接:obs://cann-id0951/dataset/ + + +## 模型训练 + +- 单卡训练 + +单卡训练 + +1. 配置训练参数 +2. 启动训练 +``` +bash train_full_1p.sh +``` + +

训练结果

+ + +- 精度结果比对 + +|精度指标项|GPU实测|NPU实测| +|---|---|---| +|LOSS|8.7241955|8.390178| + +- 性能结果比对 + +|性能指标项|GPU实测|NPU实测| +|---|---|---| +|FPS|0.9856|1.0684| + +

结果测试

+- 在test_sintel.py文件中修改测试的图片和指定模型的路径,然后运行该文件。 +``` +python test_sintel.py +``` +- 用visualize(可视化脚本集合)执行得到的.mat文件,得到如下的结果: +Result.png + +

高级参考

+ +## 脚本和示例代码 + +``` +├── tvnet.py //模型搭建文件 +├── train_epe_sintel.py //读取数据进行训练 +├── test_sintel.py //测试模型结果 +├── spatial_transformer.py //Spatial Transformer Layer +├── template.py //一些模板参数的设置 +├── README.md //代码说明文档 +``` + +## 脚本参数 + + + + +``` +--data_path +--output_path +--Batch size: 1 +--Learning rate(LR): 1e-4 +--Optimizer: Adam +--Train steps:10000 +``` + +## 训练过程 + +1. 通过“模型训练”中的训练指令启动单卡卡训练。 + +2. 参考脚本的模型存储路径为train_url diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/Result.png b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/Result.png new file mode 100644 index 0000000000000000000000000000000000000000..7f8d8e7dc41b7b071ca1ce48ac17566640c20e21 Binary files /dev/null and b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/Result.png differ diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/loss perf_npu.txt b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/loss perf_npu.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_acc.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..13077b10e660de32d6f7861257a50e1a01ede9ba --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_acc.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_full_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_perf.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..14384e227a0fa90a514254590aef5078c62ff700 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelarts_entry_perf.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_performance_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..012b8ef4f5a74d86a8555d86dacf4b10f807e129 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,5 @@ +GPUStatus:OK +NPUMigrationStatus:OK +FuncStatus:OK +PerfStatus:OK +PrecisionStatus:OK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/requirements.txt b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea314a5fa4a9e9a0a14b65d1dbb78d1412cae422 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/requirements.txt @@ -0,0 +1,8 @@ +python==3.6.0 +tensorflow==1.15.0 +scikit-learn==0.20 +matplotlib=3.3.4 +pandas==0.20.2 +numpy=1.19.2 +h5py +scipy=1.5.2 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/spatial_transformer.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/spatial_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..f06b514cac283ec58225945d3c32c57e995f8fbb --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/spatial_transformer.py @@ -0,0 +1,220 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tensorflow as tf + + +def transformer(U, theta, out_size, name='SpatialTransformer', **kwargs): + """Spatial Transformer Layer + + Implements a spatial transformer layer as described in [1]_. + Based on [2]_ and edited by David Dao for Tensorflow. + + Parameters + ---------- + U : float + The output of a convolutional net should have the + shape [num_batch, height, width, num_channels]. + theta: float + The output of the + localisation network should be [num_batch, 6]. + out_size: tuple of two ints + The size of the output of the network (height, width) + + References + ---------- + .. [1] Spatial Transformer Networks + Max Jaderberg, Karen Simonyan, Andrew Zisserman, Koray Kavukcuoglu + Submitted on 5 Jun 2015 + .. [2] https://github.com/skaae/transformer_network/blob/master/transformerlayer.py + + Notes + ----- + To initialize the network to the identity transform init + ``theta`` to : + identity = np.array([[1., 0., 0.], + [0., 1., 0.]]) + identity = identity.flatten() + theta = tf.Variable(initial_value=identity) + + """ + + def _repeat(x, n_repeats): + with tf.variable_scope('_repeat'): + rep = tf.transpose( + tf.expand_dims(tf.ones(shape=tf.stack([n_repeats, ])), 1), [1, 0]) + rep = tf.cast(rep, 'int32') + x = tf.matmul(tf.reshape(x, (-1, 1)), rep) + return tf.reshape(x, [-1]) + + def _interpolate(im, x, y, out_size): + with tf.variable_scope('_interpolate'): + # constants + num_batch = tf.shape(im)[0] + height = tf.shape(im)[1] + width = tf.shape(im)[2] + channels = tf.shape(im)[3] + + x = tf.cast(x, 'float32') + y = tf.cast(y, 'float32') + height_f = tf.cast(height, 'float32') + width_f = tf.cast(width, 'float32') + out_height = out_size[0] + out_width = out_size[1] + zero = tf.zeros([], dtype='int32') + max_y = tf.cast(tf.shape(im)[1] - 1, 'int32') + max_x = tf.cast(tf.shape(im)[2] - 1, 'int32') + + # scale indices from [-1, 1] to [0, width/height-1] + x = (x + 1.0) * (width_f - 1) / 2.0 + y = (y + 1.0) * (height_f - 1) / 2.0 + + # do sampling + x0 = tf.cast(tf.floor(x), 'int32') + x1 = x0 + 1 + y0 = tf.cast(tf.floor(y), 'int32') + y1 = y0 + 1 + + x0 = tf.clip_by_value(x0, zero, max_x - 1) + x1 = tf.clip_by_value(x1, zero, max_x) + y0 = tf.clip_by_value(y0, zero, max_y - 1) + y1 = tf.clip_by_value(y1, zero, max_y) + dim2 = width + dim1 = width * height + base = _repeat(tf.range(num_batch) * dim1, out_height * out_width) + base_y0 = base + y0 * dim2 + base_y1 = base + y1 * dim2 + idx_a = base_y0 + x0 + idx_b = base_y1 + x0 + idx_c = base_y0 + x1 + idx_d = base_y1 + x1 + + # use indices to lookup pixels in the flat image and restore + # channels dim + im_flat = tf.reshape(im, tf.stack([-1, channels])) + im_flat = tf.cast(im_flat, 'float32') + Ia = tf.gather(im_flat, idx_a) + Ib = tf.gather(im_flat, idx_b) + Ic = tf.gather(im_flat, idx_c) + Id = tf.gather(im_flat, idx_d) + + # and finally calculate interpolated values + x0_f = tf.cast(x0, 'float32') + x1_f = tf.cast(x1, 'float32') + y0_f = tf.cast(y0, 'float32') + y1_f = tf.cast(y1, 'float32') + wa = tf.expand_dims(((x1_f - x) * (y1_f - y)), 1) + wb = tf.expand_dims(((x1_f - x) * (y - y0_f)), 1) + wc = tf.expand_dims(((x - x0_f) * (y1_f - y)), 1) + wd = tf.expand_dims(((x - x0_f) * (y - y0_f)), 1) + output = tf.add_n([wa * Ia, wb * Ib, wc * Ic, wd * Id]) + return output + + def _meshgrid(height, width): + with tf.variable_scope('_meshgrid'): + # This should be equivalent to: + # x_t, y_t = np.meshgrid(np.linspace(-1, 1, width), + # np.linspace(-1, 1, height)) + # ones = np.ones(np.prod(x_t.shape)) + # grid = np.vstack([x_t.flatten(), y_t.flatten(), ones]) + x_t = tf.matmul(tf.ones(shape=tf.stack([height, 1])), + tf.transpose(tf.expand_dims(tf.linspace(-1.0, 1.0, width), 1), [1, 0])) + y_t = tf.matmul(tf.expand_dims(tf.linspace(-1.0, 1.0, height), 1), + tf.ones(shape=tf.stack([1, width]))) + + x_t_flat = tf.reshape(x_t, (1, -1)) + y_t_flat = tf.reshape(y_t, (1, -1)) + + # ones = tf.ones_like(x_t_flat) + # grid = tf.concat(axis=0, values=[x_t_flat, y_t_flat, ones]) + grid = tf.concat(axis=0, values=[x_t_flat, y_t_flat]) + return grid + + def _transform(theta, input_dim, out_size): + with tf.variable_scope('_transform'): + num_batch = tf.shape(input_dim)[0] + height = tf.shape(input_dim)[1] + width = tf.shape(input_dim)[2] + num_channels = tf.shape(input_dim)[3] + # theta = tf.reshape(theta, (-1, 2, 3)) + theta = tf.cast(theta, 'float32') + + # grid of (x_t, y_t, 1), eq (1) in ref [1] + height_f = tf.cast(height, 'float32') + width_f = tf.cast(width, 'float32') + out_height = out_size[0] + out_width = out_size[1] + grid = _meshgrid(out_height, out_width) + grid = tf.expand_dims(grid, 0) + grid = tf.reshape(grid, [-1]) + grid = tf.tile(grid, tf.stack([num_batch])) + # grid = tf.reshape(grid, tf.stack([num_batch, 3, -1])) + grid = tf.reshape(grid, tf.stack([num_batch, 2, -1])) + + # Transform A x (x_t, y_t, 1)^T -> (x_s, y_s) + # T_g = tf.matmul(theta, grid) + T_g = theta + grid + x_s = tf.slice(T_g, [0, 0, 0], [-1, 1, -1]) + y_s = tf.slice(T_g, [0, 1, 0], [-1, 1, -1]) + x_s_flat = tf.reshape(x_s, [-1]) + y_s_flat = tf.reshape(y_s, [-1]) + + input_transformed = _interpolate( + input_dim, x_s_flat, y_s_flat, + out_size) + + output = tf.reshape( + input_transformed, tf.stack([num_batch, out_height, out_width, num_channels])) + + return output + + with tf.variable_scope(name): + output = _transform(theta, U, out_size) + return output + + +def batch_transformer(U, thetas, out_size, name='BatchSpatialTransformer'): + """Batch Spatial Transformer Layer + + Parameters + ---------- + + U : float + tensor of inputs [num_batch,height,width,num_channels] + thetas : float + a set of transformations for each input [num_batch,num_transforms,6] + out_size : int + the size of the output [out_height,out_width] + + Returns: float + Tensor of size [num_batch*num_transforms,out_height,out_width,num_channels] + """ + with tf.variable_scope(name): + num_batch, num_transforms = map(int, thetas.get_shape().as_list()[:2]) + indices = [[i] * num_transforms for i in range(num_batch)] + input_repeated = tf.gather(U, tf.reshape(indices, [-1])) + return transformer(input_repeated, thetas, out_size) diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..8917afd3cdf2778761304d2d822544867643f218 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,183 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +etp_flag=${etp_running_flag} +if [ x"${etp_flag}" != xtrue ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo ${print_log} + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 + +if [ x"${etp_flag}" != xtrue ]; +then + python3.7 ./train_epe_sintel.py --data_path=${data_path}/dataset --output_path=${output_path} +else + python3.7 ./train_epe_sintel.py --data_path=${data_path}/dataset --output_path=${output_path} > ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "sec/step :" ${print_log} | awk '{print $3}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}' ` + +# 精度相关数据计算 +train_accuracy='' +# 提取所有loss打印信息 +grep " loss =" ${print_log} | awk '{print $5}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=`grep "Average epeLoss:" ${print_log} | awk '{print $3}' | tr -d ';'` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..214a6728a0685e7bd249f6bb6cae33839fd63358 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,184 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +etp_flag=${etp_running_flag} +if [ x"${etp_flag}" != xtrue ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo ${print_log} + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 +train_epochs=2 +train_steps=100 + +if [ x"${etp_flag}" != xtrue ]; +then + python3.7 ./train_epe_sintel.py --data_path=${data_path}/dataset --output_path=${output_path} --steps=${train_steps} +else + python3.7 ./train_epe_sintel.py --data_path=${data_path}/dataset --output_path=${output_path} --steps=${train_steps} > ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`cat ${print_log} | grep "sec/batch" | tail -n +2 | awk '{print $8}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy='' +# 提取所有loss打印信息 +grep " loss =" ${print_log} | awk '{print $5}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=`grep "Average epeLoss:" ${print_log} | awk '{print $3}' | tr -d ';'` + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test_sintel.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test_sintel.py new file mode 100644 index 0000000000000000000000000000000000000000..e307efb48e4dfef873bbb7251aa62bb9dc0cd21a --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/test_sintel.py @@ -0,0 +1,102 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import cv2 +import numpy as np +import scipy.io as sio +import tensorflow as tf +from npu_bridge.npu_init import * + +from tvnet import TVNet + +flags = tf.app.flags +scale = 1 +warp = 1 +iteration = 50 + +# 设置npu服务器上的路径 +data_path = '/home/ma-user/modelarts/inputs/data_url_0/' +output_path = '/home/ma-user/modelarts/outputs/train_url_0/' +print('data_url :' + data_path) +print('output_url :' + output_path) + + +eval_data = os.listdir(output_path) # 返回data_path下包含的文件或文件夹的名字的列表 +print('输出目录下的文件:') +for name in eval_data: + print(name) + +# load image ,指定测试的图片对 +img1 = cv2.imread(data_path + 'MPISintel_test/temple_2/frame_0001.png') +img2 = cv2.imread(data_path + 'MPISintel_test/temple_2/frame_0002.png') + +h, w, c = img1.shape + +# model construct +x1 = tf.placeholder(shape=[1, h, w, 3], dtype=tf.float32) +x2 = tf.placeholder(shape=[1, h, w, 3], dtype=tf.float32) +tvnet = TVNet() +u1, u2, rho = tvnet.tvnet_flow(x1, x2, max_scales=scale, + warps=warp, + max_iterations=iteration) +# init npu +# 变量初始化 +init = tf.global_variables_initializer() +# 创建session +config = tf.ConfigProto() +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 +config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 +sess = tf.Session(config=config) + +saver = tf.train.Saver() +saver = tf.train.import_meta_graph(output_path + 'ckpt_gpu_epe1/nn_model_gpu_epe.ckpt.meta') # 加载模型结构 +saver.restore(sess, tf.train.latest_checkpoint(output_path + 'ckpt_gpu_epe1/')) # 只需要指定目录就可以恢复所有变量信息 +all_var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) # 从一个集合中取出变量 +# run model +u1_np, u2_np = sess.run([u1, u2], feed_dict={x1: img1[np.newaxis, ...], x2: img2[np.newaxis, ...]}) + +u1_np = np.squeeze(u1_np) +u2_np = np.squeeze(u2_np) +flow_mat = np.zeros([h, w, 2]) +flow_mat[:, :, 0] = u1_np +flow_mat[:, :, 1] = u2_np + + + +if not os.path.exists(output_path + 'result'): + os.mkdir(output_path + 'result') +res_path = os.path.join(output_path + 'result', '1.mat') +sio.savemat(res_path, {'flow': flow_mat}) +print("Extracting Flow finished!") + +# 关闭sess +sess.close() + diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/train_epe_sintel.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/train_epe_sintel.py new file mode 100644 index 0000000000000000000000000000000000000000..b025c3c63df26205a3b29f7e9eb0b2e73d584536 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/train_epe_sintel.py @@ -0,0 +1,211 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import datetime +import os +import time +import cv2 +import numpy as np +import tensorflow as tf +from npu_bridge.npu_init import * +import random +from tvnet import TVNet, batch_size +import argparse +import sys + +flags = tf.app.flags +scale = 1 +warp = 1 +iteration = 50 +print('TVNet Params:\n scale: %d\n warp: %d\n iteration: %d' \ + % (scale, warp, iteration)) + + +def get_config(args): + parser = argparse.ArgumentParser(description='Experiment parameters') + parser.add_argument('--data_path', default='./dataset', help='training input data path.') + parser.add_argument('--output_path', default='./output', help='prepocess result path.') + parser.add_argument('--steps', default='10000', help='train steps.') + parsed_args, unknown_args = parser.parse_known_args(args) + return parsed_args + + +def readflo(file_name): # 读取光流文件 + with open(file_name, 'rb') as f: + magic = np.fromfile(f, np.float32, count=1) + if 202021.25 != magic: + print('Magic number incorrect. Invalid .flo file') + else: + w = np.fromfile(f, np.int32, count=1) + h = np.fromfile(f, np.int32, count=1) + # print 'Reading %d x %d flo file' % (w, h) + data = np.array(np.fromfile(f, np.float32, count=2 * int(w) * int(h))) + # Reshape data into 3D array (columns, rows, bands) + # data2D = np.ndarray.reshape(data, (w, h, 2)) + data2D = data.reshape(int(h), int(w), 2) + return data2D + + +# other_data = os.listdir("./other_data/") + +def loadSintelData(data_url): # 加载训练数据 + data_path = os.path.join(data_url, "MPISintel_train/") + # print(data_url + '\n') + eval_data = os.listdir(data_path) # 返回data_path下包含的文件或文件夹的名字的列表 + # for name in eval_data: + # print(name + '/n') + img1 = np.zeros((batch_size, 436, 1024, 3)) + img2 = np.zeros((batch_size, 436, 1024, 3)) + label = np.zeros((batch_size, 436, 1024, 2)) + lod_folder = random.sample(eval_data, 1)[0] + train_dir = data_path + lod_folder + for j in range(batch_size): + i = random.randint(1, 49) + img1[j, :] = cv2.imread(train_dir + "/frame_" + str(i).zfill(4) + ".png") + img2[j, :] = cv2.imread(train_dir + "/frame_" + str(i + 1).zfill(4) + ".png") + label[j, :] = readflo(train_dir + "/frame_" + str(i).zfill(4) + ".flo") + # print("===>>>Flow File: "+ train_dir + "/frame_" + str(i).zfill(4) + ".flo") + return img1, img2, label + + +def calculate_epe(pr_u1, pr_u2, gt_u): + pr_u1 = tf.squeeze(pr_u1) + pr_u2 = tf.squeeze(pr_u2) + return tf.reduce_mean(tf.sqrt(tf.square(pr_u1 - gt_u[:, :, 0]) + tf.square(pr_u2 - gt_u[:, :, 1]))) + + +def calculate_loss(u1, u2, y): + loss = 0 + for j in range(batch_size): + y_1 = u1[j, :] + + y_2 = u2[j, :] + + gt = y[j, :] + + loss += calculate_epe(y_1, y_2, gt) + + return loss / batch_size + + +x1 = tf.placeholder(shape=[batch_size, 436, 1024, 3], dtype=tf.float32) # 函数作为一种占位符用于定义过程,可以理解为形参,在执行的时候再赋具体的值 +x2 = tf.placeholder(shape=[batch_size, 436, 1024, 3], dtype=tf.float32) +y = tf.placeholder(shape=[batch_size, 436, 1024, 2], dtype=tf.float32) +tf.summary.image('input', [x1, x2]) # 形成一张名为input的图像 + +loss_list = [] + +tvnet = TVNet() # 初始化TVnet类 + +u1_p, u2_p, rho = tvnet.tvnet_flow(x1, x2, max_scales=scale, + warps=warp, + max_iterations=iteration) + +loss = calculate_loss(u1_p, u2_p, y) # 计算loss +train_op = tf.train.AdamOptimizer(1e-4).minimize(loss) # 设置优化器 + +# 设置npu服务器上的路径 +args = get_config(sys.argv[1:]) +max_steps = int(args.steps) +print('data_url :' + args.data_path) +print('output_url :' + args.output_path) +print('steps:' + args.steps) +eval_data = os.listdir(args.data_path) # 返回data_url下包含的文件或文件夹的名字的列表 +for name in eval_data: + print(name) + +# init npu +# 变量初始化 +init = tf.global_variables_initializer() +# 创建session +config = tf.ConfigProto() +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 +config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 +sess = tf.Session(config=config) +sess.run(init) + +saver = tf.train.Saver(tf.global_variables()) # 模型的保存和加载 + +start = datetime.datetime.now() +for step in range(max_steps): # 开始训练 + start_time = time.time() + img1, img2, label = loadSintelData(args.data_path) + # img1, img2 = loadData(batch_size) + _, loss_value = sess.run([train_op, loss], feed_dict={x1: img1, x2: img2, y: label}) # 带入具体的值 + duration = time.time() - start_time + loss_list.append(loss_value) + if step % 5 == 0: + examples_per_sec = batch_size / duration + sec_per_batch = float(duration) + format_str = 'step %d, loss = %.2f (%.1f examples/sec; %.3f sec/batch)' + print(format_str % (step, loss_value, examples_per_sec, sec_per_batch)) +# Total time +# 按照格式输出单步训练的时间 +end = datetime.datetime.now() +timefortrain = (end - start).total_seconds() +cost_time = timefortrain / max_steps +print("sec/step : {}".format(cost_time)) +print("use second") +print(timefortrain) +strtime = '%dh%dm%ds' % (timefortrain / 3600, timefortrain % 3600 / 60, timefortrain % 3600 % 60) +print("===>>>Total train Time: " + strtime) # 输出训练时间 + +ckpt_path = os.path.join(args.output_path, 'ckpt_gpu_epe1') # 模型最终保存路径:./output/ckpt_gpu_epe1/ +if not os.path.exists(ckpt_path): # 判断模型保存的路径是否存在 + os.mkdir(ckpt_path) +checkpoint_path = os.path.join(ckpt_path, "nn_model_gpu_epe.ckpt") +print("===>>>checkpoint_path: " + checkpoint_path) +saver.save(sess, checkpoint_path) # 保存模型 +# 关闭sess +sess.close() + +loss_path = os.path.join(args.output_path, "loss_Sintel_gpu.log") # loss_log最终保存路径:./output +print("===>>>loss_path: " + loss_path) + +# Average and minal value of loss list +avg_loss = np.mean(loss_list) +min_loss = np.min(loss_list) +print("Average epeLoss: " + str(avg_loss) + "; Minimam Loss: " + str(min_loss)) + +# 开始写入loss +loss_list1 = [] +file = open(loss_path, 'w') +file.write("Total_train_Time: " + strtime) +file.write("Average_epeLoss: " + str(avg_loss) + "; Minimam_Loss: " + str(min_loss)) +for i in range(len(loss_list)): + loss_list1.append(np.mean(loss_list[0:i])) + file.write(str(loss_list[i])) + file.write("\n") +file.close() +loss_path1 = os.path.join(args.output_path, "epeloss1_Sintel_gpu.log") +print("===>>>loss1_path: " + loss_path1) + + + + diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/tvnet.py b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/tvnet.py new file mode 100644 index 0000000000000000000000000000000000000000..96d6fdce11581efd5de9c2deeae5ff7c197ecfae --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/tvnet.py @@ -0,0 +1,360 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import tensorflow as tf +import spatial_transformer + +# from train import batch_size +batch_size = 1 + + +def zoom_image(x, new_height, new_width): + assert len(x.shape) == 4 + delta = tf.zeros((tf.shape(x)[0], 2, new_height * new_width)) + zoomed_x = spatial_transformer.transformer(x, delta, (new_height, new_width)) + return tf.reshape(zoomed_x, [tf.shape(x)[0], new_height, new_width, x.shape[-1].value]) + + +class TVNet(object): + GRAD_IS_ZERO = 1e-12 + + def __init__(self): + pass + + def variable_with_weight_loss(self, shape, stddev): + var = tf.Variable(tf.truncated_normal(shape, stddev=stddev)) # tensorflow::ops::TruncatedNormal + return var + + def grey_scale_image(self, x): + assert len(x.shape) == 4 + assert x.shape[-1].value == 3, 'number of channels must be 3 (i.e. RGB)' + + ker_init = tf.constant_initializer([[0.114], [0.587], [0.299]]) + grey_x = tf.layers.conv2d(x, 1, [1, 1], padding='same', + kernel_initializer=ker_init, use_bias=False, trainable=False) + + return tf.floor(grey_x) + + def normalize_images(self, x1, x2): + reduction_axes = [i for i in range(1, len(x1.shape))] + min_x1 = tf.reduce_min(x1, axis=reduction_axes) + max_x1 = tf.reduce_max(x1, axis=reduction_axes) + + min_x2 = tf.reduce_min(x2, axis=reduction_axes) + max_x2 = tf.reduce_max(x2, axis=reduction_axes) + + min_val = tf.minimum(min_x1, min_x2) + max_val = tf.maximum(max_x1, max_x2) + + den = max_val - min_val + + expand_dims = [-1 if i == 0 else 1 for i in range(len(x1.shape))] + min_val_ex = tf.reshape(min_val, expand_dims) + den_ex = tf.reshape(den, expand_dims) + + x1_norm = tf.where(den > 0, 255. * (x1 - min_val_ex) / den_ex, x1) + x2_norm = tf.where(den > 0, 255. * (x2 - min_val_ex) / den_ex, x2) + + return x1_norm, x2_norm + + def gaussian_smooth(self, x): + assert len(x.shape) == 4 + ker_init = tf.constant_initializer([[0.000874, 0.006976, 0.01386, 0.006976, 0.000874], + [0.006976, 0.0557, 0.110656, 0.0557, 0.006976], + [0.01386, 0.110656, 0.219833, 0.110656, 0.01386], + [0.006976, 0.0557, 0.110656, 0.0557, 0.006976], + [0.000874, 0.006976, 0.01386, 0.006976, 0.000874]]) + smooth_x = tf.layers.conv2d(x, x.shape[-1].value, [5, 5], padding='same', + kernel_initializer=ker_init, use_bias=False, trainable=False) + + return smooth_x + + def warp_image(self, x, u, v): + assert len(x.shape) == 4 + assert len(u.shape) == 3 + assert len(v.shape) == 3 + u = u / x.shape[2].value * 2 + v = v / x.shape[1].value * 2 + + delta = tf.concat(axis=1, values=[u, v]) + return spatial_transformer.transformer(x, delta, (x.shape[-3].value, x.shape[-2].value)) + + def centered_gradient(self, x, name): + assert len(x.shape) == 4 + + with tf.variable_scope('centered_gradient'): + x_ker_init = tf.constant_initializer([-0.5, 0, 0.5]) # tf.constant_initializer([-0.5,0, 0.5]) + diff_x = tf.layers.conv2d(x, x.shape[-1].value, [1, 3], padding='same', + kernel_initializer=x_ker_init, use_bias=False, name=name + '_diff_x', + trainable=False) + + y_ker_init = tf.constant_initializer([[-0.5], [0], [0.5]]) + diff_y = tf.layers.conv2d(x, x.shape[-1].value, [3, 1], padding='same', + kernel_initializer=y_ker_init, use_bias=False, name=name + '_diff_y', + trainable=False) + # refine the boundary + + # x_ker_init = self.variable_with_weight_loss(shape=[1, 3, 1, 1], stddev=5e-2) + # diff_x = tf.nn.conv2d(x, x_ker_init, [1, 2, 2, 1], padding='SAME', name=name + '_diff_y') + + test1 = tf.slice(x, [0, 0, 1, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value]) + test2 = tf.slice(x, [0, 0, 0, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value]) + first_col = 0.5 * (tf.slice(x, [0, 0, 1, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value]) - + tf.slice(x, [0, 0, 0, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value])) + + last_col = 0.5 * ( + tf.slice(x, [0, 0, x.shape[2].value - 1, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value]) - + tf.slice(x, [0, 0, x.shape[2].value - 2, 0], [batch_size, x.shape[1].value, 1, x.shape[3].value])) + diff_x_valid = tf.slice(diff_x, begin=[0, 0, 1, 0], + size=[batch_size, x.shape[1].value, x.shape[2].value - 2, x.shape[3].value]) + diff_x = tf.concat(axis=2, values=[first_col, diff_x_valid, last_col]) + + first_row = 0.5 * (tf.slice(x, [0, 1, 0, 0], [batch_size, 1, x.shape[2].value, x.shape[3].value]) - + tf.slice(x, [0, 0, 0, 0], [batch_size, 1, x.shape[2].value, x.shape[3].value])) + last_row = 0.5 * ( + tf.slice(x, [0, x.shape[1].value - 1, 0, 0], [batch_size, 1, x.shape[2].value, x.shape[3].value]) - + tf.slice(x, [0, x.shape[1].value - 2, 0, 0], [batch_size, 1, x.shape[2].value, x.shape[3].value])) + diff_y_valid = tf.slice(diff_y, begin=[0, 1, 0, 0], + size=[batch_size, x.shape[1].value - 2, x.shape[2].value, x.shape[3].value]) + diff_y = tf.concat(axis=1, values=[first_row, diff_y_valid, last_row]) + + return diff_x, diff_y + + def forward_gradient(self, x, name): + assert len(x.shape) == 4 + + with tf.variable_scope('forward_gradient'): + x_ker_init = tf.constant_initializer([[-1, 1]]) + diff_x = tf.layers.conv2d(x, x.shape[-1].value, [1, 2], padding='same', + kernel_initializer=x_ker_init, use_bias=True, name=name + '_diff_x', + trainable=True) + + y_ker_init = tf.constant_initializer([[-1], [1]]) + diff_y = tf.layers.conv2d(x, x.shape[-1].value, [2, 1], padding='same', + kernel_initializer=y_ker_init, use_bias=True, name=name + '_diff_y', + trainable=True) + # refine the boundary + diff_x_valid = tf.slice(diff_x, begin=[0, 0, 0, 0], + size=[batch_size, x.shape[1].value, x.shape[2].value - 1, x.shape[3].value]) + last_col = tf.zeros([tf.shape(x)[0], x.shape[1].value, 1, x.shape[3].value], dtype=tf.float32) + diff_x = tf.concat(axis=2, values=[diff_x_valid, last_col]) + + diff_y_valid = tf.slice(diff_y, begin=[0, 0, 0, 0], + size=[batch_size, x.shape[1].value - 1, x.shape[2].value, x.shape[3].value]) + last_row = tf.zeros([tf.shape(x)[0], 1, x.shape[2].value, x.shape[3].value], dtype=tf.float32) + diff_y = tf.concat(axis=1, values=[diff_y_valid, last_row]) + + return diff_x, diff_y + + def divergence(self, x, y, name): + assert len(x.shape) == 4 + + with tf.variable_scope('divergence'): + x_valid = tf.slice(x, begin=[0, 0, 0, 0], + size=[batch_size, x.shape[1].value, x.shape[2].value - 1, x.shape[3].value]) + first_col = tf.zeros([tf.shape(x)[0], x.shape[1].value, 1, x.shape[3].value], dtype=tf.float32) + x_pad = tf.concat(axis=2, values=[first_col, x_valid]) + + y_valid = tf.slice(y, begin=[0, 0, 0, 0], + size=[batch_size, y.shape[1].value - 1, y.shape[2].value, y.shape[3].value]) + first_row = tf.zeros([tf.shape(y)[0], 1, y.shape[2].value, y.shape[3].value], dtype=tf.float32) + y_pad = tf.concat(axis=1, values=[first_row, y_valid]) + + x_ker_init = tf.constant_initializer([[-1, 1]]) + diff_x = tf.layers.conv2d(x_pad, x.shape[-1].value, [1, 2], padding='same', + kernel_initializer=x_ker_init, use_bias=True, name=name + '_diff_x', + trainable=True) + + y_ker_init = tf.constant_initializer([[-1], [1]]) + diff_y = tf.layers.conv2d(y_pad, y.shape[-1].value, [2, 1], padding='same', + kernel_initializer=y_ker_init, use_bias=True, name=name + '_diff_y', + trainable=True) + + div = diff_x + diff_y + return div + + def zoom_size(self, height, width, factor): + new_height = int(float(height) * factor + 0.5) + new_width = int(float(width) * factor + 0.5) + + return new_height, new_width + + def dual_tvl1_optic_flow(self, x1, x2, u1, u2, + tau=0.25, # time step + lbda=0.15, # weight parameter for the data term + theta=0.3, # weight parameter for (u - v)^2 + warps=5, # number of warpings per scale + max_iterations=50 # maximum number of iterations for optimization + ): + l_t = lbda * theta + taut = tau / theta + diff2_x, diff2_y = self.centered_gradient(x2, 'x2') # conv and slice + p11 = p12 = p21 = p22 = tf.zeros_like(x1) + for warpings in range(warps): # 1 + with tf.variable_scope('warping%d' % (warpings,)): + u1_flat = tf.reshape(u1, (tf.shape(x2)[0], 1, x2.shape[1].value * x2.shape[2].value)) + u2_flat = tf.reshape(u2, (tf.shape(x2)[0], 1, x2.shape[1].value * x2.shape[2].value)) + + x2_warp = self.warp_image(x2, u1_flat, u2_flat) + x2_warp = tf.reshape(x2_warp, tf.shape(x2)) + + diff2_x_warp = self.warp_image(diff2_x, u1_flat, u2_flat) + diff2_x_warp = tf.reshape(diff2_x_warp, tf.shape(diff2_x)) + + diff2_y_warp = self.warp_image(diff2_y, u1_flat, u2_flat) + diff2_y_warp = tf.reshape(diff2_y_warp, tf.shape(diff2_y)) + + diff2_x_sq = tf.square(diff2_x_warp) # square mat + diff2_y_sq = tf.square(diff2_y_warp) + + grad = diff2_x_sq + diff2_y_sq + self.GRAD_IS_ZERO + + rho_c = x2_warp - diff2_x_warp * u1 - diff2_y_warp * u2 - x1 + + for ii in range(max_iterations): # 50 + with tf.variable_scope('iter%d' % (ii,)): + rho = rho_c + diff2_x_warp * u1 + diff2_y_warp * u2 + self.GRAD_IS_ZERO + + masks1 = rho < -l_t * grad + d1_1 = tf.where(masks1, l_t * diff2_x_warp, tf.zeros_like(diff2_x_warp)) + d2_1 = tf.where(masks1, l_t * diff2_y_warp, tf.zeros_like(diff2_y_warp)) + + masks2 = rho > l_t * grad + d1_2 = tf.where(masks2, -l_t * diff2_x_warp, tf.zeros_like(diff2_x_warp)) + d2_2 = tf.where(masks2, -l_t * diff2_y_warp, tf.zeros_like(diff2_y_warp)) + masks3 = (~masks1) & (~masks2) & (grad > self.GRAD_IS_ZERO) + d1_3 = tf.where(masks3, -rho / grad * diff2_x_warp, tf.zeros_like(diff2_x_warp)) + d2_3 = tf.where(masks3, -rho / grad * diff2_y_warp, tf.zeros_like(diff2_y_warp)) + + v1 = d1_1 + d1_2 + d1_3 + u1 + v2 = d2_1 + d2_2 + d2_3 + u2 + + u1 = v1 + theta * self.divergence(p11, p12, 'div_p1') # slice->concat->conv->add + u2 = v2 + theta * self.divergence(p21, p22, 'div_p2') + + u1x, u1y = self.forward_gradient(u1, 'u1') + u2x, u2y = self.forward_gradient(u2, 'u2') + + p11 = (p11 + taut * u1x) / ( + 1.0 + taut * tf.sqrt(tf.square(u1x) + tf.square(u1y) + self.GRAD_IS_ZERO)) + p12 = (p12 + taut * u1y) / ( + 1.0 + taut * tf.sqrt(tf.square(u1x) + tf.square(u1y) + self.GRAD_IS_ZERO)) + p21 = (p21 + taut * u2x) / ( + 1.0 + taut * tf.sqrt(tf.square(u2x) + tf.square(u2y) + self.GRAD_IS_ZERO)) + p22 = (p22 + taut * u2y) / ( + 1.0 + taut * tf.sqrt(tf.square(u2x) + tf.square(u2y) + self.GRAD_IS_ZERO)) + return u1, u2, rho + + def tvnet_flow(self, x1, x2, + tau=0.25, # time step + lbda=0.15, # weight parameter for the data term + theta=0.3, # weight parameter for (u - v)^2 + warps=5, # number of warpings per scale + zfactor=0.5, # factor for building the image piramid + max_scales=5, # maximum number of scales for image piramid + max_iterations=50 # maximum number of iterations for optimization + ): + + for i in range(len(x1.shape)): + assert x1.shape[i].value == x2.shape[i].value + + zfactor = np.float32(zfactor) + + height = x1.shape[-3].value + width = x1.shape[-2].value + + n_scales = 1 + np.log(np.sqrt(height ** 2 + width ** 2) / 4.0) / np.log(1 / zfactor); # calculate n_scales + n_scales = min(n_scales, max_scales) + # n_scales = 1 + with tf.variable_scope('tvl1_flow'): + + grey_x1 = self.grey_scale_image(x1) # conv and floor + grey_x2 = self.grey_scale_image(x2) + + norm_imgs = self.normalize_images(grey_x1, grey_x2) # normalize to 0-255 + + smooth_x1 = self.gaussian_smooth(norm_imgs[0]) # conv + smooth_x2 = self.gaussian_smooth(norm_imgs[1]) + + for ss in range(n_scales - 1, -1, -1): + with tf.variable_scope('scale%d' % ss): + down_sample_factor = zfactor ** ss + down_height, down_width = self.zoom_size(height, width, down_sample_factor) + + if ss == n_scales - 1: + u1 = tf.get_variable('u1', shape=[1, down_height, down_width, 1], dtype=tf.float32, + initializer=tf.zeros_initializer) + u2 = tf.get_variable('u2', shape=[1, down_height, down_width, 1], dtype=tf.float32, + initializer=tf.zeros_initializer) + # print ([tf.shape(smooth_x1)[0], 1, 1, 1]) + + u1 = tf.tile(u1, [tf.shape(smooth_x1)[0], 1, 1, 1]) + u2 = tf.tile(u2, [tf.shape(smooth_x1)[0], 1, 1, 1]) + + down_x1 = zoom_image(smooth_x1, down_height, down_width) + down_x2 = zoom_image(smooth_x2, down_height, down_width) + + u1, u2, rho = self.dual_tvl1_optic_flow(down_x1, down_x2, u1, u2, + tau=tau, lbda=lbda, theta=theta, warps=warps, + max_iterations=max_iterations) + + if ss == 0: + return u1, u2, rho + + up_sample_factor = zfactor ** (ss - 1) + up_height, up_width = self.zoom_size(height, width, up_sample_factor) + u1 = zoom_image(u1, up_height, up_width) / zfactor + u2 = zoom_image(u2, up_height, up_width) / zfactor + + def get_loss(self, x1, x2, + tau=0.25, # time step + lbda=0.15, # weight parameter for the data term + theta=0.3, # weight parameter for (u - v)^2 + warps=5, # number of warpings per scale + zfactor=0.5, # factor for building the image piramid + max_scales=5, # maximum number of scales for image piramid + max_iterations=50 # maximum number of iterations for optimization + ): + + u1, u2, rho = self.tvnet_flow(x1, x2, + tau=tau, lbda=lbda, theta=theta, warps=warps, + zfactor=zfactor, max_scales=max_scales, + max_iterations=max_iterations) + + # computing loss + u1x, u1y = self.forward_gradient(u1, 'u1') + u2x, u2y = self.forward_gradient(u2, 'u2') + + u1_flat = tf.reshape(u1, (tf.shape(x2)[0], 1, x2.shape[1].value * x2.shape[2].value)) + u2_flat = tf.reshape(u2, (tf.shape(x2)[0], 1, x2.shape[1].value * x2.shape[2].value)) + + x2_warp = self.warp_image(x2, u1_flat, u2_flat) + x2_warp = tf.reshape(x2_warp, tf.shape(x2)) + loss = lbda * tf.reduce_mean(tf.abs(x2_warp - x1)) + tf.reduce_mean( + tf.abs(u1x) + tf.abs(u1y) + tf.abs(u2x) + tf.abs(u2y)) + return loss, u1, u2 diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/README.txt b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/README.txt new file mode 100644 index 0000000000000000000000000000000000000000..79ff23ed3092c998fafef546655beb2968ea44e2 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/README.txt @@ -0,0 +1,9 @@ +Some utilities for reading, writing, and color-coding .flo images. + +Written according to the c++ source code of Daniel Scharstein + +Deqing Sun, 11/03/07 + +see colorTest for visualizing the encoding scheme, reading and writing .flo files. + +Run visualize.m to visualize the result generated by TVNet. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/colorTest.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/colorTest.m new file mode 100644 index 0000000000000000000000000000000000000000..626099c7e4c8812b25a717a204fed57753251860 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/colorTest.m @@ -0,0 +1,84 @@ +function colorTest() + +% colorTest() creates a test image showing the color encoding scheme + +% According to the c++ source code of Daniel Scharstein +% Contact: schar@middlebury.edu + +% Author: Deqing Sun, Department of Computer Science, Brown University +% Contact: dqsun@cs.brown.edu +% $Date: 2007-10-31 20:22:10 (Wed, 31 Oct 2006) $ + +% Copyright 2007, Deqing Sun. +% +% All Rights Reserved +% +% Permission to use, copy, modify, and distribute this software and its +% documentation for any purpose other than its incorporation into a +% commercial product is hereby granted without fee, provided that the +% above copyright notice appear in all copies and that both that +% copyright notice and this permission notice appear in supporting +% documentation, and that the name of the author and Brown University not be used in +% advertising or publicity pertaining to distribution of the software +% without specific, written prior permission. +% +% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY +% PARTICULAR PURPOSE. IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR +% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% test color pattern of Daniel's c++ code + +truerange = 1; +height = 151; +width = 151; +range = truerange * 1.04; + +s2 = round(height/2); + +[x y] = meshgrid(1:width, 1:height); + +u = x*range/s2 - range; +v = y*range/s2 - range; + +img = computeColor(u/truerange, v/truerange); + +img(s2,:,:) = 0; +img(:,s2,:) = 0; + +figure; +imshow(img); +title('test color pattern'); +pause; close; + +% test read and write flow +F(:,:,1) = u; +F(:,:,2) = v; +writeFlowFile(F, 'colorTest.flo'); +F2 = readFlowFile('colorTest.flo'); + +u2 = F2(:,:,1); +v2 = F2(:,:,2); + +img2 = computeColor(u2/truerange, v2/truerange); + +img2(s2,:,:) = 0; +img2(:,s2,:) = 0; + +figure; imshow(img2); +title('saved and reloaded test color pattern'); +pause; close; + +% color encoding scheme for optical flow +img = computeColor(u/range/sqrt(2), v/range/sqrt(2)); + +img(s2,:,:) = 0; +img(:,s2,:) = 0; + +figure; +imshow(img); +title('optical flow color encoding scheme'); +pause; close; diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/computeColor.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/computeColor.m new file mode 100644 index 0000000000000000000000000000000000000000..3566f4e08792e02a2328b9c64d2a0773b3f3d44d --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/computeColor.m @@ -0,0 +1,115 @@ +function img = computeColor(u,v) + +% computeColor color codes flow field U, V + +% According to the c++ source code of Daniel Scharstein +% Contact: schar@middlebury.edu + +% Author: Deqing Sun, Department of Computer Science, Brown University +% Contact: dqsun@cs.brown.edu +% $Date: 2007-10-31 21:20:30 (Wed, 31 Oct 2006) $ + +% Copyright 2007, Deqing Sun. +% +% All Rights Reserved +% +% Permission to use, copy, modify, and distribute this software and its +% documentation for any purpose other than its incorporation into a +% commercial product is hereby granted without fee, provided that the +% above copyright notice appear in all copies and that both that +% copyright notice and this permission notice appear in supporting +% documentation, and that the name of the author and Brown University not be used in +% advertising or publicity pertaining to distribution of the software +% without specific, written prior permission. +% +% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY +% PARTICULAR PURPOSE. IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR +% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +nanIdx = isnan(u) | isnan(v); +u(nanIdx) = 0; +v(nanIdx) = 0; + +colorwheel = makeColorwheel(); +ncols = size(colorwheel, 1); + +rad = sqrt(u.^2+v.^2); + +a = atan2(-v, -u)/pi; + +fk = (a+1) /2 * (ncols-1) + 1; % -1~1 maped to 1~ncols + +k0 = floor(fk); % 1, 2, ..., ncols + +k1 = k0+1; +k1(k1==ncols+1) = 1; + +f = fk - k0; + +for i = 1:size(colorwheel,2) + tmp = colorwheel(:,i); + col0 = tmp(k0)/255; + col1 = tmp(k1)/255; + col = (1-f).*col0 + f.*col1; + + idx = rad <= 1; + col(idx) = 1-rad(idx).*(1-col(idx)); % increase saturation with radius + + col(~idx) = col(~idx)*0.75; % out of range + + img(:,:, i) = uint8(floor(255*col.*(1-nanIdx))); +end; + +%% +function colorwheel = makeColorwheel() + +% color encoding scheme + +% adapted from the color circle idea described at +% http://members.shaw.ca/quadibloc/other/colint.htm + + +RY = 15; +YG = 6; +GC = 4; +CB = 11; +BM = 13; +MR = 6; + +ncols = RY + YG + GC + CB + BM + MR; + +colorwheel = zeros(ncols, 3); % r g b + +col = 0; +%RY +colorwheel(1:RY, 1) = 255; +colorwheel(1:RY, 2) = floor(255*(0:RY-1)/RY)'; +col = col+RY; + +%YG +colorwheel(col+(1:YG), 1) = 255 - floor(255*(0:YG-1)/YG)'; +colorwheel(col+(1:YG), 2) = 255; +col = col+YG; + +%GC +colorwheel(col+(1:GC), 2) = 255; +colorwheel(col+(1:GC), 3) = floor(255*(0:GC-1)/GC)'; +col = col+GC; + +%CB +colorwheel(col+(1:CB), 2) = 255 - floor(255*(0:CB-1)/CB)'; +colorwheel(col+(1:CB), 3) = 255; +col = col+CB; + +%BM +colorwheel(col+(1:BM), 3) = 255; +colorwheel(col+(1:BM), 1) = floor(255*(0:BM-1)/BM)'; +col = col+BM; + +%MR +colorwheel(col+(1:MR), 3) = 255 - floor(255*(0:MR-1)/MR)'; +colorwheel(col+(1:MR), 1) = 255; \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/flowToColor.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/flowToColor.m new file mode 100644 index 0000000000000000000000000000000000000000..3e39e8e42068509afaf077af572d18f4bedac0ea --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/flowToColor.m @@ -0,0 +1,90 @@ +function img = flowToColor(flow, varargin) + +% flowToColor(flow, maxFlow) flowToColor color codes flow field, normalize +% based on specified value, +% +% flowToColor(flow) flowToColor color codes flow field, normalize +% based on maximum flow present otherwise + +% According to the c++ source code of Daniel Scharstein +% Contact: schar@middlebury.edu + +% Author: Deqing Sun, Department of Computer Science, Brown University +% Contact: dqsun@cs.brown.edu +% $Date: 2007-10-31 18:33:30 (Wed, 31 Oct 2006) $ + +% Copyright 2007, Deqing Sun. +% +% All Rights Reserved +% +% Permission to use, copy, modify, and distribute this software and its +% documentation for any purpose other than its incorporation into a +% commercial product is hereby granted without fee, provided that the +% above copyright notice appear in all copies and that both that +% copyright notice and this permission notice appear in supporting +% documentation, and that the name of the author and Brown University not be used in +% advertising or publicity pertaining to distribution of the software +% without specific, written prior permission. +% +% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY +% PARTICULAR PURPOSE. IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR +% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +UNKNOWN_FLOW_THRESH = 1e9; +UNKNOWN_FLOW = 1e10; % + +[height widht nBands] = size(flow); + +if nBands ~= 2 + error('flowToColor: image must have two bands'); +end; + +u = flow(:,:,1); +v = flow(:,:,2); + +maxu = -999; +maxv = -999; + +minu = 999; +minv = 999; +maxrad = -1; + + +% fix unknown flow +idxUnknown = (abs(u)> UNKNOWN_FLOW_THRESH) | (abs(v)> UNKNOWN_FLOW_THRESH) ; + +u(idxUnknown) = 0; +v(idxUnknown) = 0; +max(u(:)) +maxu = max(maxu, max(u(:))); +minu = min(minu, min(u(:))); + +maxv = max(maxv, max(v(:))); +minv = min(minv, min(v(:))); + +rad = sqrt(u.^2+v.^2); +maxrad = max(maxrad, max(rad(:))); + +fprintf('max flow: %.4f flow range: u = %.3f .. %.3f; v = %.3f .. %.3f\n', maxrad, minu, maxu, minv, maxv); + +if isempty(varargin) ==0 + maxFlow = varargin{1}; + if maxFlow > 0 + maxrad = maxFlow; + end; +end; + +u = u/(maxrad+eps); +v = v/(maxrad+eps); + +% compute color + +img = computeColor(u, v); + +% unknown flow +IDX = repmat(idxUnknown, [1 1 3]); +img(IDX) = 0; \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/readFlowFile.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/readFlowFile.m new file mode 100644 index 0000000000000000000000000000000000000000..568d9b6296b958a3ef94b73482eaccaf5fdc6ebd --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/readFlowFile.m @@ -0,0 +1,84 @@ +function img = readFlowFile(filename) + +% readFlowFile read a flow file FILENAME into 2-band image IMG + +% According to the c++ source code of Daniel Scharstein +% Contact: schar@middlebury.edu + +% Author: Deqing Sun, Department of Computer Science, Brown University +% Contact: dqsun@cs.brown.edu +% $Date: 2007-10-31 16:45:40 (Wed, 31 Oct 2006) $ + +% Copyright 2007, Deqing Sun. +% +% All Rights Reserved +% +% Permission to use, copy, modify, and distribute this software and its +% documentation for any purpose other than its incorporation into a +% commercial product is hereby granted without fee, provided that the +% above copyright notice appear in all copies and that both that +% copyright notice and this permission notice appear in supporting +% documentation, and that the name of the author and Brown University not be used in +% advertising or publicity pertaining to distribution of the software +% without specific, written prior permission. +% +% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY +% PARTICULAR PURPOSE. IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR +% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +TAG_FLOAT = 202021.25; % check for this when READING the file + +% sanity check +if isempty(filename) == 1 + error('readFlowFile: empty filename'); +end; + +idx = findstr(filename, '.'); +idx = idx(end); + +if length(filename(idx:end)) == 1 + error('readFlowFile: extension required in filename %s', filename); +end; + +if strcmp(filename(idx:end), '.flo') ~= 1 + error('readFlowFile: filename %s should have extension ''.flo''', filename); +end; + +fid = fopen(filename, 'r'); +if (fid < 0) + error('readFlowFile: could not open %s', filename); +end; + +tag = fread(fid, 1, 'float32'); +width = fread(fid, 1, 'int32'); +height = fread(fid, 1, 'int32'); + +% sanity check + +if (tag ~= TAG_FLOAT) + error('readFlowFile(%s): wrong tag (possibly due to big-endian machine?)', filename); +end; + +if (width < 1 || width > 99999) + error('readFlowFile(%s): illegal width %d', filename, width); +end; + +if (height < 1 || height > 99999) + error('readFlowFile(%s): illegal height %d', filename, height); +end; + +nBands = 2; + +% arrange into matrix form +tmp = fread(fid, inf, 'float32'); +tmp = reshape(tmp, [width*nBands, height]); +tmp = tmp'; +img(:,:,1) = tmp(:, (1:width)*nBands-1); +img(:,:,2) = tmp(:, (1:width)*nBands); + +fclose(fid); + diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/untitled.fig b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/untitled.fig new file mode 100644 index 0000000000000000000000000000000000000000..f6e57e0f77e6d673d414bae0130b80681f1caf9b Binary files /dev/null and b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/untitled.fig differ diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/visualize.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/visualize.m new file mode 100644 index 0000000000000000000000000000000000000000..03cb0187dee005fd2f0fa6c8ac6146b8e6546e27 --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/visualize.m @@ -0,0 +1,6 @@ +flow_file = './1.mat'; +load(flow_file); +img = flowToColor(flow); +figure; +imshow(img) +saveas(gcf,'test','png') \ No newline at end of file diff --git a/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/writeFlowFile.m b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/writeFlowFile.m new file mode 100644 index 0000000000000000000000000000000000000000..5fba45de70b6787f63a4687b569a3f90df46da4f --- /dev/null +++ b/TensorFlow/contrib/cv/TVNet_ID0951_for_TensorFlow/visualize/writeFlowFile.m @@ -0,0 +1,76 @@ +function writeFlowFile(img, filename) + +% writeFlowFile writes a 2-band image IMG into flow file FILENAME + +% According to the c++ source code of Daniel Scharstein +% Contact: schar@middlebury.edu + +% Author: Deqing Sun, Department of Computer Science, Brown University +% Contact: dqsun@cs.brown.edu +% $Date: 2007-10-31 15:36:40 (Wed, 31 Oct 2006) $ + +% Copyright 2007, Deqing Sun. +% +% All Rights Reserved +% +% Permission to use, copy, modify, and distribute this software and its +% documentation for any purpose other than its incorporation into a +% commercial product is hereby granted without fee, provided that the +% above copyright notice appear in all copies and that both that +% copyright notice and this permission notice appear in supporting +% documentation, and that the name of the author and Brown University not be used in +% advertising or publicity pertaining to distribution of the software +% without specific, written prior permission. +% +% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY +% PARTICULAR PURPOSE. IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR +% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +TAG_STRING = 'PIEH'; % use this when WRITING the file + +% sanity check +if isempty(filename) == 1 + error('writeFlowFile: empty filename'); +end; + +idx = findstr(filename, '.'); +idx = idx(end); % in case './xxx/xxx.flo' + +if length(filename(idx:end)) == 1 + error('writeFlowFile: extension required in filename %s', filename); +end; + +if strcmp(filename(idx:end), '.flo') ~= 1 + error('writeFlowFile: filename %s should have extension ''.flo''', filename); +end; + +[height width nBands] = size(img); + +if nBands ~= 2 + error('writeFlowFile: image must have two bands'); +end; + +fid = fopen(filename, 'w'); +if (fid < 0) + error('writeFlowFile: could not open %s', filename); +end; + +% write the header +fwrite(fid, TAG_STRING); +fwrite(fid, width, 'int32'); +fwrite(fid, height, 'int32'); + +% arrange into matrix form +tmp = zeros(height, width*nBands); + +tmp(:, (1:width)*nBands-1) = img(:,:,1); +tmp(:, (1:width)*nBands) = squeeze(img(:,:,2)); +tmp = tmp'; + +fwrite(fid, tmp, 'float32'); + +fclose(fid); diff --git a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/README.md b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/README.md index c9611df9b4e87f0dceef398098df3475a755cb5f..4d85bbb21718170990b4de1bf2ba25a885b5a4ac 100644 --- a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/README.md @@ -106,7 +106,7 @@ cd {code_path} # 切换到模型代码所在路径,若仓库下只有 ## 模型训练 -在项目路径下执行如下 shell 命令进行训练: +在项目路径下执行如下 shell 命令进行训练与精度评估(顺序执行): ``` python3.7 modelarts_entry_acc.py ``` @@ -155,15 +155,6 @@ session = tf.Session(config=config) -## 模型评估 - -运行如下 shell 命令来评估预训练模型精度: -``` -sh bash/evaluate_voiced_void.sh -``` - -可以替换 shell 脚本中的 restore_path 和 output_path 路径来评估自己的 checkpoints 。 - ## 评估结果 | | MAE | RMSE | iMAE | iRMSE | diff --git a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/bash/evaluate_voiced_void.sh b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/bash/evaluate_voiced_void.sh deleted file mode 100644 index 94361b2ebe951c4d9d955e2cb62cd1cbb536fedc..0000000000000000000000000000000000000000 --- a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/bash/evaluate_voiced_void.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -python src/evaluate_model.py \ ---image_path testing/void_test_image_1500.txt \ ---interp_depth_path testing/void_test_interp_depth_1500.txt \ ---validity_map_path testing/void_test_validity_map_1500.txt \ ---ground_truth_path testing/void_test_ground_truth_1500.txt \ ---start_idx 0 \ ---end_idx 800 \ ---n_batch 8 \ ---n_height 480 \ ---n_width 640 \ ---occ_threshold 1.5 \ ---occ_ksize 7 \ ---net_type vggnet11 \ ---im_filter_pct 0.75 \ ---sz_filter_pct 0.25 \ ---min_predict_z 0.1 \ ---max_predict_z 8.0 \ ---min_evaluate_z 0.2 \ ---max_evaluate_z 5.0 \ ---save_depth \ ---output_path trained_models/vggnet11_void_model/output \ ---restore_path trained_models/vggnet11_void_model/model.ckpt-100000 diff --git a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/src/evaluate_model.py b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/src/evaluate_model.py index 48cf8bbdbd966f1d8b652cabf1f3d807e5e8ca58..c834b6936f59ef04ea2a45d23398252634369bf7 100644 --- a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/src/evaluate_model.py +++ b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/src/evaluate_model.py @@ -177,7 +177,7 @@ with tf.Graph().as_default(): # Initialize Tensorflow session config = tf.ConfigProto(allow_soft_placement=True) config.gpu_options.allow_growth = True - session = tf.Session(config=npu_config_proto(config_proto=config)) + session = tf.Session(config=config) # Load from checkpoint train_saver = tf.train.Saver() session.run(tf.global_variables_initializer()) diff --git a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_full_1p.sh index febcfdf2a4cbbe5efab3b411e74bb002d8f754c9..6c7127cc3f70ad653a15ba01d685c3dd74ba283d 100644 --- a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_full_1p.sh @@ -1,5 +1,6 @@ #!/bin/bash - +export ASCEND_GLOBAL_LOG_LEVEL=0 +export ASCEND_SLOG_PRINT_TO_STDOUT=0 ########################################################## #########第3行 至 100行,请一定不要、不要、不要修改########## #########第3行 至 100行,请一定不要、不要、不要修改########## @@ -91,6 +92,33 @@ cd ${cur_path}/../ rm -rf ./test/output/${ASCEND_DEVICE_ID} mkdir -p ./test/output/${ASCEND_DEVICE_ID} +#move some data +cp -r ${data_path}/training ${cur_path}/../ +sed -i 's/^data//' training/void_train_image_1500.txt +sed -i "/void_voiced/s!^!${data_path}!" training/void_train_image_1500.txt + +sed -i 's/^data//' training/void_train_interp_depth_1500.txt +sed -i "/void_voiced/s!^!${data_path}!" training/void_train_interp_depth_1500.txt + +sed -i 's/^data//' training/void_train_validity_map_1500.txt +sed -i "/void_release/s!^!${data_path}!" training/void_train_validity_map_1500.txt + +sed -i 's/^data//' training/void_train_intrinsics_1500.txt +sed -i "/void_voiced/s!^!${data_path}!" training/void_train_intrinsics_1500.txt + +cp -r ${data_path}/testing ${cur_path}/../ +sed -i 's/^data//' testing/void_test_image_1500.txt +sed -i "/void_voiced/s!^!${data_path}!" testing/void_test_image_1500.txt + +sed -i 's/^data//' testing/void_test_interp_depth_1500.txt +sed -i "/void_voiced/s!^!${data_path}!" testing/void_test_interp_depth_1500.txt + +sed -i 's/^data//' testing/void_test_validity_map_1500.txt +sed -i "/void_release/s!^!${data_path}!" testing/void_test_validity_map_1500.txt + +sed -i 's/^data//' testing/void_test_ground_truth_1500.txt +sed -i "/void_release/s!^!${data_path}!" testing/void_test_ground_truth_1500.txt + # 训练开始时间记录,不需要修改 start_time=$(date +%s) ########################################################## @@ -112,11 +140,11 @@ batch_size=8 if [ x"${etp_flag}" != xtrue ]; then #python3.7 ./LeNet.py --data_path=${data_path} --output_path=${output_path} - python3.7 src/train_voiced.py \ - --train_image_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_image_1500.txt \ - --train_interp_depth_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_interp_depth_1500.txt \ - --train_validity_map_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_validity_map_1500.txt \ - --train_intrinsics_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_intrinsics_1500.txt \ + python3.7 ./src/train_voiced.py \ + --train_image_path training/void_train_image_1500.txt \ + --train_interp_depth_path training/void_train_interp_depth_1500.txt \ + --train_validity_map_path training/void_train_validity_map_1500.txt \ + --train_intrinsics_path training/void_train_intrinsics_1500.txt \ --n_batch 8 \ --n_height 480 \ --n_width 640 \ @@ -141,14 +169,38 @@ then --rot_param exponential \ --n_summary 1000 \ --n_checkpoint 5000 \ - --checkpoint_path /home/ma-user/modelarts/outputs/train_url_0/ + --checkpoint_path ${output_path} + + # 计算MAE,RMSE,iMAE,iRMSE + python3.7 ./src/evaluate_model.py \ + --image_path testing/void_test_image_1500.txt \ + --interp_depth_path testing/void_test_interp_depth_1500.txt \ + --validity_map_path testing/void_test_validity_map_1500.txt \ + --ground_truth_path testing/void_test_ground_truth_1500.txt \ + --start_idx 0 \ + --end_idx 800 \ + --n_batch 8 \ + --n_height 480 \ + --n_width 640 \ + --occ_threshold 1.5 \ + --occ_ksize 7 \ + --net_type vggnet11 \ + --im_filter_pct 0.75 \ + --sz_filter_pct 0.25 \ + --min_predict_z 0.1 \ + --max_predict_z 8.0 \ + --min_evaluate_z 0.2 \ + --max_evaluate_z 5.0 \ + --save_depth \ + --output_path ${output_path} \ + --restore_path ${output_path}/model.ckpt-103000 else #python3.7 ./LeNet.py --data_path=${data_path} --output_path=${output_path} > ${print_log} - python3.7 src/train_voiced.py \ - --train_image_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_image_1500.txt \ - --train_interp_depth_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_interp_depth_1500.txt \ - --train_validity_map_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_validity_map_1500.txt \ - --train_intrinsics_path /home/ma-user/modelarts/inputs/data_url_0/training/void_train_intrinsics_1500.txt \ + python3.7 ./src/train_voiced.py \ + --train_image_path training/void_train_image_1500.txt \ + --train_interp_depth_path training/void_train_interp_depth_1500.txt \ + --train_validity_map_path training/void_train_validity_map_1500.txt \ + --train_intrinsics_path training/void_train_intrinsics_1500.txt \ --n_batch 8 \ --n_height 480 \ --n_width 640 \ @@ -173,7 +225,31 @@ else --rot_param exponential \ --n_summary 1000 \ --n_checkpoint 5000 \ - --checkpoint_path /home/ma-user/modelarts/outputs/train_url_0/ + --checkpoint_path ${output_path} > ${print_log} 2>&1 + + # 计算MAE,RMSE,iMAE,iRMSE + python3.7 ./src/evaluate_model.py \ + --image_path testing/void_test_image_1500.txt \ + --interp_depth_path testing/void_test_interp_depth_1500.txt \ + --validity_map_path testing/void_test_validity_map_1500.txt \ + --ground_truth_path testing/void_test_ground_truth_1500.txt \ + --start_idx 0 \ + --end_idx 800 \ + --n_batch 8 \ + --n_height 480 \ + --n_width 640 \ + --occ_threshold 1.5 \ + --occ_ksize 7 \ + --net_type vggnet11 \ + --im_filter_pct 0.75 \ + --sz_filter_pct 0.25 \ + --min_predict_z 0.1 \ + --max_predict_z 8.0 \ + --min_evaluate_z 0.2 \ + --max_evaluate_z 5.0 \ + --save_depth \ + --output_path ${output_path} \ + --restore_path ${output_path}/model.ckpt-103000 >> ${print_log} 2>&1 fi # 性能相关数据计算 @@ -181,11 +257,10 @@ StepTime=`grep "StepTime: " ${print_log} | tail -n 10 | awk '{print $NF}' | awk FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` # 精度相关数据计算 -#train_accuracy=`grep "Final Accuracy accuracy" ${print_log} | awk '{print $NF}'` +train_accuracy=`cat ${print_log} | grep -Eo " [0-9]*\.[0-9]*" | awk '{print $1}' | tail -n 1` # 提取所有loss打印信息 grep "loss: " ${print_log} | awk -F ":" '{print $2}' | awk -F " " '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt - ########################################################### #########后面的所有内容请不要修改########################### #########后面的所有内容请不要修改########################### @@ -239,4 +314,5 @@ echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}. echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/.DS_Store b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..c9b9b7889599850787aa37ed0e89aa4ce5087003 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/.DS_Store differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/.keep b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/README.md b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5a72184198531df5e367f93ff8bf24f45a06e700 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/README.md @@ -0,0 +1,247 @@ +## 基本信息 + +**发布者(Publisher):Huawei** + +**应用领域(Application Domain):** Classification + +**版本(Version):1.1** + +**修改时间(Modified) :2022.05.15** + +**大小(Size):** + +**框架(Framework):TensorFlow 1.15.0** + +**模型格式(Model Format):** + +**精度(Precision):** + +**处理器(Processor):昇腾910** + +**应用级别(Categories):Research** + +**描述(Description):基于TensorFlow框架的VisionTransformer图像分类网络训练代码** + +## 概述 + +当前Transformer模型被大量应用在NLP自然语言处理当中,而在计算机视觉领域,Transformer的注意力机制attention也被广泛应用,比如Se模块,CBAM模块等等注意力模块,这些注意力模块能够帮助提升网络性能。而VisionTransformer展示了不需要依赖CNN的结构,也可以在图像分类任务上达到很好的效果,并且也十分适合用于迁移学习。 + + +- 参考论文: + + Dosovitskiy, A., Beyer, L., Kolesnikov, A., Weissenborn, D., Zhai, X., Unterthiner, T., Dehghani, M., Minderer, M., Heigold, G., Gelly, S., Uszkoreit, J., & Houlsby, N. (2021). An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale. ArXiv, abs/2010.11929. + + + - arXiv:2010.11929(http://xxx.itp.ac.cn/pdf/2010.11929.pdf) + +- 参考实现: + + https://github.com/faustomorales/vit-keras + +- 适配昇腾 AI 处理器的实现: + + + https://gitee.com/ascend/ModelZoo-TensorFlow/tree/master/TensorFlow/contrib/cv/BicycleGAN_ID1287_for_TensorFlow + + + +- 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + +- 精度 + +| | GPU | NPU | +|-------|-------|-------| +| LPIPS | 0.8709 | 0.871 | + +- 性能 + +| batchsize | image_size | GPU (v100) | NPU | +|-----------|------------|---|---| +| 1 | 384*384 | | | + + +## 默认配置 + +- 训练数据集预处理(以原论文的maps训练集为例,仅作为用户参考示例): + + - 图像的输入尺寸为384*384 + - 图像输入格式:从cifar压缩文件中读取数据 +```python + def _preprocess(image, label): + image = tf.image.resize(image, (image_size, image_size)) + image = (image - 127.5) / 127.5 + return image, label +``` + +- 测试数据集预处理(以原论文的maps验证集为例,仅作为用户参考示例) + + - 图像的输入尺寸为384*384 + - 图像输入格式:从cifar压缩文件中读取数据 +```python + def _preprocess(image, label): + image = tf.image.resize(image, (image_size, image_size)) + image = (image - 127.5) / 127.5 + return image, label +``` + +- 训练超参 + + - Batch size: 4 + - Learning rate(LR): 0.001 + - Optimizer: Adam + - Train epoch: 1 + +## 支持特性 + +| 特性列表 | 是否支持 | +|-------|------| +| 分布式训练 | 否 | +| 混合精度 | 是 | +| 并行数据 | 否 | + + + +

训练环境准备

+ +1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 +2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 + + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + + **表 1** 镜像列表 + + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
+ + +## 注意事项 +1. 本项目是在104机器裸机上完成的,线上modelart的总是报算子错。 +2. vision transformer 需要加载预训练才能达到好的效果,这是是加载了gpu训练的模型。 + +

快速上手

+ +- 数据集准备 +1. 104机器上位置为 `/home/HwHiAiUser/wubo/vit/vit-ckpt/datasets` 另外obs也有 + +2. 获得数据集后,放入模型目录下,在训练脚本中指定数据集路径,可正常使用。 + + +## 模型训练 + +- 单击“立即下载”,并选择合适的下载方式下载源码包。 + +- 启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + +- 单卡训练 + + 1. 配置训练参数。 + + 首先在脚本test/train_full_1p.sh中,配置训练数据集路径,请用户根据实际路径配置,数据集参数如下所示: + + ``` + --data_path ./dataset + ``` + + 2. 启动训练。 + + 启动单卡训练 (脚本为modelarts_entry_acc.py) + + ``` + python3 modelarts_entry_acc.py + ``` + + + +

迁移学习指导

+ +- 数据集准备。 + + 数据集要求如下: + + 1. 获取数据。 + + 如果要使用自己的数据集, obs://cann-id1217/ 需要将数据集放到脚本参数data_path对应目录下。参考代码中的数据集存放路径如下: + + - 训练集: ./dataset/train + - 测试集: ./dataset/val + + 数据集也可以放在其它目录,则修改对应的脚本入参data_path即可。 + + +- 模型训练。 + + 参考“模型训练”中训练步骤。 + +- 模型评估。 + + 参考“模型训练”中验证步骤。 + +

高级参考

+ +## 脚本和示例代码 + +``` +VisionTransformer +└─ + ├─README.md + ├─dataset用于存放训练数据集和预训练文件 + |-model/ + ├─train + └─val + ├─precision_tool 用来去掉某一些算子融合策略 + └─... + ├─test 用于测试 + ├─output 用于存放测试结果和日志 + └─test_1p.sh +``` + +## 脚本参数 + +``` +--learning_rate 学习率,默认是0.001 +--batch_size 训练的batch大小,默认是4 +--data_path 训练集文件路径 +--output_path 日志,模型文件等存放的路径 +``` + + +## 训练过程 + +1. 通过“模型训练”中的训练指令启动单卡训练。 + +2. 参考脚本的模型存储路径为./output。 + + + +## 推理/验证过程 + + + diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/fusion_switch.cfg b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/fusion_switch.cfg new file mode 100644 index 0000000000000000000000000000000000000000..9260c45253f20249ce9bae172ab885c8ad583098 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/fusion_switch.cfg @@ -0,0 +1,10 @@ +{ + "Switch":{ + "GraphFusion":{ + "ALL":"off" + }, + "UBFusion":{ + "ALL":"off" + } + } +} \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_acc.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..13077b10e660de32d6f7861257a50e1a01ede9ba --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_acc.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_full_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_perf.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..14384e227a0fa90a514254590aef5078c62ff700 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelarts_entry_perf.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_performance_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelzoo_level.txt b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b2d5db5cc4600a7fb8513cac899f1f93d590bad --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/modelzoo_level.txt @@ -0,0 +1,6 @@ +GPUStatus:OK +NPUMigrationStatus:POK +FuncStatus:OK +PrecisionStatus:OK +AutoTune:NOK +PerfStatus:NOK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/requirements.txt b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..47c136602a9dbe367e552758a766a3d2dbd1dd00 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/requirements.txt @@ -0,0 +1,236 @@ +absl-py==0.11.0 +amct-onnx @ file:///root/arx/amct/amct_onnx/amct_onnx-0.4.0-py3-none-linux_aarch64.whl +anykeystore==0.2 +apex @ file:///home/HwHiAiUser/wubo/pytorch_docker/apex-0.1%2Bascend.20220413-cp37-cp37m-linux_aarch64.whl +appdirs==1.4.4 +astor==0.8.1 +attrs==20.3.0 +auto-tune @ file:///root/selfgz34715830652/compiler/lib64/auto_tune-0.1.0-py3-none-any.whl +av==8.0.4.dev0 +backcall==0.2.0 +backports.lzma==0.0.14 +beautifulsoup4==4.9.3 +black==19.10b0 +blis==0.7.4 +boto3==1.17.45 +botocore==1.20.45 +Bottleneck==1.3.2 +cached-property==1.5.2 +catalogue==2.0.4 +category-encoders==2.4.0 +certifi==2020.12.5 +cffi==1.12.3 +chardet==4.0.0 +click==8.0.0 +cloudpickle==1.6.0 +cmakelang==0.6.13 +colorama==0.4.4 +commonmark==0.9.1 +configparser==5.0.2 +cryptacular==1.5.5 +cycler==0.10.0 +cymem==2.0.5 +Cython==0.29.22 +decorator==4.4.2 +defusedxml==0.7.1 +# Editable install with no version control (detectron2==0.2) +-e /root/yaoyu/detectron2-0.2.1 +dill==0.3.4 +Django==2.2 +django-cors-headers==3.5.0 +djangorestframework==3.9.4 +dm-tree==0.1.6 +docker-pycreds==0.4.0 +et-xmlfile==1.1.0 +execnet==1.9.0 +fastai==1.0.51 +fastprogress==1.0.0 +ffmpeg==1.4 +ffmpeg-python==0.1.17 +filelock==3.0.12 +fire==0.4.0 +fonttools==4.29.1 +future==0.18.2 +fvcore==0.1.2.post20201210 +gast==0.2.2 +gitdb==4.0.7 +GitPython==3.1.17 +gnureadline==8.0.0 +google-pasta==0.2.0 +googleapis-common-protos==1.55.0 +graphviz==0.8.4 +greenlet==1.0.0 +grpcio==1.32.0 +grpcio-tools==1.32.0 +h5py==3.5.0 +hccl @ file:///root/selfgz34715830652/compiler/lib64/hccl-0.1.0-py3-none-any.whl +huawei-obs==0.0.1 +hupper==1.10.2 +idna==2.10 +imageio==2.9.0 +imageio-ffmpeg==0.4.4 +importlib-metadata==3.7.0 +importlib-resources==5.4.0 +iniconfig==1.1.1 +ipython==7.23.1 +ipython-genutils==0.2.0 +jedi==0.18.0 +jieba==0.42.1 +Jinja2==3.0.1 +jmespath==0.10.0 +joblib==1.0.1 +Keras==2.2.4 +Keras-Applications==1.0.8 +Keras-Preprocessing==1.1.2 +kiwisolver==1.3.1 +Markdown==3.3.4 +MarkupSafe==2.0.1 +matplotlib==3.5.1 +matplotlib-inline==0.1.2 +mock==4.0.3 +moviepy==1.0.3 +mpmath==1.2.1 +murmurhash==1.0.5 +mxnet==1.9.0 +nltk==3.6.2 +npu-bridge @ file:///root/selfgz3053178543/fwkplugin/bin/npu_bridge-1.15.0-py3-none-any.whl +npu-device @ file:///root/selfgz3053178543/fwkplugin/bin/npu_device-0.1-py3-none-any.whl +numexpr==2.7.3 +numpy==1.17.5 +nvidia-ml-py3==7.352.0 +oauthlib==3.1.0 +onnx==1.10.1 +onnxconverter-common==1.8.1 +onnxruntime==1.6.0 +opc-tool @ file:///root/selfgz34715830652/compiler/lib64/opc_tool-0.1.0-py3-none-any.whl +opencv-python==4.5.1.48 +openpyxl==3.0.7 +opt-einsum==3.3.0 +packaging==21.3 +pandas==1.0.0 +parso==0.8.2 +PasteDeploy==2.1.1 +pathlib==1.0.1 +pathlib2==2.3.5 +pathspec==0.9.0 +pathtools==0.1.2 +pathy==0.5.2 +patsy==0.5.2 +pbkdf2==1.3 +pexpect==4.8.0 +pickleshare==0.7.5 +Pillow==8.1.1 +plaster==1.0 +plaster-pastedeploy==0.7 +pluggy==1.0.0 +plyfile==0.7.4 +portalocker==2.3.2 +preshed==3.0.5 +proglog==0.1.9 +promise==2.3 +prompt-toolkit==3.0.18 +protobuf==3.15.3 +psutil==5.8.0 +ptyprocess==0.7.0 +py==1.11.0 +pyarrow==5.0.0 +pycocotools==2.0.4 +pycparser==2.20 +pydantic==1.7.4 +pydot==1.4.2 +Pygments==2.9.0 +pyparsing==2.4.7 +pyramid==2.0 +pyramid-mailer==0.15.1 +pytest==7.1.1 +pytest-forked==1.4.0 +pytest-xdist==2.5.0 +python-dateutil==2.8.1 +python-mnist==0.7 +python3-openid==3.2.0 +pytz==2021.1 +PyYAML==5.4.1 +qobs==0.0.3 +regex==2021.3.17 +repoze.sendmail==4.4.1 +requests==2.25.1 +requests-oauthlib==1.3.0 +rich==10.1.0 +s3transfer==0.3.6 +sacremoses==0.0.43 +schedule-search @ file:///root/selfgz34715830652/compiler/lib64/schedule_search-0.1.0-py3-none-any.whl +scikit-learn==1.0.2 +scipy==1.7.3 +seaborn==0.11.1 +sentencepiece==0.1.95 +sentry-sdk==1.1.0 +setproctitle==1.2.2 +shortuuid==1.0.1 +six==1.15.0 +sk-video==1.1.10 +skl2onnx==1.9.0 +sklearn2 @ file:///root/ieg/sklearn2-0.0.13-py2.py3-none-any.whl +smart-open==3.0.0 +smmap==4.0.0 +soupsieve==2.2.1 +spacy==3.0.6 +spacy-legacy==3.0.6 +SQLAlchemy==1.4.3 +sqlparse==0.4.1 +srsly==2.4.1 +statsmodels==0.13.2 +subprocess32==3.5.4 +svgpathtools==1.4.4 +svgwrite==1.4.1 +sympy==1.4 +tables==3.6.1 +tabulate==0.8.9 +te @ file:///root/selfgz34715830652/compiler/lib64/te-0.4.0-py3-none-any.whl +tensorboard==1.15.0 +tensorboardX==1.6 +tensorflow @ file:///root/tensorflow-1.15.0-cp37-cp37m-linux_aarch64.whl +tensorflow-datasets==3.0.0 +tensorflow-estimator==1.15.1 +tensorflow-gan==2.0.0 +tensorflow-hub==0.12.0 +tensorflow-metadata==1.7.0 +tensorflow-probability==0.7.0 +termcolor==1.1.0 +thinc==8.0.4 +threadpoolctl==2.1.0 +tkintertable==1.3.3 +tokenizers==0.10.1 +toml==0.10.2 +tomli==2.0.1 +topi @ file:///root/selfgz34715830652/compiler/lib64/topi-0.4.0-py3-none-any.whl +torch @ file:///home/HwHiAiUser/wubo/pytorch_docker/torch-1.5.0%2Bascend.post5.20220413-cp37-cp37m-linux_aarch64.whl +torchvision==0.2.2.post3 +tqdm==4.62.0 +traitlets==5.0.5 +transaction==3.0.1 +transformers==2.1.1 +transforms3d==0.3.1 +translationstring==1.4 +typed-ast==1.5.3 +typeguard==2.2.2 +typer==0.3.2 +typing==3.7.4.3 +typing-extensions==3.7.4.3 +urllib3==1.26.3 +velruse==1.1.1 +venusian==3.0.0 +wandb==0.10.32 +wasabi==0.8.2 +wcwidth==0.2.5 +WebOb==1.8.7 +Werkzeug==1.0.1 +wrapt==1.12.1 +WTForms==2.3.3 +wtforms-recaptcha==0.3.2 +xlrd==1.2.0 +yacs==0.1.8 +youtube-dl==2021.6.6 +zipp==3.4.0 +zope.deprecation==4.4.0 +zope.interface==5.3.0 +zope.sqlalchemy==1.3 diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_full_1p.sh b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..d2674252685044593571ea8afb61698f49f3982a --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_full_1p.sh @@ -0,0 +1,205 @@ +#!/bin/bash +export RANK_INDEX=2 +export RANK_SIZE=1 +export RANK_ID=2 +export DEVICE_ID=2 +export DEVICE_INDEX=2 +export ASCEND_DEVICE_ID=2 + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=4 + + +#export LD_LIBRARY_PATH=/usr/local/python3.7.5/lib:$LD_LIBRARY_PATH +#如果用户环境存在多个python3版本,则指定使用python3.7.5版本 + +#export PATH=/usr/local/python3.7.5/bin:$PATH +#export install_path=/usr/local/Ascend/ascend-toolkit/latest #软件包安装路径,请根据实际情况修改 +#export LD_LIBRARY_PATH=${install_path}/fwkacllib/lib64:/usr/lib:$LD_LIBRARY_PATH +#export PATH=${install_path}/fwkacllib/ccec_compiler/bin:${install_path}/fwkacllib/bin:$PATH +#export PYTHONPATH=${install_path}/fwkacllib/python/site-packages:${install_path}/toolkit/python/site-packages:$PYTHONPATH +#export PYTHONPATH=/usr/local/Ascend/tfplugin/latest/tfplugin/python/site-packages:$PYTHONPATH +#export ASCEND_OPP_PATH=${install_path}/opp +#export ASCEND_AICPU_PATH=${install_path}/{arch}-linux #其中{arch}请根据实际情况替换(arm64或x86_64) +#export TOOLCHAIN_HOME=${install_path}/toolkit + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./vit_allpipeline_fusion_accelerate.py --data_path=${data_path} --output_path=${output_path} > ${print_log} 2>&1 : +else + python3.7 ./vit_allpipeline_fusion_accelerate.py --data_path=${data_path}/dataset --output_path=${output_path} > ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "time" ${print_log} | awk '{print $9}' | tail -n +3 | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "Train ACC" ${print_log} | awk '{print $NF}'` + +# 提取所有loss打印信息 +grep "loss:" ${print_log} | awk '{print $6}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` + +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..1ea003e81f79cbfe2958437d2ee3c227e3a4e98d --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/test/train_performance_1p.sh @@ -0,0 +1,186 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=1 +epoch=1 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./vit_allpipeline_fusion_accelerate.py --data_path=${data_path} --output_path=${output_path} +else + python3.7 ./vit_allpipeline_fusion_accelerate.py --data_path=${data_path} --output_path=${output_path} > ${print_log} +fi + +# 性能相关数据计算 +StepTime=`grep "time" ${print_log} | awk '{print $9}' | tail -n +3 | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "Train ACC" ${print_log} | awk '{print $NF}'` + +# 提取所有loss打印信息 +grep "loss:" ${print_log} | awk '{print $6}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` + +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_allpipeline_fusion_accelerate.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_allpipeline_fusion_accelerate.py new file mode 100644 index 0000000000000000000000000000000000000000..28c306a6f8611995cf44df95e81dbb41f003647f --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_allpipeline_fusion_accelerate.py @@ -0,0 +1,224 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import tensorflow as tf +from vit_keras import vit, utils +import os +import time +import tensorflow.keras.backend as K +from PIL import Image +import pickle +import numpy as np +import argparse + +from npu_bridge.estimator import npu_ops +from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig +# import moxing as mox +# import precision_tool.tf_config as npu_tf_config +import os +os.environ['TF_CPP_MIN_VLOG_LEVEL'] = '3' +from npu_bridge.npu_init import * +###################### +from npu_bridge.estimator.npu.npu_config import NPURunConfig +from npu_bridge.estimator.npu.npu_config import ProfilingConfig + +def parse_args(): + parser = argparse.ArgumentParser() + # parser.add_argument('--model_path', type=str, default='', help='Pretrained model location') + parser.add_argument('--data_path', type=str, default='./datasets', help='Datasets location') + parser.add_argument('--output_path', type=str, default='./output', help='Output location,saving trained models') + parser.add_argument('--learning_rate', type=float, default=0.001, help='learning rate') + parser.add_argument('--batch_size', type=int, default=4, help='learning rate') + return parser.parse_args() + +args = parse_args() + + + +MODEL_CACHE_PATH=args.output_path +PRETRAINED_MODEL_PATH= os.path.join(args.data_path,"model") +HPARAMS = { + "batch_size": 4, + "image_size": 384, + 'learning_rate': 0.001, +} +DATA_CACHE_PATH= args.data_path +image_size = HPARAMS['image_size'] +batch_size = HPARAMS['batch_size'] + +def read_data(filename, training): + with open(filename, 'rb') as fo: + dict = pickle.load(fo, encoding='bytes') + if training: + images = dict[b'data'].reshape([50000, 3, 32, 32]) + else: + images = dict[b'data'].reshape([10000, 3, 32, 32]) + images = np.transpose(images, [0, 2, 3, 1]) + labels = np.array(dict[b'fine_labels']) + def _augment(image, label): + if np.random.rand() < 0.3: + image = tf.image.flip_left_right(image) + if np.random.rand() < 0.3: + image = tf.image.flip_up_down(image) + if np.random.rand() < 0.3: + image = tf.image.random_contrast(image, lower=0.5, upper=2) + return image, label + + def _preprocess(image, label): + image = tf.image.resize(image, (image_size, image_size)) + image = (image - 127.5) / 127.5 + return image, label + + ds = tf.data.Dataset.from_tensor_slices((images, labels)) + if training: + # ds = ds.map(_augment) + ds = ds.map(_preprocess) + ds = ds.shuffle(HPARAMS['batch_size'] * 10) + ds = ds.repeat() + else: + ds = ds.map(_preprocess) + ds = ds.repeat() + + ds = ds.batch(batch_size=HPARAMS['batch_size'], drop_remainder=True) + iterator = tf.compat.v1.data.make_one_shot_iterator(ds) + image_batch, label_batch = iterator.get_next() + print("load dataset =================================") + return image_batch, label_batch + + +model = vit.vit_b16_load_pretrain( + image_size=HPARAMS['image_size'], + activation='linear', + pretrained=True, + classes=100, + include_top=True, + pretrained_top=True, + pretrained_path="./" +) + +images_batch, labels_batch = read_data(filename=os.path.join(DATA_CACHE_PATH,"train"), training=True) +val_image_batch, val_labels_batch = read_data(filename=os.path.join(DATA_CACHE_PATH,"test"), training=False) + +loss_fun = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) + +inputx = tf.compat.v1.placeholder( + tf.float32, shape=[HPARAMS['batch_size'], HPARAMS['image_size'], HPARAMS['image_size'], 3], name="inputx") + +inputy = tf.compat.v1.placeholder( + tf.int64, shape=[HPARAMS['batch_size'], ], name="inputy") + +inputTrain = tf.compat.v1.placeholder( + tf.bool, name='training') + + +def eval(pred, label): + prediction = np.argmax(pred, 1).tolist() + return calc(prediction, label) + + +def calc(prediction, label): + a = [prediction[i] == label[i] for i in range(len(prediction))] + return sum(a) / len(a) + + +out = model(inputx, training=inputTrain) +loss = loss_fun(inputy, out) + +optimizer = tf.train.MomentumOptimizer( + learning_rate=HPARAMS['learning_rate'], + momentum=0.9, + use_locking=False, + use_nesterov=False, + name='Momentum' +) + + + +loss_scale_manager = FixedLossScaleManager(loss_scale=2**32) + +opt = NPULossScaleOptimizer(optimizer, loss_scale_manager) + + +train_op = opt.minimize(loss) + + +config = tf.ConfigProto() +custom_op = config.graph_options.rewrite_options.custom_optimizers.add() +custom_op.name = "NpuOptimizer" +custom_op.parameter_map["use_off_line"].b = True # 在昇腾AI处理器执行训练 +custom_op.parameter_map["precision_mode"].s=tf.compat.as_bytes("allow_mix_precision") + +config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 关闭remap开关 +config.gpu_options.allow_growth = True +# 混合训练 +# session_config = npu_tf_config.session_dump_config(config, action='overflow|fusion_off') # 新增行 +custom_op.parameter_map["fusion_switch_file"].s = tf.compat.as_bytes("./fusion_switch.cfg") + +sess = tf.Session(config=config) +sess.run(tf.global_variables_initializer()) + + +saver = tf.train.Saver(max_to_keep=10) +s = time.time() +print("============ start load pretrained model =======================================") +saver.restore(sess, "{}/vit-base-5".format(PRETRAINED_MODEL_PATH)) # 0.8709 for 5 # 2 0.8719 # 3 0.8767 +print("============ load success {:.4f} =====================".format(time.time() - s)) + +for epoch in range(1, 2): + # train + label_col = [] + pred_col = [] + for step in range(HPARAMS['batch_size']*10 // HPARAMS['batch_size']): + s = time.time() + x_in, y_in = sess.run([images_batch, labels_batch], + feed_dict={inputTrain: True}) + out_, loss_, _ = sess.run([out, loss, train_op], feed_dict={ + inputx: x_in, inputy: y_in, inputTrain: True}) + label_col += y_in.tolist() + pred_col += np.argmax(out_, 1).tolist() + print("epoch:{} step: {} , loss: {:.4f} , time: {:.4f} acc: {:.4f}".format( + epoch, step, loss_.item(), time.time() - s, eval(out_, y_in))) # print 到文件里面 + print("Train ACC: {:.4f}".format(calc(pred_col, label_col))) + saver.save(sess, "{}/vit-base-5fusion-mix2".format(MODEL_CACHE_PATH), global_step=epoch) + # break + prediction = [] + labels = [] + for _ in range(10000 // HPARAMS['batch_size']): + x_in, y_in = sess.run([val_image_batch, val_labels_batch]) + pred = sess.run(out, feed_dict={inputx: x_in, inputy: y_in, inputTrain: False}) + # pred = model.predict(x_in) + # print(calc(np.argmax(pred, 1).tolist(), y_in.tolist()), pred.shape, ) + prediction += np.argmax(pred, 1).tolist() + labels += y_in.tolist() + print( + "Final Average Distances :", + sum(prediction[i] == labels[i] for i in range(len(labels))) + / len(labels), + ) + diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__init__.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8ae2d297aa55758c0558838315304650db7cebc9 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-36.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7955d764f164a49213292f0b4af1fc813e212685 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-36.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-37.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3acb61bede29dd810e5ef9f56d1b08c8d70f738e Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/__init__.cpython-37.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-36.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92b737741401927e836d8ae6c8dd4d80c9430dd3 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-36.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-37.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67b06472c6c461119eccbf66d3d287bcee9f2b00 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/layers.cpython-37.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-36.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a52975d913da3c9ba63eea8c7cb571b1e1901b4 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-36.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-37.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c77867ec2d11cd2bb0c0600f9e2c88648e71657c Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/utils.cpython-37.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-36.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30e6c5cd670cf3b81921fe463854eeee1a0c4a93 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-36.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-37.pyc b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a16095d88b4d2e7d563fbcd88caa60e428298f02 Binary files /dev/null and b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/__pycache__/vit.cpython-37.pyc differ diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/layers.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..fe33b96d39891f1606fdae5955846e43cfc33fc3 --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/layers.py @@ -0,0 +1,286 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=arguments-differ,missing-function-docstring,missing-class-docstring,unexpected-keyword-arg,no-value-for-parameter +import tensorflow as tf +import threading +# modify record : +# remove super version : tfa . use tf.keras.activations.gelu() replace tfa.activations.gelu() + +import tensorflow.keras.backend as K + + +# 更改地方 +def swish(x, beta): + return x * K.sigmoid(beta * x) + + +def gelu(x, approximate="xxx"): + return swish(x, 1.702) + + +class CustomObjectsContext(threading.local): + + def __init__(self): + super(CustomObjectsContext, self).__init__() + self.objects = {} + self.names = {} + + +GLOBAL_CUSTOM_OBJECTS = CustomObjectsContext() + + +# @keras_export('keras.utils.register_keras_serializable') +def register_keras_serializable(package='Custom', name=None): + """Registers an object with the Keras serialization framework. + This decorator injects the decorated class or function into the Keras custom + object dictionary, so that it can be serialized and deserialized without + needing an entry in the user-provided custom object dict. It also injects a + function that Keras will call to get the object's serializable string key. + Note that to be serialized and deserialized, classes must implement the + `get_config()` method. Functions do not have this requirement. + The object will be registered under the key 'package>name' where `name`, + defaults to the object name if not passed. + Args: + package: The package that this class belongs to. + name: The name to serialize this class under in this package. If None, the + class' name will be used. + Returns: + A decorator that registers the decorated class with the passed names. + """ + + def decorator(arg): + """Registers a class with the Keras serialization framework.""" + class_name = name if name is not None else arg.__name__ + registered_name = package + '>' + class_name + + # if tf_inspect.isclass(arg) and not hasattr(arg, 'get_config'): + # raise ValueError( + # 'Cannot register a class that does not have a get_config() method.') + + if registered_name in GLOBAL_CUSTOM_OBJECTS.objects: + raise ValueError(f'{registered_name} has already been registered to ' + f'{GLOBAL_CUSTOM_OBJECTS.objects[registered_name]}') + + if arg in GLOBAL_CUSTOM_OBJECTS.names: + raise ValueError( + f'{arg} has already been registered to {GLOBAL_CUSTOM_OBJECTS.names[arg]}' + ) + GLOBAL_CUSTOM_OBJECTS.objects[registered_name] = arg + GLOBAL_CUSTOM_OBJECTS.names[arg] = registered_name + + return arg + + return decorator + + +@register_keras_serializable() +class ClassToken(tf.keras.layers.Layer): + """Append a class token to an input layer.""" + + def build(self, input_shape): + cls_init = tf.zeros_initializer() + self.hidden_size = input_shape[-1] + self.cls = tf.Variable( + name="cls", + initial_value=cls_init(shape=(1, 1, self.hidden_size), dtype="float32"), + trainable=True, + ) + + def call(self, inputs): + batch_size = tf.shape(inputs)[0] + cls_broadcasted = tf.cast( + tf.broadcast_to(self.cls, [batch_size, 1, self.hidden_size]), + dtype=inputs.dtype, + ) + return tf.concat([cls_broadcasted, inputs], 1) + + def get_config(self): + config = super().get_config() + return config + + @classmethod + def from_config(cls, config): + return cls(**config) + + +@register_keras_serializable() +class AddPositionEmbs(tf.keras.layers.Layer): + """Adds (optionally learned) positional embeddings to the inputs.""" + + def build(self, input_shape): + assert ( + len(input_shape) == 3 + ), f"Number of dimensions should be 3, got {len(input_shape)}" + self.pe = tf.Variable( + name="pos_embedding", + initial_value=tf.random_normal_initializer(stddev=0.06)( + shape=(1, input_shape[1], input_shape[2]) + ), + dtype="float32", + trainable=True, + ) + + def call(self, inputs): + return inputs + tf.cast(self.pe, dtype=inputs.dtype) + + def get_config(self): + config = super().get_config() + return config + + @classmethod + def from_config(cls, config): + return cls(**config) + + +@register_keras_serializable() +class MultiHeadSelfAttention(tf.keras.layers.Layer): + def __init__(self, *args, num_heads, **kwargs): + super().__init__(*args, **kwargs) + self.num_heads = num_heads + + def build(self, input_shape): + hidden_size = input_shape[-1] + num_heads = self.num_heads + if hidden_size % num_heads != 0: + raise ValueError( + f"embedding dimension = {hidden_size} should be divisible by number of heads = {num_heads}" + ) + self.hidden_size = hidden_size + self.projection_dim = hidden_size // num_heads + self.query_dense = tf.keras.layers.Dense(hidden_size, name="query") + self.key_dense = tf.keras.layers.Dense(hidden_size, name="key") + self.value_dense = tf.keras.layers.Dense(hidden_size, name="value") + self.combine_heads = tf.keras.layers.Dense(hidden_size, name="out") + + # pylint: disable=no-self-use + def attention(self, query, key, value): + score = tf.matmul(query, key, transpose_b=True) + dim_key = tf.cast(tf.shape(key)[-1], score.dtype) + scaled_score = score / tf.math.sqrt(dim_key) + weights = tf.nn.softmax(scaled_score, axis=-1) + output = tf.matmul(weights, value) + return output, weights + + def separate_heads(self, x, batch_size): + x = tf.reshape(x, (batch_size, -1, self.num_heads, self.projection_dim)) + return tf.transpose(x, perm=[0, 2, 1, 3]) + + def call(self, inputs): + batch_size = tf.shape(inputs)[0] + query = self.query_dense(inputs) + key = self.key_dense(inputs) + value = self.value_dense(inputs) + query = self.separate_heads(query, batch_size) + key = self.separate_heads(key, batch_size) + value = self.separate_heads(value, batch_size) + + attention, weights = self.attention(query, key, value) + attention = tf.transpose(attention, perm=[0, 2, 1, 3]) + concat_attention = tf.reshape(attention, (batch_size, -1, self.hidden_size)) + output = self.combine_heads(concat_attention) + return output, weights + + def get_config(self): + config = super().get_config() + config.update({"num_heads": self.num_heads}) + return config + + @classmethod + def from_config(cls, config): + return cls(**config) + + +# pylint: disable=too-many-instance-attributes +@register_keras_serializable() +class TransformerBlock(tf.keras.layers.Layer): + """Implements a Transformer block.""" + + def __init__(self, *args, num_heads, mlp_dim, dropout, **kwargs): + super().__init__(*args, **kwargs) + self.num_heads = num_heads + self.mlp_dim = mlp_dim + self.dropout = dropout + + def build(self, input_shape): + self.att = MultiHeadSelfAttention( + num_heads=self.num_heads, + name="MultiHeadDotProductAttention_1", + ) + self.mlpblock = tf.keras.Sequential( + [ + tf.keras.layers.Dense( + self.mlp_dim, + activation="linear", + name=f"{self.name}/Dense_0", + ), + tf.keras.layers.Lambda( + lambda x: gelu(x, approximate=False) + ) + if hasattr(tf.keras.activations, "gelu") + else tf.keras.layers.Lambda( + lambda x: gelu(x, approximate=False) + ), + tf.keras.layers.Dropout(self.dropout), + tf.keras.layers.Dense(input_shape[-1], name=f"{self.name}/Dense_1"), + tf.keras.layers.Dropout(self.dropout), + ], + name="MlpBlock_3", + ) + self.layernorm1 = tf.keras.layers.LayerNormalization( + epsilon=1e-6, name="LayerNorm_0" + ) + self.layernorm2 = tf.keras.layers.LayerNormalization( + epsilon=1e-6, name="LayerNorm_2" + ) + self.dropout_layer = tf.keras.layers.Dropout(self.dropout) + + def call(self, inputs): + # modify + x = self.layernorm1(inputs) + x, weights = self.att(x) + x = self.dropout_layer(x) + x = x + inputs + y = self.layernorm2(x) + y = self.mlpblock(y) + return x + y, weights + + def get_config(self): + config = super().get_config() + config.update( + { + "num_heads": self.num_heads, + "mlp_dim": self.mlp_dim, + "dropout": self.dropout, + } + ) + return config + + @classmethod + def from_config(cls, config): + return cls(**config) diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/utils.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0eebe5647b88115e721ebd234d82e2f7e3fb2c1d --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/utils.py @@ -0,0 +1,366 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing +import warnings +from urllib import request +from http import client +import io +import pkg_resources +# import validators +import numpy as np +import scipy as sp +import cv2 +from tqdm import tqdm +try: + import PIL + import PIL.Image +except ImportError: # pragma: no cover + PIL = None + +ImageInputType = typing.Union[str, np.ndarray, "PIL.Image.Image", io.BytesIO] + + +def get_imagenet_classes() -> typing.List[str]: + """Get the list of ImageNet 2012 classes.""" + filepath = pkg_resources.resource_filename("vit_keras", "imagenet2012.txt") + with open(filepath) as f: + classes = [l.strip() for l in f.readlines()] + return classes + + +def read(filepath_or_buffer: ImageInputType, size, timeout=None): + """Read a file into an image object + Args: + filepath_or_buffer: The path to the file or any object + with a `read` method (such as `io.BytesIO`) + size: The size to resize the image to. + timeout: If filepath_or_buffer is a URL, the timeout to + use for making the HTTP request. + """ + if PIL is not None and isinstance(filepath_or_buffer, PIL.Image.Image): + return np.array(filepath_or_buffer.convert("RGB")) + if isinstance(filepath_or_buffer, (io.BytesIO, client.HTTPResponse)): + image = np.asarray(bytearray(filepath_or_buffer.read()), dtype=np.uint8) + image = cv2.imdecode(image, cv2.IMREAD_UNCHANGED) + elif isinstance(filepath_or_buffer, str) :#and validators.url(filepath_or_buffer) + with request.urlopen(filepath_or_buffer, timeout=timeout) as r: + return read(r, size=size) + else: + if not os.path.isfile(filepath_or_buffer): + raise FileNotFoundError( + "Could not find image at path: " + filepath_or_buffer + ) + image = cv2.imread(filepath_or_buffer) + if image is None: + raise ValueError(f"An error occurred reading {filepath_or_buffer}.") + # We use cvtColor here instead of just ret[..., ::-1] + # in order to ensure that we provide a contiguous + # array for later processing. Some hashers use ctypes + # to pass the array and non-contiguous arrays can lead + # to erroneous results. + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + return cv2.resize(image, (size, size)) + + +def apply_embedding_weights(target_layer, source_weights, num_x_patches, num_y_patches): + """Apply embedding weights to a target layer. + + Args: + target_layer: The target layer to which weights will + be applied. + source_weights: The source weights, which will be + resized as necessary. + num_x_patches: Number of patches in width of image. + num_y_patches: Number of patches in height of image. + """ + expected_shape = target_layer.weights[0].shape + if expected_shape != source_weights.shape: + warnings.warn( + "expected_shape and source_weight.shape not matched. expected_shape: {} souce_weights: {}".format(expected_shape,source_weights.shape), + UserWarning, + ) + target_layer.set_weights([source_weights]) + +def apply_embedding_weights_customized(target_layer, source_weights, num_x_patches, num_y_patches): + """Apply embedding weights to a target layer. + + Args: + target_layer: The target layer to which weights will + be applied. + source_weights: The source weights, which will be + resized as necessary. + num_x_patches: Number of patches in width of image. + num_y_patches: Number of patches in height of image. + """ + expected_shape = target_layer.weights[0].shape + if expected_shape != source_weights.shape: + ValueError("error") + target_layer.set_weights([source_weights]) + + +def load_weights_numpy_customized( + model, params_path, pretrained_top, num_x_patches, num_y_patches +): + """Load weights saved using Flax as a numpy array. + + Args: + model: A Keras model to load the weights into. + params_path: Filepath to a numpy archive. + pretrained_top: Whether to load the top layer weights. + num_x_patches: Number of patches in width of image. + num_y_patches: Number of patches in height of image. + """ + params_dict = np.load( + params_path, allow_pickle=False + ) # pylint: disable=unexpected-keyword-arg + pre_logits = any(l.name == "pre_logits" for l in model.layers) + source_keys = list(params_dict.keys()) + n_transformers = len( + set( + "/".join(k.split("/")[:2]) + for k in source_keys + if k.startswith("Transformer/encoderblock_") + ) + ) # equal with 12 + n_transformers_out = sum( + l.name.startswith("Transformer/encoderblock_") for l in model.layers + ) + matches = [] + for tidx in range(n_transformers): # each transformer layer content + encoder = model.get_layer(f"Transformer/encoderblock_{tidx}") + source_prefix = f"Transformer/encoderblock_{tidx}" + matches.extend( + [ + { + "layer": layer, + "keys": [ + f"{source_prefix}/{norm}/{name}" for name in ["scale", "bias"] + ], + } + for norm, layer in [ + ("LayerNorm_0", encoder.layernorm1), + ("LayerNorm_2", encoder.layernorm2), + ] + ] + + [ + { + "layer": encoder.mlpblock.get_layer( + f"{source_prefix}/Dense_{mlpdense}" + ), + "keys": [ + f"{source_prefix}/MlpBlock_3/Dense_{mlpdense}/{name}" + for name in ["kernel", "bias"] + ], + } + for mlpdense in [0, 1] + ] + + [ + { + "layer": layer, + "keys": [ + f"{source_prefix}/MultiHeadDotProductAttention_1/{attvar}/{name}" + for name in ["kernel", "bias"] + ], + "reshape": True, + } + for attvar, layer in [ + ("query", encoder.att.query_dense), + ("key", encoder.att.key_dense), + ("value", encoder.att.value_dense), + ("out", encoder.att.combine_heads), + ] + ] + ) + + for layer_name in ["embedding", "head", "pre_logits"]: + if layer_name == "head" and not pretrained_top: + matches.extend(["head/kernel", "head/bias"]) + continue + if layer_name == "pre_logits" and not pre_logits: + continue + matches.append( + { + "layer": model.get_layer(layer_name), + "keys": [f"{layer_name}/{name}" for name in ["kernel", "bias"]], + } + ) + matches.append({"layer": model.get_layer("class_token"), "keys": ["cls"]}) + matches.append( + { + "layer": model.get_layer("Transformer/encoder_norm"), + "keys": [f"Transformer/encoder_norm/{name}" for name in ["scale", "bias"]], + } + ) + apply_embedding_weights_customized( + target_layer=model.get_layer("Transformer/posembed_input"), + source_weights=params_dict["Transformer/posembed_input/pos_embedding"], + num_x_patches=num_x_patches, + num_y_patches=num_y_patches, + ) + get = ["Transformer/posembed_input/pos_embedding"] + for match in tqdm(matches): + source_weights = [params_dict[k] for k in match["keys"]] + get.extend(match["keys"]) + if match.get("reshape", False): + source_weights = [ + source.reshape(expected.shape) + for source, expected in zip( + source_weights, match["layer"].get_weights() + ) + ] + match["layer"].set_weights(source_weights) + print("================ load done =========================") + + +def load_weights_numpy( + model, params_path, pretrained_top, num_x_patches, num_y_patches +): + """Load weights saved using Flax as a numpy array. + + Args: + model: A Keras model to load the weights into. + params_path: Filepath to a numpy archive. + pretrained_top: Whether to load the top layer weights. + num_x_patches: Number of patches in width of image. + num_y_patches: Number of patches in height of image. + """ + params_dict = np.load( + params_path, allow_pickle=False + ) # pylint: disable=unexpected-keyword-arg + source_keys = list(params_dict.keys()) + pre_logits = any(l.name == "pre_logits" for l in model.layers) + source_keys_used = [] + n_transformers = len( + set( + "/".join(k.split("/")[:2]) + for k in source_keys + if k.startswith("Transformer/encoderblock_") + ) + ) # equal with 12 + n_transformers_out = sum( + l.name.startswith("Transformer/encoderblock_") for l in model.layers + ) + assert n_transformers == n_transformers_out, ( + f"Wrong number of transformers (" + f"{n_transformers_out} in model vs. {n_transformers} in weights)." + ) + + matches = [] + for tidx in range(n_transformers): # each transformer layer content + encoder = model.get_layer(f"Transformer/encoderblock_{tidx}") + source_prefix = f"Transformer/encoderblock_{tidx}" + matches.extend( + [ + { + "layer": layer, + "keys": [ + f"{source_prefix}/{norm}/{name}" for name in ["scale", "bias"] + ], + } + for norm, layer in [ + ("LayerNorm_0", encoder.layernorm1), + ("LayerNorm_2", encoder.layernorm2), + ] + ] + + [ + { + "layer": encoder.mlpblock.get_layer( + f"{source_prefix}/Dense_{mlpdense}" + ), + "keys": [ + f"{source_prefix}/MlpBlock_3/Dense_{mlpdense}/{name}" + for name in ["kernel", "bias"] + ], + } + for mlpdense in [0, 1] + ] + + [ + { + "layer": layer, + "keys": [ + f"{source_prefix}/MultiHeadDotProductAttention_1/{attvar}/{name}" + for name in ["kernel", "bias"] + ], + "reshape": True, + } + for attvar, layer in [ + ("query", encoder.att.query_dense), + ("key", encoder.att.key_dense), + ("value", encoder.att.value_dense), + ("out", encoder.att.combine_heads), + ] + ] + ) + for layer_name in ["embedding", "head", "pre_logits"]: + if layer_name == "head" and not pretrained_top: + source_keys_used.extend(["head/kernel", "head/bias"]) + continue + if layer_name == "pre_logits" and not pre_logits: + continue + matches.append( + { + "layer": model.get_layer(layer_name), + "keys": [f"{layer_name}/{name}" for name in ["kernel", "bias"]], + } + ) + matches.append({"layer": model.get_layer("class_token"), "keys": ["cls"]}) + matches.append( + { + "layer": model.get_layer("Transformer/encoder_norm"), + "keys": [f"Transformer/encoder_norm/{name}" for name in ["scale", "bias"]], + } + ) + apply_embedding_weights( + target_layer=model.get_layer("Transformer/posembed_input"), + source_weights=params_dict["Transformer/posembed_input/pos_embedding"], + num_x_patches=num_x_patches, + num_y_patches=num_y_patches, + ) + source_keys_used.append("Transformer/posembed_input/pos_embedding") + for match in matches: + source_keys_used.extend(match["keys"]) + source_weights = [params_dict[k] for k in match["keys"]] + if match.get("reshape", False): + source_weights = [ + source.reshape(expected.shape) + for source, expected in zip( + source_weights, match["layer"].get_weights() + ) + ] + match["layer"].set_weights(source_weights) + unused = set(source_keys).difference(source_keys_used) + if unused: + warnings.warn(f"Did not use the following weights: {unused}", UserWarning) + target_keys_set = len(source_keys_used) + target_keys_all = len(model.weights) + if target_keys_set < target_keys_all: + warnings.warn( + f"Only set {target_keys_set} of {target_keys_all} weights.", UserWarning + ) diff --git a/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/vit.py b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/vit.py new file mode 100644 index 0000000000000000000000000000000000000000..45c7bb9e669825096c4506c2252eacbc0a675bdb --- /dev/null +++ b/TensorFlow/contrib/cv/VisionTransformer_ID1217_for_Tensorflow/vit_keras/vit.py @@ -0,0 +1,405 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing +import warnings +import tensorflow as tf +import typing_extensions as tx + +from . import layers, utils + +ConfigDict = tx.TypedDict( + "ConfigDict", + { + "dropout": float, + "mlp_dim": int, + "num_heads": int, + "num_layers": int, + "hidden_size": int, + }, +) + +CONFIG_B: ConfigDict = { + "dropout": 0.1, + "mlp_dim": 3072, + "num_heads": 12, + "num_layers": 12, + "hidden_size": 768, +} + +CONFIG_L: ConfigDict = { + "dropout": 0.1, + "mlp_dim": 4096, + "num_heads": 16, + "num_layers": 24, + "hidden_size": 1024, +} + +BASE_URL = "https://github.com/faustomorales/vit-keras/releases/download/dl" +WEIGHTS = {"imagenet21k": 21_843, "imagenet21k+imagenet2012": 1_000} +SIZES = {"B_16", "B_32", "L_16", "L_32"} + +ImageSizeArg = typing.Union[typing.Tuple[int, int], int] + + +def preprocess_inputs(X): + """Preprocess images""" + return tf.keras.applications.imagenet_utils.preprocess_input( + X, data_format=None, mode="tf" + ) + + +def interpret_image_size(image_size_arg: ImageSizeArg) -> typing.Tuple[int, int]: + """Process the image_size argument whether a tuple or int.""" + if isinstance(image_size_arg, int): + return (image_size_arg, image_size_arg) + if ( + isinstance(image_size_arg, tuple) + and len(image_size_arg) == 2 + and all(map(lambda v: isinstance(v, int), image_size_arg)) + ): + return image_size_arg + raise ValueError( + f"The image_size argument must be a tuple of 2 integers or a single integer. Received: {image_size_arg}" + ) + + +def build_model( + image_size: ImageSizeArg, + patch_size: int, + num_layers: int, + hidden_size: int, + num_heads: int, + name: str, + mlp_dim: int, + classes: int, + dropout=0.1, + activation="linear", + include_top=True, + representation_size=None, + training=False, +): + """Build a ViT model. + + Args: + image_size: The size of input images. + patch_size: The size of each patch (must fit evenly in image_size) + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + num_layers: The number of transformer layers to use. + hidden_size: The number of filters to use + num_heads: The number of transformer heads + mlp_dim: The number of dimensions for the MLP output in the transformers. + dropout_rate: fraction of the units to drop for dense layers. + activation: The activation to use for the final layer. + include_top: Whether to include the final classification layer. If not, + the output will have dimensions (batch_size, hidden_size). + representation_size: The size of the representation prior to the + classification layer. If None, no Dense layer is inserted. + """ + image_size_tuple = interpret_image_size(image_size) + assert (image_size_tuple[0] % patch_size == 0) and ( + image_size_tuple[1] % patch_size == 0 + ), "image_size must be a multiple of patch_size" + x = tf.keras.layers.Input(shape=(image_size_tuple[0], image_size_tuple[1], 3)) + y = tf.keras.layers.Conv2D( + filters=hidden_size, + kernel_size=patch_size, + strides=patch_size, + padding="valid", + name="embedding", + )(x) + y = tf.keras.layers.Reshape((y.shape[1] * y.shape[2], hidden_size))(y)# NONE, 576, 768 + y = layers.ClassToken(name="class_token")(y) # NONE 577 768 class only take one 1,1,768 + y = layers.AddPositionEmbs(name="Transformer/posembed_input")(y) # 577,768 + for n in range(num_layers): + y, _ = layers.TransformerBlock( + num_heads=num_heads, + mlp_dim=mlp_dim, + dropout=dropout, + name=f"Transformer/encoderblock_{n}", + )(y) # I remove the training parameter in layers.TransformerBlock + y = tf.keras.layers.LayerNormalization( + epsilon=1e-6, name="Transformer/encoder_norm" + )(y) + y = tf.keras.layers.Lambda(lambda v: v[:, 0], name="ExtractToken")(y) + if representation_size is not None: + y = tf.keras.layers.Dense( + representation_size, name="pre_logits", activation="tanh" + )(y) + if include_top: + y = tf.keras.layers.Dense(classes, name="head", activation=activation)(y) + return tf.keras.models.Model(inputs=x, outputs=y, name=name) + + +def validate_pretrained_top( + include_top: bool, pretrained: bool, classes: int, weights: str +): + """Validate that the pretrained weight configuration makes sense.""" + assert weights in WEIGHTS, f"Unexpected weights: {weights}." + expected_classes = WEIGHTS[weights] + if classes != expected_classes: + warnings.warn( + f"Can only use pretrained_top with {weights} if classes = {expected_classes}. Setting manually.", + UserWarning, + ) + assert include_top, "Can only use pretrained_top with include_top." + assert pretrained, "Can only use pretrained_top with pretrained." + return expected_classes + + +def load_pretrained( + size: str, + weights: str, + pretrained_top: bool, + model: tf.keras.models.Model, + image_size: ImageSizeArg, + patch_size: int, +): + """Load model weights for a known configuration.""" + image_size_tuple = interpret_image_size(image_size) + fname = f"ViT-{size}_{weights}.npz" + origin = f"{BASE_URL}/{fname}" + local_filepath = tf.keras.utils.get_file(fname, origin, cache_subdir="weights") + utils.load_weights_numpy( + model=model, + params_path=local_filepath, + pretrained_top=pretrained_top, + num_x_patches=image_size_tuple[1] // patch_size, + num_y_patches=image_size_tuple[0] // patch_size, + ) + +def load_pretrained_customized( + size: str, + weights: str, + pretrained_top: bool, + model: tf.keras.models.Model, + image_size: ImageSizeArg, + patch_size: int, + pretrained_path: str, +): + """Load model weights for a known configuration. Customized model weight.""" + image_size_tuple = interpret_image_size(image_size) + local_filepath = pretrained_path + utils.load_weights_numpy_customized( + model=model, + params_path=local_filepath, + pretrained_top=pretrained_top, + num_x_patches=image_size_tuple[1] // patch_size, + num_y_patches=image_size_tuple[0] // patch_size, + ) + +def vit_b16_load_pretrain( + image_size: ImageSizeArg = (224, 224), + classes=1000, + activation="linear", + include_top=True, + pretrained=True, + pretrained_top=True, + pretrained_path="", + weights="imagenet21k+imagenet2012", +): + """Build ViT-B16. All arguments passed to build_model.""" + model = build_model( + **CONFIG_B, + name="vit-b16", + patch_size=16, + image_size=image_size, + classes=classes, + activation=activation, + include_top=include_top, + representation_size=768 if weights == "imagenet21k" else None, + ) + + # load_pretrained_customized( + # size="B_16", + # weights=weights, + # model=model, + # pretrained_top=pretrained_top, + # image_size=image_size, + # patch_size=16, + # pretrained_path=pretrained_path + # ) + return model + +def vit_b16( + image_size: ImageSizeArg = (224, 224), + classes=1000, + activation="linear", + include_top=True, + pretrained=True, + pretrained_top=True, + weights="imagenet21k+imagenet2012", +): + """Build ViT-B16. All arguments passed to build_model.""" + if pretrained_top: + classes = validate_pretrained_top( + include_top=include_top, + pretrained=pretrained, + classes=classes, + weights=weights, + ) + model = build_model( + **CONFIG_B, + name="vit-b16", + patch_size=16, + image_size=image_size, + classes=classes, + activation=activation, + include_top=include_top, + representation_size=768 if weights == "imagenet21k" else None, + ) + + if pretrained: + load_pretrained( + size="B_16", + weights=weights, + model=model, + pretrained_top=pretrained_top, + image_size=image_size, + patch_size=16, + ) + return model + + +def vit_b32( + image_size: ImageSizeArg = (224, 224), + classes=1000, + activation="linear", + include_top=True, + pretrained=True, + pretrained_top=True, + weights="imagenet21k+imagenet2012", +): + """Build ViT-B32. All arguments passed to build_model.""" + if pretrained_top: + classes = validate_pretrained_top( + include_top=include_top, + pretrained=pretrained, + classes=classes, + weights=weights, + ) + model = build_model( + **CONFIG_B, + name="vit-b32", + patch_size=32, + image_size=image_size, + classes=classes, + activation=activation, + include_top=include_top, + representation_size=768 if weights == "imagenet21k" else None, + ) + if pretrained: + load_pretrained( + size="B_32", + weights=weights, + model=model, + pretrained_top=pretrained_top, + patch_size=32, + image_size=image_size, + ) + return model + + +def vit_l16( + image_size: ImageSizeArg = (384, 384), + classes=1000, + activation="linear", + include_top=True, + pretrained=True, + pretrained_top=True, + weights="imagenet21k+imagenet2012", +): + """Build ViT-L16. All arguments passed to build_model.""" + if pretrained_top: + classes = validate_pretrained_top( + include_top=include_top, + pretrained=pretrained, + classes=classes, + weights=weights, + ) + model = build_model( + **CONFIG_L, + patch_size=16, + name="vit-l16", + image_size=image_size, + classes=classes, + activation=activation, + include_top=include_top, + representation_size=1024 if weights == "imagenet21k" else None, + ) + if pretrained: + load_pretrained( + size="L_16", + weights=weights, + model=model, + pretrained_top=pretrained_top, + patch_size=16, + image_size=image_size, + ) + return model + + +def vit_l32( + image_size: ImageSizeArg = (384, 384), + classes=1000, + activation="linear", + include_top=True, + pretrained=True, + pretrained_top=True, + weights="imagenet21k+imagenet2012", +): + """Build ViT-L32. All arguments passed to build_model.""" + if pretrained_top: + classes = validate_pretrained_top( + include_top=include_top, + pretrained=pretrained, + classes=classes, + weights=weights, + ) + model = build_model( + **CONFIG_L, + patch_size=32, + name="vit-l32", + image_size=image_size, + classes=classes, + activation=activation, + include_top=include_top, + representation_size=1024 if weights == "imagenet21k" else None, + ) + if pretrained: + load_pretrained( + size="L_32", + weights=weights, + model=model, + pretrained_top=pretrained_top, + patch_size=32, + image_size=image_size, + ) + return model diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/LICENSE b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..dbbb273fd80e55f1722fc7a6d4a44e834812ea99 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/LICENSE @@ -0,0 +1,284 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------ +Files: third_party/compute_library/... + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------------------ +Files: ACKNOWLEDGEMENTS +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------ +Files: third_party/hexagon + +Copyright (c) 2016-2019, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/README.md b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b35f3cc865018eac983189fc304530071c7c6461 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/README.md @@ -0,0 +1,277 @@ +- [基本信息](#基本信息.md) + +- [概述](#概述.md) + +- [训练环境准备](#训练环境准备.md) + +- [快速上手](#快速上手.md) + +- [训练结果](#训练结果.md) + +- [高级参考](#高级参考.md) +

基本信息

+ + **发布者(Publisher):Huawei** + + **应用领域(Application Domain):Natural Language Processing** + + **版本(Version):1.2** + + **修改时间(Modified) :2022.4.24** + + **大小(Size):509.3MB** + + **框架(Framework):TensorFlow 1.12.0** + + **模型格式(Model Format):ckpt** + + **精度(Precision):Mixed** + + **处理器(Processor):昇腾910** + + **应用级别(Categories):Official** + + **描述(Description):基于TensorFlow框架对图片进行动漫化处理** + +

概述

+ + White-box Cartoonization是2020年由Xinrui Wang 和 Jinze Yu提出的对图片进行动漫化处理的算法,刊登在IEEE Conference on Computer Vision and Pattern Recognition上。 + 从图像中分别识别三种白盒表示:包含卡通图像平滑表面的表面表示,指赛璐珞风格工作流中稀疏色块和扁平化全局内容的结构表示,以及反映卡通图像中高频纹理、轮廓和细节的纹理表示。生成性对抗网络(GAN)框架用于学习提取的表示并对图像进行自动化。 + + - 参考论文: + + https://github.com/SystemErrorWang/White-box-Cartoonization/tree/master/paper + + - 参考实现: + + https://github.com/SystemErrorWang/White-box-Cartoonization + + - 适配昇腾 AI 处理器的实现: + + + https://gitee.com/ascend/modelzoo/tree/master/built-in/TensorFlow/Research/nlp/LeNet_for_TensorFlow + + + + - 通过Git获取对应commit\_id的代码方法如下: + + ``` + git clone {repository_url} # 克隆仓库的代码 + cd {repository_name} # 切换到模型的代码仓目录 + git checkout {branch} # 切换到对应分支 + git reset --hard {commit_id} # 代码设置到对应的commit_id + cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 + ``` + + ## 默认配置 + + - 训练数据集: + + 风景照片与人物照片以及对应的风景卡通照与人物卡通照 + + 图片输入格式:jpg + + - 测试数据集预处理(以MNIST验证集为例,仅作为用户参考示例) + + 测试照片 + + 图像输入格式:jpg + + - 训练超参 + + - Total iteration: 100000 + + + ## 支持特性 + + | 特性列表 | 是否支持 | + |-------|------| + | 分布式训练 | 否 | + | 混合精度 | 是 | + | 并行数据 | 是 | + + ## 强制精度训练 + + 昇腾910 AI处理器提供强制精度功能,通过算子溢出检测我们发现Conv2d算子存在溢出的情况。由于在NPU上该算子仅支持fp16精度,因此我们需要开启强制精度。 + + ## 开启强制精度 + + 脚本已默认开启强制置精度precision_mode参数的脚本参考如下。 + + ``` + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("force_fp32") + ``` + + ## Loss_Scale + + 由于算子精度溢出,我们需要开启Loss_Scale来进行精度的优化。由于代码中有两个优化器,因此我们开启两个loss_scale_manager。 + + ``` + loss_scale_manager_g = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) + loss_scale_manager_d = ExponentialUpdateLossScaleManager(init_loss_scale=2**32, incr_every_n_steps=1000, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) + ``` + + 在优化器上添加loss_scale. + + ``` + g_optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) + g_optim = NPULossScaleOptimizer(g_optim, loss_scale_manager_g).minimize(g_loss_total, var_list=gene_vars) + + d_optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) + d_optim = NPULossScaleOptimizer(d_optim, loss_scale_manager_d).minimize(d_loss_total, var_list=disc_vars) + ``` + + 获取loss_scale并打印 + + ``` + lossScale = tf.get_default_graph().get_tensor_by_name("loss_scale:0") + l_s_g, _, g_loss, r_loss = sess.run([lossScale,g_optim,g_loss_total, recon_loss], + feed_dict={input_photo: photo_batch, input_superpixel: superpixel_batch, + input_cartoon: cartoon_batch}) + + + l_s_d, _, d_loss, train_info = sess.run([lossScale, d_optim, d_loss_total, summary_op], + feed_dict={input_photo: photo_batch, + input_superpixel: superpixel_batch, + input_cartoon: cartoon_batch}) + print('Iter: {}, loss_scale g: {}, loss_scale d: {}'.format(total_iter, l_s_g, l_s_d)) + ``` + +

训练环境准备

+ + 1. 硬件环境准备请参见各硬件产品文档"[驱动和固件安装升级指南]( https://support.huawei.com/enterprise/zh/category/ai-computing-platform-pid-1557196528909)"。需要在硬件设备上安装与CANN版本配套的固件与驱动。 + 2. 宿主机上需要安装Docker并登录[Ascend Hub中心](https://ascendhub.huawei.com/#/detail?name=ascend-tensorflow-arm)获取镜像。 + + 当前模型支持的镜像列表如[表1](#zh-cn_topic_0000001074498056_table1519011227314)所示。 + + **表 1** 镜像列表 + + + + + + + + + + + + +

镜像名称

+

镜像版本

+

配套CANN版本

+
+

20.2.0

+

20.2

+
+ + +

快速上手

+ + - 数据集准备 + 1. 模型训练使用MNIST数据集,数据集请用户自行获取。 + + ## 模型训练 + + - 单击“立即下载”,并选择合适的下载方式下载源码包。 + + - 启动训练之前,首先要配置程序运行相关环境变量。 + + 环境变量配置信息参见: + + [Ascend 910训练平台环境变量设置](https://gitee.com/ascend/modelzoo/wikis/Ascend%20910%E8%AE%AD%E7%BB%83%E5%B9%B3%E5%8F%B0%E7%8E%AF%E5%A2%83%E5%8F%98%E9%87%8F%E8%AE%BE%E7%BD%AE?sort_id=3148819) + + - 单卡训练 + + 1. 配置训练参数。 + + 首先在脚本npu_train.sh中,配置code_dir, word_dir, dataset_path, output_path等参数,请用户根据实际路径配置data_path,或者在启动训练的命令行中以参数形式下发。 + + ``` + total_iter=100000 + data_path="../dataset" + ``` + + 2. 启动训练。 + + 启动单卡训练 (脚本为LeNet_for_TensorFlow/test/train_full_1p.sh) + + ``` + bash train_full_1p.sh --data_path=../MNIST + ``` + +

训练结果

+ + - 精度结果比对 + + |精度指标项|NPU实测|GPU实测| + |---|---|---| + |Fid to photo|29.13|28.79| + + + + | 精度指标 | NPU实测 | GPU实测 | + | -------------- | ------- | ------- | + | Fid to cartoon | 107.41 | 101.31 | + + - 性能结果比对 + + |性能指标项|NPU实测|GPU实测| + |---|---|---| + |FPS|1.46it/s|1.06it/s| + + +

高级参考

+ + ## 脚本和示例代码 + + ``` + ├── README.md //代码说明文档 + ├── modelarts_entry.py //modelarts平台开启训练文件 + ├── modelarts_entry_acc.py //测试精度文件 + ├── modelarts_entry_perf.py //测试性能文件 + ├── modelzoo_level.txt + ├── npu_train.sh + ├── requirements.txt //训练依赖列表 + ├── test //测试脚本 + │   ├── train_full_1p.sh + │   └── train_performance_1p.sh + ├── test_code //测试文件夹 + │   ├── cartoonize.py //测试文件(生成动漫图片) + │   ├── guided_filter.py + │   ├── network.py + │   ├── saved_models + │   └── test_images + ├── train_code //训练代码列表 + │   ├── guided_filter.py + │   ├── layers.py + │   ├── loss.py + │   ├── network.py + │   ├── ops_info.json + │   ├── pretrain.py + │   ├── selective_search + │   ├── train.py //训练文件 + │   └── utils.py + └── vgg19_no_fc.npy //预训练文件 + ``` + + ## 脚本参数 + + ``` + --data_path 数据集路径,默认:cache/dataset + --output_path. 输出路径,默认: cache/output + --batch_size 每个NPU的batch size,默认:16 + --total_iter 迭代次数,默认:100000 + ``` + + ## 训练过程 + + 1. 通过“模型训练”中的训练指令启动单卡卡训练。 + + 2. 参考脚本的模型存储路径为./output/train_cartoon/saved_models/ diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry.py new file mode 100644 index 0000000000000000000000000000000000000000..c1be9d1c0ca69d92fc19e6b05f9f6dd0c55aa23d --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/cache/dataset") +parser.add_argument("--train_url", type=str, default="/cache/output") +config = parser.parse_args() + +print("[CANN-ZhongZhi] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] + +print("[CANN-ZhongZhi] work_dir path is [%s]" % (os.getcwd())) +work_dir = os.getcwd() + +print("[CANN-ZhongZhi] start run train shell") +# 执行训练脚本 +shell_cmd = ("bash %s/npu_train.sh %s %s %s %s " % (code_dir, code_dir, work_dir, config.data_url, config.train_url)) +os.system(shell_cmd) +print("[CANN-ZhongZhi] finish run train shell") + + + + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_acc.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_acc.py new file mode 100644 index 0000000000000000000000000000000000000000..17bdc85682082d38bf2d5c322e551a46540b7496 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_acc.py @@ -0,0 +1,125 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ======================================================# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. +# # ============================================================================ +# # Copyright 2022 Huawei Technologies Co., Ltd +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. +# +# import os +# import argparse +# import sys +# +# # 解析输入参数data_url +# parser = argparse.ArgumentParser() +# parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +# parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +# config = parser.parse_args() +# +# print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +# code_dir = sys.path[0] +# os.chdir(code_dir) +# print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) +# +# print("[CANN-Modelzoo] before train - list my run files:") +# os.system("ls -al /usr/local/Ascend/ascend-toolkit/") +# +# print("[CANN-Modelzoo] before train - list my dataset files:") +# os.system("ls -al %s" % config.data_url) +# +# print("[CANN-Modelzoo] start run train shell") +# # 设置sh文件格式为linux可执行 +# os.system("dos2unix ./test/*") +# +# # 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# # full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +# os.system("bash ./test/train_full_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) +# +# print("[CANN-Modelzoo] finish run train shell") +# +# # 将当前执行目录所有文件拷贝到obs的output进行备份 +# print("[CANN-Modelzoo] after train - list my output files:") +# os.system("cp -r %s %s " % (code_dir, config.train_url)) +# os.system("ls -al %s" % config.train_url)====================== +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/wbc/dataset") +parser.add_argument("--train_url", type=str, default="/home/wbc/output") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_full_1p.sh --data_path=%s --output_path=%s" % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_perf.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..eed7cefbd829843ccd96763498b735a231d89e75 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelarts_entry_perf.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_performance_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a39f2221b8103c0ae90337cb4b6bd67c69f2d11 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,2 @@ +FuncStatus:OK +PrecisionStatus:OK \ No newline at end of file diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/npu_train.sh b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/npu_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..6601bff4a8226364d3ab5dde121d5653af5876bf --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/npu_train.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +code_dir=$1 +work_dir=$2 +dataset_path=$3 +output_path=$4 + +#############训练前输入目录文件确认######################### +echo "[CANN-ZhongZhi] before train - list my run files[/usr/local/Ascend/ascend-toolkit]:" +ls -al /usr/local/Ascend/ascend-toolkit +echo "" + +echo "[CANN-ZhongZhi] before train - list my code files[${code_dir}]:" +ls -al ${code_dir} +echo "" + +echo "[CANN-ZhongZhi] before train - list my work files[${work_dir}]:" +ls -al ${work_dir} +echo "" + +echo "[CANN-ZhongZhi] before train - list my dataset files[${dataset_path}]:" +ls -al ${dataset_path} +echo "" + +echo "[CANN-ZhongZhi] before train - list my output files[${output_path}]:" +ls -al ${output_path} +echo "" + +######环境变量修改###### +###如果需要修改环境变量的,在此处修改 +#设置日志级别为info +#export ASCEND_GLOBAL_LOG_LEVEL=1 +#设置日志打屏到屏幕 +#export ASCEND_SLOG_PRINT_TO_STDOUT=1 +#export TF_CPP_MIN_LOG_LEVEL=0 +env > ${output_path}/my_env.log + +######训练执行###### +###此处每个网络执行命令不同,需要修改 +cd ${code_dir} +python3.7 ${code_dir}/train_code/train.py --data_path=${dataset_path} --output_path=${output_path} --total_iter=100000 +if [ $? -eq 0 ]; +then + echo "[CANN-ZhongZhi] train return success" +else + echo "[CANN-ZhongZhi] train return failed" +fi + +######训练后把需要备份的内容保存到output_path###### +###此处每个网络不同,视情况添加cp +cp -r ${work_dir} ${output_path} + +######训练后输出目录文件确认###### +echo "[CANN-ZhongZhi] after train - list my work files[${work_dir}]:" +ls -al ${work_dir} +echo "" + +echo "[CANN-ZhongZhi] after train - list my output files[${output_path}]:" +ls -al ${output_path} +echo "" diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/pip-requirements.txt b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/pip-requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ccbe6b19272470531d45577f6c92346fff2c90de --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/pip-requirements.txt @@ -0,0 +1,6 @@ +tensorflow-gpu==1.12.0 +numpy==1.19.2 +opencv-python +tqdm +joblib +scikit-image==0.14.5 diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..57edb15c09346fd9c020c25e60611205f6453915 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,197 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +pip3 install scikit-image==0.14.5 +pip3 install numpy==1.19.2 +pip3 install pytorch-fid + + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi + echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=16 + +if [ x"${modelarts_flag}" != x ]; +then + #python3 ./train_code/pretrain.py --data_path=${data_path}/dataset/ + python3 ./train_code/train.py --data_path=${data_path}/dataset/ --REAL_PATH=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 + python3 ./test_code/cartoonize.py --data_path=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 + python -m pytorch_fid ${output_path}/cartoonized_scenery/ ${data_path}/dataset/scenery_photo 1>>${print_log} 2>&1 + python -m pytorch_fid ${output_path}/cartoonized_scenery/ ${data_path}/dataset/scenery_cartoon 1>>${print_log} 2>&1 + +else + #python3 ./train_code/pretrain.py --data_path=${data_path}/dataset/ + python3 ./train_code/train.py --data_path=${data_path}/dataset/ --REAL_PATH=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 + python3 ./test_code/cartoonize.py --data_path=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 + python -m pytorch_fid ${output_path}/cartoonized_scenery/ ${data_path}/dataset/scenery_photo 1>>${print_log} 2>&1 + python -m pytorch_fid ${output_path}/cartoonized_scenery/ ${data_path}/dataset/scenery_cartoon 1>>${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "time_per_step" ${print_log} | tail -n 10 | awk '{print $5}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy==`grep "FID:" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +grep "d_loss:" ${print_log} | awk '{print $4}' | awk -F"," '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..3e399a5716455c9e2f4c0f2741b7a1c3c78c5dd7 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,199 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +pip3 install scikit-image==0.14.5 +pip3 install numpy==1.19.2 +pip3 install pytorch-fid + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +train_steps=1000 +batch_size=16 + +if [ x"${modelarts_flag}" != x ]; +then + #python3 ./train_code/pretrain.py --data_path=${data_path}/dataset/ + echo "*******************************************" + python3 ./train_code/train.py --data_path=${data_path}/dataset/ --REAL_PATH=${data_path}/dataset/ --output_path=${output_path} --total_iter=${train_steps} 1>>${print_log} 2>&1 + echo "********************************************************" + python3 ./test_code/cartoonize.py --data_path=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 + +else + #python3 ./train_code/pretrain.py --data_path=${data_path}/dataset/ + echo "*****************************************************" + python3 ./train_code/train.py --data_path=${data_path}/dataset/ --REAL_PATH=${data_path}/dataset/ --output_path=${output_path} --total_iter=${train_steps} 1>>${print_log} 2>&1 + echo "***************************************" + python3 ./test_code/cartoonize.py --data_path=${data_path}/dataset/ --output_path=${output_path} 1>>${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "time_per_step" ${print_log} | tail -n 10 | awk '{print $5}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy==`grep "FID:" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +grep "d_loss:" ${print_log} | awk '{print $4}' | awk -F"," '{print $1}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +#echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/cartoonize.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/cartoonize.py new file mode 100644 index 0000000000000000000000000000000000000000..d17e04c9c8fe316e9f8e9c58bfff611e2c2d7a64 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/cartoonize.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import os +import cv2 +import argparse +import numpy as np +import tensorflow as tf +import network +import guided_filter +from tqdm import tqdm + + +def arg_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--output_path", default='/home/test_user03/wbc/output', type=str) + parser.add_argument("--data_path", default='../../dataset', type=str) + parser.add_argument("--save_folder", default='.cartoonized_scenery', type=str) + + args = parser.parse_args() + + return args + + +def resize_crop(image): + h, w, c = np.shape(image) + if min(h, w) > 720: + if h > w: + h, w = int(720 * h / w), 720 + else: + h, w = 720, int(720 * w / h) + image = cv2.resize(image, (w, h), + interpolation=cv2.INTER_AREA) + h, w = (h // 8) * 8, (w // 8) * 8 + image = image[:h, :w, :] + return image + + +def cartoonize(load_folder, save_folder, model_path): + input_photo = tf.placeholder(tf.float32, [1, None, None, 3]) + network_out = network.unet_generator(input_photo) + final_out = guided_filter.guided_filter(input_photo, network_out, r=1, eps=5e-3) + + all_vars = tf.trainable_variables() + gene_vars = [var for var in all_vars if 'generator' in var.name] + saver = tf.train.Saver(var_list=gene_vars) + + config = tf.ConfigProto() + config.gpu_options.allow_growth = True + sess = tf.Session(config=config) + + sess.run(tf.global_variables_initializer()) + saver.restore(sess, tf.train.latest_checkpoint(model_path)) + name_list = os.listdir(load_folder) + for name in tqdm(name_list): + try: + load_path = os.path.join(load_folder, name) + save_path = os.path.join(save_folder, name) + image = cv2.imread(load_path) + image = resize_crop(image) + batch_image = image.astype(np.float32) / 127.5 - 1 + batch_image = np.expand_dims(batch_image, axis=0) + output = sess.run(final_out, feed_dict={input_photo: batch_image}) + output = (np.squeeze(output) + 1) * 127.5 + output = np.clip(output, 0, 255).astype(np.uint8) + cv2.imwrite(save_path, output) + except: + print('cartoonize {} failed'.format(load_path)) + + +if __name__ == '__main__': + args = arg_parser() + load_path = os.path.join(args.data_path, "scenery_photo") + model_path = os.path.join(args.output_path, "train_cartoon/saved_models") + save_folder = os.path.join(args.output_path, "cartoonized_scenery") + print("output path",args.output_path) + print("model_path", model_path) + if not os.path.exists(save_folder): + os.mkdir(save_folder) + cartoonize(load_path, save_folder, model_path) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/guided_filter.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/guided_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..75e156d5b0c047d6fb222faf70eb4e67e32b49c1 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/guided_filter.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import tensorflow as tf +import numpy as np + +def tf_box_filter(x, r): + k_size = int(2*r+1) + ch = x.get_shape().as_list()[-1] + weight = 1/(k_size**2) + box_kernel = weight*np.ones((k_size, k_size, ch, 1)) + box_kernel = np.array(box_kernel).astype(np.float32) + output = tf.nn.depthwise_conv2d(x, box_kernel, [1, 1, 1, 1], 'SAME') + return output + + + +def guided_filter(x, y, r, eps=1e-2): + + x_shape = tf.shape(x) + #y_shape = tf.shape(y) + + N = tf_box_filter(tf.ones((1, x_shape[1], x_shape[2], 1), dtype=x.dtype), r) + + mean_x = tf_box_filter(x, r) / N + mean_y = tf_box_filter(y, r) / N + cov_xy = tf_box_filter(x * y, r) / N - mean_x * mean_y + var_x = tf_box_filter(x * x, r) / N - mean_x * mean_x + + A = cov_xy / (var_x + eps) + b = mean_y - A * mean_x + + mean_A = tf_box_filter(A, r) / N + mean_b = tf_box_filter(b, r) / N + + output = mean_A * x + mean_b + + return output + + + +def fast_guided_filter(lr_x, lr_y, hr_x, r=1, eps=1e-8): + + #assert lr_x.shape.ndims == 4 and lr_y.shape.ndims == 4 and hr_x.shape.ndims == 4 + + lr_x_shape = tf.shape(lr_x) + #lr_y_shape = tf.shape(lr_y) + hr_x_shape = tf.shape(hr_x) + + N = tf_box_filter(tf.ones((1, lr_x_shape[1], lr_x_shape[2], 1), dtype=lr_x.dtype), r) + + mean_x = tf_box_filter(lr_x, r) / N + mean_y = tf_box_filter(lr_y, r) / N + cov_xy = tf_box_filter(lr_x * lr_y, r) / N - mean_x * mean_y + var_x = tf_box_filter(lr_x * lr_x, r) / N - mean_x * mean_x + + A = cov_xy / (var_x + eps) + b = mean_y - A * mean_x + + mean_A = tf.image.resize_images(A, hr_x_shape[1: 3]) + mean_b = tf.image.resize_images(b, hr_x_shape[1: 3]) + + output = mean_A * hr_x + mean_b + + return output + + +if __name__ == '__main__': + import cv2 + from tqdm import tqdm + + input_photo = tf.placeholder(tf.float32, [1, None, None, 3]) + #input_superpixel = tf.placeholder(tf.float32, [16, 256, 256, 3]) + output = guided_filter(input_photo, input_photo, 5, eps=1) + image = cv2.imread('output_figure1/cartoon2.jpg') + image = image/127.5 - 1 + image = np.expand_dims(image, axis=0) + + config = tf.ConfigProto() + config.gpu_options.allow_growth = True + sess = tf.Session(config=config) + sess.run(tf.global_variables_initializer()) + + out = sess.run(output, feed_dict={input_photo: image}) + out = (np.squeeze(out)+1)*127.5 + out = np.clip(out, 0, 255).astype(np.uint8) + cv2.imwrite('output_figure1/cartoon2_filter.jpg', out) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/network.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/network.py new file mode 100644 index 0000000000000000000000000000000000000000..3d7205c5c26e7703c58651fe2758d3279ea3a7e4 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/test_code/network.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import tensorflow as tf +import numpy as np +import tensorflow.contrib.slim as slim + +def resblock(inputs, out_channel=32, name='resblock'): + + with tf.variable_scope(name): + + x = slim.convolution2d(inputs, out_channel, [3, 3], + activation_fn=None, scope='conv1') + x = tf.nn.leaky_relu(x) + x = slim.convolution2d(x, out_channel, [3, 3], + activation_fn=None, scope='conv2') + + return x + inputs + + + + +def unet_generator(inputs, channel=32, num_blocks=4, name='generator', reuse=False): + with tf.variable_scope(name, reuse=reuse): + + x0 = slim.convolution2d(inputs, channel, [7, 7], activation_fn=None) + x0 = tf.nn.leaky_relu(x0) + + x1 = slim.convolution2d(x0, channel, [3, 3], stride=2, activation_fn=None) + x1 = tf.nn.leaky_relu(x1) + x1 = slim.convolution2d(x1, channel*2, [3, 3], activation_fn=None) + x1 = tf.nn.leaky_relu(x1) + + x2 = slim.convolution2d(x1, channel*2, [3, 3], stride=2, activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + x2 = slim.convolution2d(x2, channel*4, [3, 3], activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + + for idx in range(num_blocks): + x2 = resblock(x2, out_channel=channel*4, name='block_{}'.format(idx)) + + x2 = slim.convolution2d(x2, channel*2, [3, 3], activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + + h1, w1 = tf.shape(x2)[1], tf.shape(x2)[2] + x3 = tf.image.resize_bilinear(x2, (h1*2, w1*2)) + x3 = slim.convolution2d(x3+x1, channel*2, [3, 3], activation_fn=None) + x3 = tf.nn.leaky_relu(x3) + x3 = slim.convolution2d(x3, channel, [3, 3], activation_fn=None) + x3 = tf.nn.leaky_relu(x3) + + h2, w2 = tf.shape(x3)[1], tf.shape(x3)[2] + x4 = tf.image.resize_bilinear(x3, (h2*2, w2*2)) + x4 = slim.convolution2d(x4+x0, channel, [3, 3], activation_fn=None) + x4 = tf.nn.leaky_relu(x4) + x4 = slim.convolution2d(x4, 3, [7, 7], activation_fn=None) + + return x4 + +if __name__ == '__main__': + + + pass \ No newline at end of file diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/guided_filter.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/guided_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..fb21bb903a5cabd628f0ff0dddad4802115f4456 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/guided_filter.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + + +import tensorflow as tf +import numpy as np + + +def tf_box_filter(x, r): + ch = x.get_shape().as_list()[-1] + weight = 1/((2*r+1)**2) + box_kernel = weight*np.ones((2*r+1, 2*r+1, ch, 1)) + box_kernel = np.array(box_kernel).astype(np.float32) + output = tf.nn.depthwise_conv2d(x, box_kernel, [1, 1, 1, 1], 'SAME') + return output + + + +def guided_filter(x, y, r, eps=1e-2): + + x_shape = tf.shape(x) + #y_shape = tf.shape(y) + + N = tf_box_filter(tf.ones((1, x_shape[1], x_shape[2], 1), dtype=x.dtype), r) + + mean_x = tf_box_filter(x, r) / N + mean_y = tf_box_filter(y, r) / N + cov_xy = tf_box_filter(x * y, r) / N - mean_x * mean_y + var_x = tf_box_filter(x * x, r) / N - mean_x * mean_x + + A = cov_xy / (var_x + eps) + b = mean_y - A * mean_x + + mean_A = tf_box_filter(A, r) / N + mean_b = tf_box_filter(b, r) / N + + output = mean_A * x + mean_b + + return output + + +if __name__ == '__main__': + pass + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/layers.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..31e6b69381ef1eb8967f1b51441c1d6ef39c7417 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/layers.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + + +import tensorflow as tf +import numpy as np +import tensorflow.contrib.slim as slim + + + +def adaptive_instance_norm(content, style, epsilon=1e-5): + + c_mean, c_var = tf.nn.moments(content, axes=[1, 2], keep_dims=True) + s_mean, s_var = tf.nn.moments(style, axes=[1, 2], keep_dims=True) + c_std, s_std = tf.sqrt(c_var + epsilon), tf.sqrt(s_var + epsilon) + + return s_std * (content - c_mean) / c_std + s_mean + + + +def spectral_norm(w, iteration=1): + w_shape = w.shape.as_list() + w = tf.reshape(w, [-1, w_shape[-1]]) + + u = tf.get_variable("u", [1, w_shape[-1]], + initializer=tf.random_normal_initializer(), trainable=False) + + u_hat = u + v_hat = None + for i in range(iteration): + """ + power iteration + Usually iteration = 1 will be enough + """ + v_ = tf.matmul(u_hat, tf.transpose(w)) + v_hat = tf.nn.l2_normalize(v_) + + u_ = tf.matmul(v_hat, w) + u_hat = tf.nn.l2_normalize(u_) + + u_hat = tf.stop_gradient(u_hat) + v_hat = tf.stop_gradient(v_hat) + + sigma = tf.matmul(tf.matmul(v_hat, w), tf.transpose(u_hat)) + + with tf.control_dependencies([u.assign(u_hat)]): + w_norm = w / sigma + w_norm = tf.reshape(w_norm, w_shape) + + return w_norm + + +def conv_spectral_norm(x, channel, k_size, stride=1, name='conv_snorm'): + with tf.variable_scope(name): + w = tf.get_variable("kernel", shape=[k_size[0], k_size[1], x.get_shape()[-1], channel]) + b = tf.get_variable("bias", [channel], initializer=tf.constant_initializer(0.0)) + + x = tf.nn.conv2d(input=x, filter=spectral_norm(w), strides=[1, stride, stride, 1], padding='SAME') + b + + return x + + + +def self_attention(inputs, name='attention', reuse=False): + with tf.variable_scope(name, reuse=reuse): + h, w = tf.shape(inputs)[1], tf.shape(inputs)[2] + bs, _, _, ch = inputs.get_shape().as_list() + f = slim.convolution2d(inputs, ch//8, [1, 1], activation_fn=None) + g = slim.convolution2d(inputs, ch//8, [1, 1], activation_fn=None) + s = slim.convolution2d(inputs, 1, [1, 1], activation_fn=None) + f_flatten = tf.reshape(f, shape=[f.shape[0], -1, f.shape[-1]]) + g_flatten = tf.reshape(g, shape=[g.shape[0], -1, g.shape[-1]]) + beta = tf.matmul(f_flatten, g_flatten, transpose_b=True) + beta = tf.nn.softmax(beta) + + s_flatten = tf.reshape(s, shape=[s.shape[0], -1, s.shape[-1]]) + att_map = tf.matmul(beta, s_flatten) + att_map = tf.reshape(att_map, shape=[bs, h, w, 1]) + gamma = tf.get_variable("gamma", [1], initializer=tf.constant_initializer(0.0)) + output = att_map * gamma + inputs + + return att_map, output + + + +if __name__ == '__main__': + pass + + + + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/loss.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..73e4923bc0577322f61364730f5ff2aeb4e91a5a --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/loss.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + + +import numpy as np +import scipy.stats as st +import tensorflow as tf + + + +VGG_MEAN = [103.939, 116.779, 123.68] + + +class Vgg19: + + def __init__(self, vgg19_npy_path=None): + + self.data_dict = np.load(vgg19_npy_path, encoding='latin1', allow_pickle=True).item() + print('Finished loading vgg19.npy') + + + def build_conv4_4(self, rgb, include_fc=False): + + rgb_scaled = (rgb+1) * 127.5 + + blue, green, red = tf.split(axis=3, num_or_size_splits=3, value=rgb_scaled) + bgr = tf.concat(axis=3, values=[blue - VGG_MEAN[0], + green - VGG_MEAN[1], red - VGG_MEAN[2]]) + + self.conv1_1 = self.conv_layer(bgr, "conv1_1") + self.relu1_1 = tf.nn.relu(self.conv1_1) + self.conv1_2 = self.conv_layer(self.relu1_1, "conv1_2") + self.relu1_2 = tf.nn.relu(self.conv1_2) + self.pool1 = self.max_pool(self.relu1_2, 'pool1') + + self.conv2_1 = self.conv_layer(self.pool1, "conv2_1") + self.relu2_1 = tf.nn.relu(self.conv2_1) + self.conv2_2 = self.conv_layer(self.relu2_1, "conv2_2") + self.relu2_2 = tf.nn.relu(self.conv2_2) + self.pool2 = self.max_pool(self.relu2_2, 'pool2') + + self.conv3_1 = self.conv_layer(self.pool2, "conv3_1") + self.relu3_1 = tf.nn.relu(self.conv3_1) + self.conv3_2 = self.conv_layer(self.relu3_1, "conv3_2") + self.relu3_2 = tf.nn.relu(self.conv3_2) + self.conv3_3 = self.conv_layer(self.relu3_2, "conv3_3") + self.relu3_3 = tf.nn.relu(self.conv3_3) + self.conv3_4 = self.conv_layer(self.relu3_3, "conv3_4") + self.relu3_4 = tf.nn.relu(self.conv3_4) + self.pool3 = self.max_pool(self.relu3_4, 'pool3') + + self.conv4_1 = self.conv_layer(self.pool3, "conv4_1") + self.relu4_1 = tf.nn.relu(self.conv4_1) + self.conv4_2 = self.conv_layer(self.relu4_1, "conv4_2") + self.relu4_2 = tf.nn.relu(self.conv4_2) + self.conv4_3 = self.conv_layer(self.relu4_2, "conv4_3") + self.relu4_3 = tf.nn.relu(self.conv4_3) + self.conv4_4 = self.conv_layer(self.relu4_3, "conv4_4") + self.relu4_4 = tf.nn.relu(self.conv4_4) + self.pool4 = self.max_pool(self.relu4_4, 'pool4') + + return self.conv4_4 + + def max_pool(self, bottom, name): + return tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], + strides=[1, 2, 2, 1], padding='SAME', name=name) + + def conv_layer(self, bottom, name): + with tf.variable_scope(name): + filt = self.get_conv_filter(name) + + conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME') + + conv_biases = self.get_bias(name) + bias = tf.nn.bias_add(conv, conv_biases) + + #relu = tf.nn.relu(bias) + return bias + + + + def fc_layer(self, bottom, name): + with tf.variable_scope(name): + shape = bottom.get_shape().as_list() + dim = 1 + for d in shape[1:]: + dim *= d + x = tf.reshape(bottom, [-1, dim]) + + weights = self.get_fc_weight(name) + biases = self.get_bias(name) + + # Fully connected layer. Note that the '+' operation automatically + # broadcasts the biases. + fc = tf.nn.bias_add(tf.matmul(x, weights), biases) + + return fc + + def get_conv_filter(self, name): + return tf.constant(self.data_dict[name][0], name="filter") + + def get_bias(self, name): + return tf.constant(self.data_dict[name][1], name="biases") + + def get_fc_weight(self, name): + return tf.constant(self.data_dict[name][0], name="weights") + + + +def vggloss_4_4(image_a, image_b): + vgg_model = Vgg19('vgg19_no_fc.npy') + vgg_a = vgg_model.build_conv4_4(image_a) + vgg_b = vgg_model.build_conv4_4(image_b) + VGG_loss = tf.losses.absolute_difference(vgg_a, vgg_b) + #VGG_loss = tf.nn.l2_loss(vgg_a - vgg_b) + h, w, c= vgg_a.get_shape().as_list()[1:] + VGG_loss = tf.reduce_mean(VGG_loss)/(h*w*c) + return VGG_loss + + + +def wgan_loss(discriminator, real, fake, patch=True, + channel=32, name='discriminator', lambda_=2): + real_logits = discriminator(real, patch=patch, channel=channel, name=name, reuse=False) + fake_logits = discriminator(fake, patch=patch, channel=channel, name=name, reuse=True) + + d_loss_real = - tf.reduce_mean(real_logits) + d_loss_fake = tf.reduce_mean(fake_logits) + + d_loss = d_loss_real + d_loss_fake + g_loss = - d_loss_fake + + """ Gradient Penalty """ + # This is borrowed from https://github.com/kodalinaveen3/DRAGAN/blob/master/DRAGAN.ipynb + alpha = tf.random_uniform([tf.shape(real)[0], 1, 1, 1], minval=0.,maxval=1.) + differences = fake - real # This is different from MAGAN + interpolates = real + (alpha * differences) + inter_logit = discriminator(interpolates, channel=channel, name=name, reuse=True) + gradients = tf.gradients(inter_logit, [interpolates])[0] + slopes = tf.sqrt(tf.reduce_sum(tf.square(gradients), reduction_indices=[1])) + gradient_penalty = tf.reduce_mean((slopes - 1.) ** 2) + d_loss += lambda_ * gradient_penalty + + return d_loss, g_loss + + +def gan_loss(discriminator, real, fake, scale=1,channel=32, patch=False, name='discriminator'): + + real_logit = discriminator(real, scale, channel, name=name, patch=patch, reuse=False) + fake_logit = discriminator(fake, scale, channel, name=name, patch=patch, reuse=True) + + real_logit = tf.nn.sigmoid(real_logit) + fake_logit = tf.nn.sigmoid(fake_logit) + + g_loss_blur = -tf.reduce_mean(tf.log(fake_logit)) + d_loss_blur = -tf.reduce_mean(tf.log(real_logit) + tf.log(1. - fake_logit)) + + return d_loss_blur, g_loss_blur + + + +def lsgan_loss(discriminator, real, fake, scale=1, + channel=32, patch=False, name='discriminator'): + + real_logit = discriminator(real, scale, channel, name=name, patch=patch, reuse=False) + fake_logit = discriminator(fake, scale, channel, name=name, patch=patch, reuse=True) + + g_loss = tf.reduce_mean((fake_logit - 1)**2) + d_loss = 0.5*(tf.reduce_mean((real_logit - 1)**2) + tf.reduce_mean(fake_logit**2)) + + return d_loss, g_loss + + + +def total_variation_loss(image, k_size=1): + h, w = image.get_shape().as_list()[1:3] + tv_h = tf.reduce_mean((image[:, k_size:, :, :] - image[:, :h - k_size, :, :])**2) + tv_w = tf.reduce_mean((image[:, :, k_size:, :] - image[:, :, :w - k_size, :])**2) + tv_loss = (tv_h + tv_w)/(3*h*w) + return tv_loss + + + + +if __name__ == '__main__': + pass + + + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/network.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/network.py new file mode 100644 index 0000000000000000000000000000000000000000..90750fb477717ce40bab6d3e8c460953e157c73b --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/network.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + + +import layers +import tensorflow as tf +import numpy as np +import tensorflow.contrib.slim as slim + +from tqdm import tqdm + + + +def resblock(inputs, out_channel=32, name='resblock'): + + with tf.variable_scope(name): + + x = slim.convolution2d(inputs, out_channel, [3, 3], + activation_fn=None, scope='conv1') + x = tf.nn.leaky_relu(x) + x = slim.convolution2d(x, out_channel, [3, 3], + activation_fn=None, scope='conv2') + + return x + inputs + + + +def generator(inputs, channel=32, num_blocks=4, name='generator', reuse=False): + with tf.variable_scope(name, reuse=reuse): + + x = slim.convolution2d(inputs, channel, [7, 7], activation_fn=None) + x = tf.nn.leaky_relu(x) + + x = slim.convolution2d(x, channel*2, [3, 3], stride=2, activation_fn=None) + x = slim.convolution2d(x, channel*2, [3, 3], activation_fn=None) + x = tf.nn.leaky_relu(x) + + x = slim.convolution2d(x, channel*4, [3, 3], stride=2, activation_fn=None) + x = slim.convolution2d(x, channel*4, [3, 3], activation_fn=None) + x = tf.nn.leaky_relu(x) + + for idx in range(num_blocks): + x = resblock(x, out_channel=channel*4, name='block_{}'.format(idx)) + + x = slim.conv2d_transpose(x, channel*2, [3, 3], stride=2, activation_fn=None) + x = slim.convolution2d(x, channel*2, [3, 3], activation_fn=None) + + x = tf.nn.leaky_relu(x) + + x = slim.conv2d_transpose(x, channel, [3, 3], stride=2, activation_fn=None) + x = slim.convolution2d(x, channel, [3, 3], activation_fn=None) + x = tf.nn.leaky_relu(x) + + x = slim.convolution2d(x, 3, [7, 7], activation_fn=None) + #x = tf.clip_by_value(x, -0.999999, 0.999999) + + return x + + +def unet_generator(inputs, channel=32, num_blocks=4, name='generator', reuse=False): + with tf.variable_scope(name, reuse=reuse): + + x0 = slim.convolution2d(inputs, channel, [7, 7], activation_fn=None) + x0 = tf.nn.leaky_relu(x0) + + x1 = slim.convolution2d(x0, channel, [3, 3], stride=2, activation_fn=None) + x1 = tf.nn.leaky_relu(x1) + x1 = slim.convolution2d(x1, channel*2, [3, 3], activation_fn=None) + x1 = tf.nn.leaky_relu(x1) + + x2 = slim.convolution2d(x1, channel*2, [3, 3], stride=2, activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + x2 = slim.convolution2d(x2, channel*4, [3, 3], activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + + for idx in range(num_blocks): + x2 = resblock(x2, out_channel=channel*4, name='block_{}'.format(idx)) + + x2 = slim.convolution2d(x2, channel*2, [3, 3], activation_fn=None) + x2 = tf.nn.leaky_relu(x2) + + h1, w1 = tf.shape(x2)[1], tf.shape(x2)[2] + x3 = tf.image.resize_bilinear(x2, (h1*2, w1*2)) + x3 = slim.convolution2d(x3+x1, channel*2, [3, 3], activation_fn=None) + x3 = tf.nn.leaky_relu(x3) + x3 = slim.convolution2d(x3, channel, [3, 3], activation_fn=None) + x3 = tf.nn.leaky_relu(x3) + + h2, w2 = tf.shape(x3)[1], tf.shape(x3)[2] + x4 = tf.image.resize_bilinear(x3, (h2*2, w2*2)) + x4 = slim.convolution2d(x4+x0, channel, [3, 3], activation_fn=None) + x4 = tf.nn.leaky_relu(x4) + x4 = slim.convolution2d(x4, 3, [7, 7], activation_fn=None) + #x4 = tf.clip_by_value(x4, -1, 1) + return x4 + + + +def disc_bn(x, scale=1, channel=32, is_training=True, + name='discriminator', patch=True, reuse=False): + + with tf.variable_scope(name, reuse=reuse): + + for idx in range(3): + x = slim.convolution2d(x, channel*2**idx, [3, 3], stride=2, activation_fn=None) + x = slim.batch_norm(x, is_training=is_training, center=True, scale=True) + x = tf.nn.leaky_relu(x) + + x = slim.convolution2d(x, channel*2**idx, [3, 3], activation_fn=None) + x = slim.batch_norm(x, is_training=is_training, center=True, scale=True) + x = tf.nn.leaky_relu(x) + + if patch == True: + x = slim.convolution2d(x, 1, [1, 1], activation_fn=None) + else: + x = tf.reduce_mean(x, axis=[1, 2]) + x = slim.fully_connected(x, 1, activation_fn=None) + + return x + + + + +def disc_sn(x, scale=1, channel=32, patch=True, name='discriminator', reuse=False): + with tf.variable_scope(name, reuse=reuse): + + for idx in range(3): + x = layers.conv_spectral_norm(x, channel*2**idx, [3, 3], + stride=2, name='conv{}_1'.format(idx)) + x = tf.nn.leaky_relu(x) + + x = layers.conv_spectral_norm(x, channel*2**idx, [3, 3], + name='conv{}_2'.format(idx)) + x = tf.nn.leaky_relu(x) + + + if patch == True: + x = layers.conv_spectral_norm(x, 1, [1, 1], name='conv_out'.format(idx)) + + else: + x = tf.reduce_mean(x, axis=[1, 2]) + x = slim.fully_connected(x, 1, activation_fn=None) + + return x + + +def disc_ln(x, channel=32, is_training=True, name='discriminator', patch=True, reuse=False): + with tf.variable_scope(name, reuse=reuse): + + for idx in range(3): + x = slim.convolution2d(x, channel*2**idx, [3, 3], stride=2, activation_fn=None) + x = tf.contrib.layers.layer_norm(x) + x = tf.nn.leaky_relu(x) + + x = slim.convolution2d(x, channel*2**idx, [3, 3], activation_fn=None) + x = tf.contrib.layers.layer_norm(x) + x = tf.nn.leaky_relu(x) + + if patch == True: + x = slim.convolution2d(x, 1, [1, 1], activation_fn=None) + else: + x = tf.reduce_mean(x, axis=[1, 2]) + x = slim.fully_connected(x, 1, activation_fn=None) + + return x + + + + +if __name__ == '__main__': + pass + + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/pretrain.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..d1a0b09e8734411e1e724ea859340b6bdb005286 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/pretrain.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + +import tensorflow as tf +import tensorflow.contrib.slim as slim + +import utils +import os +import numpy as np +import argparse +import network +from tqdm import tqdm + +os.environ["CUDA_VISIBLE_DEVICES"] = "0" + + +def arg_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--patch_size", default=256, type=int) + parser.add_argument("--batch_size", default=16, type=int) + parser.add_argument("--total_iter", default=50000, type=int) + parser.add_argument("--adv_train_lr", default=2e-4, type=float) + parser.add_argument("--gpu_fraction", default=0.5, type=float) + parser.add_argument("--data_path", default='/cache/dataset', type=str) + parser.add_argument("--output_path", default='/cache/output', type=str) + + args = parser.parse_args() + + return args + + +def train(args): + SAVE_DIR = "./pretrain" + if not os.path.isdir(SAVE_DIR): + os.makedirs(SAVE_DIR) + input_photo = tf.placeholder(tf.float32, [args.batch_size, + args.patch_size, args.patch_size, 3]) + + output = network.unet_generator(input_photo) + + recon_loss = tf.reduce_mean(tf.losses.absolute_difference(input_photo, output)) + + all_vars = tf.trainable_variables() + gene_vars = [var for var in all_vars if 'gene' in var.name] + # loss_scale_manager = ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=1000, + # decr_every_n_nan_or_inf=2, decr_ratio=0.5) + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) + with tf.control_dependencies(update_ops): + + optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) \ + .minimize(recon_loss, var_list=gene_vars) + # optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) + # optim = NPULossScaleOptimizer(optim, loss_scale_manager).minimize(recon_loss, var_list=gene_vars) + + ''' + config = tf.ConfigProto() + config.gpu_options.allow_growth = True + sess = tf.Session(config=config) + ''' + gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=args.gpu_fraction) + config = tf.ConfigProto(gpu_options=gpu_options) + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 + + # custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("force_fp32") + + sess = tf.Session(config=config) + saver = tf.train.Saver(var_list=gene_vars, max_to_keep=20) + + with tf.device('/cpu:0'): + + sess.run(tf.global_variables_initializer()) + face_photo_dir = os.path.join(args.data_path, 'face_photo') + face_photo_list = utils.load_image_list(face_photo_dir) + scenery_photo_dir = os.path.join(args.data_path, 'scenery_photo') + scenery_photo_list = utils.load_image_list(scenery_photo_dir) + + for total_iter in tqdm(range(args.total_iter)): + + if np.mod(total_iter, 5) == 0: + photo_batch = utils.next_batch(face_photo_list, args.batch_size) + else: + photo_batch = utils.next_batch(scenery_photo_list, args.batch_size) + # lossScale = tf.get_default_graph().get_tensor_by_name("loss_scale:0") + # _, _, r_loss = sess.run([lossScale, optim, recon_loss], feed_dict={input_photo: photo_batch}) + _, r_loss = sess.run([optim, recon_loss], feed_dict={input_photo: photo_batch}) + + if np.mod(total_iter + 1, 50) == 0: + + print('pretrain, iter: {}, recon_loss: {}'.format(total_iter, r_loss)) + if np.mod(total_iter + 1, 500) == 0: + saver.save(sess, SAVE_DIR + '/saved_models/model', + write_meta_graph=False, global_step=total_iter) + + photo_face = utils.next_batch(face_photo_list, args.batch_size) + photo_scenery = utils.next_batch(scenery_photo_list, args.batch_size) + + result_face = sess.run(output, feed_dict={input_photo: photo_face}) + + result_scenery = sess.run(output, feed_dict={input_photo: photo_scenery}) + + utils.write_batch_image(result_face, SAVE_DIR + '/images', + str(total_iter) + '_face_result.jpg', 4) + utils.write_batch_image(photo_face, SAVE_DIR + '/images', + str(total_iter) + '_face_photo.jpg', 4) + utils.write_batch_image(result_scenery, SAVE_DIR + '/images', + str(total_iter) + '_scenery_result.jpg', 4) + utils.write_batch_image(photo_scenery, SAVE_DIR + '/images', + str(total_iter) + '_scenery_photo.jpg', 4) + + +if __name__ == '__main__': + args = arg_parser() + train(args) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/__init__.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e31ca358423ad87376ca759fe299aafb1bd3be57 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/__init__.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +from .core import selective_search, box_filter + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/adaptive_color.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/adaptive_color.py new file mode 100644 index 0000000000000000000000000000000000000000..0b77455fc0bb0d7971307175fce1ab6c3e8a590e --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/adaptive_color.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * +import numpy as np + + +def label2rgb(label_field, image, kind='avg', bg_label=-1, bg_color=(0, 0, 0)): + + #std_list = list() + out = np.zeros_like(image) + labels = np.unique(label_field) + bg = (labels == bg_label) + if bg.any(): + labels = labels[labels != bg_label] + mask = (label_field == bg_label).nonzero() + out[mask] = bg_color + for label in labels: + mask = (label_field == label).nonzero() + #std = np.std(image[mask]) + #std_list.append(std) + if kind == 'avg': + color = image[mask].mean(axis=0) + elif kind == 'median': + color = np.median(image[mask], axis=0) + elif kind == 'mix': + std = np.std(image[mask]) + if std < 20: + color = image[mask].mean(axis=0) + elif 20 < std < 40: + mean = image[mask].mean(axis=0) + median = np.median(image[mask], axis=0) + color = 0.5*mean + 0.5*median + elif 40 < std: + color = np.median(image[mask], axis=0) + out[mask] = color + return out diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/batch_ss.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/batch_ss.py new file mode 100644 index 0000000000000000000000000000000000000000..c4fba59abf200cdf316b9db55eb2b33cd25e566c --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/batch_ss.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + + +import numpy as np +from adaptive_color import label2rgb +from joblib import Parallel, delayed +from skimage.segmentation import felzenszwalb +from util import switch_color_space +from structure import HierarchicalGrouping + + +def color_ss_map(image, color_space='Lab', k=10, + sim_strategy='CTSF', seg_num=200, power=1): + + img_seg = felzenszwalb(image, scale=k, sigma=0.8, min_size=100) + img_cvtcolor = label2rgb(img_seg, image, kind='mix') + img_cvtcolor = switch_color_space(img_cvtcolor, color_space) + S = HierarchicalGrouping(img_cvtcolor, img_seg, sim_strategy) + S.build_regions() + S.build_region_pairs() + + # Start hierarchical grouping + + while S.num_regions() > seg_num: + + i,j = S.get_highest_similarity() + S.merge_region(i,j) + S.remove_similarities(i,j) + S.calculate_similarity_for_new_region() + + image = label2rgb(S.img_seg, image, kind='mix') + image = (image+1)/2 + image = image**power + image = image/np.max(image) + image = image*2 - 1 + + return image + + +def selective_adacolor(batch_image, seg_num=200, power=1): + num_job = np.shape(batch_image)[0] + batch_out = Parallel(n_jobs=num_job)(delayed(color_ss_map)\ + (image, seg_num, power) for image in batch_image) + return np.array(batch_out) + + +if __name__ == '__main__': + pass diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/core.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/core.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6be16948e2fdb1b305d11d2bfe6284d457438d --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/core.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * +from joblib import Parallel, delayed +from skimage.segmentation import felzenszwalb +from .util import oversegmentation, switch_color_space, load_strategy +from .structure import HierarchicalGrouping + + +def selective_search_one(img, color_space, k, sim_strategy): + ''' + Selective Search using single diversification strategy + Parameters + ---------- + im_orig : ndarray + Original image + color_space : string + Colour Spaces + k : int + Threshold parameter for starting regions + sim_stategy : string + Combinations of similarity measures + + Returns + ------- + boxes : list + Bounding boxes of the regions + priority: list + Small priority number indicates higher position in the hierarchy + ''' + + # convert RGB image to target color space + img = switch_color_space(img, color_space) + + # Generate starting locations + img_seg = oversegmentation(img, k) + + # Initialze hierarchical grouping + S = HierarchicalGrouping(img, img_seg, sim_strategy) + + S.build_regions() + S.build_region_pairs() + + # Start hierarchical grouping + while not S.is_empty(): + i,j = S.get_highest_similarity() + + S.merge_region(i,j) + + S.remove_similarities(i,j) + + S.calculate_similarity_for_new_region() + + # convert the order by hierarchical priority + boxes = [x['box'] for x in S.regions.values()][::-1] + + # drop duplicates by maintaining order + boxes = list(dict.fromkeys(boxes)) + + # generate priority for boxes + priorities = list(range(1, len(boxes)+1)) + + return boxes, priorities + + +def selective_search(img, mode='single', random=False): + """ + Selective Search in Python + """ + + # load selective search strategy + strategy = load_strategy(mode) + + # Excecute selective search in parallel + vault = Parallel(n_jobs=1)(delayed(selective_search_one)(img, color, k, sim) for (color, k, sim) in strategy) + + boxes = [x for x,_ in vault] + priorities = [y for _, y in vault] + + boxes = [item for sublist in boxes for item in sublist] + priorities = [item for sublist in priorities for item in sublist] + + if random: + # Do pseudo random sorting as in paper + rand_list = [random() for i in range(len(priorities))] + priorities = [p * r for p, r in zip(priorities, rand_list)] + boxes = [b for _, b in sorted(zip(priorities, boxes))] + + # drop duplicates by maintaining order + boxes = list(dict.fromkeys(boxes)) + + return boxes + +def box_filter(boxes, min_size=20, max_ratio=None, topN=None): + proposal = [] + + for box in boxes: + # Calculate width and height of the box + w, h = box[2] - box[0], box[3] - box[1] + + # Filter for size + if w < min_size or h < min_size: + continue + + # Filter for box ratio + if max_ratio: + if w / h > max_ratio or h / w > max_ratio: + continue + + proposal.append(box) + + if topN: + if topN <= len(proposal): + return proposal[:topN] + else: + return proposal + else: + return proposal + + + + + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/measure.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/measure.py new file mode 100644 index 0000000000000000000000000000000000000000..67d06b5aa075c52025764df4400159df7bd31032 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/measure.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * +import numpy as np +from skimage.feature import local_binary_pattern + +def _calculate_color_sim(ri, rj): + """ + Calculate color similarity using histogram intersection + """ + return sum([min(a, b) for a, b in zip(ri["color_hist"], rj["color_hist"])]) + + +def _calculate_texture_sim(ri, rj): + """ + Calculate texture similarity using histogram intersection + """ + return sum([min(a, b) for a, b in zip(ri["texture_hist"], rj["texture_hist"])]) + + +def _calculate_size_sim(ri, rj, imsize): + """ + Size similarity boosts joint between small regions, which prevents + a single region from engulfing other blobs one by one. + + size (ri, rj) = 1 − [size(ri) + size(rj)] / size(image) + """ + return 1.0 - (ri['size'] + rj['size']) / imsize + + +def _calculate_fill_sim(ri, rj, imsize): + """ + Fill similarity measures how well ri and rj fit into each other. + BBij is the bounding box around ri and rj. + + fill(ri, rj) = 1 − [size(BBij) − size(ri) − size(ri)] / size(image) + """ + + bbsize = (max(ri['box'][2], rj['box'][2]) - min(ri['box'][0], rj['box'][0])) * (max(ri['box'][3], rj['box'][3]) - min(ri['box'][1], rj['box'][1])) + + return 1.0 - (bbsize - ri['size'] - rj['size']) / imsize + + +def calculate_color_hist(mask, img): + """ + Calculate colour histogram for the region. + The output will be an array with n_BINS * n_color_channels. + The number of channel is varied because of different + colour spaces. + """ + + BINS = 25 + if len(img.shape) == 2: + img = img.reshape(img.shape[0], img.shape[1], 1) + + channel_nums = img.shape[2] + hist = np.array([]) + + for channel in range(channel_nums): + layer = img[:, :, channel][mask] + hist = np.concatenate([hist] + [np.histogram(layer, BINS)[0]]) + + # L1 normalize + hist = hist / np.sum(hist) + + return hist + + +def generate_lbp_image(img): + + if len(img.shape) == 2: + img = img.reshape(img.shape[0], img.shape[1], 1) + channel_nums = img.shape[2] + + lbp_img = np.zeros(img.shape) + for channel in range(channel_nums): + layer = img[:, :, channel] + lbp_img[:, :,channel] = local_binary_pattern(layer, 8, 1) + + return lbp_img + + +def calculate_texture_hist(mask, lbp_img): + """ + Use LBP for now, enlightened by AlpacaDB's implementation. + Plan to switch to Gaussian derivatives as the paper in future + version. + """ + + BINS = 10 + channel_nums = lbp_img.shape[2] + hist = np.array([]) + + for channel in range(channel_nums): + layer = lbp_img[:, :, channel][mask] + hist = np.concatenate([hist] + [np.histogram(layer, BINS)[0]]) + + # L1 normalize + hist = hist / np.sum(hist) + + return hist + + +def calculate_sim(ri, rj, imsize, sim_strategy): + """ + Calculate similarity between region ri and rj using diverse + combinations of similarity measures. + C: color, T: texture, S: size, F: fill. + """ + sim = 0 + + if 'C' in sim_strategy: + sim += _calculate_color_sim(ri, rj) + if 'T' in sim_strategy: + sim += _calculate_texture_sim(ri, rj) + if 'S' in sim_strategy: + sim += _calculate_size_sim(ri, rj, imsize) + if 'F' in sim_strategy: + sim += _calculate_fill_sim(ri, rj, imsize) + + return sim + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/structure.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/structure.py new file mode 100644 index 0000000000000000000000000000000000000000..fa0d5f438d5b48efd12c8ea5ca45b1021a20029f --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/structure.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * +import numpy as np +from skimage.segmentation import find_boundaries +from skimage.segmentation import felzenszwalb +from scipy.ndimage import find_objects +from . import measure + + +class HierarchicalGrouping(object): + def __init__(self, img, img_seg, sim_strategy): + self.img = img + self.sim_strategy = sim_strategy + self.img_seg = img_seg.copy() + self.labels = np.unique(self.img_seg).tolist() + + def build_regions(self): + self.regions = {} + lbp_img = measure.generate_lbp_image(self.img) + for label in self.labels: + size = (self.img_seg == 1).sum() + region_slice = find_objects(self.img_seg==label)[0] + box = tuple([region_slice[i].start for i in (1,0)] + + [region_slice[i].stop for i in (1,0)]) + + mask = self.img_seg == label + color_hist = measure.calculate_color_hist(mask, self.img) + texture_hist = measure.calculate_texture_hist(mask, lbp_img) + + self.regions[label] = { + 'size': size, + 'box': box, + 'color_hist': color_hist, + 'texture_hist': texture_hist + } + + + def build_region_pairs(self): + self.s = {} + for i in self.labels: + neighbors = self._find_neighbors(i) + for j in neighbors: + if i < j: + self.s[(i,j)] = measure.calculate_sim(self.regions[i], + self.regions[j], + self.img.size, + self.sim_strategy) + + + def _find_neighbors(self, label): + """ + Parameters + ---------- + label : int + label of the region + Returns + ------- + neighbors : list + list of labels of neighbors + """ + + boundary = find_boundaries(self.img_seg == label, + mode='outer') + neighbors = np.unique(self.img_seg[boundary]).tolist() + + return neighbors + + def get_highest_similarity(self): + return sorted(self.s.items(), key=lambda i: i[1])[-1][0] + + def merge_region(self, i, j): + + # generate a unique label and put in the label list + new_label = max(self.labels) + 1 + self.labels.append(new_label) + + # merge blobs and update blob set + ri, rj = self.regions[i], self.regions[j] + + new_size = ri['size'] + rj['size'] + new_box = (min(ri['box'][0], rj['box'][0]), + min(ri['box'][1], rj['box'][1]), + max(ri['box'][2], rj['box'][2]), + max(ri['box'][3], rj['box'][3])) + value = { + 'box': new_box, + 'size': new_size, + 'color_hist': + (ri['color_hist'] * ri['size'] + + rj['color_hist'] * rj['size']) / new_size, + 'texture_hist': + (ri['texture_hist'] * ri['size'] + + rj['texture_hist'] * rj['size']) / new_size, + } + + self.regions[new_label] = value + + # update segmentation mask + self.img_seg[self.img_seg == i] = new_label + self.img_seg[self.img_seg == j] = new_label + + def remove_similarities(self, i, j): + + # mark keys for region pairs to be removed + key_to_delete = [] + for key in self.s.keys(): + if (i in key) or (j in key): + key_to_delete.append(key) + + for key in key_to_delete: + del self.s[key] + + # remove old labels in label list + self.labels.remove(i) + self.labels.remove(j) + + def calculate_similarity_for_new_region(self): + i = max(self.labels) + neighbors = self._find_neighbors(i) + + for j in neighbors: + # i is larger than j, so use (j,i) instead + self.s[(j,i)] = measure.calculate_sim(self.regions[i], + self.regions[j], + self.img.size, + self.sim_strategy) + + def is_empty(self): + return True if not self.s.keys() else False + + + def num_regions(self): + return len(self.s.keys()) + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/util.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/util.py new file mode 100644 index 0000000000000000000000000000000000000000..35582ba3935d1970074f1e386071928b3806d208 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/selective_search/util.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * +import numpy as np +from itertools import product + +from skimage.segmentation import felzenszwalb +from skimage.color import rgb2hsv, rgb2lab, rgb2grey + + +def oversegmentation(img, k): + """ + Generating various starting regions using the method of + Felzenszwalb. + k effectively sets a scale of observation, in that + a larger k causes a preference for larger components. + sigma = 0.8 which was used in the original paper. + min_size = 100 refer to Keon's Matlab implementation. + """ + img_seg = felzenszwalb(img, scale=k, sigma=0.8, min_size=100) + + return img_seg + + +def switch_color_space(img, target): + """ + RGB to target color space conversion. + I: the intensity (grey scale), Lab, rgI: the rg channels of + normalized RGB plus intensity, HSV, H: the Hue channel H from HSV + """ + + if target == 'HSV': + return rgb2hsv(img) + + elif target == 'Lab': + return rgb2lab(img) + + elif target == 'I': + return rgb2grey(img) + + elif target == 'rgb': + img = img / np.sum(img, axis=0) + return img + + elif target == 'rgI': + img = img / np.sum(img, axis=0) + img[:,:,2] = rgb2grey(img) + return img + + elif target == 'H': + return rgb2hsv(img)[:,:,0] + + else: + raise "{} is not suported.".format(target) + +def load_strategy(mode): + # TODO: Add mode sanity check + + cfg = { + "single": { + "ks": [100], + "colors": ["HSV"], + "sims": ["CTSF"] + }, + "lab": { + "ks": [100], + "colors": ["Lab"], + "sims": ["CTSF"] + }, + "fast": { + "ks": [50, 100], + "colors": ["HSV", "Lab"], + "sims": ["CTSF", "TSF"] + }, + "quality": { + "ks": [50, 100, 150, 300], + "colors": ["HSV", "Lab", "I", "rgI", "H"], + "sims": ["CTSF", "TSF", "F", "S"] + } + } + + if isinstance(mode, dict): + cfg['manual'] = mode + mode = 'manual' + + colors, ks, sims = cfg[mode]['colors'], cfg[mode]['ks'], cfg[mode]['sims'] + + return product(colors, ks, sims) + diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/train.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..586159d303dca531d6eef767112cf66f2c5da0e8 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/train.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import tensorflow as tf +from npu_bridge.npu_init import * + +import tensorflow.contrib.slim as slim + +import utils +import os +#import moxing as mox +import numpy as np +import argparse +import network +import loss +import time + +# from tqdm import tqdm +from guided_filter import guided_filter + +os.environ["CUDA_VISIBLE_DEVICES"] = "0" + + +# modelarts modification------------------------------ + + +def arg_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--patch_size", default=256, type=int) + parser.add_argument("--batch_size", default=16, type=int) + parser.add_argument("--total_iter", default=100000, type=int) + parser.add_argument("--adv_train_lr", default=2e-4, type=float) + parser.add_argument("--gpu_fraction", default=0.5, type=float) + parser.add_argument("--save_dir", default='train_cartoon', type=str) + parser.add_argument("--use_enhance", default=False) + parser.add_argument("--data_path", default='/cache/dataset', type=str) + parser.add_argument("--REAL_PATH", default='/cache/dataset', type=str) + parser.add_argument("--output_path", default='/cache/output', type=str) + + args = parser.parse_args() + + return args + + +def train(args): + # modelarts modification------------------------------ + SAVE_DIR = os.path.join(args.output_path, "train_cartoon") + + if not os.path.isdir(SAVE_DIR): + os.makedirs(SAVE_DIR) + REAL_PATH = args.REAL_PATH + print(REAL_PATH) + if not os.path.exists(REAL_PATH): + os.makedirs(REAL_PATH, 0o755) + #mox.file.copy_parallel(args.data_path, REAL_PATH) + print("training data finish copy to %s." % REAL_PATH) + + input_photo = tf.placeholder(tf.float32, [args.batch_size, + args.patch_size, args.patch_size, 3]) + input_superpixel = tf.placeholder(tf.float32, [args.batch_size, + args.patch_size, args.patch_size, 3]) + input_cartoon = tf.placeholder(tf.float32, [args.batch_size, + args.patch_size, args.patch_size, 3]) + + output = network.unet_generator(input_photo) + output = guided_filter(input_photo, output, r=1) + + blur_fake = guided_filter(output, output, r=5, eps=2e-1) + blur_cartoon = guided_filter(input_cartoon, input_cartoon, r=5, eps=2e-1) + + gray_fake, gray_cartoon = utils.color_shift(output, input_cartoon) + + d_loss_gray, g_loss_gray = loss.lsgan_loss(network.disc_sn, gray_cartoon, gray_fake, + scale=1, patch=True, name='disc_gray') + d_loss_blur, g_loss_blur = loss.lsgan_loss(network.disc_sn, blur_cartoon, blur_fake, + scale=1, patch=True, name='disc_blur') + + vgg_path = os.path.join(REAL_PATH, 'vgg19_no_fc.npy') + print(vgg_path) + vgg_model = loss.Vgg19(vgg_path) + vgg_photo = vgg_model.build_conv4_4(input_photo) + vgg_output = vgg_model.build_conv4_4(output) + vgg_superpixel = vgg_model.build_conv4_4(input_superpixel) + h, w, c = vgg_photo.get_shape().as_list()[1:] + + photo_loss = tf.reduce_mean(tf.losses.absolute_difference(vgg_photo, vgg_output)) / (h * w * c) + superpixel_loss = tf.reduce_mean(tf.losses.absolute_difference \ + (vgg_superpixel, vgg_output)) / (h * w * c) + recon_loss = photo_loss + superpixel_loss + tv_loss = loss.total_variation_loss(output) + + g_loss_total = 1e4 * tv_loss + 1e-1 * g_loss_blur + g_loss_gray + 2e2 * recon_loss + d_loss_total = d_loss_blur + d_loss_gray + + all_vars = tf.trainable_variables() + gene_vars = [var for var in all_vars if 'gene' in var.name] + disc_vars = [var for var in all_vars if 'disc' in var.name] + + tf.summary.scalar('tv_loss', tv_loss) + tf.summary.scalar('photo_loss', photo_loss) + tf.summary.scalar('superpixel_loss', superpixel_loss) + tf.summary.scalar('recon_loss', recon_loss) + tf.summary.scalar('d_loss_gray', d_loss_gray) + tf.summary.scalar('g_loss_gray', g_loss_gray) + tf.summary.scalar('d_loss_blur', d_loss_blur) + tf.summary.scalar('g_loss_blur', g_loss_blur) + tf.summary.scalar('d_loss_total', d_loss_total) + tf.summary.scalar('g_loss_total', g_loss_total) + + loss_scale_manager_g = ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=1000, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) + loss_scale_manager_d = ExponentialUpdateLossScaleManager(init_loss_scale=2 ** 32, incr_every_n_steps=1000, + decr_every_n_nan_or_inf=2, decr_ratio=0.5) + + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) + with tf.control_dependencies(update_ops): + g_optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) + g_optim = NPULossScaleOptimizer(g_optim, loss_scale_manager_g).minimize(g_loss_total, var_list=gene_vars) + + d_optim = tf.train.AdamOptimizer(args.adv_train_lr, beta1=0.5, beta2=0.99) + d_optim = NPULossScaleOptimizer(d_optim, loss_scale_manager_d).minimize(d_loss_total, var_list=disc_vars) + ''' + config = tf.ConfigProto() + config.gpu_options.allow_growth = True + sess = tf.Session(config=config) + ''' + gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=args.gpu_fraction) + config = tf.ConfigProto(gpu_options=gpu_options) + custom_op = config.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # 必须显式关闭 + config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF # 必须显式关闭 + + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("force_fp32") + + sess = tf.Session(config=config) + + train_writer = tf.summary.FileWriter(SAVE_DIR + '/train_log') + summary_op = tf.summary.merge_all() + saver = tf.train.Saver(var_list=gene_vars, max_to_keep=20) + + with tf.device('/cpu:0'): + + sess.run([tf.global_variables_initializer()]) + pretrain_dir = os.path.join(args.data_path,'pretrain/saved_models') + print(pretrain_dir) + saver.restore(sess, tf.train.latest_checkpoint(pretrain_dir)) + + face_photo_dir = os.path.join(REAL_PATH, 'face_photo') + print("face photo dir = ", face_photo_dir) + face_photo_list = utils.load_image_list(face_photo_dir) + scenery_photo_dir = os.path.join(REAL_PATH, 'scenery_photo') + scenery_photo_list = utils.load_image_list(scenery_photo_dir) + + face_cartoon_dir = os.path.join(REAL_PATH, 'face_cartoon') + face_cartoon_list = utils.load_image_list(face_cartoon_dir) + scenery_cartoon_dir = os.path.join(REAL_PATH, 'scenery_cartoon') + scenery_cartoon_list = utils.load_image_list(scenery_cartoon_dir) + + for total_iter in range(args.total_iter): + if np.mod(total_iter, 5) == 0: + photo_batch = utils.next_batch(face_photo_list, args.batch_size) + cartoon_batch = utils.next_batch(face_cartoon_list, args.batch_size) + else: + photo_batch = utils.next_batch(scenery_photo_list, args.batch_size) + cartoon_batch = utils.next_batch(scenery_cartoon_list, args.batch_size) + + start_time = time.time() + inter_out = sess.run(output, feed_dict={input_photo: photo_batch, + input_superpixel: photo_batch, + input_cartoon: cartoon_batch}) + + ''' + adaptive coloring has to be applied with the clip_by_value + in the last layer of generator network, which is not very stable. + to stabiliy reproduce our results, please use power=1.0 + and comment the clip_by_value function in the network.py first + If this works, then try to use adaptive color with clip_by_value. + ''' + if args.use_enhance: + superpixel_batch = utils.selective_adacolor(inter_out, power=1.2) + else: + superpixel_batch = utils.simple_superpixel(inter_out, seg_num=200) + + lossScale = tf.get_default_graph().get_tensor_by_name("loss_scale:0") + l_s_g, _, g_loss, r_loss = sess.run([lossScale, g_optim, g_loss_total, recon_loss], + feed_dict={input_photo: photo_batch, input_superpixel: superpixel_batch, + input_cartoon: cartoon_batch}) + + l_s_d, _, d_loss, train_info = sess.run([lossScale, d_optim, d_loss_total, summary_op], + feed_dict={input_photo: photo_batch, + input_superpixel: superpixel_batch, + input_cartoon: cartoon_batch}) + + duration = (time.time() - start_time) + ms_per_batch = float(duration) + print("Iter: %d/%d , time_per_step %.3f" % (total_iter, args.total_iter, ms_per_batch)) + + train_writer.add_summary(train_info, total_iter) + if np.mod(total_iter + 1, 50) == 0: + + # print('Iter: {}, loss_scale g: {}, loss_scale d: {}'.format(total_iter, l_s_g, l_s_d)) + print('Iter: {}, d_loss: {}, g_loss: {}, recon_loss: {}'. \ + format(total_iter, d_loss, g_loss, r_loss)) + if np.mod(total_iter + 1, 500) == 0: + saver.save(sess, SAVE_DIR + '/saved_models/model', + write_meta_graph=False, global_step=total_iter) + + photo_face = utils.next_batch(face_photo_list, args.batch_size) + cartoon_face = utils.next_batch(face_cartoon_list, args.batch_size) + photo_scenery = utils.next_batch(scenery_photo_list, args.batch_size) + cartoon_scenery = utils.next_batch(scenery_cartoon_list, args.batch_size) + + result_face = sess.run(output, feed_dict={input_photo: photo_face, + input_superpixel: photo_face, + input_cartoon: cartoon_face}) + + result_scenery = sess.run(output, feed_dict={input_photo: photo_scenery, + input_superpixel: photo_scenery, + input_cartoon: cartoon_scenery}) + + utils.write_batch_image(result_face, SAVE_DIR + '/images', + str(total_iter) + '_face_result.jpg', 4) + utils.write_batch_image(photo_face, SAVE_DIR + '/images', + str(total_iter) + '_face_photo.jpg', 4) + + utils.write_batch_image(result_scenery, SAVE_DIR + '/images', + str(total_iter) + '_scenery_result.jpg', 4) + utils.write_batch_image(photo_scenery, SAVE_DIR + '/images', + str(total_iter) + '_scenery_photo.jpg', 4) + + #mox.file.copy_parallel(CACHE_TRAINING_URL, args.output_path) + sess.close() + + +if __name__ == '__main__': + args = arg_parser() + train(args) diff --git a/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/utils.py b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8752884f3b0ce64d55083302a28e84aa6f244594 --- /dev/null +++ b/TensorFlow/contrib/cv/White_Box_Cartoonization_ID2089_for_TensorFlow/train_code/utils.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python +# coding=utf-8 + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +from npu_bridge.npu_init import * + + +from scipy.ndimage import filters +from skimage import segmentation, color +from joblib import Parallel, delayed +from selective_search.util import switch_color_space +from selective_search.structure import HierarchicalGrouping + +import os +import cv2 +import numpy as np +import scipy.stats as st +import tensorflow as tf + + + +def color_shift(image1, image2, mode='uniform'): + b1, g1, r1 = tf.split(image1, num_or_size_splits=3, axis=3) + b2, g2, r2 = tf.split(image2, num_or_size_splits=3, axis=3) + if mode == 'normal': + b_weight = tf.random.normal(shape=[1], mean=0.114, stddev=0.1) + g_weight = np.random.normal(shape=[1], mean=0.587, stddev=0.1) + r_weight = np.random.normal(shape=[1], mean=0.299, stddev=0.1) + elif mode == 'uniform': + b_weight = tf.random.uniform(shape=[1], minval=0.014, maxval=0.214) + g_weight = tf.random.uniform(shape=[1], minval=0.487, maxval=0.687) + r_weight = tf.random.uniform(shape=[1], minval=0.199, maxval=0.399) + output1 = (b_weight*b1+g_weight*g1+r_weight*r1)/(b_weight+g_weight+r_weight) + output2 = (b_weight*b2+g_weight*g2+r_weight*r2)/(b_weight+g_weight+r_weight) + return output1, output2 + + + + +def label2rgb(label_field, image, kind='mix', bg_label=-1, bg_color=(0, 0, 0)): + + #std_list = list() + out = np.zeros_like(image) + labels = np.unique(label_field) + bg = (labels == bg_label) + if bg.any(): + labels = labels[labels != bg_label] + mask = (label_field == bg_label).nonzero() + out[mask] = bg_color + for label in labels: + mask = (label_field == label).nonzero() + #std = np.std(image[mask]) + #std_list.append(std) + if kind == 'avg': + color = image[mask].mean(axis=0) + elif kind == 'median': + color = np.median(image[mask], axis=0) + elif kind == 'mix': + std = np.std(image[mask]) + if std < 20: + color = image[mask].mean(axis=0) + elif 20 < std < 40: + mean = image[mask].mean(axis=0) + median = np.median(image[mask], axis=0) + color = 0.5*mean + 0.5*median + elif 40 < std: + color = image[mask].median(axis=0) + out[mask] = color + return out + + + +def color_ss_map(image, seg_num=200, power=1, + color_space='Lab', k=10, sim_strategy='CTSF'): + + img_seg = segmentation.felzenszwalb(image, scale=k, sigma=0.8, min_size=100) + img_cvtcolor = label2rgb(img_seg, image, kind='mix') + img_cvtcolor = switch_color_space(img_cvtcolor, color_space) + S = HierarchicalGrouping(img_cvtcolor, img_seg, sim_strategy) + S.build_regions() + S.build_region_pairs() + + # Start hierarchical grouping + + while S.num_regions() > seg_num: + + i,j = S.get_highest_similarity() + S.merge_region(i,j) + S.remove_similarities(i,j) + S.calculate_similarity_for_new_region() + + image = label2rgb(S.img_seg, image, kind='mix') + image = (image+1)/2 + image = image**power + image = image/np.max(image) + image = image*2 - 1 + + return image + + +def selective_adacolor(batch_image, seg_num=200, power=1): + num_job = np.shape(batch_image)[0] + batch_out = Parallel(n_jobs=num_job)(delayed(color_ss_map)\ + (image, seg_num, power) for image in batch_image) + return np.array(batch_out) + + + +def simple_superpixel(batch_image, seg_num=200): + + def process_slic(image): + seg_label = segmentation.slic(image, n_segments=seg_num, sigma=1, + compactness=10, convert2lab=True) + image = color.label2rgb(seg_label, image, kind='mix') + return image + + num_job = np.shape(batch_image)[0] + batch_out = Parallel(n_jobs=num_job)(delayed(process_slic)\ + (image) for image in batch_image) + return np.array(batch_out) + + + +def load_image_list(data_dir): + name_list = list() + for name in os.listdir(data_dir): + name_list.append(os.path.join(data_dir, name)) + name_list.sort() + return name_list + + +def next_batch(filename_list, batch_size): + idx = np.arange(0 , len(filename_list)) + np.random.shuffle(idx) + idx = idx[:batch_size] + batch_data = [] + for i in range(batch_size): + image = cv2.imread(filename_list[idx[i]]) + image = image.astype(np.float32)/127.5 - 1 + #image = image.astype(np.float32)/255.0 + batch_data.append(image) + + return np.asarray(batch_data) + + + +def write_batch_image(image, save_dir, name, n): + + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + fused_dir = os.path.join(save_dir, name) + fused_image = [0] * n + for i in range(n): + fused_image[i] = [] + for j in range(n): + k = i * n + j + image[k] = (image[k]+1) * 127.5 + #image[k] = image[k] - np.min(image[k]) + #image[k] = image[k]/np.max(image[k]) + #image[k] = image[k] * 255.0 + fused_image[i].append(image[k]) + fused_image[i] = np.hstack(fused_image[i]) + fused_image = np.vstack(fused_image) + cv2.imwrite(fused_dir, fused_image.astype(np.uint8)) + + +if __name__ == '__main__': + pass + diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/README.md b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/README.md index 125090097c7ff7b94097ef2618aaa277522de077..c7e754b4c2f4ffae4c5410f1632697e76eceee5b 100644 --- a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/README.md +++ b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/README.md @@ -162,7 +162,7 @@ 1. 配置训练参数。 - 首先在train.py中,配置参数。 + 在train.py中,配置参数。 ``` classes_path 指向model_data下的voc_classes.txt @@ -171,7 +171,18 @@ val_annotation_path 指向2007_val.txt' ``` - 2. 启动训练。 + 2. 配置测试参数。 + + 在yolo.py中,配置参数。 + ``` + model_path 指向训练好的模型 + ``` + 在get_map.py中,配置参数。 + ``` + VOCdevkit_path 指向VOC数据集位置 + ``` + + 3. 启动训练和测试。 ``` bash train_full_1p.sh ``` diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/fusion_result.json b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/fusion_result.json deleted file mode 100644 index bb2fbca3d4ee5dc86205264ea67a7d338e02a8af..0000000000000000000000000000000000000000 --- a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/fusion_result.json +++ /dev/null @@ -1,1821 +0,0 @@ -{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1001" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_101" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1011" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1021" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1031" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1041" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1051" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1061" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1071" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1081" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1091" -}{ - "graph_fusion": { - "MulSquareFusionPass": { - "effect_times": "0", - "match_times": "62" - } - }, - "session_and_graph_id": "0_11" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1101" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_111" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1111" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1121" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1131" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1141" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1151" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1161" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1171" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1181" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1191" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1201" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_121" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1211" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1221" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1231" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1241" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_1251" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1261" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1271" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_1281" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_131" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_141" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_151" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_161" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_171" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_181" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_191" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_201" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_21" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_211" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_221" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_231" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_241" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_251" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_261" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_271" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_281" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_291" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_301" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_31" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_311" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_321" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_331" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_341" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_351" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_361" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_371" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_381" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_391" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_401" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_41" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_411" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_421" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_431" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_441" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_451" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_461" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_471" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_481" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_491" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_501" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_511" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_521" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_531" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_541" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_551" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_561" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_571" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_581" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_591" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_601" -}{ - "graph_fusion": { - "AABiasaddConvFusion": { - "effect_times": "6", - "match_times": "6" - }, - "AReduceMeanFusionPass": { - "effect_times": "0", - "match_times": "6" - }, - "AReduceSumFusionPass": { - "effect_times": "660", - "match_times": "814" - }, - "AddNFusionPass": { - "effect_times": "0", - "match_times": "726" - }, - "ApplyAddOutputPass": { - "effect_times": "366", - "match_times": "366" - }, - "BatchNorm3DFusionPass": { - "effect_times": "0", - "match_times": "236" - }, - "BatchNormBnInferFusionPass": { - "effect_times": "0", - "match_times": "118" - }, - "BatchNormGradBnInferGradFusion": { - "effect_times": "0", - "match_times": "118" - }, - "BatchNormGradInfGradFusion": { - "effect_times": "118", - "match_times": "118" - }, - "BatchNormGradPreprocessFusionPass": { - "effect_times": "236", - "match_times": "236" - }, - "BatchNormPreprocessFusionPass": { - "effect_times": "236", - "match_times": "236" - }, - "ConcatCToNOptimizeFusionPass": { - "effect_times": "0", - "match_times": "34" - }, - "ConstToAttrPass": { - "effect_times": "702", - "match_times": "702" - }, - "ConstToAttrReduceSumFusion": { - "effect_times": "154", - "match_times": "154" - }, - "ConstToAttrResizeNearestNeighborGradFusion": { - "effect_times": "4", - "match_times": "4" - }, - "ConstToAttrStridedSliceFusion": { - "effect_times": "110", - "match_times": "110" - }, - "Conv2DbpFilterMulFusionPass": { - "effect_times": "0", - "match_times": "124" - }, - "Conv2DbpInputDilationFusionPass": { - "effect_times": "0", - "match_times": "122" - }, - "ConvConcatFusionPass": { - "effect_times": "0", - "match_times": "34" - }, - "ConvToFullyConnectionFusionPass": { - "effect_times": "0", - "match_times": "124" - }, - "ConvWeightCompressFusionPass": { - "effect_times": "0", - "match_times": "124" - }, - "ExtremumGradFusionPass": { - "effect_times": "4", - "match_times": "4" - }, - "FIXPIPEAPREQUANTFUSIONPASS": { - "effect_times": "0", - "match_times": "370" - }, - "FIXPIPEFUSIONPASS": { - "effect_times": "0", - "match_times": "370" - }, - "FusedBatchNormBertFusionPass": { - "effect_times": "0", - "match_times": "236" - }, - "FusedBatchNormGradFusionPass": { - "effect_times": "118", - "match_times": "236" - }, - "MulAddFusionPass": { - "effect_times": "0", - "match_times": "222" - }, - "MulAddNL2LossFusionPass": { - "effect_times": "0", - "match_times": "266" - }, - "MulAddNPass": { - "effect_times": "0", - "match_times": "266" - }, - "MulGradFusionPass": { - "effect_times": "0", - "match_times": "12" - }, - "MulSquareFusionPass": { - "effect_times": "0", - "match_times": "1196" - }, - "PadConv2dFusionPass": { - "effect_times": "12", - "match_times": "12" - }, - "Pow2SquareFusionPass": { - "effect_times": "6", - "match_times": "12" - }, - "RealDiv2MulsFusionPass": { - "effect_times": "0", - "match_times": "114" - }, - "RefreshInt64ToInt32FusionPass": { - "effect_times": "2", - "match_times": "2" - }, - "SingleBatchNormFusion": { - "effect_times": "118", - "match_times": "236" - }, - "SplitConvConcatFusionPass": { - "effect_times": "0", - "match_times": "34" - }, - "SquareSumV1": { - "effect_times": "124", - "match_times": "124" - }, - "SquareSumV2": { - "effect_times": "0", - "match_times": "136" - }, - "StridedSliceGradFusionPass": { - "effect_times": "0", - "match_times": "72" - }, - "StridedSliceRemovePass": { - "effect_times": "0", - "match_times": "110" - }, - "SubFusionPass": { - "effect_times": "0", - "match_times": "556" - }, - "TileConstToAttrFusion": { - "effect_times": "24", - "match_times": "24" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "5373" - }, - "ZConcatExt2FusionPass": { - "effect_times": "0", - "match_times": "34" - } - }, - "session_and_graph_id": "0_61", - "ub_fusion": { - "AutomaticUbFusion": { - "effect_times": "536", - "match_times": "540" - }, - "TbeMultiOutputFusionPass": { - "effect_times": "160", - "match_times": "162" - } - } -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_611" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_621" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_631" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_641" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_651" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_661" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_671" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_681" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_691" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_701" -}{ - "graph_fusion": { - "AABiasaddConvFusion": { - "effect_times": "3", - "match_times": "3" - }, - "AReduceMeanFusionPass": { - "effect_times": "0", - "match_times": "3" - }, - "AReduceSumFusionPass": { - "effect_times": "0", - "match_times": "77" - }, - "AddNFusionPass": { - "effect_times": "0", - "match_times": "1" - }, - "BatchNorm3DFusionPass": { - "effect_times": "0", - "match_times": "59" - }, - "BatchNormBnInferFusionPass": { - "effect_times": "59", - "match_times": "59" - }, - "BatchNormPreprocessFusionPass": { - "effect_times": "59", - "match_times": "59" - }, - "ConcatCToNOptimizeFusionPass": { - "effect_times": "0", - "match_times": "17" - }, - "ConstToAttrPass": { - "effect_times": "5", - "match_times": "5" - }, - "ConstToAttrReduceSumFusion": { - "effect_times": "77", - "match_times": "77" - }, - "ConstToAttrStridedSliceFusion": { - "effect_times": "55", - "match_times": "55" - }, - "ConvBatchnormFusionPass": { - "effect_times": "0", - "match_times": "59" - }, - "ConvConcatFusionPass": { - "effect_times": "0", - "match_times": "17" - }, - "ConvToFullyConnectionFusionPass": { - "effect_times": "0", - "match_times": "62" - }, - "ConvWeightCompressFusionPass": { - "effect_times": "0", - "match_times": "62" - }, - "FIXPIPEAPREQUANTFUSIONPASS": { - "effect_times": "0", - "match_times": "62" - }, - "FIXPIPEFUSIONPASS": { - "effect_times": "0", - "match_times": "62" - }, - "FusedBatchNormBertFusionPass": { - "effect_times": "0", - "match_times": "59" - }, - "MulAddFusionPass": { - "effect_times": "0", - "match_times": "111" - }, - "MulGradFusionPass": { - "effect_times": "0", - "match_times": "6" - }, - "MulSquareFusionPass": { - "effect_times": "0", - "match_times": "180" - }, - "PadConv2dFusionPass": { - "effect_times": "6", - "match_times": "6" - }, - "Pow2SquareFusionPass": { - "effect_times": "3", - "match_times": "3" - }, - "RealDiv2MulsFusionPass": { - "effect_times": "0", - "match_times": "18" - }, - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "SingleBatchNormFusion": { - "effect_times": "0", - "match_times": "59" - }, - "SplitConvConcatFusionPass": { - "effect_times": "0", - "match_times": "17" - }, - "SquareSumV1": { - "effect_times": "65", - "match_times": "65" - }, - "SquareSumV2": { - "effect_times": "0", - "match_times": "65" - }, - "StridedSliceRemovePass": { - "effect_times": "0", - "match_times": "55" - }, - "SubFusionPass": { - "effect_times": "0", - "match_times": "36" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "596" - }, - "ZConcatExt2FusionPass": { - "effect_times": "0", - "match_times": "17" - } - }, - "session_and_graph_id": "0_71", - "ub_fusion": { - "AutomaticUbFusion": { - "effect_times": "92", - "match_times": "93" - }, - "TbeEltwiseFusionPass": { - "effect_times": "3", - "match_times": "3" - }, - "TbeMultiOutputFusionPass": { - "effect_times": "77", - "match_times": "77" - } - } -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_711" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_721" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_731" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_741" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_751" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_761" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_771" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_781" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_791" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_801" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - } - }, - "session_and_graph_id": "0_81" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_811" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_821" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_831" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_841" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_851" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_861" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_871" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_881" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_891" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_901" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_91" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_911" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_921" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_931" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_941" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_951" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_961" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "1" - } - }, - "session_and_graph_id": "0_971" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_981" -}{ - "graph_fusion": { - "RefreshInt64ToInt32FusionPass": { - "effect_times": "1", - "match_times": "1" - }, - "TransdataCastFusionPass": { - "effect_times": "0", - "match_times": "4" - } - }, - "session_and_graph_id": "0_991" -} \ No newline at end of file diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/get_map.py b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/get_map.py index 332fe1662f87c1dd054e78b41fadd02b2ac8a086..7fc37fe55a8a48df32ccddd7be4ec0280f6ab77e 100644 --- a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/get_map.py +++ b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/get_map.py @@ -26,6 +26,8 @@ # See the License for the specific language governing permissions and # limitations under the License. import os +import argparse + import xml.etree.ElementTree as ET from PIL import Image @@ -36,6 +38,11 @@ from utils.utils import get_classes from utils.utils_map import get_coco_map, get_map if __name__ == "__main__": + # 解析输入参数data_url + parser = argparse.ArgumentParser() + parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") + config = parser.parse_args() + ''' Recall和Precision不像AP是一个面积的概念,在门限值不同时,网络的Recall和Precision值是不同的。 map计算结果中的Recall和Precision代表的是当预测时,门限置信度为0.5时,所对应的Recall和Precision值。 @@ -70,7 +77,7 @@ if __name__ == "__main__": # 指向VOC数据集所在的文件夹 # 默认指向根目录下的VOC数据集 #-------------------------------------------------------# - VOCdevkit_path = 'VOCdevkit' + VOCdevkit_path = config.data_url + '/VOCdevkit/' #-------------------------------------------------------# # 结果输出的文件夹,默认为map_out #-------------------------------------------------------# diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/model_data/simhei.ttf b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/model_data/simhei.ttf new file mode 100644 index 0000000000000000000000000000000000000000..5bd4687e7212775e23bea569f08fdd1cd7395dc3 Binary files /dev/null and b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/model_data/simhei.ttf differ diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..187030313808963456562bf6009350049b6a3504 --- /dev/null +++ b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,189 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=8 + +sed -i s#"/home/dingwei/yolov5"#"${data_path}"#g ./2007_train.txt +sed -i s#"/home/dingwei/yolov5"#"${data_path}"#g ./2007_val.txt + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./train.py --freeze_flag=0 + python3.7 ./get_map.py --data_url=${data_path} +else + python3.7 ./train.py --freeze_flag=0 1>${print_log} 2>&1 + python3.7 ./get_map.py --data_url=${data_path} 1>>${print_log} 2>&1 +fi + +# 性能相关数据计算 +#StepTime=`grep "each step time" ${print_log} | tail -n 10 | awk '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` +#FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "mAP =" ${print_log} | awk '{print $NF}'` +# 提取所有loss打印信息 +grep "loss:" ${print_log} | awk '{print $NF}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +#echo "Final Performance images/sec : $FPS" +#echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_performance_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..809516f649713a40f39f885878dd9056aeb87a29 --- /dev/null +++ b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/test/train_performance_1p.sh @@ -0,0 +1,181 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running without etp..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +batch_size=8 + +sed -i s#"/home/dingwei/yolov5"#"${data_path}"#g ./2007_train.txt +sed -i s#"/home/dingwei/yolov5"#"${data_path}"#g ./2007_val.txt + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./train.py --epochs=8 --steps=48 --freeze_flag=0 +else + python3.7 ./train.py --epochs=8 --steps=48 --freeze_flag=0 1>${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "48/48" ${print_log} | grep -v "val_loss" | tail -n 5 | awk -F"48/48" '{print $2}' | awk '{print $4}' | awk -F"ms" '{print $1/1000}' | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 提取所有loss打印信息 +grep "loss:" ${print_log} | awk '{print $NF}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train.py b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train.py index 52f68d694e3136267f1c13b981cba9c8c2851aab..4ffa36124e39f98dc6e1925db27808c95703a7ce 100644 --- a/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train.py +++ b/TensorFlow/contrib/cv/YOLOV5_ID0378_for_TensorFlow/train.py @@ -1,35 +1,7 @@ -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -# Copyright 2021 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from npu_bridge.npu_init import * import datetime import os -import tensorflow as tf -from tensorflow.python.keras import backend as K + +import tensorflow.keras.backend as K from tensorflow.keras.callbacks import (EarlyStopping, LearningRateScheduler, ModelCheckpoint, TensorBoard) from tensorflow.keras.layers import Conv2D, Dense, DepthwiseConv2D @@ -42,29 +14,19 @@ from utils.callbacks import LossHistory from utils.dataloader import YoloDatasets from utils.utils import get_anchors, get_classes from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig +from npu_bridge.npu_init import * -''' -训练自己的目标检测模型一定需要注意以下几点: -1、训练前仔细检查自己的格式是否满足要求,该库要求数据集格式为VOC格式,需要准备好的内容有输入图片和标签 - 输入图片为.jpg图片,无需固定大小,传入训练前会自动进行resize。 - 灰度图会自动转成RGB图片进行训练,无需自己修改。 - 输入图片如果后缀非jpg,需要自己批量转成jpg后再开始训练。 - - 标签为.xml格式,文件中会有需要检测的目标信息,标签文件和输入图片文件相对应。 +import argparse -2、训练好的权值文件保存在logs文件夹中,每个epoch都会保存一次,如果只是训练了几个step是不会保存的,epoch和step的概念要捋清楚一下。 - 在训练过程中,该代码并没有设定只保存最低损失的,因此按默认参数训练完会有100个权值,如果空间不够可以自行删除。 - 这个并不是保存越少越好也不是保存越多越好,有人想要都保存、有人想只保存一点,为了满足大多数的需求,还是都保存可选择性高。 +if __name__ == "__main__": -3、损失值的大小用于判断是否收敛,比较重要的是有收敛的趋势,即验证集损失不断下降,如果验证集损失基本上不改变的话,模型基本上就收敛了。 - 损失值的具体大小并没有什么意义,大和小只在于损失的计算方式,并不是接近于0才好。如果想要让损失好看点,可以直接到对应的损失函数里面除上10000。 - 训练过程中的损失值会保存在logs文件夹下的loss_%Y_%m_%d_%H_%M_%S文件夹中 + # 解析输入参数data_url + parser = argparse.ArgumentParser() + parser.add_argument("--epochs", type=int, default=120) + parser.add_argument("--steps", type=int, default=-1) + parser.add_argument("--freeze_flag", type=int, default=1) + config = parser.parse_args() -4、调参是一门蛮重要的学问,没有什么参数是一定好的,现有的参数是我测试过可以正常训练的参数,因此我会建议用现有的参数。 - 但是参数本身并不是绝对的,比如随着batch的增大学习率也可以增大,效果也会好一些;过深的网络不要用太大的学习率等等。 - 这些都是经验上,只能靠各位同学多查询资料和自己试试了。 -''' -def main(): #---------------------------------------------------------------------# # classes_path 指向model_data下的txt,与自己训练的数据集相关 # 训练前一定要修改classes_path,使其对应自己的数据集 @@ -95,7 +57,6 @@ def main(): # 可以设置mosaic=True,直接随机初始化参数开始训练,但得到的效果仍然不如有预训练的情况。(像COCO这样的大数据集可以这样做) # 2、了解imagenet数据集,首先训练分类模型,获得网络的主干部分权值,分类模型的 主干部分 和该模型通用,基于此进行训练。 #----------------------------------------------------------------------------------------------------------------------------# - # model_path = 'model_data/yolov5_s.h5' model_path = '' #------------------------------------------------------# # input_shape 输入的shape大小,一定要是32的倍数 @@ -147,7 +108,7 @@ def main(): # (当Freeze_Train=False时失效) #------------------------------------------------------------------# Init_Epoch = 0 - Freeze_Epoch = 1 + Freeze_Epoch = 50 Freeze_batch_size = 16 #------------------------------------------------------------------# # 解冻阶段训练参数 @@ -156,14 +117,17 @@ def main(): # UnFreeze_Epoch 模型总共训练的epoch # Unfreeze_batch_size 模型在解冻后的batch_size #------------------------------------------------------------------# - UnFreeze_Epoch = 120 + UnFreeze_Epoch = config.epochs Unfreeze_batch_size = 8 #------------------------------------------------------------------# # Freeze_Train 是否进行冻结训练 # 默认先冻结主干训练后解冻训练。 # 如果设置Freeze_Train=False,建议使用优化器为sgd #------------------------------------------------------------------# - Freeze_Train = False + if config.freeze_flag != 0: + Freeze_Train = True + else: + Freeze_Train = False #------------------------------------------------------------------# # 其它训练参数:学习率、优化器、学习率下降有关 @@ -182,6 +146,7 @@ def main(): # 当使用SGD优化器时建议设置 Init_lr=1e-2 # momentum 优化器内部使用到的momentum参数 # weight_decay 权值衰减,可防止过拟合 + # adam会导致weight_decay错误,使用adam时建议设置为0。 #------------------------------------------------------------------# optimizer_type = "sgd" momentum = 0.937 @@ -193,7 +158,11 @@ def main(): #------------------------------------------------------------------# # save_period 多少个epoch保存一次权值,默认每个世代都保存 #------------------------------------------------------------------# - save_period = 1 + save_period = 20 + #------------------------------------------------------------------# + # save_dir 权值与日志文件保存的文件夹 + #------------------------------------------------------------------# + save_dir = 'logs' #------------------------------------------------------------------# # num_workers 用于设置是否使用多线程读取数据,1代表关闭多线程 # 开启后会加快数据读取速度,但是会占用更多内存 @@ -230,6 +199,8 @@ def main(): sess = tf.Session(config=sess_config) K.set_session(sess) + + #------------------------------------------------------# # 创建yolo模型 #------------------------------------------------------# @@ -239,8 +210,7 @@ def main(): # 载入预训练权重 #------------------------------------------------------# print('Load weights {}.'.format(model_path)) - model_body.load_weights(model_path, by_name=True) - # model_body.load_weights(model_path, by_name=True, skip_mismatch=True) + model_body.load_weights(model_path, by_name=False) model = get_train_model(model_body, input_shape, num_classes, anchors, anchors_mask, label_smoothing) @@ -281,12 +251,13 @@ def main(): start_epoch = Init_Epoch end_epoch = Freeze_Epoch if Freeze_Train else UnFreeze_Epoch + #-------------------------------------------------------------------# # 判断当前batch_size与64的差别,自适应调整学习率 #-------------------------------------------------------------------# nbs = 64 - Init_lr_fit = max(batch_size / nbs * Init_lr, 1e-4) - Min_lr_fit = max(batch_size / nbs * Min_lr, 1e-6) + Init_lr_fit = max(batch_size / nbs * Init_lr, 3e-4) + Min_lr_fit = max(batch_size / nbs * Min_lr, 3e-6) optimizer = { 'adam' : Adam(lr = Init_lr_fit, beta_1 = momentum), @@ -302,6 +273,10 @@ def main(): epoch_step = num_train // batch_size epoch_step_val = num_val // batch_size + if config.steps != -1: + epoch_step = config.steps + epoch_step_val = config.steps + if epoch_step == 0 or epoch_step_val == 0: raise ValueError('数据集过小,无法进行训练,请扩充数据集。') @@ -316,14 +291,14 @@ def main(): # early_stopping 用于设定早停,val_loss多次不下降自动结束训练,表示模型基本收敛 #-------------------------------------------------------------------------------# time_str = datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d_%H_%M_%S') - log_dir = os.path.join('logs', "loss_" + str(time_str)) + log_dir = os.path.join(save_dir, "loss_" + str(time_str)) logging = TensorBoard(log_dir) loss_history = LossHistory(log_dir) - checkpoint = ModelCheckpoint('ckpt/ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5', + checkpoint = ModelCheckpoint(os.path.join(save_dir, "ep{epoch:03d}.h5"), monitor = 'val_loss', save_weights_only = True, save_best_only = False, period = save_period) early_stopping = EarlyStopping(monitor='val_loss', min_delta = 0, patience = 10, verbose = 1) lr_scheduler = LearningRateScheduler(lr_scheduler_func, verbose = 1) - callbacks = [logging, loss_history, checkpoint, lr_scheduler, early_stopping] + callbacks = [logging, loss_history, checkpoint, lr_scheduler] if start_epoch < end_epoch: print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size)) @@ -351,14 +326,14 @@ def main(): # 判断当前batch_size与64的差别,自适应调整学习率 #-------------------------------------------------------------------# nbs = 64 - Init_lr_fit = max(batch_size / nbs * Init_lr, 1e-4) - Min_lr_fit = max(batch_size / nbs * Min_lr, 1e-6) + Init_lr_fit = max(batch_size / nbs * Init_lr, 3e-4) + Min_lr_fit = max(batch_size / nbs * Min_lr, 3e-6) #---------------------------------------# # 获得学习率下降的公式 #---------------------------------------# lr_scheduler_func = get_lr_scheduler(lr_decay_type, Init_lr_fit, Min_lr_fit, UnFreeze_Epoch) lr_scheduler = LearningRateScheduler(lr_scheduler_func, verbose = 1) - callbacks = [logging, loss_history, checkpoint, lr_scheduler, early_stopping] + callbacks = [logging, loss_history, checkpoint, lr_scheduler] for i in range(len(model.layers)): model.layers[i].trainable = True @@ -386,9 +361,3 @@ def main(): callbacks = callbacks ) sess.close() - -if __name__ == "__main__": - - print(1) - main() - \ No newline at end of file diff --git a/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/cfg.py b/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/cfg.py index a4a94ca5c3e2a17cdb907aeb4eb6d22a48e193e7..862f8a95766c33d41a709069255d4e55e401f06f 100644 --- a/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/cfg.py +++ b/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/cfg.py @@ -39,6 +39,7 @@ def make_config(FLAGS): custom_op = config.graph_options.rewrite_options.custom_optimizers.add() custom_op.name = "NpuOptimizer" custom_op.parameter_map["use_off_line"].b = True + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") config.graph_options.rewrite_options.remapping = RewriterConfig.OFF ## Auto Tune diff --git a/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/main.py b/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/main.py index 1af7f77f65cba4998449eb120231a2ac707b2ed1..f03b19841600fa7b736c1ea202bc620be5aead25 100644 --- a/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/main.py +++ b/TensorFlow/contrib/cv/fusiongan/FusionGAN_ID2124_for_TensorFlow/main.py @@ -75,7 +75,10 @@ def main(_): os.makedirs(FLAGS.checkpoint_dir) if not os.path.exists(FLAGS.sample_dir): os.makedirs(FLAGS.sample_dir) - + config_proto = tf.ConfigProto() + custom_op = config_proto.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = 'NpuOptimizer' + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") with tf.Session(config=config) as sess: srcnn = CGAN(sess, image_size=FLAGS.image_size, diff --git a/TensorFlow/contrib/cv/stgan/STGAN_ID1473_for_TensorFlow/tflib/utils.py b/TensorFlow/contrib/cv/stgan/STGAN_ID1473_for_TensorFlow/tflib/utils.py index 13ce1f2978e162a1264f8b49891353152f987772..ad39cb85e3d773f7acf16984d57a7384879aed37 100644 --- a/TensorFlow/contrib/cv/stgan/STGAN_ID1473_for_TensorFlow/tflib/utils.py +++ b/TensorFlow/contrib/cv/stgan/STGAN_ID1473_for_TensorFlow/tflib/utils.py @@ -51,7 +51,7 @@ def session(graph=None, allow_soft_placement=True, #custom_op.parameter_map["auto_tune_mode"].s = tf.compat.as_bytes("RL,GA") - #custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") + custom_op.parameter_map["precision_mode"].s = tf.compat.as_bytes("allow_mix_precision") npu_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF # ������ʽ�ر�remap npu_config.graph_options.rewrite_options.memory_optimization = RewriterConfig.OFF #npu_config = npu_tf_config.session_dump_config(npu_config, action='overflow') diff --git a/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/README.md b/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/README.md index 609f01ef98ee1cf46fb017648280f8aacbc11612..026fb39dcce831efcc031a84e9a1e16530bf2959 100644 --- a/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/README.md +++ b/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/README.md @@ -33,11 +33,11 @@ SESEMI的工作属于半监督学习(SSL)的框架,在图像分类的背景下 参考实现: - + 适配昇腾 AI 处理器的实现: - + https://gitee.com/zhou-xinyu-HIT/modelzoo/tree/master/contrib/TensorFlow/Research/graph/SESEMI_ID1270_for_TensorFlow - + 通过Git获取对应commit_id的代码方法如下: git clone {repository_url} # 克隆仓库的代码 cd {repository_name} # 切换到模型的代码仓目录 @@ -45,12 +45,13 @@ SESEMI的工作属于半监督学习(SSL)的框架,在图像分类的背景下 git reset --hard {commit_id} # 代码设置到对应的commit_id cd {code_path} # 切换到模型代码所在路径,若仓库下只有该模型,则无需切换 - + +​ ### 默认配置 - 训练数据集: - 数据集采用cifar-10数据集 - + - 测试数据集: - 测试数据集与训练数据集相同,使用cifar-10数据集 @@ -97,7 +98,7 @@ SESEMI的工作属于半监督学习(SSL)的框架,在图像分类的背景下 │ |--horse │ |--ship │ |--truck - ``` + ``` - 模型训练。 1. 配置训练参数 首先在脚本run_1p.sh中,配置训练数据集--data、选择网络--network、输入训练数据量--labels参数。 @@ -134,17 +135,25 @@ SESEMI的工作属于半监督学习(SSL)的框架,在图像分类的背景下 ### 训练精度 -以下是各精度对比图 +以下是各精度对比数据。 + +| 样本数量 | 论文精度 | GPU精度 | NPU精度 | +| -------- | ---------- | ------- | ------- | +| 1000 | 29.44±0.24 | 0.2876 | 0.2983 | +| 2000 | 21.53±0.18 | 0.2186 | 0.2179 | -![输入图片说明](image3.png) 在训练数据量为1000和2000的条件下,我们复现的GPU和NPU精度都能达到论文指标 - ### 训练性能 -![输入图片说明](image6.png) +| 样本数量 | GPU性能 | NPU性能 | +| -------- | --------- | --------- | +| 1000 | 75ms/step | 53ms/step | + +备注:使用x86机器本地复现。 + +总结:GPU下训练性能为75ms/step。NPU下训练性能为53ms/step。总体来看,NPU的训练性能强于GPU。 -总结:GPU下绝大部分epoch的运行时间为4.5s/Epoch,其中75ms/step。NPU下绝大部分epoch的运行时间为7s/Epoch,其中115ms/step。总体来看,GPU的运行时间为23分3秒,NPU的运行时间为37分7秒.NPU的总体训练性能略低于GPU。 #### 数据集说明: 我们提供了CIFAR-10数据集,其他数据集需要修改其中的相对路径才可跑通,我们提供的数据如下: URL: diff --git a/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/modelzoo_level.txt index c50186876ca01bb8f3de393ffb6666d54f8be56f..71a0cd4c6bb5ba372a81bad08d92d2964c9acb9d 100644 --- a/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/modelzoo_level.txt +++ b/TensorFlow/contrib/graph/SESEMI_ID1270_for_TensorFlow/modelzoo_level.txt @@ -1,16 +1,8 @@ ------仅限训练----- - GPUStatus:OK NPUMigrationStatus:OK - ------仅限推理----- - ModelConvert:OK QuantStatus:OK - ------通用部分----- - FuncStatus:OK PrecisionStatus:OK AutoTune:OK -PerfStatus:OK \ No newline at end of file +PerfStatus:PERFECT \ No newline at end of file diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/.gitignore b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..d37df596a8ca128e7e2aa9ab28aeef588edbedd3 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/.gitignore @@ -0,0 +1,4 @@ +*.pb + +.idea/ +MA_LOG/ \ No newline at end of file diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/Figure_1.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/Figure_1.png new file mode 100644 index 0000000000000000000000000000000000000000..be4f93287683b50573e88ded7596b308ce1957d9 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/Figure_1.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/LICENSE b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..12d255f8e0f049d3c3127e71788e219b86cdf55b --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/LICENSE @@ -0,0 +1,251 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +## Some of TensorFlow's code is derived from Caffe, which is subject to the following copyright notice: + +COPYRIGHT + +All contributions by the University of California: + +Copyright (c) 2014, The Regents of the University of California (Regents) +All rights reserved. + +All other contributions: + +Copyright (c) 2014, the respective contributors +All rights reserved. + +Caffe uses a shared copyright model: each contributor holds copyright over +their contributions to Caffe. The project versioning records all such +contribution and copyright details. If a contributor wants to further mark +their specific copyright on a particular contribution, they should indicate +their copyright solely in the commit message of the change when it is +committed. + +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +CONTRIBUTION AGREEMENT + +By contributing to the BVLC/caffe repository through pull-request, comment, +or otherwise, the contributor releases their content to the +license and copyright terms herein. \ No newline at end of file diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/cfg.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..dcefaa16ad6b1aa648449712d87ad514a8d0b661 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/cfg.py @@ -0,0 +1,80 @@ +""" +SRNet - Editing Text in the Wild +Some configurations. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# model parameters +lt = 1. +lt_alpha = 1. +lb = 1. +lb_beta = 10. +lf = 1. +lf_theta_1 = 10. +lf_theta_2 = 1. +lf_theta_3 = 500. +epsilon = 1e-8 + +# train +learning_rate = 1e-4 # default 1e-3 +decay_rate = 0.9 +decay_steps = 10000 +staircase = False +beta1 = 0.9 # default 0.9 +beta2 = 0.999 # default 0.999 +test_max_iter = 500 +max_iter = 20000 +show_loss_interval = 50 +write_log_interval = 50 +save_ckpt_interval = 10000 +gen_example_interval = 1000 +pretrained_ckpt_path = None +train_name = None # used for name examples and tensorboard logdirs, set None to use time + +# data +batch_size = 8 +data_shape = [64, None] +i_t_dir = 'i_t' +i_s_dir = 'i_s' +t_sk_dir = 't_sk' +t_t_dir = 't_t' +t_b_dir = 't_b' +t_f_dir = 't_f' +mask_t_dir = 'mask_t' +example_data_dir = r'examples/labels' +example_result_dir = '/cache/out/genLogs' + +# predict +predict_ckpt_path = None +predict_data_dir = None +predict_result_dir = 'examples/result' diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/datagen.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/datagen.py new file mode 100644 index 0000000000000000000000000000000000000000..ee03c0a27a18da7a39312b66e6960222f5765d3e --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/datagen.py @@ -0,0 +1,141 @@ +""" +SRNet - Editing Text in the Wild +Data generator. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import os +import cv2 +import numpy as np +import random +import cfg + +def srnet_datagen(data_dir, batchSize): + + # generator SRNet data for training + name_list = os.listdir(os.path.join(data_dir, cfg.t_b_dir)) + random.shuffle(name_list) + name_num = len(name_list) + idx = 0 + + while True: + i_t_batch, i_s_batch = [], [] + t_sk_batch, t_t_batch, t_b_batch, t_f_batch = [], [], [], [] + mask_t_batch = [] + + for _ in range(batchSize): + name = name_list[idx] + + i_t = cv2.imread(os.path.join(data_dir, cfg.i_t_dir, name)) + i_s = cv2.imread(os.path.join(data_dir, cfg.i_s_dir, name)) + t_sk = cv2.imread(os.path.join(data_dir, cfg.t_sk_dir, name), cv2.IMREAD_GRAYSCALE) + t_t = cv2.imread(os.path.join(data_dir, cfg.t_t_dir, name)) + t_b = cv2.imread(os.path.join(data_dir, cfg.t_b_dir, name)) + t_f = cv2.imread(os.path.join(data_dir, cfg.t_f_dir, name)) + mask_t = cv2.imread(os.path.join(data_dir, cfg.mask_t_dir, name), cv2.IMREAD_GRAYSCALE) + + i_t_batch.append(i_t) + i_s_batch.append(i_s) + t_sk_batch.append(t_sk) + t_t_batch.append(t_t) + t_b_batch.append(t_b) + t_f_batch.append(t_f) + mask_t_batch.append(mask_t) + idx = (idx + 1) % name_num + + ''' + w_sum = 0 + for t_b in t_b_batch: + h, w = t_b.shape[:2] + scale_ratio = cfg.data_shape[0] / h + w_sum += int(w * scale_ratio) + ''' + + to_h = cfg.data_shape[0] + to_w = 128 + to_scale = (to_w, to_h) # w first for cv2 + for i in range(cfg.batch_size): + i_t_batch[i] = cv2.resize(i_t_batch[i], to_scale) + i_s_batch[i] = cv2.resize(i_s_batch[i], to_scale) + t_sk_batch[i] = cv2.resize(t_sk_batch[i], to_scale, interpolation=cv2.INTER_NEAREST) + t_t_batch[i] = cv2.resize(t_t_batch[i], to_scale) + t_b_batch[i] = cv2.resize(t_b_batch[i], to_scale) + t_f_batch[i] = cv2.resize(t_f_batch[i], to_scale) + mask_t_batch[i] = cv2.resize(mask_t_batch[i], to_scale, interpolation=cv2.INTER_NEAREST) + + i_t_batch = np.stack(i_t_batch) + i_s_batch = np.stack(i_s_batch) + t_sk_batch = np.expand_dims(np.stack(t_sk_batch), axis = -1) + t_t_batch = np.stack(t_t_batch) + t_b_batch = np.stack(t_b_batch) + t_f_batch = np.stack(t_f_batch) + mask_t_batch = np.expand_dims(np.stack(mask_t_batch), axis = -1) + + i_t_batch = i_t_batch.astype(np.float32) / 127.5 - 1. + i_s_batch = i_s_batch.astype(np.float32) / 127.5 - 1. + t_sk_batch = t_sk_batch.astype(np.float32) / 255. + t_t_batch = t_t_batch.astype(np.float32) / 127.5 - 1. + t_b_batch = t_b_batch.astype(np.float32) / 127.5 - 1. + t_f_batch = t_f_batch.astype(np.float32) / 127.5 - 1. + mask_t_batch = mask_t_batch.astype(np.float32) / 255. + + yield [i_t_batch, i_s_batch, t_sk_batch, t_t_batch, t_b_batch, t_f_batch, mask_t_batch] + +#每隔一段时间我们要利用当前训练的结果进行预测 +#这里就是获取用来预测的数据的 +def get_input_data(data_dir): + # get input data from dir + data_list = os.listdir(data_dir) + data_list = [data_name.split('_')[0] + '_' for data_name in data_list] + data_list = list(set(data_list)) + res_list = [] + for data_name in data_list: + i_t = cv2.imread(os.path.join(data_dir, data_name + 'i_t.png')) + i_s = cv2.imread(os.path.join(data_dir, data_name + 'i_s.png')) + + # scale_ratio = cfg.data_shape[0] / h + # to_w = int(round(int(w * scale_ratio) / 8)) * 8 + + h, w = i_t.shape[:2] + to_h = cfg.data_shape[0] + to_w = 128 + + to_scale = (to_w, to_h) # w first for cv2 + i_t = cv2.resize(i_t, to_scale).astype(np.float32) / 127.5 - 1. + i_s = cv2.resize(i_s, to_scale).astype(np.float32) / 127.5 - 1. + i_t = np.expand_dims(i_t, axis = 0) + i_s = np.expand_dims(i_s, axis = 0) + res_list.append([i_t, i_s, (w, h), data_name]) # w first for cv2 + return res_list + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..60d7fc57cf0e40bc73ddc5260760b3ae20e820ca Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..5cfa6404fb9d6a489edc5b854b20d50c20fa9165 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/001_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..e3c02ef629fdfac88e2d60a9ab5deef80930a891 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..36a7472076f0888aaeb25ef6397d11dc5f9ae1d1 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/002_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..4ac500ed2466ef12c3315c88eba823f710d8b9e1 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..9e35c38e8d01f8cbaa0a8c1e33e1ded2ec68e006 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/003_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..c8368d5b258d94af32fdab42ca2fc10d8cea1676 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..14f3adb4dea83d4e7eb2f90221411cc95480c65d Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/004_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..db976a333872c66e04860521aa08c427af690ddc Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..0bf4afe2d6850c744e32860833f2462ca7c6e030 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/005_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..f9cbf20ab52a474f18b7c033098e9c7a783f60bd Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..5da774881d6b11b69cc596231e33bfc7e82634d0 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/006_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..e1cc3ccc6aad5cab3c2a977d3f4bd8e2aa4f3f71 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..8142bff0a54f78091479a12c11565ac4acedbf34 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/007_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_s.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_s.png new file mode 100644 index 0000000000000000000000000000000000000000..d412256afb079c196b58f8ad559b024530401376 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_s.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_t.png b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_t.png new file mode 100644 index 0000000000000000000000000000000000000000..ac34d93a02f7ab722c22b88d744f8ff512cceb43 Binary files /dev/null and b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/labels/008_i_t.png differ diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/results/.gitignore b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/examples/results/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/loss.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..fc9fd1fa32c78f7a1cac3a3ce27bfac647985a83 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/loss.py @@ -0,0 +1,133 @@ +""" +SRNet - Editing Text in the Wild +Definition of loss functions. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import tensorflow as tf +import cfg + +def build_discriminator_loss(x, name = 'd_loss'): + + x_true, x_pred = tf.split(x, 2, name = name + '_split') + d_loss = -tf.reduce_mean(tf.log(tf.clip_by_value(x_true, cfg.epsilon, 1.0)) \ + + tf.log(tf.clip_by_value(1.0 - x_pred, cfg.epsilon, 1.0))) + return d_loss + +def build_dice_loss(x_t, x_o, name = 'dice_loss'): + + intersection = tf.reduce_sum(x_t * x_o, axis = [1,2,3]) + union = tf.reduce_sum(x_t, axis = [1,2,3]) + tf.reduce_sum(x_o, axis = [1,2,3]) + return 1. - tf.reduce_mean((2. * intersection + cfg.epsilon)/(union + cfg.epsilon), axis = 0) + +def build_l1_loss(x_t, x_o, name = 'l1_loss'): + + return tf.reduce_mean(tf.abs(x_t - x_o)) + +def build_l1_loss_with_mask(x_t, x_o, mask, name = 'l1_loss'): + + mask_ratio = 1. - tf.reduce_sum(mask) / tf.cast(tf.size(mask), tf.float32) + l1 = tf.abs(x_t - x_o) + return mask_ratio * tf.reduce_mean(l1 * mask) + (1. - mask_ratio) * tf.reduce_mean(l1 * (1. - mask)) + +def build_perceptual_loss(x, name = 'per_loss'): + + l = [] + for i, f in enumerate(x): + l.append(build_l1_loss(f[0], f[1], name = name + '_l1_' + str(i + 1))) + l = tf.stack(l, axis = 0, name = name + '_stack') + l = tf.reduce_sum(l, name = name + '_sum') + return l + +def build_gram_matrix(x, name = 'gram_matrix'): + + x_shape = tf.shape(x) + h, w, c = x_shape[1], x_shape[2], x_shape[3] + matrix = tf.reshape(x, shape = [-1, h * w, c]) + gram = tf.matmul(matrix, matrix, transpose_a = True) / tf.cast(h * w * c, tf.float32) + return gram + +def build_style_loss(x, name = 'style_loss'): + + l = [] + for i, f in enumerate(x): + f_shape = tf.size(f[0]) + f_norm = 1. / tf.cast(f_shape, tf.float32) + gram_true = build_gram_matrix(f[0], name = name + '_gram_true_' + str(i + 1)) + gram_pred = build_gram_matrix(f[1], name = name + '_gram_pred_' + str(i + 1)) + l.append(f_norm * (build_l1_loss(gram_true, gram_pred, name = name + '_l1_' + str(i + 1)))) + l = tf.stack(l, axis = 0, name = name + '_stack') + l = tf.reduce_sum(l, name = name + '_sum') + return l + +def build_vgg_loss(x, name = 'vgg_loss'): + + splited = [] + for i, f in enumerate(x): + splited.append(tf.split(f, 2, name = name + '_split_' + str(i + 1))) + l_per = build_perceptual_loss(splited, name = name + '_per') + l_style = build_style_loss(splited, name = name + '_style') + return l_per, l_style + +def build_gan_loss(x, name = 'gan_loss'): + + x_true, x_pred = tf.split(x, 2, name = name + '_split') + gan_loss = -tf.reduce_mean(tf.log(tf.clip_by_value(x_pred, cfg.epsilon, 1.0))) + return gan_loss + +def build_generator_loss(out_g, out_d, out_vgg, labels, name = 'g_loss'): + + o_sk, o_t, o_b, o_f, mask_t = out_g + o_db, o_df = out_d + o_vgg = out_vgg + t_sk, t_t, t_b, t_f = labels + + l_t_sk = cfg.lt_alpha * build_dice_loss(t_sk, o_sk, name = name + '_dice_loss') + l_t_l1 = build_l1_loss_with_mask(t_t, o_t, mask_t, name = name + '_lt_l1_loss') + l_t = l_t_l1 + l_t_sk + + l_b_gan = build_gan_loss(o_db, name = name + '_lb_gan_loss') + l_b_l1 = cfg.lb_beta * build_l1_loss(t_b, o_b, name = name + '_lb_l1_loss') + l_b = l_b_gan + l_b_l1 + + l_f_gan = build_gan_loss(o_df, name = name + '_lf_gan_loss') + l_f_l1 = cfg.lf_theta_1 * build_l1_loss(t_f, o_f, name = name + '_lf_l1_loss') + l_f_vgg_per, l_f_vgg_style = build_vgg_loss(o_vgg, name = name + '_lf_vgg_loss') + l_f_vgg_per = cfg.lf_theta_2 * l_f_vgg_per + l_f_vgg_style = cfg.lf_theta_3 * l_f_vgg_style + l_f = l_f_gan + l_f_l1 + l_f_vgg_per + l_f_vgg_style + + l = cfg.lt * l_t + cfg.lb * l_b + cfg.lf * l_f + return l, [l_t_sk, l_t_l1, l_b_gan, l_b_l1, l_f_gan, l_f_l1, l_f_vgg_per, l_f_vgg_style] + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/model.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/model.py new file mode 100644 index 0000000000000000000000000000000000000000..c8ded00019027eefded689c503b2fba84d29dc75 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/model.py @@ -0,0 +1,348 @@ +""" +SRNet - Editing Text in the Wild +The main SRNet model implementation. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import os +import cv2 +import numpy as np +import tensorflow as tf +from loss import build_discriminator_loss, build_generator_loss +import cfg + +class SRNet(): + def __init__(self, vgg19Path, tensorboardDir, shape = [224, 224], name = ''): + self.name = name + self.cnum = 32 + self.graph = tf.Graph() + self.vgg19Path = vgg19Path + self.tensorboardDir = tensorboardDir + with self.graph.as_default(): + self.i_t = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [3]) + self.i_s = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [3]) + self.t_sk = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [1]) + self.t_t = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [3]) + self.t_b = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [3]) + self.t_f = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [3]) + self.mask_t = tf.placeholder(dtype = tf.float32, shape = [None] + shape + [1]) + self.global_step = tf.Variable(tf.constant(0)) + self.build_whole_net_with_loss() + self.build_optimizer() + self.build_summary_op() + + def _res_block(self, x, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'res_block'): + + cnum = x.get_shape().as_list()[-1] + xin = x + x = tf.layers.conv2d(x, cnum // 4, kernel_size = 1, strides = 1, activation = activation, padding = padding, name = name + '_conv1') + x = tf.layers.conv2d(x, cnum // 4, kernel_size = 3, strides = 1, activation = activation, padding = padding, name = name + '_conv2') + x = tf.layers.conv2d(x, cnum, kernel_size = 1, strides = 1, activation = None, padding = padding, name = name + '_conv3') + x = tf.add(xin, x, name = name + '_add') + x = tf.layers.batch_normalization(x, name = name + '_bn') + x = activation(x, name = name + '_out') + return x + + def _conv_bn_relu(self, x, cnum = None, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'conv_bn_relu'): + + cnum = x.get_shape().as_list()[-1] if cnum is None else cnum + x = tf.layers.conv2d(x, cnum, kernel_size = 3, strides = 1, activation = None, padding = padding, name = name + '_conv') + x = tf.layers.batch_normalization(x, name = name + '_bn') + x = activation(x, name = name + '_out') + return x + + def build_res_net(self, x, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'res_net'): + + x = self._res_block(x, activation = activation, padding = padding, name = name + '_block1') + x = self._res_block(x, activation = activation, padding = padding, name = name + '_block2') + x = self._res_block(x, activation = activation, padding = padding, name = name + '_block3') + x = self._res_block(x, activation = activation, padding = padding, name = name + '_block4') + return x + + def build_encoder_net(self, x, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'encoder_net', get_feature_map = False): + + x = self._conv_bn_relu(x, self.cnum, name = name + '_conv1_1') + x = self._conv_bn_relu(x, self.cnum, name = name + '_conv1_2') + + x = tf.layers.conv2d(x, 2 * self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_pool1') + x = self._conv_bn_relu(x, 2 * self.cnum, name = name + '_conv2_1') + x = self._conv_bn_relu(x, 2 * self.cnum, name = name + '_conv2_2') + f1 = x + + x = tf.layers.conv2d(x, 4 * self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_pool2') + x = self._conv_bn_relu(x, 4 * self.cnum, name = name + '_conv3_1') + x = self._conv_bn_relu(x, 4 * self.cnum, name = name + '_conv3_2') + f2 = x + + x = tf.layers.conv2d(x, 8 * self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_pool3') + x = self._conv_bn_relu(x, 8 * self.cnum, name = name + '_conv4_1') + x = self._conv_bn_relu(x, 8 * self.cnum, name = name + '_conv4_2') + if get_feature_map: + return x, [f2, f1] + else: + return x + + def build_decoder_net(self, x, fuse = None, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'decoder_net', get_feature_map = False): + + if fuse and fuse[0] is not None: + x = tf.concat([x, fuse[0]], axis = -1, name = name + '_fuse1') + x = self._conv_bn_relu(x, 8 * self.cnum, name = name + '_conv1_1') + x = self._conv_bn_relu(x, 8 * self.cnum, name = name + '_conv1_2') + f1 = x + + x = tf.layers.Conv2DTranspose(4 * self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_deconv1')(x) + if fuse and fuse[1] is not None: + x = tf.concat([x, fuse[1]], axis = -1, name = name + '_fuse2') + x = self._conv_bn_relu(x, 4 * self.cnum, name = name + '_conv2_1') + x = self._conv_bn_relu(x, 4 * self.cnum, name = name + '_conv2_2') + f2 = x + + x = x = tf.layers.Conv2DTranspose(2 * self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_deconv2')(x) + if fuse and fuse[2] is not None: + x = tf.concat([x, fuse[2]], axis = -1, name = name + '_fuse3') + x = self._conv_bn_relu(x, 2 * self.cnum, name = name + '_conv3_1') + x = self._conv_bn_relu(x, 2 * self.cnum, name = name + '_conv3_2') + f3 = x + + x = x = tf.layers.Conv2DTranspose(self.cnum, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_deconv3')(x) + x = self._conv_bn_relu(x, self.cnum, name = name + '_conv4_1') + x = self._conv_bn_relu(x, self.cnum, name = name + '_conv4_2') + if get_feature_map: + return x, [f1, f2, f3] + else: + return x + + def build_text_conversion_net(self, x_t, x_s, padding = 'SAME', name = 'tcn'): + + x_t = self.build_encoder_net(x_t, name = name + '_t_encoder') + x_t = self.build_res_net(x_t, name = name + '_t_res') + + x_s = self.build_encoder_net(x_s, name = name + '_s_encoder') + x_s = self.build_res_net(x_s, name = name + '_s_res') + + x = tf.concat([x_t, x_s], axis = -1, name = name + '_concat1') + + y_sk = self.build_decoder_net(x, name = name + '_sk_decoder') + y_sk_out = tf.layers.conv2d(y_sk, 1, kernel_size = 3, strides = 1, activation = 'sigmoid', padding = padding, name = name + '_sk_out') + + y_t = self.build_decoder_net(x, name = name + '_t_decoder') + y_t = tf.concat([y_sk, y_t], axis = -1, name = name + '_concat2') + y_t = self._conv_bn_relu(y_t, name = name + '_t_cbr') + y_t_out = tf.layers.conv2d(y_t, 3, kernel_size = 3, strides = 1, activation = 'tanh', padding = padding, name = name + '_t_out') + return y_sk_out, y_t_out + + def build_background_inpainting_net(self, x, padding = 'SAME', name = 'bin'): + + x, f_encoder = self.build_encoder_net(x, name = name + '_encoder', get_feature_map = True) + x = self.build_res_net(x, name = name + '_res') + x, fuse = self.build_decoder_net(x, fuse = [None] + f_encoder, name = name + '_decoder', get_feature_map = True) + x = tf.layers.conv2d(x, 3, kernel_size = 3, strides = 1, activation = 'tanh', padding = padding, name = name + '_out') + return x, fuse + + def build_fusion_net(self, x, fuse, padding = 'SAME', name = 'fn'): + + x = self.build_encoder_net(x, name = name + '_encoder') + x = self.build_res_net(x, name = name + '_res') + x = self.build_decoder_net(x, fuse, name = name + '_decoder') + x = tf.layers.conv2d(x, 3, kernel_size = 3, strides = 1, activation = 'tanh', padding = padding, name = name + '_out') + return x + + def build_discriminator(self, x, activation = tf.nn.leaky_relu, padding = 'SAME', name = 'discriminator'): + + with tf.variable_scope('D'): + x = tf.layers.conv2d(x, 64, kernel_size = 3, strides = 2, activation = activation, padding = padding, name = name + '_conv1') + x = tf.layers.conv2d(x, 128, kernel_size = 3, strides = 2, activation = None, padding = padding, name = name + '_conv2') + x = tf.layers.batch_normalization(x, name = name + '_conv2_bn') + x = activation(x, name = name + '_conv2_activation') + x = tf.layers.conv2d(x, 256, kernel_size = 3, strides = 2, activation = None, padding = padding, name = name + '_conv3') + x = tf.layers.batch_normalization(x, name = name + '_conv3_bn') + x = activation(x, name = name + '_conv3_activation') + x = tf.layers.conv2d(x, 512, kernel_size = 3, strides = 2, activation = None, padding = padding, name = name + '_conv4') + x = tf.layers.batch_normalization(x, name = name + '_conv4_bn') + x = activation(x, name = name + '_conv4_activation') + x = tf.layers.conv2d(x, 1, kernel_size = 3, strides = 1, activation = None, padding = padding, name = name + '_conv5') + x = tf.layers.batch_normalization(x, name = name + '_conv5_bn') + x = tf.nn.sigmoid(x, name = '_out') + return x + + def build_generator(self, inputs, name = 'generator'): + + i_t, i_s = inputs + with tf.variable_scope('G'): + o_sk, o_t = self.build_text_conversion_net(i_t, i_s, name = name + '_tcn') + o_b, fuse = self.build_background_inpainting_net(i_s, name = name + '_bin') + o_f = self.build_fusion_net(o_t, fuse, name = name + '_fn') + return o_sk, o_t, o_b, o_f + + def build_whole_net_with_loss(self): + + i_t, i_s = self.i_t, self.i_s + t_sk, t_t, t_b, t_f, mask_t = self.t_sk, self.t_t, self.t_b, self.t_f, self.mask_t + inputs = [i_t, i_s] + labels = [t_sk, t_t, t_b, t_f] + + o_sk, o_t, o_b, o_f = self.build_generator(inputs) + self.o_sk = tf.identity(o_sk, name = 'o_sk') + self.o_t = tf.identity(o_t, name = 'o_t') + self.o_b = tf.identity(o_b, name = 'o_b') + self.o_f = tf.identity(o_f, name = 'o_f') + + i_db_true = tf.concat([t_b, i_s], axis = -1, name = 'db_true_concat') + i_db_pred = tf.concat([o_b, i_s], axis = -1, name = 'db_pred_concat') + i_db = tf.concat([i_db_true, i_db_pred], axis = 0, name = 'db_concat') + + i_df_true = tf.concat([t_f, i_t], axis = -1, name = 'df_true_concat') + i_df_pred = tf.concat([o_f, i_t], axis = -1, name = 'df_pred_concat') + i_df = tf.concat([i_df_true, i_df_pred], axis = 0, name = 'df_concat') + + o_db = self.build_discriminator(i_db, name = 'db') + o_df = self.build_discriminator(i_df, name = 'df') + + i_vgg = tf.concat([t_f, o_f], axis = 0, name = 'vgg_concat') + + vgg_graph_def = tf.GraphDef() + #修改 + vgg_graph_path = self.vgg19Path + with open(vgg_graph_path, 'rb') as f: + vgg_graph_def.ParseFromString(f.read()) + _ = tf.import_graph_def(vgg_graph_def, input_map = {"inputs:0": i_vgg}) + with tf.Session(config=npu_config_proto()) as sess: + o_vgg_1 = sess.graph.get_tensor_by_name("import/block1_conv1/Relu:0") + o_vgg_2 = sess.graph.get_tensor_by_name("import/block2_conv1/Relu:0") + o_vgg_3 = sess.graph.get_tensor_by_name("import/block3_conv1/Relu:0") + o_vgg_4 = sess.graph.get_tensor_by_name("import/block4_conv1/Relu:0") + o_vgg_5 = sess.graph.get_tensor_by_name("import/block5_conv1/Relu:0") + + out_g = [o_sk, o_t, o_b, o_f, mask_t] + out_d = [o_db, o_df] + out_vgg = [o_vgg_1, o_vgg_2, o_vgg_3, o_vgg_4, o_vgg_5] + + db_loss = build_discriminator_loss(o_db, name = 'db_loss') + df_loss = build_discriminator_loss(o_df, name = 'df_loss') + self.d_loss_detail = [db_loss, df_loss] + self.d_loss = tf.add(db_loss, df_loss, name = 'd_loss') + self.g_loss, self.g_loss_detail = build_generator_loss(out_g, out_d, out_vgg, labels, name = 'g_loss') + + def build_optimizer(self): + + self.learning_rate = tf.train.exponential_decay(learning_rate = cfg.learning_rate, global_step = self.global_step, + decay_steps = cfg.decay_steps, decay_rate = cfg.decay_rate, staircase = cfg.staircase) + d_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope='D') + with tf.control_dependencies(d_update_ops): + self.d_train_step = tf.train.AdamOptimizer(self.learning_rate, cfg.beta1, cfg.beta2).minimize(self.d_loss, + var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope = 'D')) + g_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope='G') + with tf.control_dependencies(g_update_ops): + self.g_train_step = tf.train.AdamOptimizer(self.learning_rate, cfg.beta1, cfg.beta2).minimize(self.g_loss, + var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope = 'G')) + + def build_summary_op(self): + + d_summary_loss = tf.summary.scalar("loss", self.d_loss) + d_summary_loss_db = tf.summary.scalar("l_db", self.d_loss_detail[0]) + d_summary_loss_df = tf.summary.scalar("l_df", self.d_loss_detail[1]) + + g_summary_loss = tf.summary.scalar("loss", self.g_loss) + g_summary_loss_t_sk = tf.summary.scalar("l_t_sk", self.g_loss_detail[0]) + g_summary_loss_t_l1 = tf.summary.scalar("l_t_l1", self.g_loss_detail[1]) + g_summary_loss_b_gan = tf.summary.scalar("l_b_gan", self.g_loss_detail[2]) + g_summary_loss_b_l1 = tf.summary.scalar("l_b_l1", self.g_loss_detail[3]) + g_summary_loss_f_gan = tf.summary.scalar("l_f_gan", self.g_loss_detail[4]) + g_summary_loss_f_l1 = tf.summary.scalar("l_f_l1", self.g_loss_detail[5]) + g_summary_loss_f_vgg_per = tf.summary.scalar("l_f_vgg_per", self.g_loss_detail[6]) + g_summary_loss_f_vgg_style = tf.summary.scalar("l_f_vgg_style", self.g_loss_detail[7]) + + self.d_summary_op = tf.summary.merge([d_summary_loss, d_summary_loss_db, d_summary_loss_df]) + self.g_summary_op = tf.summary.merge([g_summary_loss, g_summary_loss_t_sk, g_summary_loss_t_l1, + g_summary_loss_b_gan, g_summary_loss_b_l1, g_summary_loss_f_gan, + g_summary_loss_f_l1, g_summary_loss_f_vgg_per, g_summary_loss_f_vgg_style]) + + #修改 + self.d_writer = tf.summary.FileWriter(os.path.join(self.tensorboardDir, self.name, 'descriminator'), self.graph) + self.g_writer = tf.summary.FileWriter(os.path.join(self.tensorboardDir, self.name, 'generator'), self.graph) + + def train_step(self, sess, global_step, i_t, i_s, t_sk, t_t, t_b, t_f, mask_t): + + feed_dict = { + self.i_t: i_t, + self.i_s: i_s, + self.t_sk: t_sk, + self.t_t: t_t, + self.t_b: t_b, + self.t_f: t_f, + self.mask_t: mask_t, + self.global_step: global_step + } + + with self.graph.as_default(): + _, d_loss, d_log = sess.run([self.d_train_step, self.d_loss, self.d_summary_op], feed_dict = feed_dict) + _, g_loss, g_log = sess.run([self.g_train_step, self.g_loss, self.g_summary_op], feed_dict = feed_dict) + return d_loss, g_loss, d_log, g_log + + def predict(self, sess, i_t, i_s, to_shape = None): + + assert i_t.shape == i_s.shape and i_t.dtype == i_s.dtype + assert len(i_t.shape) == 3 or (len(i_t.shape) == 4 and to_shape is not None \ + and i_t.shape[1] == cfg.data_shape[0] \ + and i_t.shape[2] % 8 == 0 \ + and i_t.dtype == np.float32) + assert i_t.dtype == np.uint8 \ + or (i_t.dtype == np.float32 and np.min(i_t) >= -1 and np.max(i_t) <= 1) + + # process raw image, len(i_t.shape) == 3 + if len(i_t.shape) == 3: + if not to_shape: + h, w = i_t.shape[:2] + to_shape = (w, h) # w first for cv2 + if i_t.shape[0] != cfg.data_shape[0]: + ratio = cfg.data_shape[0] / h + predict_h = cfg.data_shape[0] + predict_w = round(int(w * ratio) / 8) * 8 + predict_scale = (predict_w, predict_h) # w first for cv2 + i_t = cv2.resize(i_t, predict_scale) + i_s = cv2.resize(i_s, predict_scale) + if i_t.dtype == np.uint8: + i_t = i_t.astype(np.float32) / 127.5 - 1. + i_s = i_s.astype(np.float32) / 127.5 - 1. + i_t = np.expand_dims(i_t, axis = 0) + i_s = np.expand_dims(i_s, axis = 0) + + result = sess.run([self.o_sk, self.o_t, self.o_b, self.o_f], feed_dict = {self.i_t: i_t, self.i_s: i_s}) + o_sk, o_t, o_b, o_f = result + o_sk = cv2.resize((o_sk[0] * 255.).astype(np.uint8), to_shape, interpolation=cv2.INTER_NEAREST) + o_t = cv2.resize(((o_t[0] + 1.) * 127.5).astype(np.uint8), to_shape) + o_b = cv2.resize(((o_b[0] + 1.) * 127.5).astype(np.uint8), to_shape) + o_f = cv2.resize(((o_f[0] + 1.) * 127.5).astype(np.uint8), to_shape) + return [o_sk, o_t, o_b, o_f] + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelarts_entry_perf.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelarts_entry_perf.py new file mode 100644 index 0000000000000000000000000000000000000000..e2d23455d4cdec2d46fc273177a247905c751b73 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelarts_entry_perf.py @@ -0,0 +1,63 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import sys + +# 解析输入参数data_url +parser = argparse.ArgumentParser() +parser.add_argument("--data_url", type=str, default="/home/ma-user/modelarts/inputs/data_url_0") +parser.add_argument("--train_url", type=str, default="/home/ma-user/modelarts/outputs/train_url_0/") +config = parser.parse_args() + +print("[CANN-Modelzoo] code_dir path is [%s]" % (sys.path[0])) +code_dir = sys.path[0] +os.chdir(code_dir) +print("[CANN-Modelzoo] work_dir path is [%s]" % (os.getcwd())) + +print("[CANN-Modelzoo] before train - list my run files:") +os.system("ls -al /usr/local/Ascend/ascend-toolkit/") + +print("[CANN-Modelzoo] before train - list my dataset files:") +os.system("ls -al %s" % config.data_url) + +print("[CANN-Modelzoo] start run train shell") +# 设置sh文件格式为linux可执行 +os.system("dos2unix ./test/*") + +# 执行train_full_1p.sh或者train_performance_1p.sh,需要用户自己指定 +# full和performance的差异,performance只需要执行很少的step,控制在15分钟以内,主要关注性能FPS +os.system("bash ./test/train_performance_1p.sh --data_path=%s --output_path=%s " % (config.data_url, config.train_url)) + +print("[CANN-Modelzoo] finish run train shell") + +# 将当前执行目录所有文件拷贝到obs的output进行备份 +print("[CANN-Modelzoo] after train - list my output files:") +os.system("cp -r %s %s " % (code_dir, config.train_url)) +os.system("ls -al %s" % config.train_url) diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelzoo_level.txt b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelzoo_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a8d1227b23f1e1a9664ab221f934ff27258b908 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/modelzoo_level.txt @@ -0,0 +1,6 @@ +FuncStatus:OK +GPUStatus:OK +NPUMigrationStatus:OK +PrecisionStatus:OK +AutoTune:OK +PerfStatus:POK diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/predict.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/predict.py new file mode 100644 index 0000000000000000000000000000000000000000..995119cf791f68057432df8d8fa7c9253e594665 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/predict.py @@ -0,0 +1,101 @@ +""" +SRNet - Editing Text in the Wild +Data prediction. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from npu_bridge.npu_init import * + +import tensorflow as tf +from model import SRNet +import numpy as np +import os +import cfg +from utils import * +from datagen import srnet_datagen, get_input_data +import argparse + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument('--gpu', help = 'gpu id', default = 0) + parser.add_argument('--i_s', help = 'input original text patch') + parser.add_argument('--i_t', help = 'input standard text patch') + parser.add_argument('--input_dir', help = 'Directory containing xxx_i_s and xxx_i_t with same prefix', + default = cfg.predict_data_dir) + parser.add_argument('--save_dir', help = 'Directory to save result', default = cfg.predict_result_dir) + parser.add_argument('--save_mode', help = '1 to save all and 0 to save onle o_f', type = int, default = 0) + parser.add_argument('--checkpoint', help = 'tensorflow ckpt', default = cfg.predict_ckpt_path) + args = parser.parse_args() + + assert (args.input_dir is not None and args.i_s is None and args.i_t is None) \ + or (args.input_dir is None and args.i_s is not None and args.i_t is not None) + assert args.save_dir is not None + assert args.save_mode == 0 or args.save_mode == 1 + assert args.checkpoint is not None + + # gpu + os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) + + # define model + print_log('model compiling start.', content_color = PrintColor['yellow']) + model = SRNet(shape = cfg.data_shape, name = 'predict') + print_log('model compiled.', content_color = PrintColor['yellow']) + + with model.graph.as_default(): + with tf.Session(config=npu_config_proto()) as sess: + saver = tf.train.Saver(tf.global_variables()) + + # load pretrained weights + print_log('weight loading start.', content_color = PrintColor['yellow']) + saver.restore(sess, args.checkpoint) + print_log('weight loaded.', content_color = PrintColor['yellow']) + + # predict + print_log('predicting start.', content_color = PrintColor['yellow']) + if args.input_dir is None: + i_s = cv2.imread(args.i_s) + i_t = cv2.imread(args.i_t) + o_sk, o_t, o_b, o_f = model.predict(sess, i_t, i_s) + + cv2.imwrite(os.path.join(args.save_dir, 'result.png'), o_f) + if args.save_mode == 1: + cv2.imwrite(os.path.join(args.save_dir, 'result_sk.png'), o_sk) + cv2.imwrite(os.path.join(args.save_dir, 'result_t.png'), o_t) + cv2.imwrite(os.path.join(args.save_dir, 'result_b.png'), o_b) + else: + predict_data_list(model, sess, args.save_dir, get_input_data(args.input_dir), mode = args.save_mode) + print_log('predicting finished.', content_color = PrintColor['yellow']) + +if __name__ == '__main__': + main() + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/readme.md b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..b0ef0b2870a8f3af8e0acef9fede8f238dbc517f --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/readme.md @@ -0,0 +1,94 @@ +# SRNet - the implementation of paper "Editing Text in the Wild" +*** + +## 基本信息 +#### 发布者(Publisher):Huawei +#### 应用领域(Application Domain): Image Generator +#### 版本(Version):1.0 +#### 修改时间(Modified) :2022.3.4 +#### 大小(size): 78.5M左右 +#### 框架(Framework):TensorFlow 1.15.0 +#### 模型格式(Model Format): pb +#### 处理器(Processor): 昇腾910 +#### 描述(Description): 基于TensorFlow框架的文本图片风格迁移网络训练代码 +*** + +## 概述 +此模型的来源是ACM Multimedia 2019上的一篇文章“Editing Text in the Wild”. +这篇文章主要设计了一个模块化的DNN,完成了对替换给定图片之中文本内容的任务。在替换过程之中,源文本的文字大小、字体、颜色、朝向等细节将会被保留。DNN的各个模块分别完成整个任务的一部分,最后由一个合成模块将其余模块的输出融合在一起,生成目标图片。 + +本实现是基于[有道ai团队](https://github.com/youdao-ai)对原论文的一个[开源实现](https://github.com/youdao-ai/SRNet)之上进行NPU适配得到的结果。 + +参考论文: +* [Editing Text in the Wild](https://dl.acm.org/doi/pdf/10.1145/3343031.3350929) + +论文的主要亮点是: +* 以往的图片文本迁移工作都是以单字母或者单字为单位的,此文之中实现的方法是以单词为单位的 +* 网络是采用模块化设计的,但是训练并不是一个模块一个模块单独进行训练,而是将整个网络进行端到端的训练 +*** + +## 训练 +### 数据集获取 +按照原文,训练此模型,需要提供的数据集包含两部分,即输入图片i\_s, i\_t以及标记图片t\_sk, t\_t, t\_b, t\_f。 +在此版本的实现之中,新增了标记项mask\_t,代表原图片之中文本部分的二进制掩码。加入此项的原因是,此项会使得模型的训练收敛的更快。 +由于对于现实之中的图片,无法模型训练所需要提供的标记部分。因此,训练模型的数据集是人工合成的数据集。 +合成数据集需要提供纯净无文字的背景图片以及一些字体文件(.tff文件),在完成准备工作之中,可以使用[SRNet_datagenerator](https://github.com/youdao-ai/SRNet-Datagen)来合成数据集。 + +### 模型训练 +由于本实现针对华为的Modelarts平台进行了适配,因此训练部分将分成NPU训练(基于modelarts平台)以及GPU训练两部分。 +测试所使用的环境为: +* numpy 1.21.2 +* opencv-python 4.5.3 +* opencv-contrib-python 4.5.4.58 +* python 3.7 +* tensorflow 1.15 + +由于Gitee平台对单文件大小的限制,训练过程之中采用的预训练模型无法直接上传,请通过OBS服务器[下载](https://cann-nju-srnet.obs.cn-north-4.myhuaweicloud.com:443/vgg19_weights_tf_dim_ordering_tf_kernels_notop.pb?AccessKeyId=DNOFMBDXF3DTPNGYLYN7&Expires=1679819348&Signature=xZTguiVqpKyGuGzko/AI8fu0ilM%3D) +进行训练时,请将预训练模型保存在model_logs/vgg19目录下 + +训练依托于华为的Modelarts平台。 +首先,下载并安装pycharm以及[华为modelart插件](https://modelarts-pycharm-plugin.obs.cn-north-1.myhuaweicloud.com/Pycharm-ToolKit-3.0.zip),配置密钥 +然后,将准备好的数据集上传到华为OBS服务器上,并在modelarts插件之中进行如下配置 +* Fruquently-used + * AI Engine: Ascend-Powered-Engine, tensorflow_1.15-xxx + * Boot File Path: 仓库中npu_version/cfg.py对应的本地文件路径 + * Code Directory: 仓库中npu-version对应的本地文件夹路径 + * OBS Path: 训练时会自动将代码上传至OBS服务器,OBS Path对应OBS服务器中代码自动上传的位置 + * Data Path in OBS: 数据集在OBS服务器之中存放的位置 +然后即可进行训练,最终的训练结果需要拷贝回OBS服务器,通过更改train.py第101行之中的参数dst_url来指定拷贝的目标路径。 + +## 测试 +### 测试流程 +在完成对模型的训练之后,可以采用训练得到的结果进行预测。 +预测的方式如下: +```console +$ python3 predict.py --i_s xxx --i_t xxx --save_dir xxx --checkpoint xxx +``` +或是预测一整个文件夹之中的数据: +```console +$ python3 predict.py --input_dir xxx --save_dir xxx --checkpoint xxx +``` +此时,需要保证input\_dir之中的i\_s和i\_t有相同的前缀,且后缀分别为"\_i\_s"以及"\_i\_t"。 +或者可以直接修改cfg.py之中的路径信息,然后直接调用predict.py +```console +$ python3 predict.py +``` + +### 训练结果对比 +模型以同样的数据集、参数在GPU和NPU平台上进行了训练。 +GPU平台使用的GPU为:NVIDIA T4, 1 * 16G +NPU平台使用的NPU为:Ascend 910 +#### 时间(性能)对比 +GPU平台上每50轮训练平均耗时为30s +NPU平台上每50轮训练平均耗时约59s +#### 效果对比 +GPU和NPU上训练效果对比如下图: +![训练效果](Figure_1.png) +不难看出,二者的训练效果相当,且均达到了[复现团队训练时的效果](https://github.com/youdao-ai/SRNet/issues/11)。 +*** + +## 数据集与OBS桶 +训练过程之中所采用的数据集为:[数据集](https://e-share.obs-website.cn-north-1.myhuaweicloud.com?token=F2eF/Z8D+mFBW+7kVK2hix+8Sp1M7bu7F8ghXFWNcp9uxF9jNxIB9bDWCokG6mGrFNRAGp+hhntj2rwzM21E37Ky0dZSgxFetwTBTUL+RhE+OB7BVR2HD5rZrDdeUaH6gejEiAArg7yvDQ087n9uWxH2URogbKlWQLmw7xBZYmTXEenFva32stWa2uyAxPrxmh9dLma7LSj5bqDQFgbvWsWsGVTWQi5fvMd/nqsi+oj3/Ex4KiCCNHYd0/rbOjb1Kk4mT7zm8C6sblxB24o9XnpTYMG+7j0HuClV2ecyM/YDN8FuE7ulj7rwbyMn48rUFg+KuTbpKHq9QDnGipDk9MViMuYvHip7Ngm+0oaapyQTFG1yZB5OjqziH5mnkkb7dihsYwY91uIZ0NOJz3b9AYZ/50f9gv/TPaiLmhTk90acdcp8VxXMdgqWM0oIPWHCGq8Y3C/gLthjsqABjGalW7YdHYl2RNVylcIbSWEa6hSzqf7gCb8/qAeC3urdxBSJM+Ww17WEIjAeuDosgbXll1ZMSwaj/jQuK81EAh38TJQAi3Aqyqn3+ljFIxjINB36) +数据集的Access Code为:xx11yy +训练采用的OBS桶的地址为:[OBS桶](obs://cann-nju-srnet/) + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/requirement.txt b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/requirement.txt new file mode 100644 index 0000000000000000000000000000000000000000..13b16cd14d595169a2458ab9c029874526e5d60d --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/requirement.txt @@ -0,0 +1,5 @@ +numpy==1.21.2 +opencv-python==4.5.3 +opencv-contrib-python==4.5.4.58 +tensorflow==1.15.0 + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/test/.keep b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/test/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/test/train_full_1p.sh b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/test/train_full_1p.sh new file mode 100644 index 0000000000000000000000000000000000000000..3eb0a2f72a500c4d1bb7276b24e65cd26f90270f --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/test/train_full_1p.sh @@ -0,0 +1,185 @@ +#!/bin/bash + +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## +# shell脚本所在路径 +cur_path=`echo $(cd $(dirname $0);pwd)` + +# 判断当前shell是否是performance +perf_flag=`echo $0 | grep performance | wc -l` + +# 当前执行网络的名称 +Network=`echo $(cd $(dirname $0);pwd) | awk -F"/" '{print $(NF-1)}'` + +export RANK_SIZE=1 +export RANK_ID=0 +export JOB_ID=10087 + +# 路径参数初始化 +data_path="" +output_path="" + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_performance_1P.sh " + echo " " + echo "parameter explain: + --data_path # dataset of training + --output_path # output of training + --train_steps # max_step for training + --train_epochs # max_epoch for training + --batch_size # batch size + -h/--help show help message + " + exit 1 +fi + +# 参数校验,不需要修改 +for para in $* +do + if [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --output_path* ]];then + output_path=`echo ${para#*=}` + elif [[ $para == --train_steps* ]];then + train_steps=`echo ${para#*=}` + elif [[ $para == --train_epochs* ]];then + train_epochs=`echo ${para#*=}` + elif [[ $para == --batch_size* ]];then + batch_size=`echo ${para#*=}` + fi +done + +# 校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be config" + exit 1 +fi + +# 校验是否传入output_path,不需要修改 +if [[ $output_path == "" ]];then + output_path="./test/output/${ASCEND_DEVICE_ID}" +fi + +# 设置打屏日志文件名,请保留,文件名为${print_log} +print_log="./test/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log" +modelarts_flag=${MODELARTS_MODEL_PATH} +if [ x"${modelarts_flag}" != x ]; +then + echo "running with modelarts..." + print_log_name=`ls /home/ma-user/modelarts/log/ | grep proc-rank` + print_log="/home/ma-user/modelarts/log/${print_log_name}" +fi +echo "### get your log here : ${print_log}" + +CaseName="" +function get_casename() +{ + if [ x"${perf_flag}" = x1 ]; + then + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'perf' + else + CaseName=${Network}_bs${batch_size}_${RANK_SIZE}'p'_'acc' + fi +} + +# 跳转到code目录 +cd ${cur_path}/../ +rm -rf ./test/output/${ASCEND_DEVICE_ID} +mkdir -p ./test/output/${ASCEND_DEVICE_ID} + +# 训练开始时间记录,不需要修改 +start_time=$(date +%s) +########################################################## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +#########第3行 至 100行,请一定不要、不要、不要修改########## +########################################################## + +#========================================================= +#========================================================= +#========训练执行命令,需要根据您的网络进行修改============== +#========================================================= +#========================================================= +# 基础参数,需要模型审视修改 +# 您的训练数据集在${data_path}路径下,请直接使用这个变量获取 +# 您的训练输出目录在${output_path}路径下,请直接使用这个变量获取 +# 您的其他基础参数,可以自定义增加,但是batch_size请保留,并且设置正确的值 +#train_epochs=2 +#train_steps=100 +batch_size=8 + +if [ x"${modelarts_flag}" != x ]; +then + python3.7 ./train.py --data_dir=${data_path}/v1 --output_dir=${output_path} +else + python3.7 ./train.py --data_dir=${data_path}/v1 --output_dir=${output_path} > ${print_log} 2>&1 +fi + +# 性能相关数据计算 +StepTime=`grep "time:" ${print_log} | awk '{print $8}' | tail -n 10 | awk '{sum+=$1} END {print sum/NR}'` +FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` + +# 精度相关数据计算 +train_accuracy=`grep "d_loss" ${print_log} | awk '{print $4}' | awk '{sum+=$1} END {print sum/NR}'` +# 提取所有loss打印信息 +grep "d_loss" ${print_log} | awk '{print $4}' > ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt + +########################################################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +#########后面的所有内容请不要修改########################### +########################################################### + +# 判断本次执行是否正确使用Ascend NPU +use_npu_flag=`grep "The model has been compiled on the Ascend AI processor" ${print_log} | wc -l` +if [ x"${use_npu_flag}" == x0 ]; +then + echo "------------------ ERROR NOTICE START ------------------" + echo "ERROR, your task haven't used Ascend NPU, please check your npu Migration." + echo "------------------ ERROR NOTICE END------------------" +else + echo "------------------ INFO NOTICE START------------------" + echo "INFO, your task have used Ascend NPU, please check your result." + echo "------------------ INFO NOTICE END------------------" +fi + +# 获取最终的casename,请保留,case文件名为${CaseName} +get_casename + +# 重命名loss文件 +if [ -f ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ]; +then + mv ./test/output/${ASCEND_DEVICE_ID}/my_output_loss.txt ./test/output/${ASCEND_DEVICE_ID}/${CaseName}_loss.txt +fi + +# 训练端到端耗时 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +echo "------------------ Final result ------------------" +# 输出性能FPS/单step耗时/端到端耗时 +echo "Final Performance images/sec : $FPS" +echo "Final Performance sec/step : $StepTime" +echo "E2E Training Duration sec : $e2e_time" + +# 输出训练精度 +echo "Final Train Accuracy : ${train_accuracy}" + +# 最后一个迭代loss值,不需要修改 +ActualLoss=(`awk 'END {print $NF}' $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}_loss.txt`) + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${batch_size}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = `uname -m`" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >>$cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/train.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/train.py new file mode 100644 index 0000000000000000000000000000000000000000..44230adaa2be5691d5fb05d9cf31d2e4325b17f8 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/train.py @@ -0,0 +1,162 @@ +""" +SRNet - Editing Text in the Wild +Model training. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse + +from npu_bridge.npu_init import * + +import tensorflow as tf +from model import SRNet +import numpy as np +import os +import cfg +from utils import * +from datagen import srnet_datagen, get_input_data +import time + +from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig + +#import moxing as mox + +def getParas(code_dir): + parser = argparse.ArgumentParser() + parser.add_argument("-t", dest="test", action="store_true") + #parser.add_argument("--train_url", type=str) + #parser.add_argument("--data_url", type=str) + parser.add_argument("--data_dir", dest="dataDir", type=str, default=os.path.join(code_dir, "trainData")) + parser.add_argument("--output_dir", dest="resultDir", type=str, default=os.path.join(r"/cache/out")) + return parser.parse_args() + + +def main(): + code_dir = os.path.dirname(__file__) + args = getParas(code_dir) + + example_data_dir = os.path.join(code_dir, "examples", "labels") + vgg19Path = os.path.join(args.dataDir, "vgg19_weights_tf_dim_ordering_tf_kernels_notop.pb") + example_result_dir = os.path.join(args.resultDir, "predictResult") + tensorboardDir = os.path.join(args.resultDir, "trainResult", "train_logs") + checkpoint_savedir = os.path.join(args.resultDir, "trainResult", "checkpoints") + + #if not args.test: + #os.makedirs(args.dataDir) + # mox.file.copy_parallel(src_url=args.data_url, dst_url=args.dataDir) + + if args.test: + max_iter = cfg.test_max_iter + else: + max_iter = cfg.max_iter + + # define train_name + if not cfg.train_name: + train_name = get_train_name() + else: + train_name = cfg.train_name + + # define model + print_log('model compiling start.', content_color=PrintColor['yellow']) + model = SRNet(vgg19Path=vgg19Path, + tensorboardDir=tensorboardDir, + shape=cfg.data_shape, + name=train_name) + print_log('model compiled.', content_color=PrintColor['yellow']) + + # define data generator + # datagen()之中包含yield,所以不会正真执行,而是返回一个生成器(迭代器) + gen = srnet_datagen(args.dataDir, cfg.batch_size) + + with model.graph.as_default(): + init = tf.global_variables_initializer() + trainCfg = tf.ConfigProto() + custom_op = trainCfg.graph_options.rewrite_options.custom_optimizers.add() + custom_op.name = "NpuOptimizer" + trainCfg.graph_options.rewrite_options.remapping = RewriterConfig.OFF + + with tf.Session(config=npu_config_proto()) as sess: + saver = tf.train.Saver(tf.global_variables(), max_to_keep=100) + + # load pretrained weights or initialize variables + if cfg.pretrained_ckpt_path: + print_log('weight loading start.', content_color=PrintColor['yellow']) + saver.restore(sess, cfg.pretrained_ckpt_path) + print_log('weight loaded.', content_color=PrintColor['yellow']) + else: + print_log('weight initialize start.', content_color=PrintColor['yellow']) + sess.run(init) + print_log('weight initialized.', content_color=PrintColor['yellow']) + + # train + print_log('training start.', content_color=PrintColor['yellow']) + for step in range(max_iter): + global_step = step + 1 + + start_time = time.time() + # train and get loss + d_loss, g_loss, d_log, g_log = model.train_step(sess, global_step, *next(gen)) + + # show loss + #if global_step % cfg.show_loss_interval == 0 or step == 0: + # print_log("step: {:>6d} d_loss: {:>3.5f} g_loss: {:>3.5f}".format(global_step, d_loss, g_loss)) + print("step: {:>6d} d_loss: {:>3.5f} g_loss: {:>3.5f} time: {:.4f}".format(global_step, d_loss, g_loss, (time.time() - start_time))) + + # write tensorboard + if global_step % cfg.write_log_interval == 0: + write_summary(model.d_writer, model.g_writer, d_log, g_log, global_step) + + # gen example + if global_step % cfg.gen_example_interval == 0: + savedir = os.path.join(example_result_dir, train_name, + 'iter-' + str(global_step).zfill(len(str(max_iter)))) + predict_data_list(model, sess, savedir, get_input_data(example_data_dir)) + print_log("example generated in dir {}".format(savedir), content_color=PrintColor['green']) + + # save checkpoint + if global_step % cfg.save_ckpt_interval == 0: + savedir = os.path.join(checkpoint_savedir, train_name, 'iter') + if not os.path.exists(savedir): + os.makedirs(savedir) + save_checkpoint(sess, saver, savedir, global_step) + print_log("checkpoint saved in dir {}".format(savedir), content_color=PrintColor['green']) + + print_log('training finished.', content_color=PrintColor['yellow']) + pb_savepath = os.path.join(checkpoint_savedir, train_name, 'final.pb') + save_pb(sess, pb_savepath, ['o_sk', 'o_t', 'o_b', 'o_f']) + print_log('pb model saved in dir {}'.format(pb_savepath), content_color=PrintColor['green']) + #mox.file.copy_parallel(src_url=args.resultDir, dst_url=r'obs:\\cann-nju-srnet\tt') + + +if __name__ == '__main__': + main() + diff --git a/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/utils.py b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..54e6fa7c4e44cf807d2169f76c75d46ffabfaad0 --- /dev/null +++ b/TensorFlow/contrib/graph/SRNET_ID1089_for_TensorFlow/utils.py @@ -0,0 +1,127 @@ +""" +SRNet - Editing Text in the Wild +Common utility functions and classes. +Copyright (c) 2019 Netease Youdao Information Technology Co.,Ltd. +Licensed under the GPL License (see LICENSE for details) +Written by Yu Qian +""" + +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from npu_bridge.npu_init import * + +import tensorflow as tf +from tensorflow.python.framework import graph_util +import os +import cv2 +from datetime import datetime + +PrintColor = { + 'black': 30, + 'red': 31, + 'green': 32, + 'yellow': 33, + 'blue': 34, + 'amaranth': 35, + 'ultramarine': 36, + 'white': 37 +} + +PrintStyle = { + 'default': 0, + 'highlight': 1, + 'underline': 4, + 'flicker': 5, + 'inverse': 7, + 'invisible': 8 +} + +def get_train_name(): + + # get current time for train name + return datetime.now().strftime('%Y%m%d%H%M%S') + +def print_log(s, time_style = PrintStyle['default'], time_color = PrintColor['blue'], + content_style = PrintStyle['default'], content_color = PrintColor['white']): + + # colorful print s with time log + cur_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] + log = '\033[{};{}m[{}]\033[0m \033[{};{}m{}\033[0m'.format \ + (time_style, time_color, cur_time, content_style, content_color, s) + print (log) + +def print_nodes(graph): + + # print all nodes of the graph + nodes = [n.name for n in graph.as_graph_def().node] + for node in nodes: + print (node) + +def write_summary(d_writer, g_writer, d_log, g_log, global_step): + + # write summaries for tensorboard + d_writer.add_summary(d_log, global_step) + g_writer.add_summary(g_log, global_step) + +def save_result(save_dir, result, name, mode): + + # save output images + o_sk, o_t, o_b, o_f = result + if not os.path.exists(save_dir): + os.makedirs(save_dir) + cv2.imwrite(os.path.join(save_dir, name + 'o_f.png'), o_f, [int(cv2.IMWRITE_PNG_COMPRESSION), 0]) + if mode == 1: + cv2.imwrite(os.path.join(save_dir, name + 'o_sk.png'), o_sk, [int(cv2.IMWRITE_PNG_COMPRESSION), 0]) + cv2.imwrite(os.path.join(save_dir, name + 'o_t.png'), o_t, [int(cv2.IMWRITE_PNG_COMPRESSION), 0]) + cv2.imwrite(os.path.join(save_dir, name + 'o_b.png'), o_b, [int(cv2.IMWRITE_PNG_COMPRESSION), 0]) + +def predict_data_list(model, sess, save_dir, input_data_list, mode = 1): + + # predict output images and save them + for data in input_data_list: + i_t, i_s, ori_shape, data_name = data + result = model.predict(sess, i_t, i_s, ori_shape) + save_result(save_dir, result, data_name, mode = mode) + +def save_checkpoint(sess, saver, save_dir, global_step): + + # save tensorflow ckpt files + saver.save(sess, save_dir, global_step = global_step) + +def save_pb(sess, save_path, outputs = ['o_sk', 'o_t', 'o_b', 'o_f']): + + # save tensorflow pb model + save_dir = os.path.split(save_path)[0] + if not os.path.exists(save_dir): + os.makedirs(save_dir) + constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def, outputs) + with tf.gfile.FastGFile(save_path, mode='wb') as f: + f.write(constant_graph.SerializeToString()) + + diff --git a/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_1p.sh b/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_1p.sh index c55b4030a46f8b78d10e2b27e1d9f8fb3324b609..018962a4daff018cb59bd00d534c62684da17837 100644 --- a/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_1p.sh +++ b/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_1p.sh @@ -158,7 +158,7 @@ e2e_time=$(( $end_time - $start_time )) echo "------------------ Final result ------------------" #输出性能FPS。需要模型审视修改 epoch_duration=`grep epoch_duration $cur_path/output/0/train_0.log | awk '{print $2}'` -first_step=`grep duration: $cur_path/output/0/train_0.log |head -1| awk 'END{print $17}'` +first_step=`grep duration: $cur_path/output/0/train_0.log |head -1| awk -F "duration:" '{print $2}' |sed s/[[:space:]]//g` FPS=`awk 'BEGIN{printf "%.2f\n",('$perf'+'$train_worker_num'-2)/('$epoch_duration'-'$first_step')*'$batch_size'*1}'` echo "Final Performance imgs/sec : $FPS" diff --git a/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_8p.sh b/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_8p.sh index 6f24266d5bf266bfd778043d527358be8ab81edf..0ed17b5f9cd869a286117ea6dcd9482ec2c72a12 100644 --- a/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_8p.sh +++ b/TensorFlow2/built-in/cv/detection/YOLOv5_ID1719_for_TensorFlow2.X/test/train_performance_8p.sh @@ -157,7 +157,7 @@ e2e_time=$(( $end_time - $start_time )) echo "------------------ Final result ------------------" #输出性能FPS。需要模型审视修改 epoch_duration=`grep epoch_duration $cur_path/output/0/train_0.log | awk '{print $2}'` -first_step=`grep duration: $cur_path/output/0/train_0.log |head -1| awk 'END{print $17}'` +first_step=`grep duration: $cur_path/output/0/train_0.log |head -1| awk -F "duration:" '{print $2}' |sed s/[[:space:]]//g` FPS=`awk 'BEGIN{printf "%.2f\n",('$perf'+'$train_worker_num'-2)/('$epoch_duration'-'$first_step')*'$batch_size'*8}'` echo "Final Performance imgs/sec : $FPS" diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/ReadMe.md b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/ReadMe.md index 762b1b60f76d498b1eaeff8047263a6b4f56098b..204af8a2401509a80bb3f856559cde03813ed00e 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/ReadMe.md +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/ReadMe.md @@ -162,7 +162,18 @@ npu_device.global_options().precision_mode=FLAGS.precision_mode ├──eval_10k.tfrecord ``` +4、数据集pack (仅在使用pack策略进行训练时执行) +若训练时使用pack策略(参看“模型训练” - 开始训练- 4. pack策略),须将数据集进行处理,生成为pack后的数据集。再使用pack后的数据集进行训练。数据集转换脚本在bert/data_pack/目录下。数据集pack过后,在指定的目录下生成“strategy_record”开头的一系列文件。 +进行数据集pack时,需要将训练(train)的数据集与验证(eval)的数据集分别进行pack,生成在新的目录中。未pack的数据集为tfrecord格式,需先使用bert/data_pack目录下 "bert_data/record_to_binary.py" 将其批量转换为bin文件: + +python3 bert_data/record_to_binary.py --tf-record-glob="path/to/your/unpacked/data/part*.tfrecord" --output-path="path/to/store/binery/files" + +然后再使用bert/data_pack目录下 "pack_pretraining_data.py" ,将bin文件转化为pack后的数据集。 + +python3 pack_pretraining_data.py --input-glob="path/to/store/binery/files" --output-dir="packed/data/folder" + +文件夹路径需要自己创建。 ## 模型训练 - 下载训练脚本。 @@ -245,7 +256,10 @@ npu_device.global_options().precision_mode=FLAGS.precision_mode bash test/train_performance_8p_192bs.sh --data_path=/home/tfrecord --precision_mode=allow_mix_precision - + 4. pack策略 + + 4.1 含pack策略的训练脚本(./test/目录下名字带有"_packed"的脚本即为相应包含pack策略的训练脚本) + 使用pack策略进行训练时,需使用pack过后的数据集(train、eval)及对应的预训练模型。若无对应tensorflow-v2版本packed预训练模型,可由tensorflow-v1版本进行转换得来。模型转换相关脚本为bert/tf2_encoder_checkpoint_converter.py,详见:脚本和事例代码 - 模型转换脚本

高级参考

@@ -255,6 +269,7 @@ npu_device.global_options().precision_mode=FLAGS.precision_mode |--bert #网络代码目录 | |--tf2_common | |--modeling +| |--data_pack #pack脚本及说明所在目录 | |--...... |--configs #配置文件目录 | |--bert_config.json @@ -265,6 +280,13 @@ npu_device.global_options().precision_mode=FLAGS.precision_mode | |--...... ``` +添加: + +模型转换脚本(仅tensorflow_v1版本checkpoint转化为tensorflow_v2版本时使用) + +tensorflow-v1的checkpoint与tensorflow-v2的checkpoint从结构和使用上具有较大差异。迁移原有tensorflow-v1生成的checkpoint, 使其转换为tensorflow-v2环境中可使用的checkpoint, 需使用转换脚本, 脚本位置:“./bert/tf2_encoder_checkpoint_converter.py”。 +脚本使用示例: python3 tf2_encoder_checkpoint_converter.py --bert_config_file=/path/to/your/tensorflow_v1/bert_config.json --checkpoint_to_convert=/path/to/your/tensorflow_v1/model.ckpt-28252 --converted_checkpoint_path=/path/to/save/output_ckpt/output_ckpt + ## 脚本参数 ``` diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/bert_models.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/bert_models.py index 567c0c12c83f5a22504fd642a9d3596002341ae2..6709f33df29b3de93bb58a4306dd531a36510ff6 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/bert_models.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/bert_models.py @@ -29,7 +29,8 @@ from modeling.networks import bert_pretrainer from modeling.networks import bert_span_labeler from modeling.layers import bert_dropout from metrics_sparse_int32 import sparse_categorical_accuracy_int32 - +from absl import flags +FLAGS=flags.FLAGS class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): """Returns layer that computes custom loss and metrics for pretraining.""" @@ -43,7 +44,7 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): def _add_metrics(self, lm_output, lm_labels, lm_label_weights, lm_example_loss, sentence_output, sentence_labels, - next_sentence_loss): + next_sentence_loss, next_sentence_weights=None): """Adds metrics.""" #masked_lm_accuracy = tf.keras.metrics.sparse_categorical_accuracy( # lm_labels, lm_output) @@ -59,6 +60,11 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): #next_sentence_accuracy = tf.keras.metrics.sparse_categorical_accuracy( # sentence_labels, sentence_output) next_sentence_accuracy = sparse_categorical_accuracy_int32(sentence_labels, sentence_output) + if FLAGS.use_packed_model: + print("next_sentence_accuracy, next_sentence_weights=====", next_sentence_accuracy, next_sentence_weights) + next_sentence_numerator = tf.reduce_sum(next_sentence_accuracy * next_sentence_weights) + next_sentence_denominator = tf.reduce_sum(next_sentence_weights) + next_sentence_accuracy = next_sentence_numerator / next_sentence_denominator next_sentence_num = tf.reduce_sum(next_sentence_accuracy) next_sentence_denom = tf.size(next_sentence_accuracy) # self.add_metric( @@ -81,15 +87,24 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): def call(self, lm_output, sentence_output, lm_label_ids, lm_label_weights, - sentence_labels): + sentence_labels, next_sentence_weights=None): """Implements call() for the layer.""" lm_label_weights = tf.cast(lm_label_weights, tf.float32) + if FLAGS.use_packed_model: + lm_label_weights = tf.minimum(lm_label_weights, 1.0) lm_output = tf.cast(lm_output, tf.float32) sentence_output = tf.cast(sentence_output, tf.float32) mask_label_loss = losses.weighted_sparse_categorical_crossentropy_loss( labels=lm_label_ids, predictions=lm_output, weights=lm_label_weights) - sentence_loss = losses.weighted_sparse_categorical_crossentropy_loss( + if FLAGS.use_packed_model: + # change shape [B, 3] to [B*3, ], keep batch normal + sentence_labels = tf.reshape(sentence_labels, [-1,]) + next_sentence_weights = tf.reshape(next_sentence_weights, [-1,]) + sentence_loss = losses.weighted_sparse_categorical_crossentropy_loss( + labels=sentence_labels, predictions=sentence_output, weights=next_sentence_weights) + else: + sentence_loss = losses.weighted_sparse_categorical_crossentropy_loss( labels=sentence_labels, predictions=sentence_output) loss = mask_label_loss + sentence_loss batch_shape = tf.slice(tf.shape(sentence_labels), [0], [1]) @@ -100,9 +115,15 @@ class BertPretrainLossAndMetricLayer(tf.keras.layers.Layer): # TODO(b/122840926): metrics use distribution strategy merge_call() and do # not work with tf.function(compile=True). Either fix this issue or move # metric aggregation outside the model. - metric_outputs = self._add_metrics(lm_output, lm_label_ids, lm_label_weights, - mask_label_loss, sentence_output, sentence_labels, - sentence_loss) + if FLAGS.use_packed_model: + next_sentence_weights = tf.cast(next_sentence_weights,dtype=tf.float32) + metric_outputs = self._add_metrics(lm_output, lm_label_ids, lm_label_weights, + mask_label_loss, sentence_output, sentence_labels, + sentence_loss,next_sentence_weights) + else: + metric_outputs = self._add_metrics(lm_output, lm_label_ids, lm_label_weights, + mask_label_loss, sentence_output, sentence_labels, + sentence_loss) return final_loss, bs, metric_outputs @@ -194,8 +215,17 @@ def pretrain_model(bert_config, shape=(max_predictions_per_seq,), name='masked_lm_weights', dtype=tf.int32) - next_sentence_labels = tf.keras.layers.Input( - shape=(1,), name='next_sentence_labels', dtype=tf.int32) + + if FLAGS.use_packed_model: + next_sentence_weights = tf.keras.layers.Input( + shape=(FLAGS.max_sequences_per_pack,), name='next_sentence_weights', dtype=tf.int32) + next_sentence_positions = tf.keras.layers.Input( + shape=(FLAGS.max_sequences_per_pack,), dtype=tf.int32, name='next_sentence_positions') + next_sentence_labels = tf.keras.layers.Input( + shape=(FLAGS.max_sequences_per_pack,), name='next_sentence_labels', dtype=tf.int32) + else: + next_sentence_labels = tf.keras.layers.Input( + shape=(1,), name='next_sentence_labels', dtype=tf.int32) transformer_encoder = get_transformer_encoder(bert_config, seq_length) if initializer is None: @@ -208,15 +238,24 @@ def pretrain_model(bert_config, activation=tf_utils.get_activation(bert_config.hidden_act), initializer=initializer, output='predictions') - - lm_output, sentence_output = pretrainer_model( - [input_word_ids, input_mask, input_type_ids, masked_lm_positions]) + + if FLAGS.use_packed_model: + lm_output, sentence_output = pretrainer_model( + [input_word_ids, input_mask, input_type_ids, next_sentence_positions, masked_lm_positions]) + else: + lm_output, sentence_output = pretrainer_model( + [input_word_ids, input_mask, input_type_ids, masked_lm_positions]) pretrain_loss_layer = BertPretrainLossAndMetricLayer( vocab_size=bert_config.vocab_size) - output_loss = pretrain_loss_layer(lm_output, sentence_output, masked_lm_ids, - masked_lm_weights, next_sentence_labels) - keras_model = tf.keras.Model( + if FLAGS.use_packed_model: + output_loss = pretrain_loss_layer(lm_output, sentence_output, masked_lm_ids, + masked_lm_weights, next_sentence_labels, next_sentence_weights) + else: + output_loss = pretrain_loss_layer(lm_output, sentence_output, masked_lm_ids, + masked_lm_weights, next_sentence_labels) + if FLAGS.use_packed_model: + keras_model = tf.keras.Model( inputs={ 'input_word_ids': input_word_ids, 'input_mask': input_mask, @@ -225,6 +264,20 @@ def pretrain_model(bert_config, 'masked_lm_ids': masked_lm_ids, 'masked_lm_weights': masked_lm_weights, 'next_sentence_labels': next_sentence_labels, + 'next_sentence_weights': next_sentence_weights, + 'next_sentence_positions': next_sentence_positions, + }, + outputs=output_loss) + else: + keras_model = tf.keras.Model( + inputs={ + 'input_word_ids': input_word_ids, + 'input_mask': input_mask, + 'input_type_ids': input_type_ids, + 'masked_lm_positions': masked_lm_positions, + 'masked_lm_ids': masked_lm_ids, + 'masked_lm_weights': masked_lm_weights, + 'next_sentence_labels': next_sentence_labels, }, outputs=output_loss) return keras_model, transformer_encoder, pretrainer_model diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/README.md b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..530b8713608ecb7db2be9948e1fbcd907e3146b0 --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/README.md @@ -0,0 +1,16 @@ +## Wikipedia pre-training data + +Follow the Mlcommons' reference implementation instructions to construct the training and eval datasets + +## Pack sequences to reduce padding: + +First convert the tfrecords to a binary format using `bert_data/record_to_binary.py` +``` +python3 bert_data/record_to_binary.py --tf-record-glob="path/to/your/unpacked/data/part*.tfrecord" --output-path="path/to/store/binary/files" +``` +Then pack the sequence data using `pack_pretraining_data.py`: +``` +python3 pack_pretraining_data.py --input-glob="path/to/store/binary/files" --output-dir="packed/data/folder" +``` +The same steps should also be repeated for the eval dataset. +The wikipedia dataset is now ready to be used in the Graphcore BERT model. diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/.keep b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/data_sampler.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/data_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..ec52e6ac5ca18fa40bf2c698e69efed1ed1b8837 --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/data_sampler.py @@ -0,0 +1,152 @@ +# Copyright (c) 2020 Graphcore Ltd. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import math + + +class Sampler(object): + ''' + A Sampler can be used to provide indices that specify the order in which + to iterate over the dataset. They are similar to PyTorch's Sampler. + ''' + pass + + +class SequentialSampler(Sampler): + ''' + Iterate over the data in the original order. + ''' + + def __init__(self, data_source, padding=0): + self.indices = list(range(len(data_source))) + self.padding = padding + if self.padding > 0: + self.indices += self.indices[:self.padding] + + def __iter__(self): + return iter(self.indices) + + def __len__(self): + return len(self.indices) + + +class ShuffledSampler(Sampler): + ''' + Iterate over the data in random order. + ''' + + def __init__(self, data_source, seed=0, padding=0): + self.num_samples = len(data_source) + self.padding = padding + self.data_source = data_source + self._rng = np.random.default_rng(seed) + self.num_samples += self.padding + + def __iter__(self): + indices = list(range(len(self.data_source))) + self._rng.shuffle(indices) + if self.padding > 0: + indices += indices[:self.padding] + return iter(indices) + + def __len__(self): + return self.num_samples + + +class DistributedDataSampler(Sampler): + ''' + Shard the dataset according to popdist_rank and popdist_size. Setting shuffle=True + randomizes the indices. The data can be padded to be evenly divisible by the popdist size. + ''' + + def __init__(self, + data_source, + seed=0, + shuffle=False, + popdist_size=1, + popdist_rank=0, + padding=False, + padding_sub=0, + div_factor=1): + + self.shuffle = shuffle + self.popdist_size = popdist_size + self.popdist_rank = popdist_rank + self.data_source = data_source + self.padding_sub = padding_sub + if padding: + self.num_samples = int(math.ceil(len(data_source) * 1.0 / popdist_size)) + else: + self.num_samples = len(data_source) // popdist_size + if padding_sub > 0: + # Update padding size for no-drop-remainder given the new number of samples + self.padding_sub = int(np.ceil(self.num_samples / div_factor)) * div_factor - self.num_samples + self.total_samples = self.num_samples * self.popdist_size + self.padding = padding + self._rng = np.random.default_rng(seed) + self.returned_num_samples = self.num_samples + self.padding_sub + + def __iter__(self): + indices = list(range(len(self.data_source))) + if self.shuffle: + self._rng.shuffle(indices) + + if self.padding: + # Pad indices to have evenly divisible number of samples for each replica + indices += indices[:(self.total_samples - len(indices))] + assert(len(indices) == self.total_samples) + + # Subsample data + indices = indices[self.popdist_rank:self.total_samples:self.popdist_size] + assert(len(indices) == self.num_samples) + + if self.padding_sub > 0: + # Pad at the end AFTER the shuffling and the subsample, for no-drop-remainder + indices += indices[:self.padding_sub] + assert(len(indices) == self.returned_num_samples) + + return iter(indices) + + def __len__(self): + return self.returned_num_samples + + def get_subpadding_size(self): + return self.padding_sub + + +class SampleGenerator: + def __init__(self, data_source, sampler=None): + if sampler is None: + sampler = SequentialSampler(data_source) + self.data_source = data_source + self.sampler = sampler + + def __iter__(self): + self.index = 0 + self.data_iter = iter(self.data_source) + self.index_iter = iter(self.sampler) + return self + + def __next__(self): + next_index = next(self.index_iter) + data = next(self.data_iter) + while self.index != next_index: + data = next(self.data_iter) + self.index += 1 + self.index += 1 + return data + + def __len__(self): + return len(self.sampler) \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/dataset.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..a07bf5caaa7d265ddf3d668a58a0919014240a4a --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/dataset.py @@ -0,0 +1,58 @@ +# Copyright (c) 2019 Graphcore Ltd. All rights reserved. + +from itertools import chain +import numpy as np + + +class DataSet(object): + ''' + Converts an iterator that returns a list of np.ndarrays into an iterator that returns + the same list with the ndarrays reshaped to match PopART's dataflow requirements. + ''' + + def __init__(self, + loader, + tensor_shapes, + batches_per_step=1, + replication_factor=1, + accumulation_factor=1): + self.tensor_shapes = tensor_shapes + self.loader = loader + self.batches_per_step = batches_per_step + self.replication_factor = replication_factor + self.accumulation_factor = accumulation_factor + self.steps_per_epoch = len(loader) + + # Determine the shape of the batch based on samples_per_step, accumulation_factor and replication_factor + self.outer_shapes = [] + + # PopART expects inputs to be of the shape [batches_per_step, accl_factor, repl_factor, micro_batch, *data_shape] + if self.batches_per_step > 1: + self.outer_shapes += [self.batches_per_step] + + if self.accumulation_factor > 1: + self.outer_shapes += [self.accumulation_factor] + + if self.replication_factor > 1: + self.outer_shapes += [self.replication_factor] + + def __iter__(self): + self.loader_iterator = iter(self.loader) + return self + + def __len__(self): + return len(self.loader) + + def __next__(self): + # Get the next sample/label + items = next(self.loader_iterator) + tensor_names = [] + + # Reshape the input + feed_dict = {} + for i, (id, shape) in enumerate(self.tensor_shapes): + if shape is not None: + items[i] = items[i].reshape(tuple(chain(self.outer_shapes, shape))) + feed_dict[id] = items[i] + + return feed_dict \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/pretraining_dataset.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/pretraining_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..ce34ddaa3bb6b2ec81a1f91e25d6ca4f2d4b4b3f --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/pretraining_dataset.py @@ -0,0 +1,352 @@ +# Copyright (c) 2019 Graphcore Ltd. All rights reserved. + +import numpy as np +import random +import glob +import os +from tqdm import tqdm +from logging import getLogger +from functools import reduce + +from .dataset import DataSet +from .data_sampler import DistributedDataSampler, SampleGenerator + +logger = getLogger(__name__) + + +def data_file_format(sequence_length, mask_tokens): + return [sequence_length, + sequence_length, + sequence_length, + mask_tokens, + mask_tokens, + mask_tokens, + 1] + +def packed_data_file_format(sequence_length, mask_tokens, max_sequences_per_pack): + return [sequence_length, + sequence_length, + sequence_length, + sequence_length, + mask_tokens + max_sequences_per_pack, + mask_tokens + max_sequences_per_pack, + mask_tokens + max_sequences_per_pack, + max_sequences_per_pack, + max_sequences_per_pack, + max_sequences_per_pack] + + + +def data_ranges(sequence_length, mask_tokens, vocab_length): + return [vocab_length, + 1, + 2, + sequence_length, + vocab_length, + 1, + 2] + + +# This could be replaced by a pytorch dataloader +class BinaryDataLoader(object): + ''' + Iterates binary input files into list of N np.ndarrays with shapes (batch_size, samples_sizes[i]) for i in N + + :param input_files: Iterable of paths to binary files generated by create_pretraining_data.py + :param sample_size: Iterable of the sizes of each element in the binary file. See data_file_format for the default + :param batch_size: Number of samples to return each iteration + :param dtype: Numpy type of binary files + :param shuffle: If True, shuffle the input_files and the data contained. + :param duplication_factor: + The number of times each file contains the same sample. This will then only take 1/duplication_factor + from each file before moving to the next. + :param synthetic: If True, generate random data instead of reading from input_files + ''' + + def __init__(self, + input_files, + sample_sizes, + batch_size=1, + dtype=np.int32, + shuffle=True, + seed=1984, + duplication_factor=1, + start_data_at_epoch=0): + self.files = [] + for pattern in input_files: + self.files.extend(glob.glob(pattern)) + # print(f"Loading {len(self.files)} files: {self.files}") + self.sample_size = reduce(lambda a, s: a + s, sample_sizes, 0) + self.sample_sizes = sample_sizes + self.batch_size = batch_size + self.dtype = dtype + self.file_index = 0 + self.data_index = 0 + self.file_duplication_index = [start_data_at_epoch % duplication_factor] * len(self.files) + self.duplication_factor = duplication_factor + self.shuffle = shuffle + self._rng = np.random.default_rng(seed) + self.len = None + + def samples_in_file(self, filename): + bytes_per_sample = self.sample_size * self.dtype().itemsize + num_bytes = os.path.getsize(filename) + + #print("====",num_bytes, bytes_per_sample) + if (num_bytes % bytes_per_sample) != 0: + raise RuntimeError(f"Input file: {filename} does not align to the size of a sample. Check the dataset was generated correctly") + duplicated_samples = num_bytes // bytes_per_sample + return duplicated_samples // self.duplication_factor + + def __len__(self): + if self.len is None: + total_bytes = reduce(lambda a, f: a + self.samples_in_file(f), self.files, 0) + self.len = total_bytes // (self.batch_size) + return self.len + + def __iter__(self): + self.file_index = 0 + self.data_index = 0 + if self.shuffle: + self._rng.shuffle(self.files) + self.load_data() + return self + + def __next__(self): + data = self.get_data(self.batch_size) + # Split the batch into separate np.ndarrays + items = [] + total = 0 + for size in self.sample_sizes: + items.append(np.array(data[:, total:total + size])) + total += size + return items + + def get_data(self, batch_size): + """ + Slice batch_size samples from self.data or from the next file if there is not enough left + """ + if self.data_index + batch_size > self.data.shape[0]: + prev_data = self.data[self.data_index:, :] + still_required = batch_size - prev_data.shape[0] + self.load_data() + next_data = self.get_data(still_required) + data = np.concatenate((prev_data, next_data), axis=0) + else: + data = self.data[self.data_index:self.data_index + batch_size, :] + self.data_index += batch_size + return data + + def load_data(self): + # This drops the remainder + if self.file_index >= len(self.files): + raise StopIteration + self.data = self.load_file() + if self.shuffle: + self._rng.shuffle(self.data) + + def load_file(self): + filename = self.files[self.file_index] + # Input files are assumed to be duplicated by create_pretraining_data only within a single file. + # So for preprocessed files: A, B, C. The output files are created: AAA.., BBB.., CCC.. + # This makes sure in a single epoch A, B & C are all used once. + count = self.samples_in_file(filename) * self.sample_size + offset_bytes = count * self.file_duplication_index[self.file_index] * self.dtype().itemsize + + new_data = np.fromfile(filename, self.dtype, count=count, offset=offset_bytes) + new_data = new_data.reshape(new_data.size // self.sample_size, + self.sample_size) + + self.file_duplication_index[self.file_index] = \ + (self.file_duplication_index[self.file_index] + 1) % self.duplication_factor + + self.file_index += 1 + self.data_index = 0 + + return new_data + + +class CachedDataLoader(BinaryDataLoader): + """ + Same as the BinaryDataLoader but preloads the specified number of epochs into memory ahead of time. + :param epochs_to_cache: + Specify the number of epochs to keep loaded in memory. This can reduce the number of times the inputs + are read. It is recommened to make this as large as possible as the dataset files can be very large due to duplication factor. + Must be greater than 0. + """ + + def __init__(self, + *args, + epochs_to_cache=1, + **kwargs): + super().__init__(*args, **kwargs) + self.epochs_to_cache = epochs_to_cache + self.data_cache = [] + self.cache_index = 0 + + if self.epochs_to_cache < 1: + raise RuntimeError("epochs_to_cache must be greater than 0") + + self.load_cache() + self.len = self.data_cache[0].shape[0] // self.batch_size + + def get_data(self, batch_size): + if self.data_index + batch_size > self.data.shape[0]: + raise StopIteration + + data = self.data[self.data_index:self.data_index + batch_size, :] + self.data_index += batch_size + return data + + def load_data(self): + if self.cache_index >= len(self.data_cache): + self.load_cache() + self.data = self.data_cache[self.cache_index] + self.cache_index += 1 + + def load_cache(self): + self.cache_index = 0 + self.data_cache = [] + logger.info("Filling Dataset Cache") + for __ in range(self.epochs_to_cache): + data = [] + for __ in tqdm(self.files): + data.append(self.load_file()) + data = np.concatenate(data, axis=0) + if self.shuffle: + self._rng.shuffle(data) + self.data_cache.append(data) + self.file_index = 0 + + +class GeneratedDataLoader(BinaryDataLoader): + """ + Same as the BinaryDataLoader but generates random data instead of reading from input_files + :param generated_ranges: Iterable of the max value each element of a sample can be. See data_ranges for the default + """ + + def __init__(self, + *args, + length=1, + generated_ranges=None, + **kwargs): + super().__init__(*args, **kwargs) + self.generated_ranges = generated_ranges + self.len = length + + if self.generated_ranges is None: + raise RuntimeError("keyword argument 'generated_ranges' must not be None") + + def __iter__(self): + self.data_index = 0 + return self + + def __next__(self): + if self.data_index >= self.len: + raise StopIteration + items = [] + for size, max_value in zip(self.sample_sizes, self.generated_ranges): + items.append(np.random.randint(0, max_value, [self.batch_size, size])) + self.data_index += 1 + return items + + +class BertDataTransform(object): + ''' + Masks the indices that are larger than the vocab_length + ''' + + def __init__(self, dataloader, vocab_length, mask_tokens): + self.dataloader = dataloader + self.vocab_length = vocab_length + self.mask_tokens = mask_tokens + + def __len__(self): + return len(self.dataloader) + + def __iter__(self): + self.dataloader_iterator = iter(self.dataloader) + return self + + def __next__(self): + items = next(self.dataloader_iterator) + # Specific BERT Post Processing. TODO: Find a better place for this processing + # The vocab_length may be smaller than the original vocab so + # Mask values that are not within the vocab_length + # 100 is unknown token [UNK] + # 0 in the label is padding + OOB = items[0] >= self.vocab_length + items[0][OOB] = 100 + + # TODO: If Ind == [MASK] and label > vocab_length, should [MASK] be changed to [UNK] + OOB = items[5] >= self.vocab_length + items[5][OOB] = 0 + + # Force use of uint32 for all inputs. + for i in range(len(items)): + items[i] = items[i].astype(np.uint32) + return items + + +def get_bert_dataset(tensor_shapes, + input_files, + seed, + sequence_length, + mask_tokens, + vocab_length, + batch_size, + batches_per_step, + replication_factor=1, + accumulation_factor=1, + duplication_factor=1, + shuffle=True, + generated_data=False, + epochs_to_cache=0, + continue_training_from_epoch=0, + use_popdist=False, + popdist_size=1, + popdist_rank=0): + samples_per_step = batch_size * batches_per_step * duplication_factor * accumulation_factor + if len(input_files) == 0 and not generated_data: + raise ValueError("No input files were provided for the BERT dataset.") + data_loader_args = dict( + input_files=input_files, + sample_sizes=data_file_format(sequence_length, mask_tokens), + #sample_sizes=packed_data_file_format(sequence_length, mask_tokens, 3), + batch_size=samples_per_step, + duplication_factor=duplication_factor, + start_data_at_epoch=continue_training_from_epoch, + shuffle=shuffle, + seed=seed + ) + if generated_data: + length = 1 + if use_popdist: + length = args.popdist_size + dl = GeneratedDataLoader(**data_loader_args, + length=length, + generated_ranges=data_ranges(sequence_length, mask_tokens, vocab_length)) + elif epochs_to_cache > 0: + dl = CachedDataLoader(**data_loader_args, + epochs_to_cache=epochs_to_cache) + else: + dl = BinaryDataLoader(**data_loader_args) + + if use_popdist: + sampler = DistributedDataSampler( + dl, + popdist_size=popdist_size, + popdist_rank=popdist_rank) + dl = SampleGenerator(dl, sampler) + + if len(dl) == 0: + raise ValueError("Insufficient data for training parameters.") + + bert_ds = BertDataTransform(dl, vocab_length, mask_tokens) + ds = DataSet(bert_ds, + tensor_shapes, + batches_per_step=batches_per_step, + replication_factor=replication_factor, + accumulation_factor=accumulation_factor) + + return ds \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/record_to_binary.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/record_to_binary.py new file mode 100644 index 0000000000000000000000000000000000000000..96845b8a4af9387d2c836c77c03f471c20e61394 --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/data_pack/bert_data/record_to_binary.py @@ -0,0 +1,68 @@ +# Copyright (c) 2019 Graphcore Ltd. All rights reserved. +import os +import tqdm +import time +import argparse +import glob +import struct +import numpy as np +from tensorflow.compat import v1 as tf +from functools import reduce +from itertools import chain +from concurrent.futures import ProcessPoolExecutor +tf.enable_eager_execution() + + +parser = argparse.ArgumentParser() +parser.add_argument("--tf-record-glob", type=str, required=True) +parser.add_argument("--output-path", type=str, required=True) +parser.add_argument("--max-sequence-length", help="The maximum number of tokens in an example", default=512, type=int) +parser.add_argument("--max-predictions-per-sequence", help="The maximum number of masked tokens in an un-packed example", default=76, type=int) +args = parser.parse_args() + +input_files = glob.glob(args.tf_record_glob) + +print("input_files===",input_files) +assert len(input_files) > 0 + +# Original feature names +name_to_features = { + "input_ids": tf.FixedLenFeature([args.max_sequence_length], tf.int64), + "input_mask": tf.FixedLenFeature([args.max_sequence_length], tf.int64), + "segment_ids": tf.FixedLenFeature([args.max_sequence_length], tf.int64), + "masked_lm_positions": tf.FixedLenFeature([args.max_predictions_per_sequence], tf.int64), + "masked_lm_ids": tf.FixedLenFeature([args.max_predictions_per_sequence], tf.int64), + "masked_lm_weights": tf.FixedLenFeature([args.max_predictions_per_sequence], tf.float32), + "next_sentence_labels": tf.FixedLenFeature([1], tf.int64) + } + +# Convert the input files +if not os.path.exists(args.output_path): + os.mkdir(args.output_path) + + +def convert_file(file): + d = tf.data.TFRecordDataset(file) + d = d.map(lambda record: tf.parse_single_example(record, name_to_features)) + + output_file = os.path.join(args.output_path, os.path.basename(file)) + print("output_file===",output_file) + + with open(output_file, "wb") as writer: + for example in d: + # Pack into binary format + line = reduce(lambda accl, i: accl + struct.pack('= previous: + strategies.append([gap]) + + # Complete the sample in "depth" steps, recursively + else: + for new in range(previous, gap + 1): + + new_gap = target - start - new + if new_gap == 0: + strategies.append([new]) + else: + options = packing_strategies(start + new, new, target, depth - 1) + + for option in options: + if len(option) > 0: + strategies.append([new] + option) + return strategies + + +def get_packing_recipe(sequence_lengths, max_sequence_length, max_sequences_per_pack=3): + # Histogram of sequence lengths + histogram, bins = np.histogram(sequence_lengths, bins=np.arange(1, max_sequence_length + 2)) + print("Begin packing pass".center(80, "_")) + print(f"Unpacked mean sequence length: {sequence_lengths.mean():3.2f}") + + # Make sure all strategies are recipes to pack to the correct sequence length + + strategy_set = packing_strategies(0, 1, max_sequence_length, max_sequences_per_pack) + ''' + 返回所有的可能的组合,例如:strategy_set = [[1, 1, 510], [1, 2, 509], [1, 3, 508]…… [255, 257], [256, 256], [512]] + ''' + for strategy in strategy_set: + assert(sum(strategy) == max_sequence_length) + num_strategies = len(strategy_set) + print(f"Found {num_strategies} unique packing strategies.") + + # Solve the packing equation A@mixture = histogram + A = np.zeros((max_sequence_length, num_strategies), dtype=np.int32) + for i in range(num_strategies): + strategy = strategy_set[i] + for seq_len in strategy: + A[seq_len - 1, i] += 1 + + # short sequences are inexpensive to add, so should have low residual weights + # to exactly minimize padding use w0 = np.arange(1, max_sequence_length + 1) + # in practice the difference is negligible, but this converges faster + padding_cutoff = 8 + w0 = np.ones([max_sequence_length]) + # w0 = np.linspace(1, max_sequence_length+1, max_sequence_length)/max_sequence_length # padding minimization weight + w0[:padding_cutoff] = padding_cutoff / (2 * max_sequence_length) + w0 = np.sqrt(w0) + + # Starting values for the padding and the mixture + + # padding 应该是每种长度样本,所需要padding的个数 + padding = np.zeros([max_sequence_length], dtype=np.int32) + + # mixture 应该是每种打包策略组合的 样本个数, + # 例如num_strategies[0] = 0 只有0个符合要求的。 num_strategies[-1] = 100,最后一种打包策略有100个 + mixture = np.zeros([num_strategies], dtype=np.int32) + b = histogram + padding + + # Pack sequences as best as possible, then increase padding accordingly and repeat + for i in range(0, 20): + print(f"\nIteration: {i}: sequences still to pack: ", b.sum()) + start = time.time() + partial_mixture, rnorm = optimize.nnls(np.expand_dims(w0, -1) * A, w0 * b) + print(f"Solving nnls took {time.time() - start:3.2f} seconds.") + print(f"Residual norm: {rnorm:3.5e}") + + # Update mixture (round the floating point solution to integers) + partial_mixture = np.where(partial_mixture < 2, np.rint(partial_mixture), np.floor(partial_mixture)) + + # If partial mixture is empty (due to rounding) we follow the gradient + # this usually happens when the number of examples is small i.e. ~100 + if partial_mixture.max() == 0: + grad = A.T @ (b * np.arange(1, max_sequence_length + 1)) + k = int(b.sum() // 2) + 1 + topk = np.argsort(-grad)[:k] + partial_mixture[topk] += 1 + + # Update mixture + mixture = mixture + partial_mixture + + # Compute the residuals + residual = b - A @ partial_mixture + print(f"Max residual: {abs(residual).max()}") + print(f"Residual on first 8 categories: {np.around(residual[:8], 4)}") + print(f"Residual on last 8 categories: {np.around(residual[-8:], 4)}") + + # Add padding based on deficit (negative residual) + partial_padding = np.where(residual < 0, -residual, 0) + print(f"Added {(partial_padding*np.arange(1,max_sequence_length+1)).sum():3.2e} tokens of padding.") + padding = padding + partial_padding + + # Update the rhs vector (remaining surplus sequences) + b = histogram + padding - A @ mixture + assert np.all(b >= 0), b + + # Done iterating + if b.sum() < 100: + break + + # Make sure there is no remainder + unpacked_seqlen = np.arange(1, args.max_sequence_length + 1)[b > 0] + # Update the mixture to also covered the unpacked sequences + for l in unpacked_seqlen: + # Get the depth 1 strategy + strategy = sorted([l, args.max_sequence_length - l]) + strategy_index = strategy_set.index(strategy) + mixture[strategy_index] += b[l-1] + b = histogram - A @ mixture + padding = np.where(b < 0, -b, 0) + b = histogram + padding - A @ mixture + assert b.sum() == 0 + + # Analyze result + print("Done solving for packing order".center(80, "_")) + num_padding_tokens = (np.arange(1, max_sequence_length + 1) * padding).sum() + num_padding_tokens_original = (max_sequence_length - sequence_lengths).sum() + print(f"Number of sequences dropped: {b.sum()}") + print(f"Number of strategies utilized: {np.count_nonzero(mixture)}") + new_number_of_samples = int(mixture.sum()) + compression = 1 - new_number_of_samples / len(sequence_lengths) + print(f"New number of samples: {new_number_of_samples:3.2f}, original {len(sequence_lengths)}. A compression ratio of {compression:3.3f}") + print(f"The expected speed-up from packing: {1/(1-compression):3.3f}") + upper_bound = 1.0 / (1 - ((1 - sequence_lengths / max_sequence_length).mean())) + print(f"Theoretical upper bound on speed-up: {upper_bound:3.3f}") + avg_sequences_per_sample = ((A.sum(0) * mixture).sum() - padding.sum()) / new_number_of_samples + print(f"Average sequences/sample {avg_sequences_per_sample:3.5f}") + print(f"Added {num_padding_tokens:3.2e} padding tokens. Original dataset used {num_padding_tokens_original:3.2e} padding tokens") + efficiency = (new_number_of_samples*max_sequence_length - num_padding_tokens)/(new_number_of_samples*max_sequence_length) + print(f"Packing efficiency (fraction of real tokens): {efficiency:3.4f}") + + print(f"Top 8 strategies") + topK = np.argsort(-mixture)[:8] + for i in topK: + print(f"Strategy {strategy_set[i]} which is used {int(mixture[i])} times") + print("".center(80, "_")) + + # Figure out the slicing that each strategy should use + slicing = np.zeros_like(A) + slicing[:, 1:] = np.cumsum(A * mixture, axis=1)[:, :-1] + slicing = slicing.T + + mixture = mixture.astype(np.int64) + return strategy_set, mixture, padding, slicing + + +def slice_examples(examples_by_length, slicing, strategy_set, repeat_counts): + # Divide the work, firstly between the strategies and then into chunks of 50k + slices = [] + strategies = [] + part_idx = [] + for strategy, slice_offsets, repeat_count in zip(strategy_set, slicing, repeat_counts): + # 如果对于一种打包策略strategy_set的打包可能性为0,则调过 + if repeat_count == 0: + continue + # Slice out the sequences allocated to this strategy in increments of 50k + num_parts = repeat_count // 50000 + num_parts = num_parts + int(repeat_count != num_parts * 50000) + subcounts = (min(50000, repeat_count - 50000 * (i - 1)) for i in range(1, num_parts + 1)) + for part_id, part_count in enumerate(subcounts): + examples = [] + for k, seq_len in enumerate(strategy): + slice_start = int(slice_offsets[seq_len - 1]) + slice_end = slice_start + int(part_count) + slice_offsets[seq_len - 1] = slice_end + examples.append(examples_by_length[seq_len][slice_start:slice_end]) + + slices.append(examples) + strategies.append(strategy) + part_idx.append(part_id) + + return slices, strategies, part_idx + + +def parallel_pack_according_to_strategy(args, part_idx, strategy, examples): + # Pack the sequences according to the strategy and write them to disk + base_filename = os.path.join(args.output_dir, "strategy_" + "_".join(map(str, strategy))) + filename = base_filename + f"_part_{part_idx}" + lines = [] + for i, multi_sequence in enumerate(zip(*examples)): + lines.append(create_multi_sequence_example(multi_sequence, args.max_predictions_per_sequence, + args.max_sequence_length, args.max_sequences_per_pack)) + # Write to file + with open(filename, "wb") as f: + f.writelines(lines) +def create_int_feature(values): + feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return feature + +def parallel_pack_according_to_strategy_record(args, part_idx, strategy, examples): + # Pack the sequences according to the strategy and write them to disk + base_filename = os.path.join(args.output_dir, "strategy_record_" + "_".join(map(str, strategy))) + output_file = base_filename + f"_part_{part_idx}" + writers = tf.compat.v1.python_io.TFRecordWriter(output_file) + lines = [] + for i, multi_sequence in enumerate(zip(*examples)): + tf_example = create_multi_sequence_example_record(multi_sequence, args.max_predictions_per_sequence, + args.max_sequence_length, args.max_sequences_per_pack) + writers.write(tf_example.SerializeToString()) + + writers.close() + +def create_multi_sequence_example_record(multi_sequence, max_predictions_per_sequence, max_sequence_length, max_sequences_per_pack): + # SEQ + packed_input_ids = np.zeros(max_sequence_length, dtype=np.int32) + packed_input_mask = np.zeros(max_sequence_length, dtype=np.int32) + packed_segment_ids = np.zeros(max_sequence_length, dtype=np.int32) + packed_positions = np.zeros(max_sequence_length, dtype=np.int32) + + # MLM + # we are packing up to max_sequences_per_pack, each with a certain percentage of masked tokens + # in case that percentege is rounded up for all sequences in the pack, need to add an extra token for + # each sequence in the pack + packed_masked_lm_positions = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + packed_masked_lm_ids = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + packed_masked_lm_weights = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + + # NSP + packed_next_sentence_positions = np.zeros(max_sequences_per_pack, dtype=np.int32) + packed_next_sentence_labels = np.zeros(max_sequences_per_pack, dtype=np.int32) + packed_next_sentence_weights = np.zeros(max_sequences_per_pack, dtype=np.int32) + + offset = 0 + mlm_offset = 0 + # sequence_index 被用在packed_input_mask的input中,代表的是一个pack包里面,第几个句子。 + sequence_index = 1 # used in the input mask + for sequence in multi_sequence: + # Padding sequences are donoted with None + if sequence is not None: + input_ids, input_mask, segment_ids, masked_lm_positions, masked_lm_ids, masked_lm_weights, next_sentence_labels = sequence + seq_len = input_mask.sum() + + # SEQ + packed_input_ids[offset:offset + seq_len] = input_ids[:seq_len] + packed_input_mask[offset:offset + seq_len] = sequence_index + packed_segment_ids[offset:offset + seq_len] = segment_ids[:seq_len] + # packed_positions 里面记录的句子的positions index,每一个句子都是从零开始 + packed_positions[offset:offset + seq_len] = np.arange(0, seq_len) + + # MLM + mlm_len = int(masked_lm_weights.sum()) + assert mlm_offset + mlm_len < max_predictions_per_sequence + max_sequences_per_pack, "Too many LM predictions per sequences" + max_mlm = mlm_offset + mlm_len + packed_masked_lm_positions[mlm_offset:max_mlm] = offset + masked_lm_positions[:mlm_len] + packed_masked_lm_ids[mlm_offset:max_mlm] = masked_lm_ids[:mlm_len] + packed_masked_lm_weights[mlm_offset:max_mlm] = sequence_index + + # NSP + packed_next_sentence_positions[sequence_index - 1] = offset + packed_next_sentence_labels[sequence_index - 1] = next_sentence_labels + packed_next_sentence_weights[sequence_index - 1] = 1 + + # Update offsets + sequence_index += 1 + offset += seq_len + mlm_offset = max_mlm + + # Pack into binary format and write it + # line 一行代表一个新的组合的训练样本。 + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(packed_input_ids) + features["input_mask"] = create_int_feature(packed_input_mask) + features["segment_ids"] = create_int_feature(packed_segment_ids) + features["positions_ids"] = create_int_feature(packed_positions) + + features["masked_lm_positions"] = create_int_feature(packed_masked_lm_positions) + features["masked_lm_ids"] = create_int_feature(packed_masked_lm_ids) + features["masked_lm_weights"] = create_int_feature(packed_masked_lm_weights) + + features["next_sentence_positions"] = create_int_feature(packed_next_sentence_positions) + features["next_sentence_labels"] = create_int_feature(packed_next_sentence_labels) + features["next_sentence_weights"] = create_int_feature(packed_next_sentence_weights) + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + + return tf_example + + + + +def create_multi_sequence_example(multi_sequence, max_predictions_per_sequence, max_sequence_length, max_sequences_per_pack): + # SEQ + packed_input_ids = np.zeros(max_sequence_length, dtype=np.int32) + packed_input_mask = np.zeros(max_sequence_length, dtype=np.int32) + packed_segment_ids = np.zeros(max_sequence_length, dtype=np.int32) + packed_positions = np.zeros(max_sequence_length, dtype=np.int32) + + # MLM + # we are packing up to max_sequences_per_pack, each with a certain percentage of masked tokens + # in case that percentege is rounded up for all sequences in the pack, need to add an extra token for + # each sequence in the pack + packed_masked_lm_positions = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + packed_masked_lm_ids = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + packed_masked_lm_weights = np.zeros(max_predictions_per_sequence + max_sequences_per_pack, dtype=np.int32) + + # NSP + packed_next_sentence_positions = np.zeros(max_sequences_per_pack, dtype=np.int32) + packed_next_sentence_labels = np.zeros(max_sequences_per_pack, dtype=np.int32) + packed_next_sentence_weights = np.zeros(max_sequences_per_pack, dtype=np.int32) + + offset = 0 + mlm_offset = 0 + # sequence_index 被用在packed_input_mask的input中,代表的是一个pack包里面,第几个句子。 + sequence_index = 1 # used in the input mask + for sequence in multi_sequence: + # Padding sequences are donoted with None + if sequence is not None: + input_ids, input_mask, segment_ids, masked_lm_positions, masked_lm_ids, masked_lm_weights, next_sentence_labels = sequence + seq_len = input_mask.sum() + + # SEQ + packed_input_ids[offset:offset + seq_len] = input_ids[:seq_len] + packed_input_mask[offset:offset + seq_len] = sequence_index + packed_segment_ids[offset:offset + seq_len] = segment_ids[:seq_len] + # packed_positions 里面记录的句子的positions index,每一个句子都是从零开始 + packed_positions[offset:offset + seq_len] = np.arange(0, seq_len) + + # MLM + mlm_len = int(masked_lm_weights.sum()) + assert mlm_offset + mlm_len < max_predictions_per_sequence + max_sequences_per_pack, "Too many LM predictions per sequences" + max_mlm = mlm_offset + mlm_len + packed_masked_lm_positions[mlm_offset:max_mlm] = offset + masked_lm_positions[:mlm_len] + packed_masked_lm_ids[mlm_offset:max_mlm] = masked_lm_ids[:mlm_len] + packed_masked_lm_weights[mlm_offset:max_mlm] = sequence_index + + # NSP + packed_next_sentence_positions[sequence_index - 1] = offset + packed_next_sentence_labels[sequence_index - 1] = next_sentence_labels + packed_next_sentence_weights[sequence_index - 1] = 1 + + # Update offsets + sequence_index += 1 + offset += seq_len + mlm_offset = max_mlm + + # Pack into binary format and write it + # line 一行代表一个新的组合的训练样本。 + line = reduce(lambda accl, i: accl + struct.pack(' args.max_files: + input_files = np.random.choice(input_files, size=args.max_files, replace=False) + assert len(input_files) > 0 + + # Load un-packed dataset + sample_sizes = data_file_format(args.max_sequence_length, args.max_predictions_per_sequence) + + load_size = 1 if len(input_files) == 1 else 1024 + dataset = CachedDataLoader(input_files, sample_sizes, duplication_factor=args.duplication_factor, batch_size=load_size) + + # Put examples into bins depending on their sequence lengths and extract the sequence length + # as an array + sequence_lengths = [] + examples_by_length = defaultdict(list) + print("Looping through dataset to collect sequence length information...") + for data in dataset: + # 每个data有1024个样本,即batchsize是1024,按句子的长度,对样本进行分类。放到examples_by_length这个字典中去 + input_mask = data[1] + batch_of_lengths = input_mask.sum(1).tolist() + for i, length in enumerate(batch_of_lengths): + examples_by_length[length].append([data[k][i] for k in range(len(data))]) + sequence_lengths.extend(batch_of_lengths) + # sequence_lengths里面记录了所有句子的长度 + sequence_lengths = np.array(sequence_lengths) + + # Pass the array of sequence lengths to the packing algorithm + strategy_set, mixture, padding, slicing = get_packing_recipe(sequence_lengths, args.max_sequence_length, args.max_sequences_per_pack) + + # Add the calculated padding + # 给每个长度的句子pading 数据 + for i in range(1, args.max_sequence_length + 1): + examples_by_length[i].extend([None] * int(padding[i - 1])) + + # Shuffle the data + for key in examples_by_length: + random.shuffle(examples_by_length[key]) + + # Pack and store the data + print(f"\nPacking and writing packed dataset to {args.output_dir}.") + + # Slice the data into chunks of max 50k packed examples + # 一个文件最多写50k个组合样本。文件里面一行就是一个512长的组合样本。 + # 这里面还没有训练的batchsize概念, + # 训练的时候应该是从每个文件里面随机去样本,组合成新的batchsize,进行训练。 + example_slices, strategies, part_idx = slice_examples(examples_by_length, slicing, strategy_set, mixture) + print(f"Splitting work into {len(part_idx)} parts.") + + start = time.time() + with ProcessPoolExecutor() as executor: + work = repeat(args), part_idx, strategies, example_slices + #for partial_result in executor.map(parallel_pack_according_to_strategy, *work): + for partial_result in executor.map(parallel_pack_according_to_strategy_record, *work): + pass + print(f"\nDone. Took: {time.time() - start:3.2f} seconds to pack and write dataset.") \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/input_pipeline.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/input_pipeline.py index 724d2887f2fe528b719eb37957a445d94c45eccb..37f34748d411ec1c5af8410d8ef5f986f417aace 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/input_pipeline.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/input_pipeline.py @@ -40,7 +40,8 @@ import npu_device as npu from npu_device.npu_device import global_npu_ctx import numpy as np from tf2_common.utils.dataset_unpad import _batch_examples - +from absl import flags +FLAGS=flags.FLAGS def decode_record(record, name_to_features): """Decodes a record to a TensorFlow example.""" @@ -121,22 +122,44 @@ def create_pretrain_dataset(input_patterns, input_pipeline_context=None, num_eval_samples=None): """Creates input dataset from (tf)records files for pretraining.""" - name_to_features = { + if FLAGS.use_packed_model: + name_to_features = { 'input_ids': tf.io.FixedLenFeature([seq_length], tf.int64), 'input_mask': tf.io.FixedLenFeature([seq_length], tf.int64), 'segment_ids': tf.io.FixedLenFeature([seq_length], tf.int64), + 'positions_ids': + tf.io.FixedLenFeature([seq_length], tf.int64), 'masked_lm_positions': tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), 'masked_lm_ids': tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), 'masked_lm_weights': - tf.io.FixedLenFeature([max_predictions_per_seq], tf.float32), - 'next_sentence_labels': - tf.io.FixedLenFeature([1], tf.int64), - } + tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + + 'next_sentence_positions': tf.io.FixedLenFeature([FLAGS.max_sequences_per_pack], tf.int64), + 'next_sentence_labels': tf.io.FixedLenFeature([FLAGS.max_sequences_per_pack], tf.int64), + 'next_sentence_weights': tf.io.FixedLenFeature([FLAGS.max_sequences_per_pack], tf.int64), + } + else: + name_to_features = { + 'input_ids': + tf.io.FixedLenFeature([seq_length], tf.int64), + 'input_mask': + tf.io.FixedLenFeature([seq_length], tf.int64), + 'segment_ids': + tf.io.FixedLenFeature([seq_length], tf.int64), + 'masked_lm_positions': + tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + 'masked_lm_ids': + tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + 'masked_lm_weights': + tf.io.FixedLenFeature([max_predictions_per_seq], tf.float32), + 'next_sentence_labels': + tf.io.FixedLenFeature([1], tf.int64), + } if use_synthetic: dataset = create_synthetic_pretrain_dataset( @@ -188,15 +211,28 @@ def create_pretrain_dataset(input_patterns, def _select_data_from_record(record): """Filter out features to use for pretraining.""" - x = { - 'input_word_ids': record['input_ids'], - 'input_mask': record['input_mask'], - 'input_type_ids': record['segment_ids'], - 'masked_lm_positions': record['masked_lm_positions'], - 'masked_lm_ids': record['masked_lm_ids'], - 'masked_lm_weights': record['masked_lm_weights'], - 'next_sentence_labels': record['next_sentence_labels'], - } + if FLAGS.use_packed_model: + x = { + 'input_word_ids': record['input_ids'], + 'input_mask': record['input_mask'], + 'input_type_ids': record['segment_ids'], + 'masked_lm_positions': record['masked_lm_positions'], + 'masked_lm_ids': record['masked_lm_ids'], + 'masked_lm_weights': record['masked_lm_weights'], + 'next_sentence_labels': record['next_sentence_labels'], + 'next_sentence_positions': record['next_sentence_positions'], + 'next_sentence_weights': record['next_sentence_weights'], + } + else: + x = { + 'input_word_ids': record['input_ids'], + 'input_mask': record['input_mask'], + 'input_type_ids': record['segment_ids'], + 'masked_lm_positions': record['masked_lm_positions'], + 'masked_lm_ids': record['masked_lm_ids'], + 'masked_lm_weights': record['masked_lm_weights'], + 'next_sentence_labels': record['next_sentence_labels'], + } y = record['masked_lm_weights'] diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/layers/self_attention_mask.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/layers/self_attention_mask.py index 7569e3c25b2407a44e97a3f87e96139cb7d1e722..edda39d164328bd403c096627a1c00097bdf85d9 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/layers/self_attention_mask.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/layers/self_attention_mask.py @@ -21,6 +21,8 @@ from __future__ import print_function import tensorflow as tf from tf2_common.modeling import tf_utils +from absl import flags +FLAGS=flags.FLAGS @tf.keras.utils.register_keras_serializable(package='Text') class SelfAttentionMask(tf.keras.layers.Layer): @@ -44,19 +46,30 @@ class SelfAttentionMask(tf.keras.layers.Layer): to_shape = tf_utils.get_shape_list(to_mask, expected_rank=2) to_seq_length = to_shape[1] - to_mask = tf.cast( - tf.reshape(to_mask, [batch_size, 1, to_seq_length]), - dtype=from_tensor.dtype) + if FLAGS.use_packed_model: + mask_tile = tf.tile(to_mask, [1, to_seq_length]) + mask_tile = tf.reshape(mask_tile, (batch_size * to_seq_length, to_seq_length)) + reshape_mask = tf.reshape(to_mask, (1, -1)) + broadcast_mask = tf.broadcast_to(reshape_mask, (to_seq_length, batch_size * to_seq_length)) + transpose_mask = tf.transpose(broadcast_mask, (1, 0)) + equal_mask = tf.equal(mask_tile, transpose_mask) + equal_mask = tf.cast(equal_mask, dtype=tf.float32) + mask = tf.reshape(equal_mask, [batch_size, to_seq_length, to_seq_length]) - # We don't assume that `from_tensor` is a mask (although it could be). We - # don't actually care if we attend *from* padding tokens (only *to* padding) - # tokens so we create a tensor of all ones. - # - # `broadcast_ones` = [batch_size, from_seq_length, 1] - broadcast_ones = tf.ones( - shape=[batch_size, from_seq_length, 1], dtype=from_tensor.dtype) + else: + to_mask = tf.cast( + tf.reshape(to_mask, [batch_size, 1, to_seq_length]), + dtype=from_tensor.dtype) - # Here we broadcast along two dimensions to create the mask. - mask = broadcast_ones * to_mask + # We don't assume that `from_tensor` is a mask (although it could be). We + # don't actually care if we attend *from* padding tokens (only *to* padding) + # tokens so we create a tensor of all ones. + # + # `broadcast_ones` = [batch_size, from_seq_length, 1] + broadcast_ones = tf.ones( + shape=[batch_size, from_seq_length, 1], dtype=from_tensor.dtype) + + # Here we broadcast along two dimensions to create the mask. + mask = broadcast_ones * to_mask return mask diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/networks/transformer_encoder.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/networks/transformer_encoder.py index ec6a120ce7c2d5047024c1c7407f7d56a187a4b6..b1528c473b31807f927c9cc2b4c5bd200a1e6f02 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/networks/transformer_encoder.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/modeling/networks/transformer_encoder.py @@ -26,6 +26,10 @@ from modeling.layers import util from modeling.layers import bert_dropout from modeling.layers import bert_layernorm +from tf2_common.modeling import tf_utils +from absl import flags +FLAGS = flags.FLAGS + @tf.keras.utils.register_keras_serializable(package='Text') class TransformerEncoder(tf.keras.Model): """Bi-directional Transformer-based encoder network. @@ -106,6 +110,10 @@ class TransformerEncoder(tf.keras.Model): shape=(sequence_length,), dtype=tf.int32, name='input_mask') type_ids = tf.keras.layers.Input( shape=(sequence_length,), dtype=tf.int32, name='input_type_ids') + + if FLAGS.use_packed_model: + next_sentence_starts = tf.keras.layers.Input( + shape=([FLAGS.max_sequences_per_pack,]), dtype=tf.int32, name='next_sentence_positions') self._embedding_layer = layers.OnDeviceEmbedding( vocab_size=vocab_size, @@ -156,10 +164,20 @@ class TransformerEncoder(tf.keras.Model): self._transformer_layers.append(layer) data = layer([data, attention_mask]) encoder_outputs.append(data) - - first_token_tensor = ( - tf.keras.layers.Lambda(lambda x: tf.squeeze(x[:, 0:1, :], axis=1))( - encoder_outputs[-1])) + if FLAGS.use_packed_model: + def first_token(args): + encoder_outputs, next_sentence_starts = args[0],args[1] + sequence_output = encoder_outputs[-1] + first_token_tensor = tf.gather(sequence_output, next_sentence_starts, axis=1, batch_dims=1, name=None) + first_token_tensor = tf.reshape(first_token_tensor, [-1, hidden_size]) + return first_token_tensor + + first_token_tensor = ( + tf.keras.layers.Lambda(first_token)([encoder_outputs, next_sentence_starts])) + else: + first_token_tensor = ( + tf.keras.layers.Lambda(lambda x: tf.squeeze(x[:, 0:1, :], axis=1))( + encoder_outputs[-1])) cls_output = tf.keras.layers.Dense( units=hidden_size, activation='tanh', @@ -171,9 +189,12 @@ class TransformerEncoder(tf.keras.Model): outputs = [encoder_outputs, cls_output] else: outputs = [encoder_outputs[-1], cls_output] - - super(TransformerEncoder, self).__init__( - inputs=[word_ids, mask, type_ids], outputs=outputs, **kwargs) + if FLAGS.use_packed_model: + super(TransformerEncoder, self).__init__( + inputs=[word_ids, mask, type_ids, next_sentence_starts], outputs=outputs, **kwargs) + else: + super(TransformerEncoder, self).__init__( + inputs=[word_ids, mask, type_ids], outputs=outputs, **kwargs) def get_embedding_table(self): return self._embedding_layer.embeddings diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining.py index 1d5959077af10cfef9e12a6760941ed67791a89b..2f7a2efb79f2ed27391b41397eda44a6ad305b7e 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining.py @@ -120,6 +120,21 @@ flags.DEFINE_integer('num_accumulation_steps', 1, 'number of steps to accumulate with large batch size.') flags.DEFINE_float('stop_threshold', 0.912, 'Stop threshold for MLPerf.') flags.DEFINE_float('poly_power', 1.0, 'The power of poly decay.') + +flags.DEFINE_boolean(name='use_packed_model', default=False, help='whether to enable packed model, default is True.') +flags.DEFINE_integer( + "max_sequences_per_pack", 3, + "Maximum number of sequences per sequence. " + "Must match data generation.") +flags.DEFINE_float( + "average_sequences_per_sample", 1.999, + "average number of sequences per sample. " + "Must match data generation.") +flags.DEFINE_float( + "average_sequences_per_eval_sample", 1.73, + "average number of sequences per sample. " + "Must match data generation.") + common_flags.define_common_bert_flags() FLAGS = flags.FLAGS diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining_bucket.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining_bucket.py index f9b770b7c6dace47809f548895fd5ba96de1267b..e4b550a919c9d7c5372d482f8cec06bcd7fb322e 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining_bucket.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/run_pretraining_bucket.py @@ -121,20 +121,47 @@ flags.DEFINE_integer('num_accumulation_steps', 1, flags.DEFINE_float('stop_threshold', 0.912, 'Stop threshold for MLPerf.') flags.DEFINE_float('poly_power', 1.0, 'The power of poly decay.') -flags.DEFINE_multi_integer("seq_len_buckets", [64,128,192,256,384,512], +flags.DEFINE_multi_integer("seq_len_buckets", [64,128,192,256,320,384,448,512], "sequence length bucketizations boundaries") flags.DEFINE_integer('max_tockens_num', 12288, 'max_tockens_num = bs * seq_len') +flags.DEFINE_boolean(name='use_packed_model', default=False, help='whether to enable packed model, default is True.') +flags.DEFINE_integer( + "max_sequences_per_pack", 3, + "Maximum number of sequences per sequence. " + "Must match data generation.") +flags.DEFINE_float( + "average_sequences_per_sample", 1.999, + "average number of sequences per sample. " + "Must match data generation.") +flags.DEFINE_float( + "average_sequences_per_eval_sample", 1.73, + "average number of sequences per sample. " + "Must match data generation.") + common_flags.define_common_bert_flags() FLAGS = flags.FLAGS def npu_config(): FLAGS = flags.FLAGS - npu_device.global_options().input_shape = "data_0:-1,-1;data_1:-1,-1;data_2:-1,-1;data_3:-1,-1;data_4:-1,-1;data_5:-1,-1;data_6:-1,-1" - npu_device.global_options().dynamic_node_type = "0" - npu_device.global_options().dynamic_dims = "192,64,192,64,192,64,192,76,192,76,192,76,192,1;96,128,96,128,96,128,96,76,96,76,96,76,96,1;64,192,64,192,64,192,64,76,64,76,64,76,64,1;48,256,48,256,48,256,48,76,48,76,48,76,48,1;32,384,32,384,32,384,32,76,32,76,32,76,32,1;24,512,24,512,24,512,24,76,24,76,24,76,24,1" + npu_device.global_options().experimental.multi_branches_config.input_shape = "data_0:-1,-1;" \ + "data_1:-1,-1;" \ + "data_2:-1,-1;" \ + "data_3:-1,-1;" \ + "data_4:-1,-1;" \ + "data_5:-1,-1;" \ + "data_6:-1,-1" + npu_device.global_options().experimental.multi_branches_config.dynamic_node_type = "0" + npu_device.global_options().experimental.multi_branches_config.dynamic_dims = "192,64,192,64,192,64,192,76,192,76,192,76,192,1;" \ + "96,128,96,128,96,128,96,76,96,76,96,76,96,1;" \ + "64,192,64,192,64,192,64,76,64,76,64,76,64,1;" \ + "48,256,48,256,48,256,48,76,48,76,48,76,48,1;" \ + "38,320,38,320,38,320,38,76,38,76,38,76,38,1;" \ + "32,384,32,384,32,384,32,76,32,76,32,76,32,1;" \ + "28,448,28,448,28,448,28,76,28,76,28,76,28,1;" \ + "24,512,24,512,24,512,24,76,24,76,24,76,24,1" if FLAGS.data_dump_flag: npu_device.global_options().dump_config.enable_dump = True diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf1_checkpoint_converter_lib.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf1_checkpoint_converter_lib.py index daec8295956a217fca3df541ca6dd2f03faab7b2..08855b56882487ec42a4571f4859b1340e64bd58 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf1_checkpoint_converter_lib.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf1_checkpoint_converter_lib.py @@ -71,6 +71,12 @@ def _bert_name_replacement(var_name, name_replacements): old_var_name = var_name var_name = var_name.replace(src_pattern, tgt_pattern) tf.logging.info("Converted: %s --> %s", old_var_name, var_name) + if "transformer" in var_name and "layer" in var_name: + node_list = var_name.split("/") + new_node_list = node_list[0:2] | node_list + connect_str= "/" + var_name = connect_str.join(new_node_list) + tf.logging.info("Converted new: %s ---> %s", old_var_name, var_name) return var_name @@ -188,7 +194,7 @@ def convert(checkpoint_from_path, with tf.Session() as sess: sess.run(tf.global_variables_initializer()) tf.logging.info("Writing checkpoint_to_path %s", checkpoint_to_path) - saver.save(sess, checkpoint_to_path) + saver.save(sess, checkpoint_to_path, write_meta_graph=False) tf.logging.info("Summary:") tf.logging.info(" Converted %d variable name(s).", len(new_variable_map)) diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_common/modeling/model_training_utils.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_common/modeling/model_training_utils.py index 1458b80d0a64c17e5db219c09a9d4dfc794baf1e..5dc20c6019a3ab9d03530489922a4d5db602cee8 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_common/modeling/model_training_utils.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_common/modeling/model_training_utils.py @@ -36,6 +36,7 @@ import numpy as np import json import os import time +import math from absl import logging import tensorflow as tf @@ -44,6 +45,9 @@ from tf2_common.utils.misc import distribution_utils from tf2_common.utils.mlp_log import mlp_log import npu_device as npu +from absl import flags +FLAGS = flags.FLAGS + _SUMMARY_TXT = 'training_summary.txt' _MIN_SUMMARY_STEPS = 10 @@ -211,6 +215,8 @@ def run_customized_training_loop( by `model_fn` is None. """ mlperf_block_number = 1 + if FLAGS.use_packed_model: + eval_steps = int(math.floor(eval_steps / FLAGS.average_sequences_per_eval_sample)) if _sentinel is not None: raise ValueError('only call `run_customized_training_loop()` ' diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_encoder_checkpoint_converter.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_encoder_checkpoint_converter.py index 42c1a9717f87edc4361b55fb9c1ef94d37a7ef61..c4710ff2039d6ea4c90e2f9610c068cab2bfb4ee 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_encoder_checkpoint_converter.py +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/bert/tf2_encoder_checkpoint_converter.py @@ -31,6 +31,8 @@ from tf2_common.modeling import activations import configs import tf1_checkpoint_converter_lib from modeling import networks +from modeling.networks import bert_pretrainer +import tf2_common.modeling.tf_utils as tf_utils FLAGS = flags.FLAGS @@ -43,6 +45,21 @@ flags.DEFINE_string( flags.DEFINE_string("converted_checkpoint_path", None, "Name for the created object-based V2 checkpoint.") +flags.DEFINE_boolean(name='use_packed_model', default=True, + help="whether to enable packed model, default is True.") + +flags.DEFINE_integer( + "max_sequences_per_pack", 3, + "Maximum number of sequences per sequence. " + "Must match data generation.") +flags.DEFINE_float( + "average_sequences_per_sample", 1.999, + "average number of sequences per sample. " + "Must match data generation.") +flags.DEFINE_boolean(name='use_fastgelu', default=True, + help='whether to enable fastgelu, default is True.') +flags.DEFINE_integer('max_predictions_per_seq', 20, + 'Maximum predictions per sequence_output. ') def _create_bert_model(cfg): """Creates a BERT keras core model from BERT configuration. @@ -65,8 +82,20 @@ def _create_bert_model(cfg): type_vocab_size=cfg.type_vocab_size, initializer=tf.keras.initializers.TruncatedNormal( stddev=cfg.initializer_range)) + max_predictions_per_seq = FLAGS.max_predictions_per_seq - return bert_encoder + initializer = tf.keras.initializers.TruncatedNormal( + stddev=cfg.initializer_range) + pretrainer_model = bert_pretrainer.BertPretrainer( + network=bert_encoder, + num_classes=2, # The next sentence prediction label has two classes. + num_token_predictions=max_predictions_per_seq, + activation=tf_utils.get_activation(cfg.hidden_act), + initializer=initializer, + output='predictions') + return pretrainer_model + + #return bert_encoder def convert_checkpoint(bert_config, output_path, v1_checkpoint): @@ -82,7 +111,7 @@ def convert_checkpoint(bert_config, output_path, v1_checkpoint): num_heads=bert_config.num_attention_heads, name_replacements=tf1_checkpoint_converter_lib.BERT_V2_NAME_REPLACEMENTS, permutations=tf1_checkpoint_converter_lib.BERT_V2_PERMUTATIONS, - exclude_patterns=["adam", "Adam"]) + exclude_patterns=["lamb", "Lamb"]) # Create a V2 checkpoint from the temporary checkpoint. model = _create_bert_model(bert_config) diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/set_ranktable.py b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/set_ranktable.py new file mode 100644 index 0000000000000000000000000000000000000000..c25b51462c5df2325462786688d4a206ee29fb9a --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/set_ranktable.py @@ -0,0 +1,1740 @@ +import argparse +parser = argparse.ArgumentParser() +parser.add_argument('-n', '--npu_nums', type=int, default='2', help='nums of npu') +parser.add_argument('-c', '--conf_path', type=str, default='./', help='the path of server_info') +FLAGS = parser.parse_args() + +import json +import os +server = [] +server_conf = [] +server_list = ["0", "1", "2", "3", "4", "5", "6", "7"] +if os.path.isdir(FLAGS.conf_path): + for f in os.listdir(FLAGS.conf_path): + if (f.split("_")[-1]).split(".")[0] in server_list and (f.split("_")[-1]).split(".")[1] == 'info' and f.split("_")[0] == 'server': + server_conf.append(f) + + + + + + +rank_address = [] +for i in range(FLAGS.npu_nums): + for x in server_conf: + if (x.split("_")[-1]).split(".")[0] == str(i): + server.append(x.split("_")[1]) + l = FLAGS.conf_path + "/" + x + with open(l, "r") as a: + s = a.readlines() + for s_ in s: + if 'address_0' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_1' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_2' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_3' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_4' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_5' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_6' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + for s_ in s: + if 'address_7' in s_: + rank_address.append(s_.split("=")[-1][:-1]) + +if FLAGS.npu_nums == 1: + rank = { + "server_count":"1", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 2: + rank = { + "server_count":"2", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]} + ], + + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 3: + rank = { + "server_count":"3", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 4: + rank = { + "server_count":"4", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]} + ], + "status":"completed", + "version":"1.0" + } +elif FLAGS.npu_nums == 5: + rank = { + "server_count":"5", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + +elif FLAGS.npu_nums == 6: + rank = { + "server_count":"6", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + +elif FLAGS.npu_nums == 7: + rank = { + "server_count":"7", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +elif FLAGS.npu_nums == 8: + rank = { + "server_count":"8", + "server_list":[ + { + "server_id":server[0], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[0], + "rank_id":"0" + }, + { + "device_id":"1", + "device_ip":rank_address[1], + "rank_id":"1" + }, + { + "device_id":"2", + "device_ip":rank_address[2], + "rank_id":"2" + }, + { + "device_id":"3", + "device_ip":rank_address[3], + "rank_id":"3" + }, + { + "device_id":"4", + "device_ip":rank_address[4], + "rank_id":"4" + }, + { + "device_id":"5", + "device_ip":rank_address[5], + "rank_id":"5" + }, + { + "device_id":"6", + "device_ip":rank_address[6], + "rank_id":"6" + }, + { + "device_id":"7", + "device_ip":rank_address[7], + "rank_id":"7" + } + ]}, + + + { + "server_id":server[1], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[8], + "rank_id":"8" + }, + { + "device_id":"1", + "device_ip":rank_address[9], + "rank_id":"9" + }, + { + "device_id":"2", + "device_ip":rank_address[10], + "rank_id":"10" + }, + { + "device_id":"3", + "device_ip":rank_address[11], + "rank_id":"11" + }, + { + "device_id":"4", + "device_ip":rank_address[12], + "rank_id":"12" + }, + { + "device_id":"5", + "device_ip":rank_address[13], + "rank_id":"13" + }, + { + "device_id":"6", + "device_ip":rank_address[14], + "rank_id":"14" + }, + { + "device_id":"7", + "device_ip":rank_address[15], + "rank_id":"15" + } + ]}, + { + "server_id":server[2], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[16], + "rank_id":"16" + }, + { + "device_id":"1", + "device_ip":rank_address[17], + "rank_id":"17" + }, + { + "device_id":"2", + "device_ip":rank_address[18], + "rank_id":"18" + }, + { + "device_id":"3", + "device_ip":rank_address[19], + "rank_id":"19" + }, + { + "device_id":"4", + "device_ip":rank_address[20], + "rank_id":"20" + }, + { + "device_id":"5", + "device_ip":rank_address[21], + "rank_id":"21" + }, + { + "device_id":"6", + "device_ip":rank_address[22], + "rank_id":"22" + }, + { + "device_id":"7", + "device_ip":rank_address[23], + "rank_id":"23" + } + ]}, + { + "server_id":server[3], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[24], + "rank_id":"24" + }, + { + "device_id":"1", + "device_ip":rank_address[25], + "rank_id":"25" + }, + { + "device_id":"2", + "device_ip":rank_address[26], + "rank_id":"26" + }, + { + "device_id":"3", + "device_ip":rank_address[27], + "rank_id":"27" + }, + { + "device_id":"4", + "device_ip":rank_address[28], + "rank_id":"28" + }, + { + "device_id":"5", + "device_ip":rank_address[29], + "rank_id":"29" + }, + { + "device_id":"6", + "device_ip":rank_address[30], + "rank_id":"30" + }, + { + "device_id":"7", + "device_ip":rank_address[31], + "rank_id":"31" + } + ]}, + { + "server_id":server[4], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[32], + "rank_id":"32" + }, + { + "device_id":"1", + "device_ip":rank_address[33], + "rank_id":"33" + }, + { + "device_id":"2", + "device_ip":rank_address[34], + "rank_id":"34" + }, + { + "device_id":"3", + "device_ip":rank_address[35], + "rank_id":"35" + }, + { + "device_id":"4", + "device_ip":rank_address[36], + "rank_id":"36" + }, + { + "device_id":"5", + "device_ip":rank_address[37], + "rank_id":"37" + }, + { + "device_id":"6", + "device_ip":rank_address[38], + "rank_id":"38" + }, + { + "device_id":"7", + "device_ip":rank_address[39], + "rank_id":"39" + } + ]}, + { + "server_id":server[5], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[40], + "rank_id":"40" + }, + { + "device_id":"1", + "device_ip":rank_address[41], + "rank_id":"41" + }, + { + "device_id":"2", + "device_ip":rank_address[42], + "rank_id":"42" + }, + { + "device_id":"3", + "device_ip":rank_address[43], + "rank_id":"43" + }, + { + "device_id":"4", + "device_ip":rank_address[44], + "rank_id":"44" + }, + { + "device_id":"5", + "device_ip":rank_address[45], + "rank_id":"45" + }, + { + "device_id":"6", + "device_ip":rank_address[46], + "rank_id":"46" + }, + { + "device_id":"7", + "device_ip":rank_address[47], + "rank_id":"47" + } + ]}, + { + "server_id":server[6], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[48], + "rank_id":"48" + }, + { + "device_id":"1", + "device_ip":rank_address[49], + "rank_id":"49" + }, + { + "device_id":"2", + "device_ip":rank_address[50], + "rank_id":"50" + }, + { + "device_id":"3", + "device_ip":rank_address[51], + "rank_id":"51" + }, + { + "device_id":"4", + "device_ip":rank_address[52], + "rank_id":"52" + }, + { + "device_id":"5", + "device_ip":rank_address[53], + "rank_id":"53" + }, + { + "device_id":"6", + "device_ip":rank_address[54], + "rank_id":"54" + }, + { + "device_id":"7", + "device_ip":rank_address[55], + "rank_id":"55" + } + ]}, + { + "server_id":server[7], + "device":[ + { + "device_id":"0", + "device_ip":rank_address[56], + "rank_id":"56" + }, + { + "device_id":"1", + "device_ip":rank_address[57], + "rank_id":"57" + }, + { + "device_id":"2", + "device_ip":rank_address[58], + "rank_id":"58" + }, + { + "device_id":"3", + "device_ip":rank_address[59], + "rank_id":"59" + }, + { + "device_id":"4", + "device_ip":rank_address[60], + "rank_id":"60" + }, + { + "device_id":"5", + "device_ip":rank_address[61], + "rank_id":"61" + }, + { + "device_id":"6", + "device_ip":rank_address[62], + "rank_id":"62" + }, + { + "device_id":"7", + "device_ip":rank_address[63], + "rank_id":"63" + } + ]} + ], + "status":"completed", + "version":"1.0" + } + + + + +with open("rank_table.json", "w") as f: + json.dump(rank, f) + + + + + + diff --git a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_1p.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_1p_24bs_packed.sh similarity index 51% rename from TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_1p.sh rename to TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_1p_24bs_packed.sh index f6d490715962f2d7da47144fb144f11ff9609f6c..60d5c60352100e5a344766175e44272efbd988a1 100644 --- a/TensorFlow/built-in/nlp/BertNV_Series_for_TensorFlow/test/train_ID3067_BertLarge-128_full_1p.sh +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_1p_24bs_packed.sh @@ -1,171 +1,218 @@ -#!/bin/bash - -#当前路径,不需要修改 -cur_path=`pwd` - -#集合通信参数,不需要修改 -export RANK_SIZE=1 -export JOB_ID=99990001 -RANK_ID_START=0 - -# 数据集路径,保持为空,不需要修改 -data_path="" - -#基础参数,需要模型审视修改 -#网络名称,同目录名称 -Network="BertLarge-128_ID3067_for_TensorFlow" -#训练epoch -train_epochs=1 -#训练batch_size -batch_size=24 -#训练step -train_steps=100000 -#学习率 -learning_rate= - -#维测参数,precision_mode需要模型审视修改 -#precision_mode="allow_mix_precision" -#维持参数,以下不需要修改 -over_dump=False -data_dump_flag=False -data_dump_step="10" -profiling=False -autotune=False - -# 帮助信息,不需要修改 -if [[ $1 == --help || $1 == -h ]];then - echo"usage:./train_full_1p.sh " - echo " " - echo "parameter explain: - --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) - --over_dump if or not over detection, default is False - --data_dump_flag data dump flag, default is False - --data_dump_step data dump step, default is 10 - --profiling if or not profiling for performance debug, default is False - --autotune whether to enable autotune, default is False - --data_path source data of training - -h/--help show help message - " - exit 1 -fi - -#参数校验,不需要修改 -for para in $* -do - if [[ $para == --precision_mode* ]];then - precision_mode=`echo ${para#*=}` - elif [[ $para == --over_dump* ]];then - over_dump=`echo ${para#*=}` - over_dump_path=${cur_path}/output/overflow_dump - mkdir -p ${over_dump_path} - elif [[ $para == --data_dump_flag* ]];then - data_dump_flag=`echo ${para#*=}` - data_dump_path=${cur_path}/output/data_dump - mkdir -p ${data_dump_path} - elif [[ $para == --data_dump_step* ]];then - data_dump_step=`echo ${para#*=}` - elif [[ $para == --profiling* ]];then - profiling=`echo ${para#*=}` - profiling_dump_path=${cur_path}/output/profiling - mkdir -p ${profiling_dump_path} - elif [[ $para == --data_path* ]];then - data_path=`echo ${para#*=}` - fi -done - -#校验是否传入data_path,不需要修改 -if [[ $data_path == "" ]];then - echo "[Error] para \"data_path\" must be confing" - exit 1 -fi - -#训练开始时间,不需要修改 -start_time=$(date +%s) -#进入训练脚本目录,需要模型审视修改 -for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); -do - #设置环境变量,不需要修改 - echo "Device ID: $ASCEND_DEVICE_ID" - export RANK_ID=$RANK_ID - - #创建DeviceID输出目录,不需要修改 - if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then - rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - else - mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt${ASCEND_DEVICE_ID} - fi - - #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 - #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune - nohup python3.7 $cur_path/../src/run_pretraining.py --bert_config_file=${cur_path}/../configs/bert_large_config.json \ - --max_seq_length=128 \ - --max_predictions_per_seq=20 \ - --train_batch_size=${batch_size} \ - --learning_rate=1e-4 \ - --num_warmup_steps=10000 \ - --num_train_steps=${train_steps} \ - --optimizer_type=adam \ - --manual_fp16=True \ - --use_fp16_cls=True \ - --input_files_dir=${data_path}/train_phase1 \ - --eval_files_dir=${data_path}/eval_phase1 \ - --npu_bert_debug=False \ - --npu_bert_use_tdt=True \ - --do_train=True \ - --num_accumulation_steps=1 \ - --npu_bert_job_start_file= \ - --iterations_per_loop=1000 \ - --save_checkpoints_steps=1000 \ - --npu_bert_clip_by_global_norm=False \ - --distributed=False \ - --npu_bert_loss_scale=0 \ - --init_loss_scale_value=1 \ - --over_dump=${over_dump} \ - --over_dump_path=${over_dump_path} \ - --output_dir=${cur_path}/output/${ASCEND_DEVICE_ID}/ckpt${ASCEND_DEVICE_ID} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & -done -wait - -#训练结束时间,不需要修改 -end_time=$(date +%s) -e2e_time=$(( $end_time - $start_time )) - -#结果打印,不需要修改 -echo "------------------ Final result ------------------" -#输出性能FPS,需要模型审视修改 -ActualFPS=`grep Throughput ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $6}'` -TrainingTime=`awk 'BEGIN{printf "%.2f\n", '${batch_size}' * '${RANK_SIZE}' / '${ActualFPS}'}'` -#打印,不需要修改 -echo "Final Performance images/sec : $ActualFPS" - -#输出训练精度,需要模型审视修改 -TrainAccuracy=`grep -A 1 top1 $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $3}'` -#打印,不需要修改 -echo "Final Train Accuracy : ${TrainAccuracy}" -echo "E2E Training Duration sec : $e2e_time" - -#稳定性精度看护结果汇总 -#训练用例信息,不需要修改 -BatchSize=${batch_size} -DeviceType=`uname -m` -CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'acc' - - -#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 -grep "tensorflow:loss =" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log | awk -F "loss = " '{print $2}' | awk -F "," '{print $1}' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt - -#最后一个迭代loss值,不需要修改 -ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` - -#关键信息打印到${CaseName}.log中,不需要修改 -echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${TrainAccuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge_ID0634_for_TensorFlow2.X" + +#训练batch_size +eval_batch_size=4 +batch_size=24 +average_sequences_per_sample=2 +#训练step +train_steps=1000 +#训练epoch +train_epochs=`expr 768 / ${batch_size}` +#学习率 +learning_rate=0.000058711 + +#TF2.X独有,不需要修改 +#export NPU_LOOP_SIZE=${train_steps} +export NPU_LOOP_SIZE=1000 +export GE_USE_STATIC_MEMORY=1 + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_1p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +init_ckpt_path=${data_path}/'output_ckpt/model.ckpt-28252' #need modify to actual path +train_files_path=${data_path}/'train_packed/*' #need modify to actual path +eval_files_path=${data_path}/'eval_packed/*' #need modify to actual path + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup taskset -c $a-$c python3 ../bert/run_pretraining.py \ + --use_packed_model=True \ + --all_reduce_alg=nccl \ + --bert_config_file=../configs/bert_config.json \ + --beta_1=0.91063 \ + --beta_2=0.96497 \ + --device_warmup=False \ + --do_eval=True \ + --dtype=fp16 \ + --eval_batch_size=${eval_batch_size} \ + --init_checkpoint=${init_ckpt_path} \ + --train_files=${train_files_path} \ + --eval_files=${eval_files_path} \ + --learning_rate=${learning_rate} \ + --loss_scale=dynamic \ + --max_predictions_per_seq=79 \ + --max_seq_length=512 \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} \ + --num_accumulation_steps=1 \ + --distribution_strategy=one_device \ + --num_gpus=1 \ + --num_steps_per_epoch=8000 \ + --num_train_epochs=${train_epochs} \ + --optimizer_type=lamb \ + --scale_loss=False \ + --stop_threshold=0.95 \ + --steps_between_eval=1000 \ + --steps_per_loop=${NPU_LOOP_SIZE} \ + --stop_steps=100000 \ + --enable_checkpoint_and_summary=True \ + --train_batch_size=${batch_size} \ + --verbosity=0 \ + --warmup_steps=0 \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${batch_size}'*'${average_sequences_per_sample}'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p_packed'_'acc' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_bucket.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_bucket.sh index c6e9d5be8bddb513a44ef59641a610de85af041c..9f6dac2b527452cd9eebf9cb5aa1135a68e7648a 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_bucket.sh +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_bucket.sh @@ -177,9 +177,14 @@ e2e_time=$(( $end_time - $start_time )) #############结果处理######################### echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 -single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` -avg_bs=`grep avg_bs $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` -FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${avg_bs}'*8}'` +FPS=0.0 +for((ID=0; ID<8; ID++)) +do + single_batch_step_sec=`grep TimeHistory $cur_path/output/${ID}/train_${ID}.log|awk 'END {print $8}'` + avg_bs=`grep avg_bs $cur_path/output/${ID}/train_${ID}.log|awk 'END {print $8}'` + PER_FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${avg_bs}'}'` + FPS=`awk 'BEGIN{printf "%.2f\n",'${PER_FPS}'+'${FPS}'}'` +done #打印,不需要修改 echo "Final Performance images/sec : $FPS" diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_packed.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_packed.sh new file mode 100644 index 0000000000000000000000000000000000000000..e7157da722666ba45a73ef09471b16a8fd4b8ff3 --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_full_8p_192bs_packed.sh @@ -0,0 +1,229 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +#保证rank table file 文件rank_table_8p.json存放在和test同级的configs目录下 +export RANK_SIZE=8 +export RANK_TABLE_FILE=${cur_path}/../configs/rank_table_8p.json +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数 需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge_ID0634_for_TensorFlow2.X" + + +#训练batch_size +batch_size=192 +eval_batch_size=16 +average_sequences_per_sample=2 +#训练step +train_steps=1000 +#训练epoch +train_epochs=`expr 768 / ${batch_size}` +#学习率 +learning_rate=0.0002 + +#TF2.X独有,需要模型审视修改 +export NPU_LOOP_SIZE=1000 +export GE_USE_STATIC_MEMORY=1 + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_8p_32bs.sh " + + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is 0 + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,需要模型审视修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]]; then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +init_ckpt_path=${data_path}/'output_ckpt/model.ckpt-28252' #need modify to actual path +train_files_path=${data_path}/'train_packed/*' #need modify to actual path +eval_files_path=${data_path}/'eval_packed/*' #need modify to actual path + + + +start_time=$(date +%s) +#############执行训练######################### +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=$RANK_ID + + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + nohup ${bind_core} python3 ../bert/run_pretraining.py \ + --use_packed_model=True \ + --all_reduce_alg=nccl \ + --bert_config_file=../configs/bert_config.json \ + --beta_1=0.91063 \ + --beta_2=0.96497 \ + --device_warmup=False \ + --do_eval=True \ + --dtype=fp16 \ + --eval_batch_size=${eval_batch_size} \ + --init_checkpoint=${init_ckpt_path} \ + --train_files=${train_files_path} \ + --eval_files=${eval_files_path} \ + --learning_rate=${learning_rate} \ + --loss_scale=dynamic \ + --max_predictions_per_seq=79 \ + --max_seq_length=512 \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} \ + --num_accumulation_steps=1 \ + --distribution_strategy=one_device \ + --num_gpus=1 \ + --use_npu_lamb=True \ + --use_mixlist=True \ + --num_steps_per_epoch=8000 \ + --stop_threshold=0.95 \ + --num_train_epochs=${train_epochs} \ + --optimizer_type=lamb \ + --enable_checkpoint_and_summary=True \ + --scale_loss=False \ + --steps_between_eval=1000 \ + --steps_per_loop=${NPU_LOOP_SIZE} \ + --stop_steps=32000 \ + --train_batch_size=${batch_size} \ + --verbosity=0 \ + --warmup_steps=0 \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${batch_size}'*'${average_sequences_per_sample}'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p_packed'_'acc' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + sed -i "/AttributeError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log + sed -i "/ModuleNotFoundError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log +done \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_16p_384bs.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_16p_384bs.sh new file mode 100644 index 0000000000000000000000000000000000000000..f2b82938dc2a36324d04e30a1ec70b9c73c6f02c --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_16p_384bs.sh @@ -0,0 +1,231 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +#保证rank table file 文件rank_table_8p.json存放在和test同级的configs目录下 +export RANK_SIZE=16 +#export RANK_TABLE_FILE=${cur_path}/../configs/rank_table_8p.json +export JOB_ID=10087 +RANK_ID_START=0 + +# 数据集路径,保持为空,不需要修改 +data_path="" +server_index="" +conf_path="" +#基础参数 需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge_ID0634_for_TensorFlow2.X" +#训练batch_size +batch_size=384 +eval_batch_size=32 +#训练step +train_steps=1000 +#训练epoch +train_epochs=`expr 768 / ${batch_size}` +#学习率 +learning_rate=0.0007 + +#TF2.X独有,需要模型审视修改 +export NPU_LOOP_SIZE=100 +export GE_USE_STATIC_MEMORY=1 + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_8p_32bs.sh " + + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is 0 + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,需要模型审视修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]]; then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + elif [[ $para == --server_index* ]];then + server_index=`echo ${para#*=}` + elif [[ $para == --conf_path* ]];then + conf_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +init_ckpt_path=${data_path}/tf2_ckpt/model.ckpt-28252 #need modify to actual path +train_files_path=${data_path}/'train/*' #need modify to actual path +eval_files_path=${data_path}/'eval/eval.tfrecord' #need modify to actual path + +rank_size=8 +nohup python3 set_ranktable.py --npu_nums=$((RANK_SIZE/rank_size)) --conf_path=$conf_path +export RANK_TABLE_FILE=${cur_path}/rank_table.json +export HCCL_CONNECT_TIMEOUT=600 +RANK_ID_START=0 + +start_time=$(date +%s) +#############执行训练######################### +for((RANK_ID=$((rank_size*server_index));RANK_ID<$((((server_index+1))*rank_size));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + ASCEND_DEVICE_ID=`expr ${RANK_ID} - $((rank_size*server_index))` + + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + nohup ${bind_core} python3 ../bert/run_pretraining.py \ + --all_reduce_alg=nccl \ + --bert_config_file=../configs/bert_config.json \ + --beta_1=0.91063 \ + --beta_2=0.96497 \ + --device_warmup=False \ + --do_eval=True \ + --dtype=fp16 \ + --init_checkpoint=${init_ckpt_path} \ + --eval_batch_size=${eval_batch_size} \ + --train_files=${train_files_path} \ + --eval_files=${eval_files_path} \ + --learning_rate=${learning_rate} \ + --loss_scale=dynamic \ + --max_predictions_per_seq=76 \ + --max_seq_length=512 \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} \ + --num_accumulation_steps=1 \ + --distribution_strategy=one_device \ + --num_gpus=1 \ + --enable_checkpoint_and_summary=True \ + --num_steps_per_epoch=1000 \ + --num_train_epochs=${train_epochs} \ + --optimizer_type=lamb \ + --scale_loss=False \ + --steps_between_eval=100 \ + --steps_per_loop=${NPU_LOOP_SIZE} \ + --stop_steps=200 \ + --train_batch_size=${batch_size} \ + --verbosity=0 \ + --warmup_steps=0 \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${batch_size}'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + sed -i "/AttributeError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log + sed -i "/ModuleNotFoundError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log +done diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_1p_24bs_packed.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_1p_24bs_packed.sh new file mode 100644 index 0000000000000000000000000000000000000000..3ddc8aef045b7a786518437cead3bf1fc43b5c6f --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_1p_24bs_packed.sh @@ -0,0 +1,219 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +RANK_ID_START=0 + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge_ID0634_for_TensorFlow2.X" +#训练batch_size +eval_batch_size=4 +batch_size=24 +average_sequences_per_sample=2 +#训练step +train_steps=1000 +#训练epoch +train_epochs=`expr 768 / ${batch_size}` +#学习率 +learning_rate=0.000058711 + +#TF2.X独有,不需要修改 +#export NPU_LOOP_SIZE=${train_steps} +export NPU_LOOP_SIZE=100 +export GE_USE_STATIC_MEMORY=1 +export NPU_ENABLE_PERF=true + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_1p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +init_ckpt_path=${data_path}/'output_ckpt/model.ckpt-28252' #need modify to actual path +train_files_path=${data_path}/'train_packed/*' #need modify to actual path +eval_files_path=${data_path}/'eval_packed/*' #need modify to actual path + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup taskset -c $a-$c python3 ../bert/run_pretraining.py \ + --use_packed_model=True \ + --all_reduce_alg=nccl \ + --bert_config_file=../configs/bert_config.json \ + --beta_1=0.91063 \ + --beta_2=0.96497 \ + --device_warmup=False \ + --do_eval=True \ + --dtype=fp16 \ + --eval_batch_size=${eval_batch_size} \ + --init_checkpoint=${init_ckpt_path} \ + --train_files=${train_files_path} \ + --eval_files=${eval_files_path} \ + --learning_rate=${learning_rate} \ + --loss_scale=dynamic \ + --max_predictions_per_seq=79 \ + --max_seq_length=512 \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} \ + --num_accumulation_steps=1 \ + --distribution_strategy=one_device \ + --num_gpus=1 \ + --num_steps_per_epoch=1000 \ + --num_train_epochs=${train_epochs} \ + --optimizer_type=lamb \ + --scale_loss=False \ + --steps_between_eval=100 \ + --steps_per_loop=${NPU_LOOP_SIZE} \ + --stop_steps=200 \ + --enable_checkpoint_and_summary=True \ + --train_batch_size=${batch_size} \ + --verbosity=0 \ + --warmup_steps=0 \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#结果打印,不需要修改 +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${batch_size}'*'${average_sequences_per_sample}'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p_packed'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +sed -i "/AttributeError/d" $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs.sh index f40ad087a7c25549c789b02d7bf403961a0a15e5..b9832cc804195b14e5e6fae62ae2d204483da387 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs.sh +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#'!/bin/bash #当前路径,不需要修改 cur_path=`pwd` @@ -88,7 +88,7 @@ if [[ $data_path == "" ]];then exit 1 fi -init_ckpt_path=${data_path}/tf2_ckpt/model.ckpt-28252 #need modify to actual path +init_ckpt_path=${data_path}/'tf2_ckpt/model.ckpt-28252' #need modify to actual path train_files_path=${data_path}/'train/*' #need modify to actual path eval_files_path=${data_path}/'eval/eval.tfrecord' #need modify to actual path diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_bucket.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_bucket.sh index b82b734fe13b7551a3e5f5c42b5392d102c45a4d..225afcb73aa0ae9ac7ee01ef069df6d96b1a0225 100644 --- a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_bucket.sh +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_bucket.sh @@ -173,9 +173,14 @@ e2e_time=$(( $end_time - $start_time )) #############结果处理######################### echo "------------------ Final result ------------------" #输出性能FPS,需要模型审视修改 -single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` -avg_bs=`grep avg_bs $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` -FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${avg_bs}'*8}'` +FPS=0.0 +for((ID=0; ID<8; ID++)) +do + single_batch_step_sec=`grep TimeHistory $cur_path/output/${ID}/train_${ID}.log|awk 'END {print $8}'` + avg_bs=`grep avg_bs $cur_path/output/${ID}/train_${ID}.log|awk 'END {print $8}'` + PER_FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${avg_bs}'}'` + FPS=`awk 'BEGIN{printf "%.2f\n",'${PER_FPS}'+'${FPS}'}'` +done #打印,不需要修改 echo "Final Performance images/sec : $FPS" diff --git a/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_packed.sh b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_packed.sh new file mode 100644 index 0000000000000000000000000000000000000000..25e660e708e4e9df418cd224847613ffe1800547 --- /dev/null +++ b/TensorFlow2/built-in/nlp/BertLarge_ID0634_for_TensorFlow2.X/test/train_performance_8p_192bs_packed.sh @@ -0,0 +1,225 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 +#保证rank table file 文件rank_table_8p.json存放在和test同级的configs目录下 +export RANK_SIZE=8 +export RANK_TABLE_FILE=${cur_path}/../configs/rank_table_8p.json +export JOB_ID=10087 +RANK_ID_START=0 + +export NPU_ENABLE_PERF=true +# 数据集路径,保持为空,不需要修改 +data_path="" + +#基础参数 需要模型审视修改 +#网络名称,同目录名称 +Network="BertLarge_ID0634_for_TensorFlow2.X" +#训练batch_size +batch_size=192 +eval_batch_size=16 +average_sequences_per_sample=2 +#训练step +train_steps=1000 +#训练epoch +train_epochs=`expr 768 / ${batch_size}` +#学习率 +learning_rate=0.000144 + +#TF2.X独有,需要模型审视修改 +export NPU_LOOP_SIZE=100 +export GE_USE_STATIC_MEMORY=1 + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_fp32_to_fp16" +#维持参数,不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_8p_32bs.sh " + + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is 0 + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,需要模型审视修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + elif [[ $para == --bind_core* ]]; then + bind_core=`echo ${para#*=}` + name_bind="_bindcore" + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +init_ckpt_path=${data_path}/'output_ckpt/model.ckpt-28252' #need modify to actual path +train_files_path=${data_path}/'train_packed/*' #need modify to actual path +eval_files_path=${data_path}/'eval_packed/*' #need modify to actual path + + + +start_time=$(date +%s) +#############执行训练######################### +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $RANK_ID" + export RANK_ID=$RANK_ID + export ASCEND_DEVICE_ID=$RANK_ID + ASCEND_DEVICE_ID=$RANK_ID + + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + if [ "x${bind_core}" != x ];then + bind_core="taskset -c $a-$c" + fi + nohup ${bind_core} python3 ../bert/run_pretraining.py \ + --use_packed_model=True \ + --all_reduce_alg=nccl \ + --bert_config_file=../configs/bert_config.json \ + --beta_1=0.91063 \ + --beta_2=0.96497 \ + --device_warmup=False \ + --do_eval=True \ + --dtype=fp16 \ + --eval_batch_size=${eval_batch_size} \ + --init_checkpoint=${init_ckpt_path} \ + --train_files=${train_files_path} \ + --eval_files=${eval_files_path} \ + --learning_rate=${learning_rate} \ + --loss_scale=dynamic \ + --max_predictions_per_seq=79 \ + --max_seq_length=512 \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} \ + --num_accumulation_steps=1 \ + --distribution_strategy=one_device \ + --num_gpus=1 \ + --enable_checkpoint_and_summary=True \ + --num_steps_per_epoch=1000 \ + --num_train_epochs=${train_epochs} \ + --optimizer_type=lamb \ + --scale_loss=False \ + --steps_between_eval=100 \ + --steps_per_loop=${NPU_LOOP_SIZE} \ + --stop_steps=200 \ + --train_batch_size=${batch_size} \ + --verbosity=0 \ + --warmup_steps=0 \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############结果处理######################### +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'*'${batch_size}'*'${average_sequences_per_sample}'}'` +#打印,不需要修改 +echo "Final Performance images/sec : $FPS" + +#输出训练精度,需要模型审视修改 +train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}${name_bind}_bs${BatchSize}_${RANK_SIZE}'p_packed'_'perf' + +##获取性能数据 +#吞吐量,不需要修改 +ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + sed -i "/AttributeError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log + sed -i "/ModuleNotFoundError/d" $cur_path/output/${RANK_ID}/train_${RANK_ID}.log +done \ No newline at end of file diff --git a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_4096bs_dynamic_noeval.sh b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_4096bs_dynamic_noeval.sh new file mode 100644 index 0000000000000000000000000000000000000000..632d4525fb67313007945d08934b57988b6df877 --- /dev/null +++ b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_4096bs_dynamic_noeval.sh @@ -0,0 +1,220 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +export RANK_ID_START=0 +export PYTHONPATH=../transformer:$PYTHONPATH + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="Transformer_ID0633_for_TensorFlow2.X" +#训练batch_size +batch_size=4096 +#训练step +train_steps=400000 + +#TF2.X独有,不需要修改 +#export NPU_ENABLE_PERF=true + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_1p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup taskset -c $a-$c python3 ../transformer/official/nlp/transformer/transformer_main.py \ + --data_dir=${data_path} \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ + --vocab_file=${data_path}/vocab.ende.32768 \ + --param_set=big \ + --train_steps=${train_steps} \ + --batch_size=${batch_size} \ + --steps_between_evals=10000 \ + --max_length=64 \ + --mode=train \ + --decode_batch_size=32 \ + --decode_max_length=97 \ + --padded_decode=False \ + --num_gpus=1 \ + --dtype=fp16 \ + --distribution_strategy='one_device' \ + --enable_time_history=true \ + --log_steps=1000 \ + --loss_scale='dynamic' \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +# single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +# FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'}'` +#打印,不需要修改 +# echo "Final Performance images/sec : $FPS" + +# grep "Train history" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$8}'|sed 's/,//g'|sed 's/\[//g'|sed 's/\]//g' |sed 's/\}//g'>> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +# ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` +#输出训练精度,需要模型审视修改 +# grep "Train history" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$8}'|sed 's/,//g'|sed 's/\[//g'|sed 's/\]//g' |sed 's/\}//g'>> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt +# train_accuracy=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt` +#train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +# echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + + +#fail info +ModelStatus="图执行FAIL" +DTS_Number="DTS2022042410927" +error_msg="op\[SoftmaxCrossEntropyWithLogitsTiling\], not supported shape\[FUNC:DoNdTiling\]" +Status=`grep "${error_msg}" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|wc -l` +error_msg=`grep "${error_msg}" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tail -1` +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 + +#最后一个迭代loss值,不需要修改 + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ModelStatus = ${ModelStatus}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DTS_Number = ${DTS_Number}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "Status = ${Status}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "error_msg = ${error_msg}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +##获取性能数据 +#吞吐量,不需要修改 +# ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +# TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +#grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +#ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +#echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_6144bs_dynamic_noeval.sh b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_6144bs_dynamic_noeval.sh new file mode 100644 index 0000000000000000000000000000000000000000..adba4519454eeb9e1aca00cdfa9e12394192cc00 --- /dev/null +++ b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/test/train_full_1p_6144bs_dynamic_noeval.sh @@ -0,0 +1,221 @@ +#!/bin/bash + +#当前路径,不需要修改 +cur_path=`pwd` + +#集合通信参数,不需要修改 + +export RANK_SIZE=1 +export JOB_ID=10087 +export RANK_ID_START=0 +export PYTHONPATH=../transformer:$PYTHONPATH + + +# 数据集路径,保持为空,不需要修改 +data_path="" + +#设置默认日志级别,不需要修改 + +#基础参数,需要模型审视修改 +#网络名称,同目录名称 +Network="Transformer_ID0633_for_TensorFlow2.X" +#训练batch_size +batch_size=6144 +#训练step +train_steps=250000 + +#TF2.X独有,不需要修改 +#export NPU_ENABLE_PERF=true + +#维测参数,precision_mode需要模型审视修改 +precision_mode="allow_mix_precision" +#维持参数,以下不需要修改 +over_dump=False +data_dump_flag=False +data_dump_step="10" +profiling=False + + +# 帮助信息,不需要修改 +if [[ $1 == --help || $1 == -h ]];then + echo"usage:./train_full_1p.sh " + echo " " + echo "parameter explain: + --precision_mode precision mode(allow_fp32_to_fp16/force_fp16/must_keep_origin_dtype/allow_mix_precision) + --over_dump if or not over detection, default is False + --data_dump_flag data dump flag, default is False + --data_dump_step data dump step, default is 10 + --profiling if or not profiling for performance debug, default is False + --data_path source data of training + -h/--help show help message + " + exit 1 +fi + +#参数校验,不需要修改 +for para in $* +do + if [[ $para == --precision_mode* ]];then + precision_mode=`echo ${para#*=}` + elif [[ $para == --over_dump* ]];then + over_dump=`echo ${para#*=}` + over_dump_path=${cur_path}/output/overflow_dump + mkdir -p ${over_dump_path} + elif [[ $para == --data_dump_flag* ]];then + data_dump_flag=`echo ${para#*=}` + data_dump_path=${cur_path}/output/data_dump + mkdir -p ${data_dump_path} + elif [[ $para == --data_dump_step* ]];then + data_dump_step=`echo ${para#*=}` + elif [[ $para == --profiling* ]];then + profiling=`echo ${para#*=}` + profiling_dump_path=${cur_path}/output/profiling + mkdir -p ${profiling_dump_path} + elif [[ $para == --data_path* ]];then + data_path=`echo ${para#*=}` + fi +done + +#校验是否传入data_path,不需要修改 +if [[ $data_path == "" ]];then + echo "[Error] para \"data_path\" must be confing" + exit 1 +fi + +#训练开始时间,不需要修改 +start_time=$(date +%s) + +#进入训练脚本目录,需要模型审视修改 + +for((RANK_ID=$RANK_ID_START;RANK_ID<$((RANK_SIZE+RANK_ID_START));RANK_ID++)); +do + #设置环境变量,不需要修改 + echo "Device ID: $ASCEND_DEVICE_ID" + export RANK_ID=$RANK_ID + + + + #创建DeviceID输出目录,不需要修改 + if [ -d ${cur_path}/output/${ASCEND_DEVICE_ID} ];then + rm -rf ${cur_path}/output/${ASCEND_DEVICE_ID} + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + else + mkdir -p ${cur_path}/output/$ASCEND_DEVICE_ID/ckpt_${learning_rate} + fi + + #绑核,不需要绑核的模型删除,需要绑核的模型根据实际修改 + cpucount=`lscpu | grep "CPU(s):" | head -n 1 | awk '{print $2}'` + cpustep=`expr $cpucount / 8` + echo "taskset c steps:" $cpustep + let a=RANK_ID*$cpustep + let b=RANK_ID+1 + let c=b*$cpustep-1 + + #执行训练脚本,以下传参不需要修改,其他需要模型审视修改 + #--data_dir, --model_dir, --precision_mode, --over_dump, --over_dump_path,--data_dump_flag,--data_dump_step,--data_dump_path,--profiling,--profiling_dump_path,--autotune + nohup taskset -c $a-$c python3 ../transformer/official/nlp/transformer/transformer_main.py \ + --data_dir=${data_path} \ + --model_dir=${cur_path}/output/$ASCEND_DEVICE_ID/ckpt \ + --vocab_file=${data_path}/vocab.ende.32768 \ + --param_set=big \ + --train_steps=${train_steps} \ + --batch_size=${batch_size} \ + --steps_between_evals=10000 \ + --max_length=64 \ + --mode=train \ + --decode_batch_size=32 \ + --decode_max_length=97 \ + --padded_decode=False \ + --num_gpus=1 \ + --dtype=fp32 \ + --enable_metrics_in_training=true \ + --distribution_strategy='one_device' \ + --enable_time_history=true \ + --log_steps=1000 \ + --loss_scale='dynamic' \ + --precision_mode=${precision_mode} \ + --over_dump=${over_dump} \ + --over_dump_path=${over_dump_path} \ + --data_dump_flag=${data_dump_flag} \ + --data_dump_step=${data_dump_step} \ + --data_dump_path=${data_dump_path} \ + --profiling=${profiling} \ + --profiling_dump_path=${profiling_dump_path} > ${cur_path}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log 2>&1 & +done +wait + +#训练结束时间,不需要修改 +end_time=$(date +%s) +e2e_time=$(( $end_time - $start_time )) + +#############冒烟看护######################### +BatchSize=${batch_size} +#设备类型 +DeviceType=`uname -m` +#用例名称 +CaseName=${Network}_bs${BatchSize}_${RANK_SIZE}'p'_'perf' + +echo "------------------ Final result ------------------" +#输出性能FPS,需要模型审视修改 +# single_batch_step_sec=`grep TimeHistory $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|awk 'END {print $8}'` +# FPS=`awk 'BEGIN{printf "%.2f\n",'${single_batch_step_sec}'}'` +#打印,不需要修改 +# echo "Final Performance images/sec : $FPS" + +# grep "Train history" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$8}'|sed 's/,//g'|sed 's/\[//g'|sed 's/\]//g' |sed 's/\}//g'>> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt +# ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` +#输出训练精度,需要模型审视修改 +# grep "Train history" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$8}'|sed 's/,//g'|sed 's/\[//g'|sed 's/\]//g' |sed 's/\}//g'>> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt +# train_accuracy=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_acc.txt` +#train_accuracy=`grep eval_accuracy $cur_path/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log|grep -v mlp_log|awk 'END {print $5}'|sed 's/,//g'|cut -c 1-5` +#打印,不需要修改 +# echo "Final Train Accuracy : ${train_accuracy}" +echo "E2E Training Duration sec : $e2e_time" + + +#fail info +ModelStatus="图执行FAIL" +DTS_Number="DTS2022042214040" +error_msg="Param:owner_graph is nullptr, check invalid" +Status=`grep "${error_msg}" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|wc -l` +error_msg=`grep "${error_msg}" $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|tail -1` +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视 + +#最后一个迭代loss值,不需要修改 + +#关键信息打印到${CaseName}.log中,不需要修改 +echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "ModelStatus = ${ModelStatus}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "DTS_Number = ${DTS_Number}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "Status = ${Status}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +echo "error_msg = ${error_msg}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log + +##获取性能数据 +#吞吐量,不需要修改 +# ActualFPS=${FPS} +#单迭代训练时长,不需要修改 +# TrainingTime=`awk 'BEGIN{printf "%.2f\n",'${BatchSize}'*'${RANK_SIZE}'*1000/'${FPS}'}'` + +##获取Loss +#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中 +#grep loss $cur_path/output/$ASCEND_DEVICE_ID/train_$ASCEND_DEVICE_ID.log|awk '{print$11}'|grep -v instead|grep -v masked_lm_loss|sed 's/,//g'|sed '/^$/d' >> $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt + +#最后一个迭代loss值 +#ActualLoss=`awk 'END {print}' $cur_path/output/$ASCEND_DEVICE_ID/train_${CaseName}_loss.txt` + +#关键信息打印到${CaseName}.log中 +#echo "Network = ${Network}" > $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "RankSize = ${RANK_SIZE}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "BatchSize = ${BatchSize}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "DeviceType = ${DeviceType}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "CaseName = ${CaseName}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualFPS = ${ActualFPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainingTime = ${TrainingTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +#echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log diff --git a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/misc.py b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/misc.py index fd20e215d96aedacf4afd8d1f5ac9d02adf39e64..0678b396efb3f45c3a751d0ba1c9aa9e0f5807d7 100644 --- a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/misc.py +++ b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/misc.py @@ -217,6 +217,11 @@ def define_transformer_flags(): 'If True, then only the model\'s weights will be saved ' '(`model.save_weights(filepath)`), else the full model is saved ' '(`model.save(filepath)`)')) + flags.DEFINE_bool( + name='dynamic_eval', + default=False, + help=flags_core.help_wrap( + 'use dynamic eval')) flags_core.set_defaults( data_dir='/tmp/translate_ende', diff --git a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/transformer_main.py b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/transformer_main.py index 5b650f074a2a5e7c2022438a2893e99af6102c19..8f52dec383785e603a5935962df6790d44cf3717 100644 --- a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/transformer_main.py +++ b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/transformer_main.py @@ -233,6 +233,7 @@ class TransformerTask(object): params["steps_between_evals"] = flags_obj.steps_between_evals params["enable_checkpointing"] = flags_obj.enable_checkpointing params["save_weights_only"] = flags_obj.save_weights_only + params["dynamic_eval"] = flags_obj.dynamic_eval self.distribution_strategy = distribute_utils.get_distribution_strategy( distribution_strategy=flags_obj.distribution_strategy, diff --git a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/translate.py b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/translate.py index b0fb14827275b3434ab47d4afbd03102752c2aa7..485db5d75cace7a31b3d0fe18b795a12aa19b73c 100644 --- a/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/translate.py +++ b/TensorFlow2/built-in/nlp/Transformer_ID0633_for_TensorFlow2.X/transformer/official/nlp/transformer/translate.py @@ -65,6 +65,31 @@ def _get_sorted_inputs(filename,batch_size): return sorted_inputs, sorted_keys +def _get_sorted_inputs_dynamic(filename): + """Read and sort lines from the file sorted by decreasing length. + Args: + filename: String name of file to read inputs from. + Returns: + Sorted list of inputs, and dictionary mapping original index->sorted index + of each element. + """ + with tf.io.gfile.GFile(filename) as f: + records = f.read().split("\n") + inputs = [record.strip() for record in records] + if not inputs[-1]: + inputs.pop() + + input_lens = [(i, len(line.split())) for i, line in enumerate(inputs)] + sorted_input_lens = sorted(input_lens, key=lambda x: x[1], reverse=True) + + sorted_inputs = [None] * len(sorted_input_lens) + sorted_keys = [0] * len(sorted_input_lens) + for i, (index, _) in enumerate(sorted_input_lens): + sorted_inputs[i] = inputs[index] + sorted_keys[index] = i + return sorted_inputs, sorted_keys + + def _encode_and_add_eos(line, subtokenizer): """Encode line with subtokenizer, and add EOS id to the end.""" return subtokenizer.encode(line) + [tokenizer.EOS_ID] @@ -107,7 +132,11 @@ def translate_file(model, # Read and sort inputs by length. Keep dictionary (original index-->new index # in sorted list) to write translations in the original order. - sorted_inputs, sorted_keys = _get_sorted_inputs(input_file,batch_size) + + if params['dynamic_eval']: + sorted_inputs, sorted_keys = _get_sorted_inputs_dynamic(input_file) + else: + sorted_inputs, sorted_keys = _get_sorted_inputs(input_file,batch_size) total_samples = len(sorted_inputs) num_decode_batches = (total_samples - 1) // batch_size + 1 #static input modify